diff --git a/.github/ISSUE_TEMPLATE/crash.md b/.github/ISSUE_TEMPLATE/crash.md
index 5a4cfe3f5e15..c742875616e2 100644
--- a/.github/ISSUE_TEMPLATE/crash.md
+++ b/.github/ISSUE_TEMPLATE/crash.md
@@ -25,6 +25,9 @@ println("hello, world")
```
## Output (click arrow to expand)
+
```scala
diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md
index d622f4439f9d..52f8010c372e 100644
--- a/.github/ISSUE_TEMPLATE/feature.md
+++ b/.github/ISSUE_TEMPLATE/feature.md
@@ -1,10 +1,10 @@
---
name: "\U0001F389 Suggest a feature"
-about: Please create a feature request here https://github.com/lampepfl/dotty-feature-requests
+about: Please create a feature request here https://github.com/lampepfl/dotty/discussions/new?category=feature-requests
title: ''
labels: ''
assignees: ''
---
-Please create a feature request here: [lampepfl/dotty-feature-requests](https://github.com/lampepfl/dotty-feature-requests).
+Please create a feature request in the [Dotty Discussions](https://github.com/lampepfl/dotty/discussions/new?category=feature-requests).
diff --git a/.github/ISSUE_TEMPLATE/improve-error.md b/.github/ISSUE_TEMPLATE/improve-error.md
new file mode 100644
index 000000000000..918196e1ec53
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/improve-error.md
@@ -0,0 +1,55 @@
+---
+name: "\U0001F615 Error/Warning message report"
+about: Report an error/warning message that was confusing/unhelpful
+title: ''
+labels: itype:enhancement, area:reporting, better-errors, stat:needs triage
+assignees: ''
+
+---
+
+## Compiler version
+
+If you're not sure what version you're using, run `print scalaVersion` from sbt
+(if you're running scalac manually, use `scalac -version` instead).
+
+## Minimized example
+
+
+
+```Scala
+printl("hello, world")
+```
+
+## Output Error/Warning message
+
+
+
+```scala
+-- [E006] Not Found Error: ----------------
+1 |printl("hello, world")
+ |^^^^^^
+ |Not found: printl
+1 error found
+```
+
+## Why this Error/Warning was not helpful
+
+
+
+The message was unhelpful because...
+
+## Suggested improvement
+
+
+
+It could be made more helpful by...
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 370b66854051..fd1fe30d3fa9 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -18,6 +18,8 @@ on:
- '*'
branches-ignore:
- 'gh-readonly-queue/**'
+ - 'release-**'
+ - 'lts-**'
pull_request:
merge_group:
schedule:
@@ -70,13 +72,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -121,20 +123,20 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
- name: Cmd Tests
run: |
- ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test"
+ ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala2-library-tasty-tests/run ;scala2-library-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test"
./project/scripts/cmdTests
./project/scripts/bootstrappedOnlyCmdTests
@@ -142,6 +144,51 @@ jobs:
run: |
./project/scripts/sbt ";sjsSandbox/run ;sjsSandbox/test ;sjsJUnitTests/test ;set sjsJUnitTests/scalaJSLinkerConfig ~= switchToESModules ;sjsJUnitTests/test ;sjsCompilerTests/test"
+ - name: Test with Scala 2 library TASTy (fast)
+ run: ./project/scripts/sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/testCompilation i5; scala3-bootstrapped/testCompilation tests/run/typelevel-peano.scala; scala3-bootstrapped/testOnly dotty.tools.backend.jvm.DottyBytecodeTests" # only test a subset of test to avoid doubling the CI execution time
+
+ test_scala2_library_tasty:
+ runs-on: [self-hosted, Linux]
+ container:
+ image: lampepfl/dotty:2021-03-22
+ options: --cpu-shares 4096
+ volumes:
+ - ${{ github.workspace }}/../../cache/sbt:/root/.sbt
+ - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache
+ - ${{ github.workspace }}/../../cache/general:/root/.cache
+ if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty'
+ || (
+ github.event_name == 'pull_request'
+ && contains(github.event.pull_request.body, '[test_scala2_library_tasty]')
+ )
+ || (
+ github.event_name == 'workflow_dispatch'
+ && github.repository == 'lampepfl/dotty'
+ )"
+
+ steps:
+ - name: Set JDK 16 as default
+ run: echo "/usr/lib/jvm/java-16-openjdk-amd64/bin" >> $GITHUB_PATH
+
+ - name: Reset existing repo
+ run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
+
+ - name: Checkout cleanup script
+ uses: actions/checkout@v4
+
+ - name: Cleanup
+ run: .github/workflows/cleanup.sh
+
+ - name: Git Checkout
+ uses: actions/checkout@v4
+
+ - name: Add SBT proxy repositories
+ run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
+
+ - name: Test with Scala 2 library TASTy
+ run: ./project/scripts/sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/test"
+
+
test_windows_fast:
runs-on: [self-hosted, Windows]
if: "(
@@ -161,7 +208,7 @@ jobs:
shell: cmd
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Test
run: sbt ";scala3-bootstrapped/compile"
@@ -203,7 +250,7 @@ jobs:
shell: cmd
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Test
run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test"
@@ -240,20 +287,25 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
- name: MiMa
run: |
- ./project/scripts/sbt ";scala3-interfaces/mimaReportBinaryIssues ;scala3-library-bootstrapped/mimaReportBinaryIssues ;scala3-library-bootstrappedJS/mimaReportBinaryIssues; tasty-core-bootstrapped/mimaReportBinaryIssues"
+ ./project/scripts/sbt ";scala3-interfaces/mimaReportBinaryIssues ;scala3-library-bootstrapped/mimaReportBinaryIssues ;scala3-library-bootstrappedJS/mimaReportBinaryIssues; tasty-core-bootstrapped/mimaReportBinaryIssues; scala2-library-bootstrapped/mimaReportBinaryIssues"
+
+ - name: TASTy MiMa
+ run: |
+ # This script cleans the compiler and recompiles it from scratch (keep as last run)
+ ./project/scripts/scala2-library-tasty-mima.sh
community_build_a:
runs-on: [self-hosted, Linux]
@@ -283,13 +335,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -332,13 +384,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -381,13 +433,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -428,13 +480,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -475,20 +527,20 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
- name: Test
run: |
- ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test"
+ ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala2-library-tasty-tests/run ;scala2-library-tasty-tests/test"
./project/scripts/cmdTests
./project/scripts/bootstrappedOnlyCmdTests
@@ -519,13 +571,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -573,13 +625,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -623,13 +675,13 @@ jobs:
run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true
- name: Checkout cleanup script
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cleanup
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Add SBT proxy repositories
run: cp -vf .github/workflows/repositories /root/.sbt/ ; true
@@ -696,7 +748,7 @@ jobs:
if: "failure() && github.event_name == 'schedule'"
steps:
- name: Checkout issue template
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Open an issue
uses: JasonEtco/create-an-issue@v2
diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml
index bb1aec1290c0..f370cb2b541c 100644
--- a/.github/workflows/cla.yml
+++ b/.github/workflows/cla.yml
@@ -15,7 +15,7 @@ jobs:
check:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- run: ./project/scripts/check-cla.sh
if: github.event_name == 'pull_request'
env:
diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml
index f8facc0453ca..e96c3efbc8aa 100644
--- a/.github/workflows/dependency-graph.yml
+++ b/.github/workflows/dependency-graph.yml
@@ -8,5 +8,5 @@ jobs:
name: Update Dependency Graph
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: scalacenter/sbt-dependency-submission@v2
diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml
index ec134ec35ffe..786785eaa4a2 100644
--- a/.github/workflows/language-reference.yaml
+++ b/.github/workflows/language-reference.yaml
@@ -24,14 +24,14 @@ jobs:
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: 'dotty'
fetch-depth: 0
ssh-key: ${{ secrets.DOCS_KEY }}
- name: Set up JDK 17
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
@@ -46,7 +46,7 @@ jobs:
- name: Push changes to scala3-reference-docs
if: github.event_name == 'push'
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: lampepfl/scala3-reference-docs
fetch-depth: 0
@@ -74,7 +74,7 @@ jobs:
runs-on: ubuntu-latest
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: repo-sync/pull-request@v2
with:
destination_branch: main
diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml
new file mode 100644
index 000000000000..8e8added1c03
--- /dev/null
+++ b/.github/workflows/lts-backport.yaml
@@ -0,0 +1,22 @@
+name: Add to backporting project
+
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ add-to-backporting-project:
+ if: "!contains(github.event.push.head_commit.message, '[Next only]')"
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - uses: coursier/cache-action@v6
+ - uses: VirtusLab/scala-cli-setup@v1.1.0
+ - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }}
+ env:
+ GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }}
+
diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml
index ba4bae0456d0..f2cd0706cfe7 100644
--- a/.github/workflows/releases.yml
+++ b/.github/workflows/releases.yml
@@ -24,7 +24,7 @@ jobs:
run: .github/workflows/cleanup.sh
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Publish to SDKMAN
run: .github/workflows/scripts/publish-sdkman.sh
diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml
index 3108f2b94562..98ce94718fe5 100644
--- a/.github/workflows/scaladoc.yaml
+++ b/.github/workflows/scaladoc.yaml
@@ -27,10 +27,10 @@ jobs:
steps:
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up JDK 17
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
@@ -62,21 +62,6 @@ jobs:
- name: Generate documentation for example project using dotty-sbt
run: ./project/scripts/sbt "sbt-test/scripted sbt-dotty/scaladoc"
- - name: Generate index file
- run: scaladoc/scripts/mk-index.sh scaladoc/output > scaladoc/output/index.html
-
- - name: Upload documentation to server
- uses: azure/CLI@v1
- if: env.AZURE_STORAGE_SAS_TOKEN
- env:
- PR_NUMBER: ${{ github.event.pull_request.number }}
- with:
- inlineScript: |
- DOC_DEST=$(echo pr-${PR_NUMBER:-${GITHUB_REF##*/}} | tr -d -c "[-A-Za-z0-9]")
- echo uplading docs to https://scala3doc.virtuslab.com/$DOC_DEST
- az storage container create --name $DOC_DEST --account-name scala3docstorage --public-access container
- az storage blob upload-batch --overwrite true -s scaladoc/output -d $DOC_DEST --account-name scala3docstorage
-
stdlib-sourcelinks-test:
runs-on: ubuntu-latest
# if false - disable flaky test
@@ -90,10 +75,10 @@ jobs:
steps:
- name: Git Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up JDK 17
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml
new file mode 100644
index 000000000000..0c09ec170986
--- /dev/null
+++ b/.github/workflows/spec.yml
@@ -0,0 +1,56 @@
+name: Specification
+
+on:
+ push:
+ tags:
+ - '*'
+ branches-ignore:
+ - 'gh-readonly-queue/**'
+ pull_request:
+ merge_group:
+ workflow_dispatch:
+
+env:
+ DOTTY_CI_RUN: true
+
+jobs:
+ specification:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: ./docs/_spec
+
+ steps:
+ - uses: actions/checkout@v4
+
+ # Keep in sync with ./docs/_spec/Dockerfile
+ - uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: '2.7'
+ - name: Install required gems
+ run: |
+ gem install "rubygems-update:<3.5" --no-document
+ update_rubygems
+ gem install sass-embedded -v 1.58.0
+ gem install bundler:1.17.2 jekyll
+ bundle install
+ npm install bower
+
+ - name: Build the specification
+ run: |
+ bundle exec jekyll build
+
+ # Deploy
+ - name: Deployment
+ env:
+ USER_FOR_TEST: ${{ secrets.SPEC_DEPLOY_USER }}
+ if: ${{ env.USER_FOR_TEST != '' }}
+ uses: burnett01/rsync-deployments@6.0.0
+ with:
+ switches: -rzv
+ path: docs/_spec/_site/
+ remote_path: ${{ secrets.SPEC_DEPLOY_PATH }}
+ remote_host: ${{ secrets.SPEC_DEPLOY_HOST }}
+ remote_user: ${{ secrets.SPEC_DEPLOY_USER }}
+ remote_key: ${{ secrets.SPEC_DEPLOY_KEY }}
+ remote_key_pass: ${{ secrets.SPEC_DEPLOY_PASS }}
diff --git a/.gitignore b/.gitignore
index 5240662741bb..3d44cdefb941 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,6 +37,7 @@ metals.sbt
# scala-cli
.scala-build
+sbt-launch.jar
# Partest
dotty.jar
@@ -63,8 +64,8 @@ testlogs/
local/
compiler/test/debug/Gen.jar
-compiler/before-pickling.txt
-compiler/after-pickling.txt
+before-pickling.txt
+after-pickling.txt
bench/compile.txt
community-build/scala3-bootstrapped.version
@@ -95,3 +96,10 @@ contributors.js
content-contributors.css
docs/_spec/_site/
docs/_spec/.jekyll-metadata
+
+# scaladoc related
+scaladoc/output/
+
+#coverage
+coverage/
+
diff --git a/.gitmodules b/.gitmodules
index 4d87dd214e9c..8f87e992013a 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -21,17 +21,13 @@
url = https://github.com/dotty-staging/fastparse
[submodule "community-build/community-projects/stdLib213"]
path = community-build/community-projects/stdLib213
- url = https://github.com/dotty-staging/scala
+ url = https://github.com/dotty-staging/scala213
[submodule "community-build/community-projects/sourcecode"]
path = community-build/community-projects/sourcecode
url = https://github.com/dotty-staging/sourcecode
[submodule "community-build/community-projects/scala-xml"]
path = community-build/community-projects/scala-xml
url = https://github.com/dotty-staging/scala-xml
-[submodule "community-build/community-projects/shapeless"]
- path = community-build/community-projects/shapeless
- url = https://github.com/dotty-staging/shapeless
- branch = shapeless-3-staging
[submodule "community-build/community-projects/xml-interpolator"]
path = community-build/community-projects/xml-interpolator
url = https://github.com/dotty-staging/xml-interpolator.git
@@ -222,3 +218,6 @@
[submodule "community-build/community-projects/parboiled2"]
path = community-build/community-projects/parboiled2
url = https://github.com/dotty-staging/parboiled2.git
+[submodule "community-build/community-projects/shapeless-3"]
+ path = community-build/community-projects/shapeless-3
+ url = https://github.com/dotty-staging/shapeless-3.git
diff --git a/.vscode-template/settings.json b/.vscode-template/settings.json
index 8cf2d29e3bae..257da27b118f 100644
--- a/.vscode-template/settings.json
+++ b/.vscode-template/settings.json
@@ -9,6 +9,7 @@
"**/*.class": true,
"**/*.tasty": true,
"**/target/": true,
- "community-build/community-projects": true
+ "community-build/community-projects": true,
+ "tests/pos-with-compiler-cc/dotc/**/*.scala": true
}
}
diff --git a/MAINTENANCE.md b/MAINTENANCE.md
index 54e74f7cb7ca..1e80f891e987 100644
--- a/MAINTENANCE.md
+++ b/MAINTENANCE.md
@@ -16,6 +16,11 @@ The issue supervisor is responsible for:
- Attempting to reproduce the issue (or label “stat:cannot reproduce”)
- Further minimizing the issue or asking the reporter of the issue to minimize it correctly (or label “stat:needs minimization”)
- Identifying which issues are of considerable importance and bringing them to the attention of the team during the Dotty meeting, where they can be filtered and added to the [Future Versions](https://github.com/lampepfl/dotty/milestone/46) milestone.
+ - Identifying if a report is really a feature request and if so, converting it to
+ a [feature request discussion](https://github.com/lampepfl/dotty/discussions/categories/feature-requests).
+- Keeping an eye on new
+[discussions](https://github.com/lampepfl/dotty/discussions), making sure they
+don't go unanswered and also correctly labeling new feature requests.
Other core teammates are responsible for providing information to the issue supervisor in a timely manner when it is requested if they have that information.
@@ -32,7 +37,6 @@ The issue supervisor schedule is maintained in the [Issue Supervisor Statistics
An issue supervisor needs to have all the accesses and privileges required to get their job done. This might include:
- Admin rights in lampepfl/dotty repository
-- Admin rights in lampepfl/dotty-feature-requests repository
- Permission to create new repositories in lampepfl organization (needed to fork repositories for the community build)
- Access to the LAMP slack to be able to ask for help with the infrastructure, triaging and such
@@ -62,24 +66,25 @@ At the end of their supervision period, the supervisor reports to the team durin
The following is the list of all the principal areas of the compiler and the core team members who are responsible for their maintenance:
### Compiler
-- Parser: @odersky
-- Typer: @odersky, @smarter, (@dwijnand)
+- Parser: @odersky, @hamzaremmal
+- Typer: @odersky, @smarter, (@dwijnand), @noti0nal
- Erasure: @smarter, @odersky
- Enums: @bishabosha
-- Derivation & Mirrors: @bishabosha, (@dwijnand)
+- Derivation & Mirrors: @bishabosha, (@dwijnand), @EugeneFlesselle
- Export: @bishabosha, @odersky
- Pattern Matching: @dwijnand, (@liufengyun), @sjrd
-- Inline: @nicolasstucki, @odersky
-- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb
-- Match types: @sjrd, @dwijnand, @Decel
+- Inline: @nicolasstucki, @odersky, @hamzaremmal
+- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb, @hamzaremmal
+- Match types: @sjrd, @dwijnand, @Decel, @Linyxus
- GADT: @dwijnand, @Linyxus
- Initialization checker: @olhotak, @liufengyun
- Safe nulls: @noti0na1, @olhotak
-- Lazy vals: @szymon-rd, @sjrd
+- Transforms: @szymon-rd, @sjrd, @odersky, @smarter
- tailrec: @sjrd, @mbovel
- JS backend: @sjrd
- JVM backend: @sjrd
-- Java-compat: @smarter
+- Java-compat: @smarter, @dwijnand
+- Capture checker: @odersky, @Linyxus
### Tooling
- REPL: @dwijnand, @prolativ
diff --git a/NOTICE.md b/NOTICE.md
index f4d0e6ed2b5a..64c1ede1a5eb 100644
--- a/NOTICE.md
+++ b/NOTICE.md
@@ -89,15 +89,19 @@ major authors were omitted by oversight.
details.
* dotty.tools.dotc.coverage: Coverage instrumentation utilities have been
- adapted from the scoverage plugin for scala 2 [5], which is under the
+ adapted from the scoverage plugin for scala 2 [4], which is under the
Apache 2.0 license.
+ * dooty.tools.pc: Presentation compiler implementation adapted from
+ scalameta/metals [5] mtags module, which is under the Apache 2.0 license.
+
* The Dotty codebase contains parts which are derived from
- the ScalaPB protobuf library [4], which is under the Apache 2.0 license.
+ the ScalaPB protobuf library [6], which is under the Apache 2.0 license.
[1] https://github.com/scala/scala
[2] https://github.com/adriaanm/scala/tree/sbt-api-consolidate/src/compiler/scala/tools/sbt
[3] https://github.com/sbt/sbt/tree/0.13/compile/interface/src/main/scala/xsbt
-[4] https://github.com/lampepfl/dotty/pull/5783/files
-[5] https://github.com/scoverage/scalac-scoverage-plugin
+[4] https://github.com/scoverage/scalac-scoverage-plugin
+[5] https://github.com/scalameta/metals
+[6] https://github.com/lampepfl/dotty/pull/5783/files
diff --git a/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala b/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala
index ac26e57c7381..97dbf95556bf 100644
--- a/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala
+++ b/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala
@@ -68,7 +68,7 @@ object Bench {
println()
println("Usage:")
println()
- println("dotty-bench-run/jmh:run [] [] [] [|--] [
+
+## `-experimental` compiler flag
+
+This flag enables the use of any experimental language feature in the project.
+It does this by adding an `@experimental` annotation to all top-level definitions.
+Hence, dependent projects also have to be experimental.
diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md
index e8482cb343d9..98e9a7d3d711 100644
--- a/docs/_docs/reference/other-new-features/export.md
+++ b/docs/_docs/reference/other-new-features/export.md
@@ -78,8 +78,6 @@ A member is _eligible_ if all of the following holds:
It is a compile-time error if a simple or renaming selector does not identify
any eligible members.
-It is a compile-time error if a simple or renaming selector does not identify any eligible members.
-
Type members are aliased by type definitions, and term members are aliased by method definitions. For instance:
```scala
object O:
diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md
index a705c5a3fd79..bf2c27d57863 100644
--- a/docs/_docs/reference/syntax.md
+++ b/docs/_docs/reference/syntax.md
@@ -274,7 +274,7 @@ ColonArgument ::= colon [LambdaStart]
LambdaStart ::= FunParams (‘=>’ | ‘?=>’)
| HkTypeParamClause ‘=>’
Quoted ::= ‘'’ ‘{’ Block ‘}’
- | ‘'’ ‘[’ Type ‘]’
+ | ‘'’ ‘[’ TypeBlock ‘]’
ExprSplice ::= spliceId -- if inside quoted block
| ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern
| ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern
@@ -293,6 +293,8 @@ BlockStat ::= Import
| Extension
| Expr1
| EndMarker
+TypeBlock ::= {TypeBlockStat semi} Type
+TypeBlockStat ::= ‘type’ {nl} TypeDcl
ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr
| ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr
@@ -316,7 +318,7 @@ Pattern1 ::= PatVar ‘:’ RefinedType
| [‘-’] integerLiteral ‘:’ RefinedType
| [‘-’] floatingPointLiteral ‘:’ RefinedType
| Pattern2
-Pattern2 ::= [id ‘@’] InfixPattern [‘*’]
+Pattern2 ::= [id ‘@’] InfixPattern
InfixPattern ::= SimplePattern { id [nl] SimplePattern }
SimplePattern ::= PatVar
| Literal
@@ -391,10 +393,10 @@ Export ::= ‘export’ ImportExpr {‘,’ ImportExpr}
ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec
| SimpleRef ‘as’ id
ImportSpec ::= NamedSelector
- | WildcardSelector
+ | WildCardSelector
| ‘{’ ImportSelectors) ‘}’
NamedSelector ::= id [‘as’ (id | ‘_’)]
-WildCardSelector ::= ‘*' | ‘given’ [InfixType]
+WildCardSelector ::= ‘*’ | ‘given’ [InfixType]
ImportSelectors ::= NamedSelector [‘,’ ImportSelectors]
| WildCardSelector {‘,’ WildCardSelector}
diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md
index de11de10402f..7dfcea87bd2d 100644
--- a/docs/_spec/01-lexical-syntax.md
+++ b/docs/_spec/01-lexical-syntax.md
@@ -27,8 +27,6 @@ The principle of optional braces is that any keyword that can be followed by `{`
The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md).
-´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´
-
In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`.
Analogously, the notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows a `colon` token.
@@ -45,17 +43,17 @@ colon ::= ':' -- with side conditions explained above
## Identifiers
```ebnf
-op ::= opchar {opchar}
-varid ::= lower idrest
-boundvarid ::= varid
- | ‘`’ varid ‘`’
-alphaid ::= upper idrest
- | varid
-plainid ::= alphaid
- | op
-id ::= plainid
- | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’
-idrest ::= {letter | digit} [‘_’ op]
+op ::= opchar {opchar}
+varid ::= lower idrest
+boundvarid ::= varid
+ | ‘`’ varid ‘`’
+alphaid ::= upper idrest
+ | varid
+plainid ::= alphaid
+ | op
+id ::= plainid
+ | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’
+idrest ::= {letter | digit} [‘_’ op]
escapeSeq ::= UnicodeEscape | charEscapeSeq
UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit
hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’
@@ -85,7 +83,7 @@ For this purpose, lower case letters include not only a-z, but also all characte
The following are examples of variable identifiers:
-> ```scala
+> ```
> x maxIndex p2p empty_?
> `yield` αρετη _y dot_product_*
> __system _MAX_LEN_
@@ -94,7 +92,7 @@ The following are examples of variable identifiers:
Some examples of constant identifiers are
-> ```scala
+> ```
> + Object $reserved Džul ǂnûm
> ⅰ_ⅲ Ⅰ_Ⅲ ↁelerious ǃqhàà ʹthatsaletter
> ```
@@ -106,7 +104,7 @@ User programs should not define identifiers that contain ‘$’ characters.
The following names are reserved words instead of being members of the syntactic class `id` of lexical identifiers.
-```scala
+```
abstract case catch class def do else
enum export extends false final finally for
given if implicit import lazy match new
@@ -121,12 +119,35 @@ type val var while with yield
Additionally, the following soft keywords are reserved only in some situations.
-´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´
-
```
-as derives end extension infix inline opaque open transparent using | * + -
+as derives end extension infix inline opaque
+open transparent using
+| * + -
```
+A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`.
+
+A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `using`, `|`, `+`, `-`, `*`.
+
+A soft modifier is treated as an actual modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`).
+Between the two words, there may be a sequence of newline tokens and/or other soft modifiers.
+
+Otherwise, soft keywords are treated as actual keywords in the following situations:
+
+ - `as`, if it appears in a renaming import clause.
+ - `derives`, if it appears after an extension clause or after the name and possibly parameters of a class, trait, object, or enum definition.
+ - `end`, if it appears at the start of a line following a statement (i.e. definition or toplevel expression) and is followed on the same line by a single non-comment token that is:
+ - one of the keywords `for`, `given`, `if`, `match`, `new`, `this`, `throw`, `try`, `val`, `while`, or
+ - an identifier.
+ - `extension`, if it appears at the start of a statement and is followed by `(` or `[`.
+ - `inline`, if it is followed by any token that can start an expression.
+ - `using`, if it appears at the start of a parameter or argument list.
+ - `|`, if it separates two patterns in an alternative.
+ - `+`, `-`, if they appear in front of a type parameter.
+ - `*`, if it appears in a wildcard import, or if it follows the type of a parameter, or if it appears in a vararg splice `x*`.
+
+Everywhere else, a soft keyword is treated as a normal identifier.
+
> When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings.
@@ -143,26 +164,32 @@ Scala is a line-oriented language where statements may be terminated by semi-col
A newline in a Scala source text is treated as the special token “nl” if the three following criteria are satisfied:
1. The token immediately preceding the newline can terminate a statement.
-1. The token immediately following the newline can begin a statement.
+1. The token immediately following the newline can begin a statement and is not a _leading infix operator_.
1. The token appears in a region where newlines are enabled.
The tokens that can terminate a statement are: literals, identifiers and the following delimiters and reserved words:
-```scala
-this null true false return type
-_ ) ] }
+```
+this null true false return type given
+_ ) ] } outdent
```
The tokens that can begin a statement are all Scala tokens _except_ the following delimiters and reserved words:
-```scala
-catch else extends finally forSome match
-with yield , . ; : = => <- <: <%
->: # [ ) ] }
+```
+catch do else extends finally forSome macro
+match then with yield
+, . ; : = => <- <: <% >: # =>> ?=>
+) ] } outdent
```
-A `case` token can begin a statement only if followed by a
-`class` or `object` token.
+A _leading infix operator_ is a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that:
+
+- starts a new line, and
+- is not following a blank line, and
+- is followed by at least one whitespace character (including new lines) and a token that can start an expression.
+
+Furthermore, if the operator appears on its own line, the next line must have at least the same indentation width as the operator.
Newlines are enabled in:
@@ -189,13 +216,13 @@ Multiple newline tokens are accepted in the following places (note that a semico
- between the condition of a [conditional expression](06-expressions.html#conditional-expressions) or [while loop](06-expressions.html#while-loop-expressions) and the next following expression,
- between the enumerators of a [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops) and the next following expression, and
-- after the initial `type` keyword in a [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
+- after the initial `type` keyword in a [type definition](04-basic-definitions.html#type-member-definitions).
A single new line token is accepted
- in front of an opening brace ‘{’, if that brace is a legal continuation of the current statement or expression,
- after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations), if the first token on the next line can start an expression,
-- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and
+- in front of a [parameter clause](04-basic-definitions.html#method-definitions), and
- after an [annotation](11-annotations.html#user-defined-annotations).
> The newline tokens between the two lines are not treated as statement separators.
@@ -305,8 +332,7 @@ Literal ::= [‘-’] integerLiteral
### Integer Literals
```ebnf
-integerLiteral ::= (decimalNumeral | hexNumeral)
- [‘L’ | ‘l’]
+integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit]
hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit]
```
@@ -337,11 +363,10 @@ The digits of a numeric literal may be separated by arbitrarily many underscores
### Floating Point Literals
```ebnf
-floatingPointLiteral
- ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType]
- | decimalNumeral exponentPart [floatType]
- | decimalNumeral floatType
-exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit]
+floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType]
+ | decimalNumeral exponentPart [floatType]
+ | decimalNumeral floatType
+exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit]
```
Floating point literals are of type `Float` when followed by a floating point type suffix `F` or `f`, and are of type `Double` otherwise.
@@ -423,7 +448,7 @@ Characters must not necessarily be printable; newlines or other control characte
>
> This would produce the string:
>
-> ```scala
+> ```
> the present string
> spans three
> lines.
@@ -440,7 +465,7 @@ Characters must not necessarily be printable; newlines or other control characte
>
> evaluates to
>
-> ```scala
+> ```
> the present string
> spans three
> lines.
diff --git a/docs/_spec/02-identifiers-names-and-scopes.md b/docs/_spec/02-identifiers-names-and-scopes.md
index 2b34ae8844cf..551781e911d0 100644
--- a/docs/_spec/02-identifiers-names-and-scopes.md
+++ b/docs/_spec/02-identifiers-names-and-scopes.md
@@ -8,15 +8,15 @@ chapter: 2
Names in Scala identify types, values, methods, and classes which are collectively called _entities_.
Names are introduced by local
-[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions),
+[definitions](04-basic-definitions.html#basic-definitions),
[inheritance](05-classes-and-objects.html#class-members),
-[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or
+[import clauses](04-basic-definitions.html#import-clauses), or
[package clauses](09-top-level-definitions.html#packagings)
which are collectively called _bindings_.
Bindings of different kinds have precedence defined on them:
-1. Definitions and declarations that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence.
+1. Definitions that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence.
1. Explicit imports have the next highest precedence.
1. Wildcard imports have the next highest precedence.
1. Definitions made available by a package clause, but not also defined in the same compilation unit as the reference to them, as well as imports which are supplied by the compiler but not explicitly written in source code, have the lowest precedence.
@@ -48,12 +48,12 @@ A reference to an unqualified (type- or term-) identifier ´x´ is bound by the
It is an error if no such binding exists.
If ´x´ is bound by an import clause, then the simple name ´x´ is taken to be equivalent to the qualified name to which ´x´ is mapped by the import clause.
-If ´x´ is bound by a definition or declaration, then ´x´ refers to the entity introduced by that binding.
+If ´x´ is bound by a definition, then ´x´ refers to the entity introduced by that binding.
In that case, the type of ´x´ is the type of the referenced entity.
A reference to a qualified (type- or term-) identifier ´e.x´ refers to the member of the type ´T´ of ´e´ which has the name ´x´ in the same namespace as the identifier.
It is an error if ´T´ is not a [value type](03-types.html#value-types).
-The type of ´e.x´ is the member type of the referenced entity in ´T´.
+The type of ´e.x´ is specified as a [type designator](03-types.html#type-designators).
Binding precedence implies that the way source is bundled in files affects name resolution.
In particular, imported names have higher precedence than names, defined in other files, that might otherwise be visible because they are defined in either the current package or an enclosing package.
@@ -74,11 +74,11 @@ The compiler supplies imports in a preamble to every source file.
This preamble conceptually has the following form, where braces indicate nested scopes:
```scala
-import java.lang._
+import java.lang.*
{
- import scala._
+ import scala.*
{
- import Predef._
+ import Predef.*
{ /* source */ }
}
}
@@ -95,8 +95,8 @@ This allows redundant type aliases to be imported without introducing an ambigui
object X { type T = annotation.tailrec }
object Y { type T = annotation.tailrec }
object Z {
- import X._, Y._, annotation.{tailrec => T} // OK, all T mean tailrec
- @T def f: Int = { f ; 42 } // error, f is not tail recursive
+ import X.*, Y.*, annotation.tailrec as T // OK, all T mean tailrec
+ @T def f: Int = { f ; 42 } // error, f is not tail recursive
}
```
@@ -107,7 +107,7 @@ Similarly, imported aliases of names introduced by package statements are allowe
package p { class C }
// xy.scala
-import p._
+import p.*
package p { class X extends C }
package q { class Y extends C }
```
@@ -132,27 +132,32 @@ package q {
The following program illustrates different kinds of bindings and precedences between them.
```scala
-package p { // `X' bound by package clause
-import Console._ // `println' bound by wildcard import
-object Y {
- println(s"L4: $X") // `X' refers to `p.X' here
- locally {
- import q._ // `X' bound by wildcard import
- println(s"L7: $X") // `X' refers to `q.X' here
- import X._ // `x' and `y' bound by wildcard import
- println(s"L9: $x") // `x' refers to `q.X.x' here
+package p { // `X' bound by package clause
+ import Console.* // `println' bound by wildcard import
+ object Y {
+ println(s"L4: $X") // `X' refers to `p.X' here
locally {
- val x = 3 // `x' bound by local definition
- println(s"L12: $x") // `x' refers to constant `3' here
+ import q.* // `X' bound by wildcard import
+ println(s"L7: $X") // `X' refers to `q.X' here
+ import X.* // `x' and `y' bound by wildcard import
+ println(s"L9: $x") // `x' refers to `q.X.x' here
locally {
- import q.X._ // `x' and `y' bound by wildcard import
-// println(s"L15: $x") // reference to `x' is ambiguous here
- import X.y // `y' bound by explicit import
- println(s"L17: $y") // `y' refers to `q.X.y' here
+ val x = 3 // `x' bound by local definition
+ println(s"L12: $x") // `x' refers to constant `3' here
locally {
- val x = "abc" // `x' bound by local definition
- import p.X._ // `x' and `y' bound by wildcard import
-// println(s"L21: $y") // reference to `y' is ambiguous here
- println(s"L22: $x") // `x' refers to string "abc" here
-}}}}}}
+ import q.X.* // `x' and `y' bound by wildcard import
+// println(s"L15: $x") // reference to `x' is ambiguous here
+ import X.y // `y' bound by explicit import
+ println(s"L17: $y") // `y' refers to `q.X.y' here
+ locally {
+ val x = "abc" // `x' bound by local definition
+ import p.X.* // `x' and `y' bound by wildcard import
+// println(s"L21: $y") // reference to `y' is ambiguous here
+ println(s"L22: $x") // `x' refers to string "abc" here
+ }
+ }
+ }
+ }
+ }
+}
```
diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md
index bbaac5de03a0..407a69b8c8c5 100644
--- a/docs/_spec/03-types.md
+++ b/docs/_spec/03-types.md
@@ -7,172 +7,507 @@ chapter: 3
# Types
```ebnf
- Type ::= FunctionArgTypes ‘=>’ Type
- | TypeLambdaParams ‘=>>’ Type
- | InfixType
- FunctionArgTypes ::= InfixType
- | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
- TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’
- TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
- InfixType ::= CompoundType {id [nl] CompoundType}
- CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement]
- | Refinement
- AnnotType ::= SimpleType {Annotation}
- SimpleType ::= SimpleType TypeArgs
- | SimpleType ‘#’ id
- | StableId
- | Path ‘.’ ‘type’
- | Literal
- | ‘(’ Types ‘)’
- TypeArgs ::= ‘[’ Types ‘]’
- Types ::= Type {‘,’ Type}
-```
-
-We distinguish between proper types and type constructors, which take type parameters and yield types.
-All types have a _kind_, either the kind of proper types or a _higher kind_.
-A subset of proper types called _value types_ represents sets of (first-class) values.
-Types are either _concrete_ or _abstract_.
-
-Every concrete value type can be represented as a _class type_, i.e. a [type designator](#type-designators) that refers to a [class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a [compound type](#compound-types) representing an intersection of types, possibly with a [refinement](#compound-types) that further constrains the types of its members.
-
-
-Abstract types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters) and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
-Parentheses in types can be used for grouping.
-
-[^1]: We assume that objects and packages also implicitly
- define a class (of the same name as the object or package, but
- inaccessible to user programs).
-
-Non-value types capture properties of identifiers that [are not values](#non-value-types).
-For example, a [type constructor](#type-constructors) does not directly specify a type of values.
-However, when a type constructor is applied to the correct type arguments, it yields a proper type, which may be a value type.
-
-Non-value types are expressed indirectly in Scala.
-E.g., a method type is described by writing down a method signature, which in itself is not a real type, although it gives rise to a corresponding [method type](#method-types).
-Type constructors are another example, as one can write `type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write the corresponding anonymous type function directly.
-
-`AnyKind` is the super type of all types in the Scala type system.
-It has all possible kinds to encode [kind polymorphism](#kind-polymorphism).
-As such, it is neither a value type nor a type constructor.
-
-## Paths
+Type ::= FunType
+ | TypeLambda
+ | InfixType
+FunType ::= FunTypeArgs ‘=>’ Type
+ | TypeLambdaParams '=>' Type
+TypeLambda ::= TypeLambdaParams ‘=>>’ Type
+InfixType ::= RefinedType
+ | RefinedTypeOrWildcard id [nl] RefinedTypeOrWildcard {id [nl] RefinedTypeOrWildcard}
+RefinedType ::= AnnotType {[nl] Refinement}
+AnnotType ::= SimpleType {Annotation}
+SimpleType ::= SimpleLiteral
+ | SimpleType1
+SimpleType1 ::= id
+ | Singleton ‘.’ id
+ | Singleton ‘.’ ‘type’
+ | ‘(’ TypesOrWildcards ‘)’
+ | Refinement
+ | SimpleType1 TypeArgs
+ | SimpleType1 ‘#’ id
+Singleton ::= SimpleRef
+ | SimpleLiteral
+ | Singleton ‘.’ id
+SimpleRef ::= id
+ | [id ‘.’] ‘this’
+ | [id ‘.’] ‘super’ [‘[’ id ‘]’] ‘.’ id
+ParamType ::= [‘=>’] ParamValueType
+ParamValueType ::= ParamValueType [‘*’]
+TypeArgs ::= ‘[’ TypesOrWildcards ‘]’
+Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>>
+
+FunTypeArgs ::= InfixType
+ | ‘(’ [ FunArgTypes ] ‘)’
+ | FunParamClause
+FunArgTypes ::= FunArgType { ‘,’ FunArgType }
+FunArgType ::= Type
+ | ‘=>’ Type
+FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’
+TypedFunParam ::= id ‘:’ Type
+
+TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’
+TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] TypeBounds
+TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
+VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [TypeParamClause] TypeBounds
+
+RefineDef ::= ‘val’ ValDef
+ | ‘def’ DefDef
+ | ‘type’ {nl} TypeDef
+
+TypeBounds ::= [‘>:’ Type] [‘<:’ Type]
+
+TypesOrWildcards ::= TypeOrWildcard {‘,’ TypeOrWildcard}
+TypeOrWildcard ::= Type
+ | WildcardType
+RefinedTypeOrWildcard ::= RefinedType
+ | WildcardType
+WildcardType ::= (‘?‘ | ‘_‘) TypeBounds
+```
+
+The above grammer describes the concrete syntax of types that can be written in user code.
+Semantic operations on types in the Scala type system are better defined in terms of _internal types_, which are desugared from the concrete type syntax.
+
+## Internal Types
+
+The following _abstract grammar_ defines the shape of _internal types_.
+In this specification, unless otherwise noted, "types" refer to internal types.
+Internal types abstract away irrelevant details such as precedence and grouping, and contain shapes of types that cannot be directly expressed using the concrete syntax.
+They also contain simplified, decomposed shapes for complex concrete syntax types, such as refined types.
+
+```ebnf
+Type ::= ‘AnyKind‘
+ | ‘Nothing‘
+ | TypeLambda
+ | DesignatorType
+ | ParameterizedType
+ | ThisType
+ | SuperType
+ | LiteralType
+ | ByNameType
+ | AnnotatedType
+ | RefinedType
+ | RecursiveType
+ | RecursiveThis
+ | UnionType
+ | IntersectionType
+ | SkolemType
+
+TypeLambda ::= ‘[‘ TypeParams ‘]‘ ‘=>>‘ Type
+TypeParams ::= TypeParam {‘,‘ TypeParam}
+TypeParam ::= ParamVariance id TypeBounds
+ParamVariance ::= ε | ‘+‘ | ‘-‘
+
+DesignatorType ::= Prefix ‘.‘ id
+Prefix ::= Type
+ | PackageRef
+ | ε
+PackageRef ::= id {‘.‘ id}
+
+ParameterizedType ::= Type ‘[‘ TypeArgs ‘]‘
+TypeArgs ::= TypeArg {‘,‘ TypeArg}
+TypeArg ::= Type
+ | WilcardTypeArg
+WildcardTypeArg ::= ‘?‘ TypeBounds
+
+ThisType ::= classid ‘.‘ ‘this‘
+SuperType ::= classid ‘.‘ ‘super‘ ‘[‘ classid ‘]‘
+LiteralType ::= SimpleLiteral
+ByNameType ::= ‘=>‘ Type
+AnnotatedType ::= Type Annotation
+
+RefinedType ::= Type ‘{‘ Refinement ‘}‘
+Refinement ::= ‘type‘ id TypeAliasOrBounds
+ | ‘def‘ id ‘:‘ TypeOrMethodic
+ | ‘val‘ id ‘:‘ Type
+
+RecursiveType ::= ‘{‘ recid ‘=>‘ Type ‘}‘
+RecursiveThis ::= recid ‘.‘ ‘this‘
+
+UnionType ::= Type ‘|‘ Type
+IntersectionType ::= Type ‘&‘ Type
+
+SkolemType ::= ‘∃‘ skolemid ‘:‘ Type
+
+TypeOrMethodic ::= Type
+ | MethodicType
+MethodicType ::= MethodType
+ | PolyType
+
+MethodType ::= ‘(‘ MethodTypeParams ‘)‘ TypeOrMethodic
+MethodTypeParams ::= ε
+ | MethodTypeParam {‘,‘ MethodTypeParam}
+MethodTypeParam ::= id ‘:‘ Type
+
+PolyType ::= ‘[‘ PolyTypeParams ‘]‘ TypeOrMethodic
+PolyTypeParams ::= PolyTypeParam {‘,‘ PolyTypeParam}
+PolyTypeParam ::= ‘id‘ TypeBounds
+
+TypeAliasOrBounds ::= TypeAlias
+ | TypeBounds
+TypeAlias ::= ‘=‘ Type
+TypeBounds ::= ‘<:‘ Type ‘>:‘ Type
+```
+
+### Translation of Concrete Types into Internal Types
+
+Concrete types are recursively translated, or desugared, into internal types.
+Most shapes of concrete types have a one-to-one translation to shapes of internal types.
+We elaborate hereafter on the translation of the other ones.
+
+### Infix Types
```ebnf
-Path ::= StableId
- | [id ‘.’] this
-StableId ::= id
- | Path ‘.’ id
- | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id
-ClassQualifier ::= ‘[’ id ‘]’
+InfixType ::= CompoundType {id [nl] CompoundType}
```
-
-Paths are not types themselves, but they can be a part of named types and in that function form a central role in Scala's type system.
-A path is one of the following.
+A concrete _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´.
+The type is translated to the internal type application `op`´[T_1, T_2]´.
+The infix operator `op` may be an arbitrary identifier.
-- The empty path ε (which cannot be written explicitly in user programs).
-- ´C.´`this`, where ´C´ references a class.
- The path `this` is taken as a shorthand for ´C.´`this` where ´C´ is the name of the class directly enclosing the reference.
-- ´p.x´ where ´p´ is a path and ´x´ is a stable member of ´p´.
- _Stable members_ are packages or members introduced by object definitions or by value definitions of [non-volatile types](#volatile-types).
-- ´C.´`super`´.x´ or ´C.´`super`´[M].x´
- where ´C´ references a class and ´x´ references a stable member of the super class or designated parent class ´M´ of ´C´.
- The prefix `super` is taken as a shorthand for ´C.´`super` where ´C´ is the name of the class directly enclosing the reference.
+Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations).
+For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`.
+Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative.
-A _stable identifier_ is a path which ends in an identifier.
+In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity.
+If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´.
-## Value Types
+Under `-source:future`, if the type name is alphanumeric and the target type is not marked [`infix`](./05-classes-and-objects.html#infix), a deprecation warning is emitted.
-Every value in Scala has a type which is of one of the following forms.
+The type operators `|` and `&` are not really special.
+Nevertheless, unless shadowed, they resolve to [the fundamental type aliases `scala.|` and `scala.&`](./12-the-scala-standard-library.html#fundamental-type-aliases), which represent [union and intersection types](#union-and-intersection-types), respectively.
-### Singleton Types
+### Function Types
```ebnf
-SimpleType ::= Path ‘.’ ‘type’
+Type ::= FunTypeArgs ‘=>’ Type
+FunTypeArgs ::= InfixType
+ | ‘(’ [ FunArgTypes ] ‘)’
+ | FunParamClause
+FunArgTypes ::= FunArgType { ‘,’ FunArgType }
+FunArgType ::= Type
+ | ‘=>’ Type
+FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’
+TypedFunParam ::= id ‘:’ Type
```
-A _singleton type_ is of the form ´p.´`type`.
-Where ´p´ is a path pointing to a value which [conforms](06-expressions.html#expression-typing) to `scala.AnyRef`, the type denotes the set of values consisting of `null` and the value denoted by ´p´ (i.e., the value ´v´ for which `v eq p`).
-Where the path does not conform to `scala.AnyRef` the type denotes the set consisting of only the value denoted by ´p´.
+The concrete function type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´.
+The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´.
+An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-definitions.html#by-name-parameters) of type ´T´.
+
+Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´.
+
+Function types are [covariant](04-basic-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-definitions.md#variance-annotations) in their argument types.
-
+Function types translate into internal class types that define an `apply` method.
+Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ translates to the internal class type `scala.Function´_n´[´T_1´, ..., ´T_n´, ´R´]`.
+In particular ´() \Rightarrow R´ is a shorthand for class type `scala.Function´_0´[´R´]`.
-### Literal Types
+Such class types behave as if they were instances of the following trait:
+
+```scala
+trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]:
+ def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´
+```
+
+Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document.
+
+_Dependent function types_ are function types whose parameters are named and can referred to in result types.
+In the concrete type ´(x_1: T_1, ..., x_n: T_n) \Rightarrow R´, ´R´ can refer to the parameters ´x_i´, notably to form path-dependent types.
+It translates to the internal [refined type](#refined-types)
+```scala
+scala.Function´_n´[´T_1´, ..., ´T_n´, ´S´] {
+ def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´
+}
+```
+where ´S´ is the least super type of ´R´ that does not mention any of the ´x_i´.
+
+_Polymorphic function types_ are function types that take type arguments.
+Their result type must be a function type.
+In the concrete type ´[a_1 >: L_1 <: H_1, ..., a_n >: L_1 <: H_1] => (T_1, ..., T_m) => R´, the types ´T_j´ and ´R´ can refer to the type parameters ´a_i´.
+It translates to the internal refined type
+```scala
+scala.PolyFunction {
+ def apply[´a_1 >: L_1 <: H_1, ..., a_n >: L_1 <: H_1´](´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´
+}
+```
+
+### Tuple Types
```ebnf
-SimpleType ::= Literal
+SimpleType1 ::= ...
+ | ‘(’ TypesOrWildcards ‘)’
```
-A literal type `lit` is a special kind of singleton type which denotes the single literal value `lit`.
-Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`.
+A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is sugar for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`, which is itself a series of nested infix types which are sugar for `*:[´T_1´, *:[´T_2´, ... *:[´T_n´, scala.EmptyTuple]]]`.
+The ´T_i´ can be wildcard type arguments.
-At run time, an expression `e` is considered to have literal type `lit` if `e == lit`.
-Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`.
+Notes:
+
+- `(´T_1´)` is the type ´T_1´, and not `´T_1´ *: scala.EmptyTuple` (´T_1´ cannot be a wildcard type argument in that case).
+- `()` is not a valid type (i.e. it is not desugared to `scala.EmptyTuple`).
+
+### Concrete Refined Types
+
+```ebnf
+RefinedType ::= AnnotType {[nl] Refinement}
+SimpleType1 ::= ...
+ | Refinement
+Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>>
+
+RefineDef ::= ‘val’ ValDef
+ | ‘def’ DefDef
+ | ‘type’ {nl} TypeDef
+```
+
+In the concrete syntax of types, refinements can contain several refined definitions.
+They must all be abstract.
+Moreover, the refined definitions can refer to each other as well as to members of the parent type, i.e., they have access to `this`.
+
+In the internal types, each refinement defines exactly one refined definition, and references to `this` must be made explicit in a recursive type.
+
+The conversion from the concrete syntax to the abstract syntax works as follows:
-Literal types are available for all types for which there is dedicated syntax except `Unit`.
-This includes the numeric types (other than `Byte` and `Short` which don't currently have syntax), `Boolean`, `Char` and `String`.
+1. Create a fresh recursive this name ´\alpha´.
+2. Replace every implicit or explicit reference to `this` in the refinement definitions by ´\alpha´.
+3. Create nested [refined types](#refined-types), one for every refined definition.
+4. Unless ´\alpha´ was never actually used, wrap the result in a [recursive type](#recursive-types) `{ ´\alpha´ => ´...´ }`.
-### Stable Types
-A _stable type_ is a singleton type, a literal type, or a type that is declared to be a subtype of trait `scala.Singleton`.
+### Concrete Type Lambdas
-### Type Projection
+```ebnf
+TypeLambda ::= TypeLambdaParams ‘=>>’ Type
+TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’
+TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] TypeBounds
+TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
+VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [TypeParamClause] TypeBounds
+```
+
+At the top level of concrete type lambda parameters, variance annotations are not allowed.
+However, in internal types, all type lambda parameters have explicit variance annotations.
+
+When translating a concrete type lambda into an internal one, the variance of each type parameter is _inferred_ from its usages in the body of the type lambda.
+
+## Definitions
+
+From here onwards, we refer to internal types by default.
+
+### Kinds
+
+The Scala type system is fundamentally higher-kinded.
+_Types_ are either _proper types_, _type constructors_ or _poly-kinded types_.
+
+- Proper types are the types of _terms_.
+- Type constructors are type-level functions from types to types.
+- Poly-kinded types can take various kinds.
+
+All types live in a single lattice with respect to a [_conformance_](#conformance) relationship ´<:´.
+The _top type_ is `AnyKind` and the _bottom type_ is `Nothing`: all types conform to `AnyKind`, and `Nothing` conforms to all types.
+They can be referred to with [the fundamental type aliases `scala.AnyKind` and `scala.Nothing`](./12-the-scala-standard-library.html#fundamental-type-aliases), respectively.
+
+Types can be _concrete_ or _abstract_.
+An abstract type ´T´ always has lower and upper bounds ´L´ and ´H´ such that ´L >: T´ and ´T <: H´.
+A concrete type ´T´ is considered to have itself as both lower and upper bound.
+
+The kind of a type is indicated by its (transitive) upper bound:
+
+- A type `´T <:´ scala.Any` is a proper type.
+- A type `´T <: K´` where ´K´ is a [_type lambda_](#type-lambdas) (of the form `[´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´`) is a type constructor.
+- Other types are poly-kinded; they are neither proper types nor type constructors.
+
+As a consequece, `AnyKind` itself is poly-kinded.
+`Nothing` is _universally-kinded_: it has all kinds at the same time, since it conforms to all types.
+
+With this representation, it is rarely necessary to explicitly talk about the kinds of types.
+Usually, the kinds of types are implicit through their bounds.
+
+Another way to look at it is that type bounds _are_ kinds.
+They represent sets of types: ´>: L <: H´ denotes the set of types ´T´ such that ´L <: T´ and ´T <: H´.
+A set of types can be seen as a _type of types_, i.e., as a _kind_.
+
+#### Conventions
+
+Type bounds are formally always of the form `´>: L <: H´`.
+By convention, we can omit either of both bounds in writing.
+
+- When omitted, the lower bound ´L´ is `Nothing`.
+- When omitted, the higher bound ´H´ is `Any` (_not_ `AnyKind`).
+
+These conventions correspond to the defaults in the concrete syntax.
+
+### Proper Types
+
+Proper types are also called _value types_, as they represent sets of _values_.
+
+_Stable types_ are value types that contain exactly one non-`null` value.
+Stable types can be used as prefixes in named [designator types](#designator-types).
+The stable types are
+
+- designator types referencing a stable term,
+- this types,
+- super types,
+- literal types,
+- recursive this types, and
+- skolem types.
+
+Every stable type ´T´ is concrete and has an _underlying_ type ´U´ such that ´T <: U´.
+
+### Type Constructors
+
+To each type constructor corresponds an _inferred type parameter clause_ which is computed as follows:
+
+- For a [type lambda](#type-lambdas), its type parameter clause (including variance annotations).
+- For a [polymorphic class type](#type-designators), the type parameter clause of the referenced class definition.
+- For a non-class [type designator](#type-designators), the inferred clause of its upper bound.
+
+### Type Definitions
+
+A _type definition_ ´D´ represents the right-hand-side of a `type` member definition or the bounds of a type parameter.
+It is either:
+
+- a type alias of the form ´= U´, or
+- an abstract type definition with bounds ´>: L <: H´.
+
+All type definitions have a lower bound ´L´ and an upper bound ´H´, which are types.
+For type aliases, ´L = H = U´.
+
+The type definition of a type parameter is never a type alias.
+
+## Types
+
+### Type Lambdas
```ebnf
-SimpleType ::= SimpleType ‘#’ id
+TypeLambda ::= ‘[‘ TypeParams ‘]‘ ‘=>>‘ Type
+TypeParams ::= TypeParam {‘,‘ TypeParam}
+TypeParam ::= ParamVariance id TypeBounds
+ParamVariance ::= ε | ‘+‘ | ‘-‘
```
-A _type projection_ ´T´#´x´ references the type member named ´x´ of type ´T´.
+A _type lambda_ of the form `[´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´` is a direct representation of a type constructor with ´n´ type parameters.
+When applied to ´n´ type arguments that conform to the specified bounds, it produces another type ´U´.
+Type lambdas are always concrete types.
+
+The scope of a type parameter extends over the result type ´U´ as well as the bounds of the type parameters themselves.
+
+All type constructors conform to some type lambda.
+
+The type bounds of the parameters of a type lambda are in contravariant position, while its result type is in covariant position.
+If some type constructor `´T <:´ [´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´`, then ´T´'s ´i´th type parameter bounds contain the bounds ´>: L_i <: H_i´, and its result type conforms to ´U´.
+
+Note: the concrete syntax of type lambdas does not allow to specify variances for type parameters.
+Instead, variances are inferred from the body of the lambda to be as general as possible.
+
+##### Example
+
+```scala
+type Lst = [T] =>> List[T] // T is inferred to be covariant with bounds >: Nothing <: Any
+type Fn = [A <: Seq[?], B] =>> (A => B) // A is inferred to be contravariant, B covariant
+
+val x: Lst[Int] = List(1) // ok, Lst[Int] expands to List[Int]
+val f: Fn[List[Int], Int] = (x: List[Int]) => x.head // ok
-
+val g: Fn[Int, Int] = (x: Int) => x // error: Int does not conform to the bound Seq[?]
+
+def liftPair[F <: [T] =>> Any](f: F[Int]): Any = f
+liftPair[Lst](List(1)) // ok, Lst <: ([T] =>> Any)
+```
-### Type Designators
+### Designator Types
```ebnf
-SimpleType ::= StableId
+DesignatorType ::= Prefix ‘.‘ id
+Prefix ::= Type
+ | PackageRef
+ | ε
+PackageRef ::= id {‘.‘ id}
```
-A _type designator_ refers to a named value type.
-It can be simple or qualified.
-All such type designators are shorthands for type projections.
+A designator type (or designator for short) is a reference to a definition.
+Term designators refer to term definitions, while type designators refer to type definitions.
-Specifically, the unqualified type name ´t´ where ´t´ is bound in some class, object, or package ´C´ is taken as a shorthand for
-´C.´`this.type#`´t´.
-If ´t´ is not bound in a class, object, or package, then ´t´ is taken as a shorthand for ε`.type#`´t´.
+In the abstract syntax, the `id` retains whether it is a term or type.
+In the concrete syntax, an `id` refers to a *type* designator, while `id.type` refers to a *term* designator.
+In that context, term designators are often called _singleton types_.
-A qualified type designator has the form `p.t` where `p` is a [path](#paths) and _t_ is a type name.
-Such a type designator is equivalent to the type projection `p.type#t`.
+Designators with an empty prefix ´\epsilon´ are called direct designators.
+They refer to local definitions available in the scope:
-###### Example
+- Local `type`, `object`, `val`, `lazy val`, `var` or `def` definitions
+- Term or type parameters
+
+The `id`s of direct designators are protected from accidental shadowing in the abstract syntax.
+They retain the identity of the exact definition they refer to, rather than relying on scope-based name resolution. [^debruijnoralpha]
+
+[^debruijnoralpha]: In the literature, this is often achieved through De Bruijn indices or through alpha-renaming when needed. In a concrete implementation, this is often achieved through retaining *symbolic* references in a symbol table.
+
+The ´\epsilon´ prefix cannot be written in the concrete syntax.
+A bare `id` is used instead and resolved based on scopes.
+
+Named designators refer to *member* definitions of a non-empty prefix:
+
+- Top-level definitions, including top-level classes, have a package ref prefix
+- Class member definitions and refinements have a type prefix
+
+#### Term Designators
+
+A term designator ´p.x´ referring to a term definition `t` has an _underlying type_ ´U´.
+If ´p = \epsilon´ or ´p´ is a package ref, the underlying type ´U´ is the _declared type_ of `t` and ´p.x´ is a stable type if an only if `t` is a `val` or `object` definition.
+Otherwise, the underlying type ´U´ and whether ´p.x´ is a stable type are determined by [`memberType`](#member-type)`(´p´, ´x´)`.
+
+All term designators are concrete types.
+If `scala.Null ´<: U´`, the term designator denotes the set of values consisting of `null` and the value denoted by ´t´, i.e., the value ´v´ for which `t eq v`.
+Otherwise, the designator denotes the singleton set only containing ´v´.
+
+#### Type Designators
-Some type designators and their expansions are listed below.
-We assume a local type parameter ´t´, a value `maintable` with a type member `Node` and the standard class `scala.Int`,
+A type designator ´p.C´ referring to a _class_ definition (including traits and hidden object classes) is a _class type_.
+If the class is monomorphic, the type designator is a value type denoting the set of instances of ´C´ or any of its subclasses.
+Otherwise it is a type constructor with the same type parameters as the class definition.
+All class types are concrete, non-stable types.
-| Designator | Expansion |
-|-------------------- | --------------------------|
-|t | ε.type#t |
-|Int | scala.type#Int |
-|scala.Int | scala.type#Int |
-|data.maintable.Node | data.maintable.type#Node |
+If a type designator ´p.T´ is not a class type, it refers to a type definition `T` (a type parameter or a `type` member definition) and has an _underlying [type definition](#type-definitions)_.
+If ´p = \epsilon´ or ´p´ is a package ref, the underlying type definition is the _declared type definition_ of `T`.
+Otherwise, it is determined by [`memberType`](#member-type)`(´p´, ´T´)`.
+A non-class type designator is concrete (resp. stable) if and only if its underlying type definition is an alias ´U´ and ´U´ is itself concrete (resp. stable).
### Parameterized Types
```ebnf
-SimpleType ::= SimpleType TypeArgs
-TypeArgs ::= ‘[’ Types ‘]’
+ParameterizedType ::= Type ‘[‘ TypeArgs ‘]‘
+TypeArgs ::= TypeArg {‘,‘ TypeArg}
+TypeArg ::= Type
+ | WilcardTypeArg
+WildcardTypeArg ::= ‘?‘ TypeBounds
```
-A _parameterized type_ ´T[ T_1, ..., T_n ]´ consists of a type designator ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´.
-´T´ must refer to a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´.
+A _parameterized type_ ´T[T_1, ..., T_n]´ consists of a type constructor ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´.
+The parameterized type is well-formed if
-
-Say the type parameters have lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´.
-The parameterized type is well-formed if each type argument _conforms to its bounds_, i.e. ´\sigma L_i <: T_i <: \sigma U_i´ where ´\sigma´ is the substitution ´[ a_1 := T_1, ..., a_n := T_n ]´.
+- ´T´ is a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´, i.e., it must conform to a type lambda of the form ´[\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n] => U´, and
+- if ´T´ is an abstract type constructor, none of the type arguments is a wildcard type argument, and
+- each type argument _conforms to its bounds_, i.e., given ´\sigma´ the substitution ´[a_1 := T_1, ..., a_n := T_n]´, for each type ´i´:
+ - if ´T_i´ is a type and ´\sigma L_i <: T_i <: \sigma H_i´, or
+ - ´T_i´ is a wildcard type argument ´? >: L_{Ti} <: H_{Ti}´ and ´\sigma L_i <: L_{Ti}´ and ´H_{Ti} <: \sigma H_i´.
+
+´T[T_1, ..., T_n]´ is a _parameterized class type_ if and only if ´T´ is a [class type](#type-designators).
+All parameterized class types are value types.
+
+In the concrete syntax of wildcard type arguments, if both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor (which must be concrete).
+If only one bound is omitted, `Nothing` or `Any` is used, as usual.
+
+Also in the concrete syntax, `_` can be used instead of `?` for compatibility reasons, with the same meaning.
+This alternative will be deprecated in the future, and is already deprecated under `-source:future`.
+
+#### Simplification Rules
+
+Wildcard type arguments used in covariant or contravariant positions can always be simplified to regular types.
+
+Let ´T[T_1, ..., T_n]´ be a parameterized type for a concrete type constructor.
+Then, applying a wildcard type argument ´? >: L <: H´ at the ´i´'th position obeys the following equivalences:
+
+- If the type parameter ´T_i´ is declared covariant, then ´T[..., ? >: L <: H, ...] =:= T[..., H, ...]´.
+- If the type parameter ´T_i´ is declared contravariant, then ´T[..., ? >: L <: H, ...] =:= T[..., L, ...]´.
#### Example Parameterized Types
@@ -180,12 +515,12 @@ Given the partial type definitions:
```scala
class TreeMap[A <: Comparable[A], B] { ... }
-class List[A] { ... }
+class List[+A] { ... }
class I extends Comparable[I] { ... }
-class F[M[A], X] { ... }
+class F[M[A], X] { ... } // M[A] desugars to M <: [A] =>> Any
class S[K <: String] { ... }
-class G[M[Z <: I], I] { ... }
+class G[M[Z <: I], I] { ... } // M[Z <: I] desugars to M <: [Z <: I] =>> Any
```
the following parameterized types are well-formed:
@@ -196,7 +531,13 @@ List[I]
List[List[Boolean]]
F[List, Int]
+F[[X] =>> List[X], Int]
G[S, String]
+
+List[?] // ? inferred as List[_ >: Nothing <: Any], equivalent to List[Any]
+List[? <: String] // equivalent to List[String]
+S[? <: String]
+F[?, Boolean] // ? inferred as ? >: Nothing <: [A] =>> Any
```
and the following types are ill-formed:
@@ -204,96 +545,100 @@ and the following types are ill-formed:
```scala
TreeMap[I] // illegal: wrong number of parameters
TreeMap[List[I], Int] // illegal: type parameter not within bound
+List[[X] => List[X]]
F[Int, Boolean] // illegal: Int is not a type constructor
F[TreeMap, Int] // illegal: TreeMap takes two parameters,
// F expects a constructor taking one
+F[[X, Y] => (X, Y)]
G[S, Int] // illegal: S constrains its parameter to
// conform to String,
// G expects type constructor with a parameter
// that conforms to Int
```
-#### Wildcard Type Argument
+The following code also contains an ill-formed type:
-
-```ebnf
-WildcardType ::= ‘_’ TypeBounds
+```scala
+trait H[F[A]]: // F[A] desugars to F <: [A] =>> Any, which is abstract
+ def f: F[_] // illegal : an abstract type constructor
+ // cannot be applied to wildcard arguments.
```
-A _wildcard type argument_ is of the form `_´\;´>:´\,L\,´<:´\,U´`.
-A wildcard type must appear as a type argument of a parameterized type.
-The parameterized type to which the wildcard type is applied cannot be an abstract type constructor.
-
-Both bound clauses may be omitted.
-If both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor.
-Otherwise, if a lower bound clause `>:´\,L´` is missing, `>:´\,´scala.Nothing` is assumed.
-Otherwise, if an upper bound clause `<:´\,U´` is missing, `<:´\,´scala.Any` is assumed.
+### This Types
-Given the [above type definitions](#example-parameterized-types), the following types are well-formed:
-
-```scala
-List[_] // inferred as List[_ >: Nothing <: Any]
-List[_ <: java.lang.Number]
-S[_ <: String]
-F[_, Boolean]
+```ebnf
+ThisType ::= classid ‘.‘ ‘this‘
```
-and the following code contains an ill-formed type:
+A _this type_ `´C´.this` denotes the `this` value of class ´C´ within ´C´.
-```scala
-trait H[F[A]]:
- def f: F[_] // illegal : an abstract type constructor
- // cannot be applied to wildcard arguments.
-```
+This types often appear implicitly as the prefix of [designator types](#designator-types) referring to members of ´C´.
+They play a particular role in the type system, since they are affected by the [as seen from](#as-seen-from) operation on types.
-Wildcard types may also appear as parts of [infix types](#infix-types), [function types](#function-types), or [tuple types](#tuple-types).
-Their expansion is then the expansion in the equivalent parameterized type.
+This types are stable types.
+The underlying type of `´C´.this` is the [self type](05-classes-and-objects.html#templates) of ´C´.
-##### Simplification Rules
+### Super Types
-Let ´T[T_1, ..., T_n]´ be a parameterized type.
-Then, applying a wildcard type argument ´t´ of the form ´\\_ >: L <: U´ at the ´i´'th position obeys the following equivalences:
+```ebnf
+SuperType ::= classid ‘.‘ ‘super‘ ‘[‘ classid ‘]‘
+```
-- If the type parameter ´T_i´ is declared covariant, then ´t \equiv U´
-- If the type parameter ´T_i´ is declared contravariant, then ´t \equiv L´
+A _super type_ `´C´.super[´D´]` denotes the `this` value of class `C` within `C`, but "widened" to only see members coming from a parent class or trait ´D´.
-### Tuple Types
+Super types exist for compatibility with Scala 2, which allows shadowing of inner classes.
+In a Scala 3-only context, a super type can always be replaced by the corresponding [this type](#this-types).
+Therefore, we omit further discussion of super types in this specification.
+
+### Literal Types
```ebnf
-SimpleType ::= ‘(’ Types ‘)’
+LiteralType ::= SimpleLiteral
```
-A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is an alias for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`.
+A literal type `lit` denotes the single literal value `lit`.
+Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`.
-Notes:
-- `(´T´)` is just the type ´T´, and not `´T´ *: scala.EmptyTuple`.
-- `()` is not a valid type, and not `scala.EmptyTuple`.
+At run time, an expression `e` is considered to have literal type `lit` if `e == lit`.
+Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`.
-If ´n \leq 22´, the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple` is both a subtype and a supertype of tuple class `scala.Tuple´_n´[´T_1´, ..., ´T_n´]`.
+Literal types are available for all primitive types, as well as for `String`.
+However, only literal types for `Int`, `Long`, `Float`, `Double`, `Boolean`, `Char` and `String` can be expressed in the concrete syntax.
-Tuple classes are case classes whose fields can be accessed using selectors `_1`, ..., `_n`.
-Their functionality is abstracted in the corresponding `scala.Product_´n´` trait.
-The _n_-ary tuple class and product trait are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits).
+Literal types are stable types.
+Their underlying type is the primitive type containing their value.
+
+##### Example
```scala
-case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´)
-extends Product´_n´[´T_1´, ..., ´T_n´]
+val x: 1 = 1
+val y: false = false
+val z: false = y
+val int: Int = x
+
+val badX: 1 = int // error: Int is not a subtype of 1
+val badY: false = true // error: true is not a subtype of false
+```
-trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product:
- override def productArity = ´n´
- def _1: ´T_1´
- ...
- def _n: ´T_n´
+### By-Name Types
+
+```ebnf
+ByNameType ::= ‘=>‘ Type
```
+A by-name type ´=> T´ denotes the declared type of a by-name term parameter.
+By-name types can only appear as the types of parameters in method types, and as type arguments in [parameterized types](#parameterized-types).
+
+
+
### Annotated Types
```ebnf
-AnnotType ::= SimpleType {Annotation}
+AnnotatedType ::= Type Annotation
```
-An _annotated type_ ´T´ ´a_1, ..., a_n´ attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the type ´T´.
+An _annotated type_ ´T a´ attaches the [annotation](11-annotations.html#user-defined-annotations) ´a´ to the type ´T´.
###### Example
@@ -303,83 +648,121 @@ The following type adds the `@suspendable` annotation to the type `String`:
String @suspendable
```
-### Compound Types
+### Refined Types
```ebnf
-CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement]
- | Refinement
-Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’
-RefineStat ::= Dcl
- | ‘type’ TypeDef
- |
+RefinedType ::= Type ‘{‘ Refinement ‘}‘
+Refinement ::= ‘type‘ id TypeAliasOrBounds
+ | ‘def‘ id ‘:‘ TypeOrMethodic
+ | ‘val‘ id ‘:‘ Type
```
-A _compound type_ ´T_1´ `with` ... `with` ´T_n \\{ R \\}´ represents objects with members as given in the component types ´T_1, ..., T_n´ and the refinement ´\\{ R \\}´.
-A refinement ´\\{ R \\}´ contains declarations and type definitions.
-If a declaration or definition overrides a declaration or definition in one of the component types ´T_1, ..., T_n´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration or definition is said to be “structural” [^2].
+A _refined type_ ´T { R }´ denotes the set of values that belong to ´T´ and also have a _member_ conforming to the refinement ´R´.
-[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member.
+The refined type ´T { R }´ is well-formed if:
+
+- ´T´ is a proper type, and
+- if ´R´ is a term (`def` or `val`) refinement, the refined type is a proper type, and
+- if ´R´ overrides a member of ´T´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply, and
+- if ´R´ is a `def` refinement with a [polymorphic method type](#polymorphic-method-types), then ´R´ overrides a member definition of ´T´.
+
+As an exception to the last rule, a polymorphic method type refinement is allowed if `´T <:´ scala.PolyFunction` and ´id´ is the name `apply`.
-Within a method declaration in a structural refinement, the type of any value parameter may only refer to type parameters or abstract types that are contained inside the refinement.
-That is, it must refer either to a type parameter of the method itself, or to a type definition within the refinement.
-This restriction does not apply to the method's result type.
+If the refinement ´R´ overrides no member of ´T´ and is not an occurrence of the `scala.PolyFunction` exception, the refinement is said to be “structural” [^2].
-If no refinement is given, the empty refinement is implicitly added, i.e. ´T_1´ `with` ... `with` ´T_n´ is a shorthand for ´T_1´ `with` ... `with` ´T_n \\{\\}´.
+[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member.
-A compound type may also consist of just a refinement ´\\{ R \\}´ with no preceding component types.
-Such a type is equivalent to `AnyRef` ´\\{ R \\}´.
+Note: since a refinement does not define a _class_, it is not possible to use a [this type](#this-types) to reference term and type members of the parent type ´T´ within the refinement.
+When the surface syntax of refined types makes such references, a [recursive type](#recursive-types) wraps the refined type, given access to members of self through a recursive-this type.
###### Example
-The following example shows how to declare and use a method which has a parameter type that contains a refinement with structural declarations.
+Given the following class definitions:
```scala
-case class Bird (val name: String) extends Object {
- def fly(height: Int) = ...
-...
-}
-case class Plane (val callsign: String) extends Object {
- def fly(height: Int) = ...
-...
-}
-def takeoff(
- runway: Int,
- r: { val callsign: String; def fly(height: Int) }) = {
- tower.print(r.callsign + " requests take-off on runway " + runway)
- tower.read(r.callsign + " is clear for take-off")
- r.fly(1000)
-}
-val bird = new Bird("Polly the parrot"){ val callsign = name }
-val a380 = new Plane("TZ-987")
-takeoff(42, bird)
-takeoff(89, a380)
+trait T:
+ type X <: Option[Any]
+ def foo: Any
+ def fooPoly[A](x: A): Any
+
+trait U extends T:
+ override def foo: Int
+ override def fooPoly[A](x: A): A
+
+trait V extends T
+ type X = Some[Int]
+ def bar: Int
+ def barPoly[A](x: A): A
```
-Although `Bird` and `Plane` do not share any parent class other than `Object`, the parameter _r_ of method `takeoff` is defined using a refinement with structural declarations to accept any object that declares a value `callsign` and a `fly` method.
+We get the following conformance relationships:
-### Infix Types
+- `U <: T { def foo: Int }`
+- `U <: T { def fooPoly[A](x: A): A }`
+- `U <: (T { def foo: Int }) { def fooPoly[A](x: A): A }` (we can chain refined types to refine multiple members)
+- `V <: T { type X <: Some[Any] }`
+- `V <: T { type X >: Some[Nothing] }`
+- `V <: T { type X = Some[Int] }`
+- `V <: T { def bar: Any }` (a structural refinement)
+
+The following refined types are not well-formed:
+
+- `T { def barPoly[A](x: A): A }` (structural refinement for a polymorphic method type)
+- `T { type X <: List[Any] }` (does not satisfy overriding rules)
+- `List { def head: Int }` (the parent type `List` is not a proper type)
+- `T { def foo: List }` (the refined type `List` is not a proper type)
+- `T { def foo: T.this.X }` (`T.this` is not allowed outside the body of `T`)
+
+### Recursive Types
```ebnf
-InfixType ::= CompoundType {id [nl] CompoundType}
+RecursiveType ::= ‘{‘ recid ‘=>‘ Type ‘}‘
+RecursiveThis ::= recid ‘.‘ ‘this‘
```
-An _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´.
-The type is equivalent to the type application `op`´[T_1, T_2]´.
-The infix operator `op` may be an arbitrary identifier.
+A _recursive type_ of the form `{ ´\alpha´ => ´T´ }` represents the same values as ´T´, while offering ´T´ access to its _recursive this_ type `´\alpha´`.
-Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations).
-For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`.
-Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative.
+Recursive types cannot directly be expressed in the concrete syntax.
+They are created as needed when a refined type in the concrete syntax contains a refinement that needs access to the `this` value.
+Each recursive type defines a unique self-reference `´\alpha´`, distinct from any other recursive type in the system.
-In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity.
-If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´.
+Recursive types can be unfolded during subtyping as needed, replacing references to its `´\alpha´` by a stable reference to the other side of the conformance relationship.
-The type operators `|` and `&` are not really special.
-Nevertheless, unless shadowed, they resolve to `scala.|` and `scala.&`, which represent [union and intersection types](#union-and-intersection-types), respectively.
+##### Example
+
+Given the class definitions in the [refined types](#refined-types) section, we can write the following refined type in the source syntax:
+
+```scala
+T { def foo: X }
+// equivalent to
+T { def foo: this.X }
+```
+
+This type is not directly expressible as a refined type alone, as the refinement cannot access the `this` value.
+Instead, in the abstract syntax of types, it is translated to `{ ´\alpha´ => ´T´ { def foo: ´\alpha´.X } }`.
+
+Given the following definitions:
+
+```scala
+trait Z extends T:
+ type X = Option[Int]
+ def foo: Option[Int] = Some(5)
+
+val z: Z
+```
+
+we can check that `z ´<:´ { ´\alpha´ => ´T´ { def foo: ´\alpha´.X } }`.
+We first unfold the recursive type, substituting ´z´ for ´\alpha´, resulting in `z ´<:´ T { def foo: z.X }`.
+Since the underlying type of ´z´ is ´Z´, we can resolve `z.X` to mean `Option[Int]`, and then validate that `z ´<:´ T` and that `z` has a member `def foo: Option[Int]`.
### Union and Intersection Types
-Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see above).
+```ebnf
+UnionType ::= Type ‘|‘ Type
+IntersectionType ::= Type ‘&‘ Type
+```
+
+Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see [infix types](#infix-types)).
However, in this specification, ´S | T´ and ´S & T´ refer to the underlying core concepts of *union and intersection types*, respectively.
@@ -390,22 +773,21 @@ From the [conformance rules](#conformance) rules on union and intersection types
Moreover, `&` is distributive over `|`.
For any type ´A´, ´B´ and ´C´, all of the following relationships hold:
-- ´A & B \equiv B & A´,
-- ´A | B \equiv B | A´,
-- ´(A & B) & C \equiv A & (B & C)´,
-- ´(A | B) | C \equiv A | (B | C)´, and
-- ´A & (B | C) \equiv (A & B) | (A & C)´.
+- ´A & B =:= B & A´,
+- ´A | B =:= B | A´,
+- ´(A & B) & C =:= A & (B & C)´,
+- ´(A | B) | C =:= A | (B | C)´, and
+- ´A & (B | C) =:= (A & B) | (A & C)´.
-If ´C´ is a type constructor, then ´C[A] & C[B]´ can be simplified using the following three rules:
+If ´C´ is a co- or contravariant type constructor, ´C[A] & C[B]´ can be simplified using the following rules:
-- If ´C´ is covariant, ´C[A] & C[B] \equiv C[A & B]´
-- If ´C´ is contravariant, ´C[A] & C[B] \equiv C[A | B]´
-- If ´C´ is invariant, emit a compile error
+- If ´C´ is covariant, ´C[A] & C[B] =:= C[A & B]´
+- If ´C´ is contravariant, ´C[A] & C[B] =:= C[A | B]´
-From the above rules, we can derive the following conformance relationships:
+The right-to-left validity of the above two rules can be derived from the definition of covariance and contravariance and the conformance rules of union and intersection types:
-- When ´C´ is covariant, ´C[A & B] <: C[A] & C[B]´.
-- When ´C´ is contravariant, ´C[A | B] <: C[A] & C[B]´.
+- When ´C´ is covariant, we can derive ´C[A & B] <: C[A] & C[B]´.
+- When ´C´ is contravariant, we can derive ´C[A | B] <: C[A] & C[B]´.
#### Join of a union type
@@ -425,56 +807,61 @@ class B extends C[B] with D with E
The join of ´A | B´ is ´C[A | B] & D´
-### Function Types
+### Skolem Types
```ebnf
-Type ::= FunctionArgs ‘=>’ Type
-FunctionArgs ::= InfixType
- | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
+SkolemType ::= ‘∃‘ skolemid ‘:‘ Type
```
-The type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´.
-The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´.
-An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-declarations-and-definitions.md#by-name-parameters) of type ´T´.
-
-Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´.
-
-Function types are [covariant](04-basic-declarations-and-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-declarations-and-definitions.md#variance-annotations) in their argument types.
+Skolem types cannot directly be written in the concrete syntax.
+Moreover, although they are proper types, they can never be inferred to be part of the types of term definitions (`val`s, `var`s and `def`s).
+They are exclusively used temporarily during subtyping derivations.
-Function types are shorthands for class types that define an `apply` method.
-Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ is a shorthand for the class type `Function´_n´[´T_1´, ..., ´T_n´, ´R´]`.
-In particular ´() \Rightarrow R´ is a shorthand for class type `Function´_0´[´R´]`.
+Skolem types are stable types.
+A skolem type of the form ´∃ \alpha : T´ represents a stable reference to unknown value of type ´T´.
+The identifier ´\alpha´ is chosen uniquely every time a skolem type is created.
+However, as a skolem type is stable, it can be substituted in several occurrences in other types.
+When "copied" through substitution, all the copies retain the same ´\alpha´, and are therefore equivalent.
-Such class types behave as if they were instances of the following trait:
+## Methodic Types
-```scala
-trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]:
- def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´
+```ebnf
+TypeOrMethodic ::= Type
+ | MethodicType
+MethodicType ::= MethodType
+ | PolyType
```
-Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document.
+Methodic types are not real types.
+They are not part of the type lattice.
-## Non-Value Types
+However, they share some meta-properties with types.
+In particular, when contained within other types that undertake some substitution, the substitution carries to the types within methodic types.
+It is therefore often convenient to think about them as types themselves.
-The types explained in the following do not denote sets of values.
+Methodic types are used as the "declared type" of `def` definitions that have at least one term or type parameter list.
### Method Types
+```ebnf
+MethodType ::= ‘(‘ MethodTypeParams ‘)‘ TypeOrMethodic
+MethodTypeParams ::= ε
+ | MethodTypeParam {‘,‘ MethodTypeParam}
+MethodTypeParam ::= id ‘:‘ Type
+```
+
A _method type_ is denoted internally as ´(\mathit{Ps})U´, where ´(\mathit{Ps})´ is a sequence of parameter names and types ´(p_1:T_1, ..., p_n:T_n)´ for some ´n \geq 0´ and ´U´ is a (value or method) type.
This type represents named methods that take arguments named ´p_1, ..., p_n´ of types ´T_1, ..., T_n´ and that return a result of type ´U´.
Method types associate to the right: ´(\mathit{Ps}\_1)(\mathit{Ps}\_2)U´ is treated as ´(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)´.
-A special case are types of methods without any parameters.
-They are written here `=> T`. Parameterless methods name expressions that are re-evaluated each time the parameterless method name is referenced.
-
Method types do not exist as types of values.
If a method name is used as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a corresponding function type.
###### Example
-The declarations
+The definitions
```scala
def a: Int
@@ -485,19 +872,25 @@ def c (x: Int) (y: String, z: String): String
produce the typings
```scala
-a: => Int
+a: Int
b: (Int) Boolean
c: (Int) (String, String) String
```
### Polymorphic Method Types
-A polymorphic method type is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type.
+```ebnf
+PolyType ::= ‘[‘ PolyTypeParams ‘]‘ TypeOrMethodic
+PolyTypeParams ::= PolyTypeParam {‘,‘ PolyTypeParam}
+PolyTypeParam ::= ‘id‘ TypeBounds
+```
+
+A polymorphic method type, or _poly type_ for short, is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type.
This type represents named methods that take type arguments `´S_1, ..., S_n´` which [conform](#parameterized-types) to the lower bounds `´L_1, ..., L_n´` and the upper bounds `´U_1, ..., U_n´` and that yield results of type ´T´.
###### Example
-The declarations
+The definitions
```scala
def empty[A]: List[A]
@@ -511,153 +904,176 @@ empty : [A >: Nothing <: Any] List[A]
union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A]
```
-### Type Constructors
+## Operations on Types
-```
-Type ::= ... | TypeLambdaParams ‘=>>’ Type
-TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
-TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’
-TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
-```
+This section defines a few meta-functions on types and methodic types.
-
+- [`baseType(´T´, ´C´)`](#base-type): computes the smallest type ´U´ of the form `´p´.´C´[´T_1, ..., T_n´]` such that ´T <: U´.
+- [`asSeenFrom(´T´, ´C´, ´p´)`](#as-seen-from): rebases the type ´T´ visible inside the class ´C´ "as seen from" the prefix ´p´.
+- [`memberType(´T´, ´id´)`](#member-type): finds a member of a type (`T.id`) and computes its underlying type or bounds.
-A _type constructor_ is either:
-- a _type lambda_, of the form `[´\mathit{tps}\,´] =>> ´T´` where `[´\mathit{tps}\,´]` is a type parameter clause `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \gt 0´ and ´T´ is either a value type
-or another type lambda.
-- a reference to a [desugared type declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) upper-bounded by a type lambda.
-- a reference to a [polymorphic class](05-classes-and-objects.html##class-definitions).
+These meta-functions are mutually recursive.
-Each type parameter ´a_i´ of a type lambda has a variance ´v_i´ which cannot be written down by the user but is inferred from the body of the type lambda to maximize the number of types that conform to the type lambda.
-
+### Base Type
-#### Inferred type parameter clause
+The meta-function `baseType(´T´, ´C´)`, where ´T´ is a proper type and ´C´ is a class identifier, computes the smallest type ´U´ of the form `´p.C´` or `´p.C´[´U_1, ..., U_n´]` such that ´T <: U´.
+If no such type exists, the function is not defined.
+The main purpose of `baseType` is to substitute prefixes and class type parameters along the inheritance chain.
-To each type constructor corresponds an _inferred type parameter clause_ which is computed as follow:
-- For a type lambda, its type parameter clause (including variance annotations).
-- For a type declaration upper-bounded by a type lambda ´T´, the inferred clause of ´T´.
-- For a polymorphic class, its type parameter clause.
+We define `baseType(´T´, ´C´)` as follows.
+For brevity, we write `´p.X´[´U_1, ..., U_n´]` instead of `´p.X´` with ´n = 0´.
-
+Note that the cases of `superType` do not overlap with each other nor with any `baseType` case other than the `superType`-based one.
+The cases of `baseType` therefore do not overlap with each other either.
+That makes `baseType` an algorithmic partial function.
-## Kind Polymorphism
+`meet(´p.C[T_1, ..., T_n]´, ´q.C[U_1, ..., U_n]´)` computes an intersection of two (parameterized) class types for the same class, and `join` computes a union:
-Type parameters are normally partitioned into _kinds_, indicated by the top type of which it is a subtype.
-Proper types are the types of values and are subtypes of `Any`.
-Higher-kinded types are type constructors such as `List` or `Map`.
-Covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`.
-The `Map` type constructor is a subtype of `[X, +Y] =>> Any`.
+- if `´p =:= q´` is false, then it is not defined
+- otherwise, let ´W_i´ for ´i \in 1, ..., n´ be:
+ - ´T_i & U_i´ for `meet` (resp. ´T_i | U_i´ for `join`) if the ´i´th type parameter of ´C´ is covariant
+ - ´T_i | U_i´ for `meet` (resp. ´T_i & U_i´ for `join`) if the ´i´th type parameter of ´C´ is contravariant
+ - ´T_i´ if ´T_i =:= U_i´ and the ´i´th type parameter of ´C´ is invariant
+ - not defined otherwise
+- if any of the ´W_i´ are not defined, the result is not defined
+- otherwise, the result is `´p.C[W_1, ..., W_n]´`
-A type can be used only as prescribed by its kind.
-Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` _must_ be applied to a type argument, unless they are passed to type parameters of the same kind.
+We generalize `meet(´T_1, ..., T_n´)` for a sequence as:
-A type parameter whose upper bound is [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) can have any kind and is called an _any-kinded type_.
+- not defined for ´n = 0´
+- ´T_1´ if ´n = 1´
+- `meet(meet(´T_1, ..., T_{n-1}´), ´T_n´)` if `meet(´T_1, ..., T_{n-1}´)` is defined
+- not defined otherwise
-```scala
-def f[T <: AnyKind] = ...
-```
+##### Examples
-The actual type arguments of `f` can then be types of arbitrary kinds.
-So the following are all legal:
+Given the following definitions:
```scala
-f[Int]
-f[List]
-f[Map]
-f[[X] =>> String]
-```
-
-Since the actual kind of an any-kinded type is unknown, its usage is heavily restricted.
-An any-kinded type can neither be the type of a value, nor be instantiated with type parameters.
-The only thing one can do with an any-kinded type is to pass it to another any-kinded type argument.
-
-`AnyKind` plays a special role in Scala's subtype system.
-It is a supertype of all other types, no matter what their kind is.
-It is also assumed to be kind-compatible with all other types.
-Furthermore, `AnyKind` is itself an any-kinded type, so it cannot be the type of values and it cannot be instantiated.
-
-## Base Types and Member Definitions
-
-Types of class members depend on the way the members are referenced.
-Central here are three notions, namely:
-1. the notion of the set of base types of a type ´T´,
-1. the notion of a type ´T´ in some class ´C´ seen from some
- prefix type ´S´,
-1. the notion of the set of member bindings of some type ´T´.
-
-These notions are defined mutually recursively as follows.
-
-1. The set of _base types_ of a type is a set of class types,
- given as follows.
- - The base types of a class type ´C´ with parents ´T_1, ..., T_n´ are ´C´ itself, as well as the base types of the compound type `´T_1´ with ... with ´T_n´ { ´R´ }`.
- - The base types of an aliased type are the base types of its alias.
- - The base types of an abstract type are the base types of its upper bound.
- - The base types of a parameterized type `´C´[´T_1, ..., T_n´]` are the base types of type ´C´, where every occurrence of a type parameter ´a_i´ of ´C´ has been replaced by the corresponding parameter type ´T_i´.
- - The base types of a singleton type `´p´.type` are the base types of the type of ´p´.
- - The base types of a compound type `´T_1´ with ... with ´T_n´ { ´R´ }` are the _reduced union_ of the base classes of all ´T_i´'s.
- This means: Let the multi-set ´\mathscr{S}´ be the multi-set-union of the base types of all ´T_i´'s.
- If ´\mathscr{S}´ contains several type instances of the same class, say `´S^i´#´C´[´T^i_1, ..., T^i_n´]` ´(i \in I)´, then all those instances are replaced by one of them which conforms to all others.
- It is an error if no such instance exists.
- It follows that the reduced union, if it exists, produces a set of class types, where different types are instances of different classes.
- - The base types of a type selection `´S´#´T´` are determined as follows.
- If ´T´ is an alias or abstract type, the previous clauses apply.
- Otherwise, ´T´ must be a (possibly parameterized) class type, which is defined in some class ´B´.
- Then the base types of `´S´#´T´` are the base types of ´T´ in ´B´ seen from the prefix type ´S´.
-
-1. The notion of a type ´T´ _in class ´C´ seen from some prefix type ´S´_ makes sense only if the prefix type ´S´ has a type instance of class ´C´ as a base type, say `´S'´#´C´[´T_1, ..., T_n´]`.
-Then we define as follows.
- - If `´S´ = ´\epsilon´.type`, then ´T´ in ´C´ seen from ´S´ is ´T´ itself.
- - Otherwise, if ´T´ is the ´i´'th type parameter of some class ´D´, then
- - If ´S´ has a base type `´D´[´U_1, ..., U_n´]`, for some type parameters `[´U_1, ..., U_n´]`, then ´T´ in ´C´ seen from ´S´ is ´U_i´.
- - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´.
- - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself.
- - Otherwise, if ´T´ is the singleton type `´D´.this.type` for some class ´D´ then
- - If ´D´ is a subclass of ´C´ and ´S´ has a type instance of class ´D´ among its base types, then ´T´ in ´C´ seen from ´S´ is ´S´.
- - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´.
- - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself.
- - If ´T´ is some other type, then the described mapping is performed to all its type components.
-
-If ´T´ is a possibly parameterized class type, where ´T´'s class is defined in some other class ´D´, and ´S´ is some prefix type, then we use "´T´ seen from ´S´" as a shorthand for "´T´ in ´D´ seen from ´S´".
-
-1. The _member bindings_ of a type ´T´ are
- 1. all bindings ´d´ such that there exists a type instance of some class ´C´ among the base types of ´T´ and there exists a definition or declaration ´d'´ in ´C´ such that ´d´ results from ´d'´ by replacing every type ´T'´ in ´d'´ by ´T'´ in ´C´ seen from ´T´, and
- 2. all bindings of the type's [refinement](#compound-types), if it has one.
-2. The member bindinds of ´S & T´ are all the binds of ´S´ *and* all the bindins of ´T´.
-3. The member bindings of ´S | T´ are the member bindings of its [join](#join-of-a-union-type).
-
-The _definition_ of a type projection `S#T` is the member binding ´d_T´ of the type `T` in `S`.
-In that case, we also say that `S#T` _is defined by_ ´d_T´.
+trait Iterable[+A]
+trait List[+A] extends Iterable[A]
+trait Map[K, +V] extends Iterable[(K, V)]
+trait Foo
+```
+
+we have the following `baseType` results:
+
+- `baseType(List[Int], List) = List[Int]`
+- `baseType(List[Int], Iterable) = Iterable[Int]`
+- `baseType(List[A] & Iterable[B], Iterable) = meet(Iterable[A], Iterable[B]) = Iterable[A & B]`
+- `baseType(List[A] & Foo, Iterable) = Iterable[A]` (because `baseType(Foo, Iterable)` is not defined)
+- `baseType(Int, Iterable)` is not defined
+- `baseType(Map[Int, String], Iterable) = Iterable[(Int, String)]`
+- `baseType(Map[Int, String] & Map[String, String], Map)` is not defined (because `K` is invariant)
+
+### As Seen From
+
+The meta-function `asSeenFrom(´T´, ´C´, ´p´)`, where ´T´ is a type or methodic type visible inside the class ´C´ and ´p´ is a stable type, rebases the type ´T´ "as seen from" the prefix ´p´.
+Essentially, it substitutes this-types and class type parameters in ´T´ to appropriate types visible from outside.
+Since `T` is visible inside ´C´, it can contain this-types and class type parameters of ´C´ itself as well as of all its enclosing classes.
+This-types of enclosing classes must be mapped to appropriate subprefixes of ´p´, while class type parameters must be mapped to appropriate concrete type arguments.
+
+`asSeenFrom(´T´, ´C´, ´p´)` only makes sense if ´p´ has a base type for ´C´, i.e., if `baseType(´p´, ´C´)` is defined.
+
+We define `asSeenFrom(´T´, ´C´, ´p´)` where `baseType(´p´, ´C´) = ´q.C[U_1, ..., U_n]´` as follows:
+
+- If ´T´ is a reference to the ´i´th class type parameter of some class ´D´:
+ - If `baseType(´p´, ´D´) ´= r.D[W_1, ..., W_m]´` is defined, then ´W_i´
+ - Otherwise, if ´q = \epsilon´ or ´q´ is a package ref, then ´T´
+ - Otherwise, ´q´ is a type, ´C´ must be defined in another class ´B´ and `baseType(´q´, ´B´)` must be defined, then `asSeenFrom(´T´, ´B´, ´q´)`
+- Otherwise, if ´T´ is a this-type `´D´.this`:
+ - If ´D´ is a subclass of ´C´ and `baseType(´p´, ´D´)` is defined, then ´p´ (this is always the case when ´D = C´)
+ - Otherwise, if ´q = \epsilon´ or ´q´ is a package ref, then ´T´
+ - Otherwise, ´q´ is a type, ´C´ must be defined in another class ´B´ and `baseType(´q´, ´B´)` must be defined, then `asSeenFrom(´T´, ´B´, ´q´)`
+- Otherwise, ´T´ where each if of its type components ´T_i´ is mapped to `asSeenFrom(´T_i´, ´C´, ´p´)`.
+
+For convenience, we generalize `asSeenFrom` to _type definitions_ ´D´.
+
+- If ´D´ is an alias ´= U´, then `asSeenFrom(´D´, ´C´, ´p´) = asSeenFrom(´U´, ´C´, ´p´)`.
+- If ´D´ is an abstract type definition with bounds ´>: L <: H´, then `asSeenFrom(´D´, ´C´, ´p´) = ´>:´ asSeenFrom(´L´, ´C´, ´p´) ´<:´ asSeenFrom(´H´, ´C´, ´p´)`.
+
+### Member Type
+
+The meta-function `memberType(´T´, ´id´, ´p´)`, where ´T´ is a proper type, ´id´ is a term or type identifier, and ´p´ is a stable type, finds a member of a type (`T.id`) and computes its underlying type (for a term) or type definition (for a type) as seen from the prefix ´p´.
+For a term, it also computes whether the term is _stable_.
+`memberType` is the fundamental operation that computes the _underlying type_ or _underlying type definition_ of a [named designator type](#designator-types).
+
+The result ´M´ of a `memberType` is one of:
+
+- undefined,
+- a term result with underlying type or methodic type ´U´ and a _stable_ flag,
+- a class result with class ´C´, or
+- a type result with underlying type definition ´D´.
+
+As short-hand, we define `memberType(´T´, ´id´)` to be the same as `memberType(´T´, ´id´, ´T´)` when ´T´ is a stable type.
+
+We define `memberType(´T´, ´id´, ´p´)` as follows:
+
+- If ´T´ is a possibly parameterized class type of the form ´q.C[T_1, ..., T_n]´ (with ´n \geq 0´):
+ - Let ´m´ be the [class member](05-classes-and-objects.html#class-members) of ´C´ with name ´id´.
+ - If ´m´ is not defined, the result is undefined.
+ - If ´m´ is a class definition, the result is a class result with class ´m´.
+ - If ´m´ is a term definition in class ´D´ with declared type ´U´, the result is a term result with underlying type [`asSeenFrom`](#as-seen-from)`(´U´, ´D´, ´p´)` and stable flag true if and only if ´m´ is stable.
+ - If ´m´ is a type member definition in class ´D´, the result is a type result with underlying type definition [`asSeenFrom`](#as-seen-from)`(´U´, ´D´, ´p´)` where ´U´ is defined as follows:
+ - If ´m´ is an opaque type alias member definition with declared definition ´>: L <: H = V´, then
+ - ´U´ is ´= V´ if `´p = D.´this` or if we are computing `memberType` in a [_transparent mode_](#type-erasure),
+ - ´U´ is ´>: L <: H´ otherwise.
+ - ´U´ is the declared type definition of ´m´ otherwise.
+- If ´T´ is another monomorphic type designator of the form ´q.X´:
+ - Let ´U´ be `memberType(´q´, ´X´)`
+ - Let ´H´ be the upper bound of ´U´
+ - The result is `memberType(´H´, ´id´, ´p´)`
+- If ´T´ is another parameterized type designator of the form ´q.X[T_1, ..., T_n]´ (with ´n \geq 0´):
+ - Let ´U´ be `memberType(´q´, ´X´)`
+ - Let ´H´ be the upper bound of ´U´
+ - The result is `memberType(´H[T_1, ..., T_n]´, ´id´, ´p´)`
+- If ´T´ is a parameterized type lambda of the form `´([\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´U)[T_1, ..., T_n]´`:
+ - The result is `memberType(´[a_1 := T_1, ..., a_n := T_n] U´, ´id´, ´p´)`, i.e., we beta-reduce the type redex.
+- If ´T´ is a refined type of the form `´T_1´ { ´R´ }`:
+ - Let ´M_1´ be the result of `memberType(´T_1´, ´id´, ´p´)`.
+ - If the name of the refinement ´R´ is not ´id´, let ´M_2´ be undefined.
+ - Otherwise, let ´M_2´ be the type or type definition of the refinement ´R´, as well as whether it is stable.
+ - The result is `mergeMemberType(´M_1´, ´M_2´)`.
+- If ´T´ is a union type of the form ´T_1 | T_2´:
+ - Let ´J´ be the [join](#join-of-a-union-type) of ´T´.
+ - The result is `memberType(´J´, ´id´, ´p´)`.
+- If ´T´ is an intersection type of the form ´T_1 & T_2´:
+ - Let ´M_1´ be the result of `memberType(´T_1´, ´id´, ´p´)`.
+ - Let ´M_2´ be the result of `memberType(´T_2´, ´id´, ´p´)`.
+ - The result is `mergeMemberType(´M_1´, ´M_2´)`.
+- If ´T´ is a recursive type of the form `{ ´\alpha´ => ´T_1´ }`:
+ - The result is `memberType(´T_1´, ´id´, ´p ´)`.
+- If ´T´ is a stable type:
+ - Let ´U´ be the underlying type of ´T´.
+ - The result is `memberType(´U´, ´id´, ´p´)`.
+- Otherwise, the result is undefined.
+
+We define the helper function `mergeMemberType(´M_1´, ´M_2´)` as:
+
+- If either ´M_1´ or ´M_2´ is undefined, the result is the other one.
+- Otherwise, if either ´M_1´ or ´M_2´ is a class result, the result is that one.
+- Otherwise, ´M_1´ and ´M_2´ must either both be term results or both be type results.
+ - If they are term results with underlying types ´U_1´ and ´U_2´ and stable flags ´s_1´ and ´s_2´, the result is a term result whose underlying type is `meet(´U_1´, ´U_2´)` and whose stable flag is ´s_1 \lor s_2´.
+ - If they are type results with underlying type definitions ´D_1´ and ´D_2´, the result is a type result whose underlying type definition is `intersect(´D_1´, ´D_2´)`.
## Relations between types
@@ -665,80 +1081,93 @@ We define the following relations between types.
| Name | Symbolically | Interpretation |
|------------------|----------------|----------------------------------------------------|
-| Equivalence | ´T \equiv U´ | ´T´ and ´U´ are interchangeable in all contexts. |
| Conformance | ´T <: U´ | Type ´T´ conforms to ("is a subtype of") type ´U´. |
+| Equivalence | ´T =:= U´ | ´T´ and ´U´ conform to each other. |
| Weak Conformance | ´T <:_w U´ | Augments conformance for primitive numeric types. |
| Compatibility | | Type ´T´ conforms to type ´U´ after conversions. |
-### Equivalence
-
-´\color{red}{\text{TODO SCALA3: Redefine equivalence as mutual conformance?}}´
-
-Equivalence ´(\equiv)´ between types is the smallest congruence [^congruence] such that the following holds:
-
-- If ´t´ is defined by a type alias `type ´t´ = ´T´`, then ´t´ is equivalent to ´T´.
-- If a path ´p´ has a singleton type `´q´.type`, then `´p´.type ´\equiv q´.type`.
-- If ´O´ is defined by an object definition, and ´p´ is a path consisting only of package or object selectors and ending in ´O´, then `´O´.this.type ´\equiv p´.type`.
-- Two [compound types](#compound-types) are equivalent if the sequences of their component are pairwise equivalent, and occur in the same order, and their refinements are equivalent. Two refinements are equivalent if they bind the same names and the modifiers, types and bounds of every declared entity are equivalent in both refinements.
-- Two [method types](#method-types) are equivalent if:
- - neither are implicit, or they both are [^implicit];
- - they have equivalent result types;
- - they have the same number of parameters; and
- - corresponding parameters have equivalent types.
- Note that the names of parameters do not matter for method type equivalence.
-- Two [polymorphic method types](#polymorphic-method-types) are equivalent if they have the same number of type parameters, and, after renaming one set of type parameters by another, the result types as well as lower and upper bounds of corresponding type parameters are equivalent.
-- Two [type constructors](#type-constructors) are equivalent if they have the same number of type parameters, and, after renaming one list of type parameters by another, the result types as well as variances, lower and upper bounds of corresponding type parameters are equivalent.
-
-[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts.
-[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword.
-
### Conformance
-The conformance relation ´(<:)´ is the smallest transitive relation that satisfies the following conditions.
-
-- Conformance includes equivalence. If ´T \equiv U´ then ´T <: U´.
-- For every type `´T´` (of any kind), `scala.Nothing <: ´T´ <: scala.AnyKind`.
-- For every value type `´T´`, `´T´ <: scala.Any`.
-- For every type constructor `´T´` with type parameters `[´U_1´, ..., ´U_n´]`, `[´U_1´, ..., ´U_n´] =>> scala.Nothing <: ´T´ <: [´U_1´, ..., ´U_n´] =>> scala.Any`.
-- For every value type ´T´, `scala.Null <: ´T´` unless `´T´ <: scala.AnyVal`.
-- A type variable or abstract type ´t´ conforms to its upper bound and its lower bound conforms to ´t´.
-- A class type or parameterized type conforms to any of its base-types.
-- A singleton type `´p´.type` conforms to the type of the path ´p´.
-- A singleton type `´p´.type` conforms to the type `scala.Singleton`.
-- A type projection `´T´#´t´` conforms to `´U´#´t´` if ´T´ conforms to ´U´.
-- A parameterized type `´T´[´T_1´, ..., ´T_n´]` conforms to `´T´[´U_1´, ..., ´U_n´]` if the following conditions hold for ´i \in \{ 1, ..., n \}´:
- 1. If the ´i´'th type parameter of ´T´ is declared covariant, then ´T_i <: U_i´. [^argisnotwildcard]
- 1. If the ´i´'th type parameter of ´T´ is declared contravariant, then ´U_i <: T_i´. [^argisnotwildcard]
- 1. If the ´i´'th type parameter of ´T´ is declared neither covariant nor contravariant:
- 1. If neither ´T_i´ nor ´U_i´ are wildcard type arguments, then ´U_i \equiv T_i´.
- 1. If ´T_i´ is a wildcard type argument of the form ´\\_ >: L_1 <: U_1´ and ´U_i´ is a wildcard argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´T_i´ "interval" is contained in the ´U_i´ "interval").
- 1. If ´U_i´ is a wildcard type argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: T_i´ and ´T_i <: U_2´.
-- A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` conforms to each of its component types ´T_i´.
-- If ´T <: U_i´ for ´i \in \{ 1, ..., n \}´ and for every binding ´d´ of a type or value ´x´ in ´R´ there exists a member binding of ´x´ in ´T´ which subsumes ´d´, then ´T´ conforms to the compound type `´U_1´ with ... with ´U_n´ {´R\,´}`.
-- If ´T <: U´, then ´T <: U | W´ and ´T <: W | U´.
-- If ´T <: W´ and ´U <: W´, then ´T | U <: W´.
-- If ´T <: U´ and ´T <: W´, then ´T <: U & W´.
-- If ´T <: W´, then ´T & U <: W´ and ´U & T <: W´.
-- If ´T_i \equiv T_i'´ for ´i \in \{ 1, ..., n\}´ and ´U´ conforms to ´U'´ then the method type ´(p_1:T_1, ..., p_n:T_n) U´ conforms to ´(p_1':T_1', ..., p_n':T_n') U'´.
-- The polymorphic type ´[a_1 >: L_1 <: U_1, ..., a_n >: L_n <: U_n] T´ conforms to the polymorphic type ´[a_1 >: L_1' <: U_1', ..., a_n >: L_n' <: U_n'] T'´ if, assuming ´L_1' <: a_1 <: U_1', ..., L_n' <: a_n <: U_n'´ one has ´T <: T'´ and ´L_i <: L_i'´ and ´U_i' <: U_i´ for ´i \in \{ 1, ..., n \}´.
-- Type constructors ´T´ and ´T'´ follow a similar discipline.
-We characterize ´T´ and ´T'´ by their [inferred type parameter clauses](#inferred-type-parameter-clause) ´[a_1, ..., a_n]´ and ´[a_1', ..., a_n']´.
-Then, ´T´ conforms to ´T'´ if any list ´[t_1, ..., t_n]´ -- with declared variances, bounds and higher-order type parameter clauses -- of valid type arguments for ´T'´ is also a valid list of type arguments for ´T´ and ´T[t_1, ..., t_n] <: T'[t_1, ..., t_n]´.
-Note that this entails that:
- - The bounds on ´a_i´ must be weaker than the corresponding bounds declared for ´a'_i´.
- - The variance of ´a_i´ must match the variance of ´a'_i´, where covariance matches covariance, contravariance matches contravariance and any variance matches invariance.
- - Recursively, these restrictions apply to the corresponding higher-order type parameter clauses of ´a_i´ and ´a'_i´.
+The conformance relation ´(<:)´ is the smallest relation such that ´S <: T´ is true if any of the following conditions hold.
+Note that the conditions are not all mutually exclusive.
+
+- ´S = T´ (i.e., conformance is reflexive by definition).
+- ´S´ is `Nothing`.
+- ´T´ is `AnyKind`.
+- ´S´ is a stable type with underlying type ´S_1´ and ´S_1 <: T´.
+- ´S = p.x´ and ´T = q.x´ are term designators and
+ - `isSubPrefix(´p´, ´q´)`.
+- ´S = p.X[S_1, ..., S_n]´ and ´T = q.X[T_1, ..., T_n]´ are possibly parameterized type designators with ´n \geq 0´ and:
+ - `isSubPrefix(´p´, ´q´)`, and
+ - it is not the case that ´p.x´ and ´q.X´ are class type designators for different classes, and
+ - for each ´i \in \{ 1, ..., n \}´:
+ - the ´i´th type parameter of ´q.X´ is covariant and ´S_i <: T_i´ [^argisnotwildcard], or
+ - the ´i´th type parameter of ´q.X´ is contravariant and ´T_i <: S_i´ [^argisnotwildcard], or
+ - the ´i´th type parameter of ´q.X´ is invariant and:
+ - ´S_i´ and ´T_i´ are types and ´S_i =:= T_i´, or
+ - ´S_i´ is a type and ´T_i´ is a wildcard type argument of the form ´? >: L_2 <: H_2´ and ´L_2 <: S_i´ and ´S_i <: H_2´, or
+ - ´S_i´ is a wildcard type argument of the form ´? >: L_1 <: H_1´ and ´T_i´ is a wildcard type argument of the form ´? >: L_2 <: H_2´ and ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´S_i´ "interval" is contained in the ´T_i´ "interval").
+- ´T = q.C[T_1, ..., T_n]´ with ´n \geq 0´ and `baseType(´S´, ´C´)` is defined and `baseType(´S´, ´C´) ´<: T´`.
+- ´S = p.X[S_1, ..., S_n]´ and ´p.X´ is non-class type designator and ´H <: T´ where ´H´ is the upper bound of the underlying type definition of ´p.X´.
+- ´S = p.C´ and `´T = C´.this` and ´C´ is the hidden class of an `object` and:
+ - ´p = \epsilon´ or ´p´ is a package ref, or
+ - `isSubPrefix(´p´, ´D´.this)` where ´D´ is the enclosing class of ´C´.
+- `´S = C´.this` and ´T = q.C´ and ´C´ is the hidden class of an `object` and:
+ - either ´q = \epsilon´ or ´q´ is a package ref, or
+ - `isSubPrefix(´D´.this, ´q´)` where ´D´ is the enclosing class of ´C´.
+- ´S = S_1 | S_2´ and ´S_1 <: T´ and ´S_2 <: T´.
+- ´T = T_1 | T_2´ and either ´S <: T_1´ or ´S <: T_2´.
+- ´T = T_1 & T_2´ and ´S <: T_1´ and ´S <: T_2´.
+- ´S = S_1 & S_2´ and either ´S_1 <: T´ or ´S_2 <: T´.
+- `´S = S_1´ @a` and ´S_1 <: T´.
+- `´T = T_1´ @a` and ´S <: T_1´ (i.e., annotations can be dropped).
+- ´T = q.X´ and ´q.X´ is a non-class type designator and ´S <: L´ where ´L´ is the lower bound of the underlying type definition of ´q.X´.
+- ´S = p.X´ and ´p.X´ is a non-class type designator and ´H <: T´ where ´H´ is the upper bound of the underlying type definition of ´p.X´.
+- `´S = [\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´S_1´` and `´T = [\pm b_1 >: M_1 <: G_1, ..., \pm b_n >: M_n <: G_n]´ =>> ´T_1´`, and given ´\sigma = [b_1 := a_1, ..., b_n := a_n]´:
+ - ´S_1 <: \sigma T_1´, and
+ - for each ´i \in \{ 1, ..., n \}´:
+ - the variance of ´a_i´ conforms to the variance of ´b_i´ (´+´ conforms to ´+´ and ´\epsilon´, ´-´ conforms to ´-´ and ´\epsilon´, and ´\epsilon´ conforms to ´\epsilon´), and
+ - ´\sigma (>: M_i <: G_i)´ is contained in ´>: L_i <: H_i´ (i.e., ´L_i <: \sigma M_i´ and ´\sigma G_i <: H_i´).
+- ´S = p.X´ and `´T = [\pm b_1 >: M_1 <: G_1, ..., \pm b_n >: M_n <: G_n]´ =>> ´T_1´` and ´S´ is a type constructor with ´n´ type parameters and:
+ - `´([\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´S[a_1, ..., a_n]) <: T´` where the ´a_i´ are copies of the type parameters of ´S´ (i.e., we can eta-expand ´S´ to compare it to a type lambda).
+- `´T = T_1´ { ´R´ }` and ´S <: T_1´ and, given ´p = S´ if ´S´ is a stable type and ´p = ∃ \alpha : S´ otherwise:
+ - `´R =´ type ´X >: L <: H´` and `memberType(´p´, ´X´)` is a class result for ´C´ and ´L <: p.C´ and ´p.C <: H´, or
+ - `´R =´ type ´X >: L_2 <: H_2´` and `memberType(´p´, ´X´)` is a type result with bounds ´>: L_1 <: H_1´ and ´L_2 <: L_1´ and ´H_1 <: H_2´, or
+ - `´R =´ val ´X: T_2´` and `memberType(´p´, ´X´)` is a stable term result with type ´S_2´ and ´S_2 <: T_2´, or
+ - `´R =´ def ´X: T_2´` and `memberType(´p´, ´X´)` is a term result with type ´S_2´ and ´T_2´ is a type and ´S_2 <: T_2´, or
+ - `´R =´ def ´X: T_2´` and `memberType(´p´, ´X´)` is a term result with methodic type ´S_2´ and ´T_2´ is a methodic type and `matches(´S_2´, ´T_2´)`.
+- `´S = S_1´ { ´R´ }` and ´S_1 <: T´.
+- `´S =´ { ´\alpha´ => ´S_1´ }` and `´T =´ { ´\beta´ => ´T_1´ }` and ´S_1 <: [\beta := \alpha]T_1´.
+- `´T =´ { ´\beta´ => ´T_1´ }` and ´S´ is a proper type but not a recursive type and ´p' <: [\beta := p]T_1´ where:
+ - ´p´ is ´S´ if ´S´ is a stable type and ´∃ \alpha : S´ otherwise, and
+ - ´p'´ is the result of replacing any top-level recursive type `{ ´\gamma´ => ´Z´ }` in ´p´ with ´[\gamma := p]Z´ (TODO specify this better).
+- `´S = (´=> ´S_1)´` and `´T = (´=> ´T_1)´` and ´S_1 <: T_1´.
+- `´S =´ scala.Null` and:
+ - ´T = q.C[T_1, ..., T_n]´ with ´n \geq 0´ and ´C´ does not derive from `scala.AnyVal` and ´C´ is not the hidden class of an `object`, or
+ - ´T = q.x´ is a term designator with underlying type ´U´ and `scala.Null ´<: U´`, or
+ - `´T = T_1´ { ´R´ }` and `scala.Null ´<: T_1´`, or
+ - `´T =´ { ´\beta´ => ´T_1´ }` and `scala.Null ´<: T_1´`.
+- ´S´ is a stable type and ´T = q.x´ is a term designator with underlying type ´T_1´ and ´T_1´ is a stable type and ´S <: T_1´.
+- `´S = S_1´ { ´R´ }` and ´S_1 <: T´.
+- `´S =´ { ´\alpha´ => ´S_1´ }` and ´S_1 <: T´.
+- `´T =´ scala.Tuple´_n[T_1, ..., T_n]´` with ´1 \leq n \leq 22´, and `´S <: T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`.
+
+We define `isSubPrefix(´p´, ´q´)` where ´p´ and ´q´ are prefixes as:
+
+- If both ´p´ and ´q´ are types, then ´p <: q´.
+- Otherwise, ´p = q´ (for empty prefixes and package refs).
+
+We define `matches(´S´, ´T´)` where ´S´ and ´T´ are types or methodic types as:
+
+- If ´S´ and ´T´ are types, then ´S <: T´.
+- If ´S´ and ´T´ are method types ´(a_1: S_1, ..., a_n: S_n)S'´ and ´(b_1: T_1, ..., b_n: T_n)T'´, then ´\sigma S_i =:= T_i´ for each ´i´ and `matches(´\sigma S'´, ´T'´)`, where ´\sigma = [a_1 := b_1, ..., a_n := b_n]´.
+- If ´S´ and ´T´ are poly types ´[a_1 >: L_{s1} <: H_{s1}, ..., a_n >: L_{sn} <: H_{sn}]S'´ and ´[b_1 >: L_{t1} <: H_{t1}, ..., b_n >: L_{tn} <: H_{tn}]T'´, then ´\sigma L_{si} =:= L_{ti}´ and ´\sigma H_{si} =:= H_{ti}´ for each ´i´ and `matches(´\sigma S'´, ´T'´)`, where ´\sigma = [a_1 := b_1, ..., a_n := b_n]´.
+
+Note that conformance in Scala is _not_ transitive.
+Given two abstract types ´A´ and ´B´, and one abstract `type ´C >: A <: B´` available on prefix ´p´, we have ´A <: p.C´ and ´C <: p.B´ but not necessarily ´A <: B´.
[^argisnotwildcard]: In these cases, if `T_i` and/or `U_i` are wildcard type arguments, the [simplification rules](#simplification-rules) for parameterized types allow to reduce them to real types.
-A declaration or definition in some compound type of class type ´C´ _subsumes_ another declaration of the same name in some compound type or class type ´C'´, if one of the following holds.
-
-- A value declaration or definition that defines a name ´x´ with type ´T´ subsumes a value or method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´.
-- A method declaration or definition that defines a name ´x´ with type ´T´ subsumes a method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´.
-- A type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T´` subsumes a type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T'´` if ´T \equiv T'´.
-- A type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L´ <: ´U´` subsumes a type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L'´ <: ´U'´` if ´L' <: L´ and ´U <: U'´.
-- A type or class definition that binds a type name ´t´ subsumes an abstract type declaration `type t[´T_1´, ..., ´T_n´] >: L <: U` if ´L <: t <: U´.
-
#### Least upper bounds and greatest lower bounds
The ´(<:)´ relation forms pre-order between types, i.e. it is transitive and reflexive.
@@ -749,6 +1178,12 @@ This allows us to define _least upper bounds_ and _greatest lower bounds_ of a s
By construction, for all types `A` and `B`, the least upper bound of `A` and `B` is `A | B`, and their greatest lower bound is `A & B`.
+### Equivalence
+
+Equivalence is defined as mutual conformance.
+
+´S =:= T´ if and only if both ´S <: T´ and ´T <: S´.
+
### Weak Conformance
In some situations Scala uses a more general conformance relation.
@@ -790,25 +1225,28 @@ The application `foo((x: Int) => x.toString)` [resolves](06-expressions.html#ove
- `Int => String` is compatible to `ToString` -- when expecting a value of type `ToString`, you may pass a function literal from `Int` to `String`, as it will be SAM-converted to said function;
- `ToString` is not compatible to `Int => String` -- when expecting a function from `Int` to `String`, you may not pass a `ToString`.
-## Volatile Types
-
-Type volatility approximates the possibility that a type parameter or abstract type instance of a type does not have any non-null values.
-A value member of a volatile type cannot appear in a [path](#paths).
-
-A type is _volatile_ if it falls into one of four categories:
+## Realizability
-A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is volatile if one of the following three conditions hold.
+A type ´T´ is _realizable_ if and only if it is inhabited by non-null values.
+It is defined as:
-1. One of ´T_2, ..., T_n´ is a type parameter or abstract type, or
-1. ´T_1´ is an abstract type and either the refinement ´R´ or a type ´T_j´ for ´j > 1´ contributes an abstract member to the compound type, or
-1. one of ´T_1, ..., T_n´ is a singleton type.
+- A term designator ´p.x´ with underlying type ´U´ is realizable if ´p´ is ´\epsilon´ or a package ref or a realizable type and
+ - `memberType(´p´, ´x´)` has the stable flag, or
+ - the type returned by `memberType(´p´, ´x´)` is realizable.
+- A stable type that is not a term designator is realizable.
+- Another type ´T´ is realizable if
+ - ´T´ is concrete, and
+ - ´T´ has good bounds.
-Here, a type ´S´ _contributes an abstract member_ to a type ´T´ if ´S´ contains an abstract member that is also a member of ´T´.
-A refinement ´R´ contributes an abstract member to a type ´T´ if ´R´ contains an abstract declaration which is also a member of ´T´.
+A concrete type ´T´ has good bounds if all of the following apply:
-A type designator is volatile if it is an alias of a volatile type, or if it designates a type parameter or abstract type that has a volatile type as its upper bound.
+- all its non-class type members have good bounds, i.e., their bounds ´L´ and ´H´ are such that ´L <: H´,
+- all its type refinements have good bounds, and
+- for all base classes ´C´ of ´T´:
+ - `baseType(´T´, ´C´)` is defined with some result ´p.C[T_1, ..., T_n]´, and
+ - for all ´i \in \{ 1, ..., n \}´, ´T_i´ is a real type or (when it is a wildcard type argument) it has good bounds.
-A singleton type `´p´.type` is volatile, if the underlying type of path ´p´ is volatile.
+Note: it is possible for `baseType(´T´, ´C´)` not to be defined because of the `meet` computation, which may fail to merge prefixes and/or invariant type arguments.
## Type Erasure
@@ -816,15 +1254,18 @@ A type is called _generic_ if it contains type arguments or type variables.
_Type erasure_ is a mapping from (possibly generic) types to non-generic types.
We write ´|T|´ for the erasure of type ´T´.
The erasure mapping is defined as follows.
+Internal computations are performed in a _transparent mode_, which has an effect on how [`memberType`](#member-type) behaves for opaque type aliases.
-- The erasure of `scala.AnyKind` is `Object`.
-- The erasure of an alias type is the erasure of its right-hand side.
-- The erasure of an abstract type is the erasure of its upper bound.
+- The erasure of `AnyKind` is `Object`.
+- The erasure of a non-class type designator is the erasure of its underlying upper bound.
+- The erasure of a term designator is the erasure of its underlying type.
- The erasure of the parameterized type `scala.Array´[T_1]´` is `scala.Array´[|T_1|]´`.
- The erasure of every other parameterized type ´T[T_1, ..., T_n]´ is ´|T|´.
-- The erasure of a singleton type `´p´.type` is the erasure of the type of ´p´.
-- The erasure of a type projection `´T´#´x´` is `|´T´|#´x´`.
-- The erasure of a compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is the erasure of the intersection dominator of ´T_1, ..., T_n´.
+- The erasure of a stable type `´p´` is the erasure of the underlying type of ´p´.
+- The erasure of a by-name type `=> ´T_1´` is `scala.Function0`.
+- The erasure of an annotated type ´T_1 a´ is ´|T_1|´.
+- The erasure of a refined type `´T_1´ { ´R´ }` is ´|T_1|´.
+- The erasure of a recursive type `{ ´\alpha´ => ´T_1´ }` and the associated recursive this type ´\alpha´ is ´|T_1|´.
- The erasure of a union type ´S | T´ is the _erased least upper bound_ (_elub_) of the erasures of ´S´ and ´T´.
- The erasure of an intersection type ´S & T´ is the _eglb_ (erased greatest lower bound) of the erasures of ´S´ and ´T´.
diff --git a/docs/_spec/04-basic-declarations-and-definitions.md b/docs/_spec/04-basic-declarations-and-definitions.md
deleted file mode 100644
index 5c45cc5c7819..000000000000
--- a/docs/_spec/04-basic-declarations-and-definitions.md
+++ /dev/null
@@ -1,758 +0,0 @@
----
-title: Basic Declarations & Definitions
-layout: default
-chapter: 4
----
-
-# Basic Declarations and Definitions
-
-```ebnf
-Dcl ::= ‘val’ ValDcl
- | ‘var’ VarDcl
- | ‘def’ FunDcl
- | ‘type’ {nl} TypeDcl
-PatVarDef ::= ‘val’ PatDef
- | ‘var’ VarDef
-Def ::= PatVarDef
- | ‘def’ FunDef
- | ‘type’ {nl} TypeDef
- | TmplDef
-```
-
-A _declaration_ introduces names and assigns them types.
-It can form part of a [class definition](05-classes-and-objects.html#templates) or of a refinement in a [compound type](03-types.html#compound-types).
-
-A _definition_ introduces names that denote terms or types.
-It can form part of an object or class definition or it can be local to a block.
-Both declarations and definitions produce _bindings_ that associate type names with type definitions or bounds, and that associate term names with types.
-
-The scope of a name introduced by a declaration or definition is the whole statement sequence containing the binding.
-However, there is a restriction on forward references in blocks:
-In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´,
-
-- ´s_k´ cannot be a variable definition.
-- If ´s_k´ is a value definition, it must be lazy.
-
-
-
-## Value Declarations and Definitions
-
-```ebnf
-Dcl ::= ‘val’ ValDcl
-ValDcl ::= ids ‘:’ Type
-PatVarDef ::= ‘val’ PatDef
-PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr
-ids ::= id {‘,’ id}
-```
-
-A value declaration `val ´x´: ´T´` introduces ´x´ as a name of a value of type ´T´.
-
-A value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´.
-If the value definition is not recursive, the type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of expression ´e´ is assumed.
-If a type ´T´ is given, then ´e´ is expected to conform to it.
-
-Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`.
-The effect of the value definition is to bind ´x´ to the value of ´e´
-converted to type ´T´.
-A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed.
-
-A _constant value definition_ is of the form
-
-```scala
-final val x = e
-```
-
-where `e` is a [constant expression](06-expressions.html#constant-expressions).
-The `final` modifier must be present and no type annotation may be given.
-References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`.
-
-Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side.
-If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows:
-
-1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´:
-
-```scala
-val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)}
-val ´x_1´ = ´\$x´._1
-...
-val ´x_n´ = ´\$x´._n
-```
-
-Here, ´\$x´ is a fresh name.
-
-2. If ´p´ has a unique bound variable ´x´:
-
-```scala
-val ´x´ = ´e´ match { case ´p´ => ´x´ }
-```
-
-3. If ´p´ has no bound variables:
-
-```scala
-´e´ match { case ´p´ => ()}
-```
-
-###### Example
-
-The following are examples of value definitions
-
-```scala
-val pi = 3.1415
-val pi: Double = 3.1415 // equivalent to first definition
-val Some(x) = f() // a pattern definition
-val x :: xs = mylist // an infix pattern definition
-```
-
-The last two definitions have the following expansions.
-
-```scala
-val x = f() match { case Some(x) => x }
-
-val x´\$´ = mylist match { case x :: xs => (x, xs) }
-val x = x´\$´._1
-val xs = x´\$´._2
-```
-
-The name of any declared or defined value may not end in `_=`.
-
-A value declaration `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value declarations `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`.
-A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`.
-A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`.
-
-## Variable Declarations and Definitions
-
-```ebnf
-Dcl ::= ‘var’ VarDcl
-PatVarDef ::= ‘var’ VarDef
-VarDcl ::= ids ‘:’ Type
-VarDef ::= PatDef
- | ids ‘:’ Type ‘=’ ‘_’
-```
-
-A variable declaration `var ´x´: ´T´` is equivalent to the declarations of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`:
-
-```scala
-def ´x´: ´T´
-def ´x´_= (´y´: ´T´): Unit
-```
-
-An implementation of a class may _define_ a declared variable using a variable definition, or by defining the corresponding setter and getter methods.
-
-A variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´.
-The type ´T´ can be omitted, in which case the type of ´e´ is assumed.
-If ´T´ is given, then ´e´ is expected to [conform to it](06-expressions.html#expression-typing).
-
-Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side.
-A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-declarations-and-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values.
-
-The name of any declared or defined variable may not end in `_=`.
-
-A variable definition `var ´x´: ´T´ = _` can appear only as a member of a template.
-It introduces a mutable field with type ´T´ and a default initial value.
-The default value depends on the type ´T´ as follows:
-
-| default | type ´T´ |
-|----------|------------------------------------|
-|`0` | `Int` or one of its subrange types |
-|`0L` | `Long` |
-|`0.0f` | `Float` |
-|`0.0d` | `Double` |
-|`false` | `Boolean` |
-|`()` | `Unit` |
-|`null` | all other types |
-
-When they occur as members of a template, both forms of variable definition also introduce a getter method ´x´ which returns the value currently assigned to the variable, as well as a setter method `´x´_=` which changes the value currently assigned to the variable.
-The methods have the same signatures as for a variable declaration.
-The template then has these getter and setter methods as members, whereas the original variable cannot be accessed directly as a template member.
-
-###### Example
-
-The following example shows how _properties_ can be simulated in Scala.
-It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds.
-Its implementation contains tests that allow only legal values to be assigned to these fields.
-The user code, on the other hand, accesses these fields just like normal variables.
-
-```scala
-class TimeOfDayVar {
- private var h: Int = 0
- private var m: Int = 0
- private var s: Int = 0
-
- def hours = h
- def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h
- else throw new DateError()
-
- def minutes = m
- def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m
- else throw new DateError()
-
- def seconds = s
- def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s
- else throw new DateError()
-}
-val d = new TimeOfDayVar
-d.hours = 8; d.minutes = 30; d.seconds = 0
-d.hours = 25 // throws a DateError exception
-```
-
-A variable declaration `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable declarations `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`.
-A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`.
-A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`.
-
-## Type Declarations and Type Aliases
-
-
-
-```ebnf
-Dcl ::= ‘type’ {nl} TypeDcl
-TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
-Def ::= ‘type’ {nl} TypeDef
-TypeDef ::= id [TypeParamClause] ‘=’ Type
-```
-
-### Desugaring of parameterized type declarations
-A parameterized type declaration is desugared into an unparameterized type declaration
-whose bounds are type lambdas with explicit variance annotations.
-
-#### Abstract Type
-An abstract type
-```scala
-type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´
-```
-is desugared into an unparameterized abstract type as follow:
-- If `L` conforms to `Nothing`, then,
-
- ```scala
-type ´t´ >: Nothing
- <: [´\mathit{tps'}\,´] =>> ´U´
- ```
-- otherwise,
-
- ```scala
-type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´
- <: [´\mathit{tps'}\,´] =>> ´U´
- ```
-
-If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´U´`.
-
-The same desugaring applies to type parameters. For instance,
-```scala
-[F[X] <: Coll[X]]
-```
-is treated as a shorthand for
-```scala
-[F >: Nothing <: [X] =>> Coll[X]]
-```
-
-#### Type Alias
-A parameterized type alias
-```scala
-type ´t´[´\mathit{tps}\,´] = ´T´
-```
-is desugared into an unparameterized type alias
-```scala
-type ´t´ = [´\mathit{tps'}\,´] =>> ´T´
-```
-where ´\mathit{tps'}´ is computed as in the previous case.
-
-´\color{red}{\text{TODO SCALA3: Everything else in this section (and the next one
-on type parameters) needs to be rewritten to take into account the desugaring described above.}}´
-
-A _type declaration_ `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´` declares ´t´ to be an abstract type with lower bound type ´L´ and upper bound type ´U´.
-If the type parameter clause `[´\mathit{tps}\,´]` is omitted, ´t´ abstracts over a proper type, otherwise ´t´ stands for a type constructor that accepts type arguments as described by the type parameter clause.
-
-If a type declaration appears as a member declaration of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: U´.
-It is a compile-time error if ´L´ does not conform to ´U´.
-Either or both bounds may be omitted.
-If the lower bound ´L´ is absent, the bottom type `scala.Nothing` is assumed.
-If the upper bound ´U´ is absent, the top type `scala.Any` is assumed.
-
-A type constructor declaration imposes additional restrictions on the concrete types for which ´t´ may stand.
-Besides the bounds ´L´ and ´U´, the type parameter clause may impose higher-order bounds and variances, as governed by the [conformance of type constructors](03-types.html#conformance).
-
-The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` and the type parameter clause ´\mathit{tps}´ itself.
-A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the declaration of the type parameter ´tc´.
-
-To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the declaration of ´m´.
-In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of ´Bound´, ´t´'s second type constructor parameter.
-`t[MutableList, Iterable]` is a valid use of ´t´.
-
-A _type alias_ `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´.
-The left hand side of a type alias may have a type parameter clause, e.g. `type ´t´[´\mathit{tps}\,´] = ´T´`.
-The scope of a type parameter extends over the right hand side ´T´ and the type parameter clause ´\mathit{tps}´ itself.
-
-The scope rules for [definitions](#basic-declarations-and-definitions) and [type parameters](#method-declarations-and-definitions) make it possible that a type name appears in its own bound or in its right-hand side.
-However, it is a static error if a type alias refers recursively to the defined type constructor itself.
-That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´.
-It is also an error if an abstract type is directly or indirectly its own upper or lower bound.
-
-###### Example
-
-The following are legal type declarations and definitions:
-
-```scala
-type IntList = List[Integer]
-type T <: Comparable[T]
-type Two[A] = Tuple2[A, A]
-type MyCollection[+X] <: Iterable[X]
-```
-
-The following are illegal:
-
-```scala
-type Abs = Comparable[Abs] // recursive type alias
-
-type S <: T // S, T are bounded by themselves.
-type T <: S
-
-type T >: Comparable[T.That] // Cannot select from T.
- // T is a type, not a value
-type MyCollection <: Iterable // Type constructor members must explicitly
- // state their type parameters.
-```
-
-If a type alias `type ´t´[´\mathit{tps}\,´] = ´S´` refers to a class type ´S´, the name ´t´ can also be used as a constructor for objects of type ´S´.
-
-###### Example
-
-Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows:
-
-```scala
-type Pair[+A, +B] = Tuple2[A, B]
-object Pair {
- def apply[A, B](x: A, y: B) = Tuple2(x, y)
- def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
-}
-```
-
-As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`.
-`Pair` can also be used as a constructor instead of `Tuple2`, as in:
-
-```scala
-val x: Pair[Int, String] = new Pair(1, "abc")
-```
-
-## Type Parameters
-
-```ebnf
-TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
-VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam
-TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type]
-```
-
-Type parameters appear in type definitions, class definitions, and method definitions.
-In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds).
-
-The most general form of a proper type parameter is
-`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`.
-Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter.
-It is a compile-time error if ´L´ does not conform to ´U´.
-´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter.
-
-
-
-
-
-The names of all type parameters must be pairwise different in their enclosing type parameter clause.
-The scope of a type parameter includes in each case the whole type parameter clause.
-Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause.
-However, a type parameter may not be bounded directly or indirectly by itself.
-
-A type constructor parameter adds a nested type parameter clause to the type parameter.
-The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`.
-
-The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters.
-Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´.
-Therefore, their names must only be pairwise different from the names of other visible parameters.
-Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible.
-
-###### Example
-Here are some well-formed type parameter clauses:
-
-```scala
-[S, T]
-[@specialized T, U]
-[Ex <: Throwable]
-[A <: Comparable[B], B <: A]
-[A, B >: A, C >: A <: B]
-[M[X], N[X]]
-[M[_], N[_]] // equivalent to previous clause
-[M[X <: Bound[X]], Bound[_]]
-[M[+X] <: Iterable[X]]
-```
-
-The following type parameter clauses are illegal:
-
-```scala
-[A >: A] // illegal, `A' has itself as bound
-[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound
-[A, B, C >: A <: B] // illegal lower bound `A' of `C' does
- // not conform to upper bound `B'.
-```
-
-## Variance Annotations
-
-Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance).
-A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency.
-
-A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter.
-In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, or a type declaration `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position.
-Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position.
-
-The variance position of a type parameter in a type or template is defined as follows.
-Let the opposite of covariance be contravariance, and the opposite of invariance be itself.
-The top-level of the type or template is always in covariant position.
-The variance position changes at the following constructs.
-
-- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause.
-- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause.
-- The variance position of the lower bound of a type declaration or type parameter is the opposite of the variance position of the type declaration or parameter.
-- The type of a mutable variable is always in invariant position.
-- The right-hand side of a type alias is always in invariant position.
-- The prefix ´S´ of a type selection `´S´#´T´` is always in invariant position.
-- For a type argument ´T´ of a type `´S´[´... T ...´ ]`:
-If the corresponding type parameter is invariant, then ´T´ is in invariant position.
-If the corresponding type parameter is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´... T ...´ ]`.
-
-
-
-References to the type parameters in [object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position.
-In these members the type parameter may appear anywhere without restricting its legal variance annotations.
-
-###### Example
-The following variance annotation is legal.
-
-```scala
-abstract class P[+A, +B] {
- def fst: A; def snd: B
-}
-```
-
-With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments.
-For instance,
-
-```scala
-P[IOException, String] <: P[Throwable, AnyRef]
-```
-
-If the members of ´P´ are mutable variables, the same variance annotation becomes illegal.
-
-```scala
-abstract class Q[+A, +B](x: A, y: B) {
- var fst: A = x // **** error: illegal variance:
- var snd: B = y // `A', `B' occur in invariant position.
-}
-```
-
-If the mutable variables are object-private, the class definition becomes legal again:
-
-```scala
-abstract class R[+A, +B](x: A, y: B) {
- private[this] var fst: A = x // OK
- private[this] var snd: B = y // OK
-}
-```
-
-###### Example
-
-The following variance annotation is illegal, since ´a´ appears in contravariant position in the parameter of `append`:
-
-```scala
-abstract class Sequence[+A] {
- def append(x: Sequence[A]): Sequence[A]
- // **** error: illegal variance:
- // `A' occurs in contravariant position.
-}
-```
-
-The problem can be avoided by generalizing the type of `append` by means of a lower bound:
-
-```scala
-abstract class Sequence[+A] {
- def append[B >: A](x: Sequence[B]): Sequence[B]
-}
-```
-
-###### Example
-
-```scala
-abstract class OutputChannel[-A] {
- def write(x: A): Unit
-}
-```
-
-With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`.
-That is, a channel on which one can write any object can substitute for a channel on which one can write only strings.
-
-## Method Declarations and Definitions
-
-```ebnf
-Dcl ::= ‘def’ FunDcl
-FunDcl ::= FunSig ‘:’ Type
-Def ::= ‘def’ FunDef
-FunDef ::= FunSig [‘:’ Type] ‘=’ Expr
-FunSig ::= id [FunTypeParamClause] ParamClauses
-FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
-ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
-ParamClause ::= [nl] ‘(’ [Params] ‘)’
-Params ::= Param {‘,’ Param}
-Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr]
-ParamType ::= Type
- | ‘=>’ Type
- | Type ‘*’
-```
-
-A _method declaration_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type.
-A _method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result.
-A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`.
-Such a declaration or definition introduces a value with a (possibly polymorphic) method type whose parameter types and result type are as given.
-
-The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given.
-If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body.
-
-A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type declarations](#type-declarations-and-type-aliases), which introduce type parameters, possibly with bounds.
-The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present.
-
-A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types.
-
-### Default Arguments
-
-Each value parameter declaration may optionally define a default argument.
-The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type.
-
-For every parameter ´p_{i,j}´ with a default argument a method named `´f\$´default´\$´n` is generated which computes the default argument expression.
-Here, ´n´ denotes the parameter's position in the method declaration.
-These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´.
-The `´f\$´default´\$´n` methods are inaccessible for user programs.
-
-###### Example
-In the method
-
-```scala
-def compare[T](a: T = 0)(b: T = a) = (a == b)
-```
-
-the default expression `0` is type-checked with an undefined expected
-type.
-When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`.
-The methods computing the default arguments have the form:
-
-```scala
-def compare´\$´default´\$´1[T]: Int = 0
-def compare´\$´default´\$´2[T](a: T): T = a
-```
-
-The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given.
-Both type parameter names and value parameter names must be pairwise distinct.
-
-A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments.
-
-```scala
-def f(a: Int = 0)(b: Int = a + 1) = b // OK
-// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a"
-f(10)() // returns 11 (not 1)
-```
-
-If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument.
-
-```scala
-implicit val i: Int = 2
-def f(implicit x: Int, s: String = "hi") = s * x
-f // "hihi"
-```
-
-### By-Name Parameters
-
-```ebnf
-ParamType ::= ‘=>’ Type
-```
-
-The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`.
-The type of such a parameter is then the parameterless method type `=> ´T´`.
-This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method.
-That is, the argument is evaluated using _call-by-name_.
-
-The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated.
-
-###### Example
-The declaration
-
-```scala
-def whileLoop (cond: => Boolean) (stat: => Unit): Unit
-```
-
-indicates that both parameters of `whileLoop` are evaluated using call-by-name.
-
-### Repeated Parameters
-
-```ebnf
-ParamType ::= Type ‘*’
-```
-
-The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`.
-The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`.
-Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´.
-That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation.
-If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`.
-
-It is not allowed to define any default arguments in a parameter section with a repeated parameter.
-
-###### Example
-The following method definition computes the sum of the squares of a variable number of integer arguments.
-
-```scala
-def sum(args: Int*) = {
- var result = 0
- for (arg <- args) result += arg
- result
-}
-```
-
-The following applications of this method yield `0`, `1`, `6`, in that order.
-
-```scala
-sum()
-sum(1)
-sum(1, 2, 3)
-```
-
-Furthermore, assume the definition:
-
-```scala
-val xs = List(1, 2, 3)
-```
-
-The following application of method `sum` is ill-formed:
-
-```scala
-sum(xs) // ***** error: expected: Int, found: List[Int]
-```
-
-By contrast, the following application is well formed and yields again the result `6`:
-
-```scala
-sum(xs: _*)
-```
-
-### Method Return Type Inference
-
-A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive.
-In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´.
-
-###### Example
-Assume the following definitions:
-
-```scala
-trait I {
- def factorial(x: Int): Int
-}
-class C extends I {
- def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1)
-}
-```
-
-Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive.
-
-
-
-## Import Clauses
-
-```ebnf
-Import ::= ‘import’ ImportExpr {‘,’ ImportExpr}
-ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors)
-ImportSelectors ::= ‘{’ {ImportSelector ‘,’}
- (ImportSelector | ‘_’) ‘}’
-ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’]
-```
-
-An import clause has the form `import ´p´.´I´` where ´p´ is a [stable identifier](03-types.html#paths) and ´I´ is an import expression.
-The import expression determines a set of names of importable members of ´p´ which are made available without qualification.
-A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers).
-The most general form of an import expression is a list of _import selectors_
-
-```scala
-{ ´x_1´ => ´y_1, ..., x_n´ => ´y_n´, _ }
-```
-
-for ´n \geq 0´, where the final wildcard `‘_’` may be absent.
-It makes available each importable member `´p´.´x_i´` under the unqualified name ´y_i´. I.e. every import selector `´x_i´ => ´y_i´` renames `´p´.´x_i´` to
-´y_i´.
-If a final wildcard is present, all importable members ´z´ of ´p´ other than `´x_1, ..., x_n,y_1, ..., y_n´` are also made available under their own unqualified names.
-
-Import selectors work in the same way for type and term members.
-For instance, an import clause `import ´p´.{´x´ => ´y\,´}` renames the term
-name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´.
-At least one of these two names must reference an importable member of ´p´.
-
-If the target in an import selector is a wildcard, the import selector hides access to the source member.
-For instance, the import selector `´x´ => _` “renames” ´x´ to the wildcard symbol (which is unaccessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´.
-This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors.
-
-The scope of a binding introduced by an import-clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first.
-
-Several shorthands exist. An import selector may be just a simple name ´x´.
-In this case, ´x´ is imported without renaming, so the import selector is equivalent to `´x´ => ´x´`.
-Furthermore, it is possible to replace the whole import selector list by a single identifier or wildcard.
-The import clause `import ´p´.´x´` is equivalent to `import ´p´.{´x\,´}`, i.e. it makes available without qualification the member ´x´ of ´p´. The import clause `import ´p´._` is equivalent to `import ´p´.{_}`, i.e. it makes available without qualification all members of ´p´ (this is analogous to `import ´p´.*` in Java).
-
-An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`.
-
-###### Example
-Consider the object definition:
-
-```scala
-object M {
- def z = 0, one = 1
- def add(x: Int, y: Int): Int = x + y
-}
-```
-
-Then the block
-
-```scala
-{ import M.{one, z => zero, _}; add(zero, one) }
-```
-
-is equivalent to the block
-
-```scala
-{ M.add(M.z, M.one) }
-```
diff --git a/docs/_spec/04-basic-definitions.md b/docs/_spec/04-basic-definitions.md
new file mode 100644
index 000000000000..369709b52bff
--- /dev/null
+++ b/docs/_spec/04-basic-definitions.md
@@ -0,0 +1,814 @@
+---
+title: Basic Definitions
+layout: default
+chapter: 4
+---
+
+# Basic Definitions
+
+```ebnf
+PatVarDef ::= ‘val’ PatDef
+ | ‘var’ VarDef
+Def ::= PatVarDef
+ | ‘def’ FunDef
+ | ‘type’ {nl} TypeDef
+ | ‘opaque‘ ‘type‘ {nl} OpaqueTypeDef
+ | TmplDef
+```
+
+A _definition_ introduces names that denote terms and assigns them types, or that denote types and assigns them [type definitions](./03-types.html#type-definitions).
+It can form part of an object or [class definition](05-classes-and-objects.html#templates) or it can be local to a block.
+
+The scope of a name introduced by a definition is the whole statement sequence containing the definition.
+However, there is a restriction on forward references in blocks:
+In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´,
+
+- ´s_k´ cannot be a variable definition.
+- If ´s_k´ is a value definition, it must be lazy.
+
+Moreover, in a block, all term definitions must be concrete, and opaque type alias definitions are not allowed.
+
+
+
+## Value Definitions
+
+```ebnf
+PatVarDef ::= ‘val’ PatDef
+PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] [‘=’ Expr]
+ids ::= id {‘,’ id}
+```
+
+An abstract value definition `val ´x´: ´T´` introduces ´x´ as a name of a value of _declared type_ ´T´.
+´T´ must be explicitly specified and must be a [proper type](03-types.html#proper-types).
+
+A concrete value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´.
+If the value definition is not recursive, the declared type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of the expression ´e´ is assumed.
+If a type ´T´ is given, then it must be a [proper type](03-types.html#proper-types) and ´e´ is expected to [conform to it](06-expressions.html#expression-typing).
+
+Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`.
+The effect of the value definition is to bind ´x´ to the value of ´e´ converted to type ´T´.
+A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed.
+
+A _constant value definition_ is of the form
+
+```scala
+final val x = e
+```
+
+where `e` is a [constant expression](06-expressions.html#constant-expressions).
+The `final` modifier must be present and no type annotation may be given.
+References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`.
+
+Concrete value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side.
+If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows:
+
+1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´:
+
+```scala
+val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)}
+val ´x_1´ = ´\$x´._1
+...
+val ´x_n´ = ´\$x´._n
+```
+
+Here, ´\$x´ is a fresh name.
+
+2. If ´p´ has a unique bound variable ´x´:
+
+```scala
+val ´x´ = ´e´ match { case ´p´ => ´x´ }
+```
+
+3. If ´p´ has no bound variables:
+
+```scala
+´e´ match { case ´p´ => () }
+```
+
+###### Example
+
+The following are examples of value definitions
+
+```scala
+val foo: Int // abstract value definition
+val pi = 3.1415
+val pi: Double = 3.1415 // equivalent to first definition
+val Some(x) = f() // a pattern definition
+val x :: xs = mylist // an infix pattern definition
+```
+
+The last two definitions have the following expansions.
+
+```scala
+val x = f() match { case Some(x) => x }
+
+val x´\$´ = mylist match { case x :: xs => (x, xs) }
+val x = x´\$´._1
+val xs = x´\$´._2
+```
+
+The name of any defined value may not end in `_=`.
+
+A value definition `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value definitions `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`.
+A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`.
+A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`.
+
+## Variable Definitions
+
+```ebnf
+Dcl ::= ‘var’ VarDcl
+PatVarDef ::= ‘var’ VarDef
+VarDcl ::= ids ‘:’ Type
+VarDef ::= PatDef
+ | ids ‘:’ Type ‘=’ ‘_’
+```
+
+An abstract variable definition `var ´x´: ´T´` is equivalent to the definition of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`:
+
+```scala
+def ´x´: ´T´
+def ´x´_= (´y´: ´T´): Unit
+```
+
+An implementation of a class may implement a defined abstract variable using a concrete variable definition, or by defining the corresponding setter and getter methods.
+
+A concrete variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´.
+The type ´T´ can be omitted, in which case the type of ´e´ is assumed.
+If ´T´ is given, then it must be a [proper type](03-types.html#proper-types) and ´e´ is expected to [conform to it](06-expressions.html#expression-typing).
+
+Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side.
+A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values.
+
+The name of any defined variable may not end in `_=`.
+
+The right-hand-side of a mutable variable definition that is a member of a template can be the special reference `scala.compiletime.uninitialized`: `var ´x´: ´T´ = scala.compiletime.uninitialized`.
+It introduces a mutable field with type ´T´ and a default initial value.
+The default value depends on the type ´T´ as follows:
+
+| default | type ´T´ |
+|----------|------------------------------------|
+|`0` | `Int` or one of its subrange types |
+|`0L` | `Long` |
+|`0.0f` | `Float` |
+|`0.0d` | `Double` |
+|`false` | `Boolean` |
+|`()` | `Unit` |
+|`null` | all other types |
+
+`scala.compiletime.uninitialized` can never appear anywhere else.
+For compatibility with Scala 2, the syntax `var ´x´: ´T´ = _` is accepted as equivalent to using `uninitialized`.
+
+When they occur as members of a template, both forms of concrete variable definition also introduce a setter method `´x´_=` which changes the value currently assigned to the variable.
+The setter has the same signatures as for an abstract variable definition.
+It is then not possible to directly modify the value assigned to the variable; mutations always go through the corresponding setter.
+
+###### Example
+
+The following example shows how _properties_ can be simulated in Scala.
+It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds.
+Its implementation contains tests that allow only legal values to be assigned to these fields.
+The user code, on the other hand, accesses these fields just like normal variables.
+
+```scala
+class TimeOfDayVar {
+ private var h: Int = 0
+ private var m: Int = 0
+ private var s: Int = 0
+
+ def hours = h
+ def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h
+ else throw new DateError()
+
+ def minutes = m
+ def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m
+ else throw new DateError()
+
+ def seconds = s
+ def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s
+ else throw new DateError()
+}
+val d = new TimeOfDayVar
+d.hours = 8; d.minutes = 30; d.seconds = 0
+d.hours = 25 // throws a DateError exception
+```
+
+A variable definition `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable definitions `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`.
+A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`.
+A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`.
+
+## Type Member Definitions
+
+```ebnf
+Dcl ::= ‘type’ {nl} TypeDcl
+TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
+Def ::= ‘type’ {nl} TypeDef
+ | ‘opaque‘ ‘type‘ {nl} OpaqueTypeDef
+TypeDef ::= id [TypeParamClause] ‘=’ Type
+OpaqueTypeDef ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] ‘=’ Type
+```
+
+_Type members_ can be abstract type members, type aliases, or opaque type aliases.
+
+A possibly parameterized _abstract type member_ definition `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´` declares ´t´ to be an abstract type.
+If omitted, ´L´ and ´H´ are implied to be `Nothing` and `scala.Any`, respectively.
+
+A possibly parameterized _type alias_ definition `type ´t´[´\mathit{tps}\,´] = ´T´` defines ´t´ to be a concrete type member.
+
+A possibly parameterized _opaque type alias_ definition `opaque type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´ = ´T´` defines ´t´ to be an opaque type alias with public bounds `>: ´L´ <: ´H´` and a private alias `= ´T´`.
+
+If a type parameter clause `[´\mathit{tps}\,´]` is present, it is desugared away according to the rules in the following section.
+
+### Desugaring of parameterized type definitions
+
+A parameterized type definition is desugared into an unparameterized type definition whose bounds are [type lambdas](03-types.html#type-lambdas) with explicit variance annotations.
+
+The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` or the alias `= ´T´` and the type parameter clause ´\mathit{tps}´ itself.
+A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the definition of the type parameter ´tc´.
+
+To illustrate nested scoping, these definitions are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the definition of ´m´.
+In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of `Bound`, ´t´'s second type constructor parameter.
+`t[MutableList, Iterable]` is a valid use of ´t´.
+
+#### Abstract Type
+
+A parameterized abstract type
+```scala
+type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´
+```
+is desugared into an unparameterized abstract type as follows:
+- If `L` conforms to `Nothing`, then,
+
+ ```scala
+type ´t´ >: Nothing
+ <: [´\mathit{tps'}\,´] =>> ´H´
+ ```
+- otherwise,
+
+ ```scala
+type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´
+ <: [´\mathit{tps'}\,´] =>> ´H´
+ ```
+
+If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´H´`.
+
+The same desugaring applies to type parameters.
+For instance,
+```scala
+[F[X] <: Coll[X]]
+```
+is treated as a shorthand for
+```scala
+[F >: Nothing <: [X] =>> Coll[X]]
+```
+
+#### Type Alias
+
+A parameterized type alias
+```scala
+type ´t´[´\mathit{tps}\,´] = ´T´
+```
+is desugared into an unparameterized type alias
+```scala
+type ´t´ = [´\mathit{tps'}\,´] =>> ´T´
+```
+where ´\mathit{tps'}´ is computed as in the previous case.
+
+#### Opaque Type Alias
+
+A parameterized type alias
+```scala
+type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´ = ´T´
+```
+is desugared into an unparameterized opaque type alias as follows:
+- If `L` conforms to `Nothing`, then,
+
+ ```scala
+type ´t´ >: Nothing <: [´\mathit{tps'}\,´] =>> ´H´ = [´\mathit{tps'}\,´] =>> ´T´
+ ```
+- otherwise,
+
+ ```scala
+type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ <: [´\mathit{tps'}\,´] =>> ´H´ = [´\mathit{tps'}\,´] =>> ´T´
+ ```
+where ´\mathit{tps'}´ is computed as in the previous cases.
+
+### Non-Parameterized Type Member Definitions
+
+An _abstract type member_ definition `type ´t´ >: ´L´ <: ´H´` declares ´t´ to be an abstract type whose [type definition](03-types.html#type-definitions) has the lower bound type ´L´ and upper bound type ´H´.
+
+If a type definition appears as a member definition of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: H´.
+It is a compile-time error if ´L´ does not conform to ´H´.
+
+A _type alias_ definition `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´.
+
+An _opaque type alias_ definition `opaque type ´t´ >: ´L´ <: ´H´ = ´T´` defines ´t´ to be an opaque type alias with public bounds `>: ´L´ <: ´H´` and a private alias `= ´T´`.
+An opaque type alias can only be declared within a [template](./05-classes-and-objects.html#templates).
+It cannot be `private` and cannot be overridden in subclasses.
+In order for the definition to be valid, ´T´ must satisfy some constraints:
+
+- ´L <: T´ and ´T <: H´ must be true,
+- ´T´ must not be a context function type, and
+- If ´T´ is a type lambda, its result must be a proper type (i.e., it cannot be a curried type lambda).
+
+When viewed from within its enclosing template, an opaque type alias behaves as a type alias with type definition `= ´T´`.
+When viewed from anywhere else, it behaves as an abstract type member with type definition `>: ´L´ <: ´H´`.
+See [`memberType`](./03-types.html#member-type) for the precise mechanism that governs this dual view.
+
+The scope rules for [definitions](#basic-definitions) and [type parameters](#method-definitions) make it possible that a type name appears in its own bounds or in its right-hand side.
+However, it is a static error if a type alias refers recursively to the defined type itself.
+That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´.
+It is also an error if an abstract type is directly or indirectly its own upper or lower bound.
+
+###### Example
+
+The following are legal type definitions:
+
+```scala
+type IntList = List[Integer]
+type T <: Comparable[T]
+type Two[A] = Tuple2[A, A] // desugars to Two = [A] =>> Tuple2[A, A]
+type MyCollection[+X] <: Iterable[X] // desugars to MyCollection <: [+X] =>> Iterable[X]
+```
+
+The following are illegal:
+
+```scala
+type Abs = Comparable[Abs] // recursive type alias
+
+type S <: T // S, T are bounded by themselves.
+type T <: S
+
+type T >: Comparable[T.That] // Cannot select from T.
+ // T is a type, not a value
+type MyCollection <: Iterable // The reference to the type constructor
+ // Iterable must explicitly state its type arguments.
+```
+
+If a type alias `type ´t´ = ´S´` refers to a class type ´S´ (or to a type lambda that is the eta-expansion of class type ´S´), the name ´t´ can also be used as a constructor for objects of type ´S´.
+
+###### Example
+
+Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows:
+
+```scala
+type Pair[+A, +B] = Tuple2[A, B]
+object Pair {
+ def apply[A, B](x: A, y: B) = Tuple2(x, y)
+ def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
+}
+```
+
+As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`.
+`Pair` can also be used as a constructor instead of `Tuple2`, as in:
+
+```scala
+val x: Pair[Int, String] = new Pair(1, "abc")
+```
+
+## Type Parameters
+
+```ebnf
+TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
+VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam
+TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type]
+```
+
+Type parameters appear in type definitions, class definitions, and method definitions.
+In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds).
+
+The most general form of a proper type parameter is
+`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`.
+Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter.
+It is a compile-time error if ´L´ does not conform to ´U´.
+´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter.
+
+
+
+
+
+The names of all type parameters must be pairwise different in their enclosing type parameter clause.
+The scope of a type parameter includes in each case the whole type parameter clause.
+Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause.
+However, a type parameter may not be bounded directly or indirectly by itself.
+
+A type constructor parameter adds a nested type parameter clause to the type parameter.
+The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`.
+
+The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters.
+Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´.
+Therefore, their names must only be pairwise different from the names of other visible parameters.
+Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible.
+
+###### Example
+Here are some well-formed type parameter clauses:
+
+```scala
+[S, T]
+[@specialized T, U]
+[Ex <: Throwable]
+[A <: Comparable[B], B <: A]
+[A, B >: A, C >: A <: B]
+[M[X], N[X]]
+[M[_], N[_]] // equivalent to previous clause
+[M[X <: Bound[X]], Bound[_]]
+[M[+X] <: Iterable[X]]
+```
+
+The following type parameter clauses are illegal:
+
+```scala
+[A >: A] // illegal, `A' has itself as bound
+[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound
+[A, B, C >: A <: B] // illegal lower bound `A' of `C' does
+ // not conform to upper bound `B'.
+```
+
+## Variance Annotations
+
+Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance).
+A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency.
+
+A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter.
+In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` or `opaque type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´ = ´S´`, type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position.
+Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position.
+
+The variance position of a type parameter in a type or template is defined as follows.
+Let the opposite of covariance be contravariance, and the opposite of invariance be itself.
+The top-level of the type or template is always in covariant position.
+The variance position changes at the following constructs.
+
+- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause.
+- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause.
+- The variance position of the lower bound of a type definition or type parameter is the opposite of the variance position of the type definition or parameter.
+- The type of a mutable variable is always in invariant position.
+- The right-hand side of a type alias is always in invariant position.
+- The prefix ´p´ of a type selection `´p.T´` is always in invariant position.
+- For a type argument ´T´ of a type `´S´[´..., T, ...´]`:
+ - If the corresponding type parameter of ´S´ is invariant, then ´T´ is in invariant position.
+ - If the corresponding type parameter of ´S´ is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´..., T, ...´]`.
+
+References to the type parameters in [object-private values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position.
+In these members the type parameter may appear anywhere without restricting its legal variance annotations.
+
+###### Example
+The following variance annotation is legal.
+
+```scala
+abstract class P[+A, +B] {
+ def fst: A
+ def snd: B
+}
+```
+
+With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments.
+For instance,
+
+```scala
+P[IOException, String] <: P[Throwable, AnyRef]
+```
+
+If the members of ´P´ are mutable variables, the same variance annotation becomes illegal.
+
+```scala
+abstract class Q[+A, +B](x: A, y: B) {
+ var fst: A = x // **** error: illegal variance:
+ var snd: B = y // `A', `B' occur in invariant position.
+}
+```
+
+If the mutable variables are object-private, the class definition becomes legal again:
+
+```scala
+abstract class R[+A, +B](x: A, y: B) {
+ private var fst: A = x // OK
+ private var snd: B = y // OK
+}
+```
+
+###### Example
+
+The following variance annotation is illegal, since ´A´ appears in contravariant position in the parameter of `append`:
+
+```scala
+abstract class Sequence[+A] {
+ def append(x: Sequence[A]): Sequence[A]
+ // **** error: illegal variance:
+ // `A' occurs in contravariant position.
+}
+```
+
+The problem can be avoided by generalizing the type of `append` by means of a lower bound:
+
+```scala
+abstract class Sequence[+A] {
+ def append[B >: A](x: Sequence[B]): Sequence[B]
+}
+```
+
+###### Example
+
+```scala
+abstract class OutputChannel[-A] {
+ def write(x: A): Unit
+}
+```
+
+With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`.
+That is, a channel on which one can write any object can substitute for a channel on which one can write only strings.
+
+## Method Definitions
+
+```ebnf
+Def ::= ‘def’ FunDef
+FunDef ::= FunSig [‘:’ Type] [‘=’ Expr]
+FunSig ::= id [FunTypeParamClause] ParamClauses
+FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
+ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
+ParamClause ::= [nl] ‘(’ [Params] ‘)’
+Params ::= Param {‘,’ Param}
+Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr]
+ParamType ::= Type
+ | ‘=>’ Type
+ | Type ‘*’
+```
+
+An _abstract method definition_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type.
+A _concrete method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result.
+A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`.
+
+If there is no type or term parameter clause, a method definition introduces a method with a proper type, which is also its result type.
+Otherwise, it introduces a method with a methodic type whose parameter types and result type are as given.
+
+The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given.
+If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body.
+
+A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type definitions](#type-definitions), which introduce type parameters, possibly with bounds.
+The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present.
+
+A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types.
+
+A unary operator must not have explicit parameter lists even if they are empty.
+A unary operator is a method named `"unary_´op´"` where ´op´ is one of `+`, `-`, `!`, or `~`.
+
+### Default Arguments
+
+Each value parameter may optionally define a default argument.
+The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type.
+
+For every parameter ´p_{i,j}´ with a default argument, a method named `´f\$´default´\$´n` is generated which computes the default argument expression.
+Here, ´n´ denotes the parameter's position in the method definition.
+These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´.
+The `´f\$´default´\$´n` methods are inaccessible for user programs.
+
+###### Example
+In the method
+
+```scala
+def compare[T](a: T = 0)(b: T = a) = (a == b)
+```
+
+the default expression `0` is type-checked with an undefined expected type.
+When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`.
+The methods computing the default arguments have the form:
+
+```scala
+def compare´\$´default´\$´1[T]: Int = 0
+def compare´\$´default´\$´2[T](a: T): T = a
+```
+
+The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given.
+Both type parameter names and value parameter names must be pairwise distinct.
+
+A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments.
+
+```scala
+def f(a: Int = 0)(b: Int = a + 1) = b // OK
+// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a"
+f(10)() // returns 11 (not 1)
+```
+
+If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument.
+
+```scala
+implicit val i: Int = 2
+def f(implicit x: Int, s: String = "hi") = s * x
+f // "hihi"
+```
+
+### By-Name Parameters
+
+```ebnf
+ParamType ::= ‘=>’ Type
+```
+
+The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`.
+The type of such a parameter is then the [by-name type](./03-types.html#by-name-types) `=> ´T´`.
+This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method.
+That is, the argument is evaluated using _call-by-name_.
+
+The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated.
+
+###### Example
+The definition
+
+```scala
+def whileLoop (cond: => Boolean) (stat: => Unit): Unit
+```
+
+indicates that both parameters of `whileLoop` are evaluated using call-by-name.
+
+### Repeated Parameters
+
+```ebnf
+ParamType ::= Type ‘*’
+```
+
+The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`.
+The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`.
+Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´.
+That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation.
+If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`.
+
+It is not allowed to define any default arguments in a parameter section with a repeated parameter.
+
+###### Example
+The following method definition computes the sum of the squares of a variable number of integer arguments.
+
+```scala
+def sum(args: Int*) = {
+ var result = 0
+ for (arg <- args) result += arg
+ result
+}
+```
+
+The following applications of this method yield `0`, `1`, `6`, in that order.
+
+```scala
+sum()
+sum(1)
+sum(1, 2, 3)
+```
+
+Furthermore, assume the definition:
+
+```scala
+val xs = List(1, 2, 3)
+```
+
+The following application of method `sum` is ill-formed:
+
+```scala
+sum(xs) // ***** error: expected: Int, found: List[Int]
+```
+
+By contrast, the following application is well formed and yields again the result `6`:
+
+```scala
+sum(xs: _*)
+```
+
+### Method Return Type Inference
+
+A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive.
+In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´.
+
+###### Example
+Assume the following definitions:
+
+```scala
+trait I {
+ def factorial(x: Int): Int
+}
+class C extends I {
+ def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1)
+}
+```
+
+Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive.
+
+
+
+## Import Clauses
+
+```
+Import ::= ‘import’ ImportExpr {‘,’ ImportExpr}
+ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpecifier
+ | SimpleRef `as` id
+ImportSpecifier ::= NamedSelector
+ | WildcardSelector
+ | ‘{’ ImportSelectors ‘}’
+NamedSelector ::= id [(‘as’ | ’=>’) (id | ‘_’)]
+WildcardSelector ::= ‘*’ | ’_’ | ‘given’ [InfixType]
+ImportSelectors ::= NamedSelector [‘,’ ImportSelectors]
+ | WildCardSelector {‘,’ WildcardSelector}
+```
+
+- In a `NamedSelector`, `=>` can only be used when inside an `ImportSelectors` and is then equivalent to `as`, to be deprecated in the future.
+- In a `WildcardSelector`, `_` is equivalent to `*`, to be deprecated in the future.
+
+An `ImportSpecifier` that is a single `NamedSelector` or `WildcardSelector` is equivalent to an `‘{‘ ImportSelectors ‘}‘` list with that single selector.
+
+An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`.
+
+An import clause with a single import expression has the form `import ´p´.´I´` where ´p´ is a [prefix](03-types.html#designator-types) and ´I´ is an import specifier.
+The import specifier determines a set of names of importable members of ´p´ which are made available without qualification as well as a set of importable `given` members which are made available in the implicit scope.
+A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers).
+The most general form of an import specifier is a list of _import selectors_
+
+```scala
+{ ´x_1´ as ´y_1, ..., x_n´ as ´y_n´, *, given ´T_1´, ..., given ´T_m´, given }
+```
+
+for ´n \geq 0´ and ´m \geq 0´, where the wildcards `‘*’` and `’given’` may be absent.
+They are decomposed into non-given selectors and given selectors.
+
+### Non-given Imports
+
+Non-given selectors make available each importable member `´p´.´x_i´` under the unqualified name ´y_i´.
+In other words, every import selector `´x_i´ as ´y_i´` renames `´p´.´x_i´` to ´y_i´.
+When `as ´y_i´` is omitted, ´y_i´ is assumed to be ´x_i´.
+If a final wildcard `‘*’` is present, all non-`given` importable members ´z´ of ´p´ other than `´x_1, ..., x_n, y_1, ..., y_n´` are also made available under their own unqualified names.
+
+Non-given import selectors work in the same way for type and term members.
+For instance, an import clause `import ´p´.´x´ as ´y´` renames the term name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´.
+At least one of these two names must reference an importable member of ´p´.
+
+If the target in an import selector is an underscore `as _`, the import selector hides access to the source member instead of importing it.
+For instance, the import selector `´x´ as _` “renames” ´x´ to the underscore symbol (which is not accessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´.
+This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors.
+
+The scope of a binding introduced by a non-given import clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first.
+
+### Given Imports
+
+Given selectors make available in the implicit scope all the importable `given` and `implicit` members `´p´.´x´` such that `´p.x´` is a subtype of ´T_i´.
+A bare `given` selector without type is equivalent to `given scala.Any`.
+
+The names of the given members are irrelevant for the selection, and are not made available in the normal scope of unqualified names.
+
+###### Example
+Consider the object definition:
+
+```scala
+object M {
+ def z = 0
+ def one = 1
+ def add(x: Int, y: Int): Int = x + y
+}
+```
+
+Then the block
+
+```scala
+{
+ import M.{one, z as zero, *}
+ add(zero, one)
+}
+```
+
+is equivalent to the block
+
+```scala
+{
+ M.add(M.z, M.one)
+}
+```
diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md
index 6feda780417a..e1d4ace3d81f 100644
--- a/docs/_spec/05-classes-and-objects.md
+++ b/docs/_spec/05-classes-and-objects.md
@@ -46,8 +46,8 @@ It is forbidden for a template's superclass constructor ´sc´ to be an [enum cl
The _least proper supertype_ of a template is the class type or [compound type](03-types.html#compound-types) consisting of all its parent class types.
The statement sequence ´\mathit{stats}´ contains member definitions that define new members or overwrite members in the parent classes.
-If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain declarations of abstract members.
-If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain declarations of abstract type members, but not of abstract term members.
+If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain definitions of abstract members.
+If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain definitions of abstract type members, but not of abstract term members.
Furthermore, ´\mathit{stats}´ may in any case also contain expressions; these are executed in the order they are given as part of the initialization of a template.
The sequence of template statements may be prefixed with a formal parameter definition and an arrow, e.g. `´x´ =>`, or `´x´:´T´ =>`.
@@ -310,6 +310,7 @@ LocalModifier ::= ‘abstract’
| ‘sealed’
| ‘implicit’
| ‘lazy’
+ | ‘infix’
AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier]
AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’
```
@@ -320,7 +321,7 @@ Modifiers preceding a repeated definition apply to all constituent definitions.
The rules governing the validity and meaning of a modifier are as follows.
### `private`
-The `private` modifier can be used with any definition or declaration in a template.
+The `private` modifier can be used with any definition in a template.
Private members of a template can be accessed only from within the directly enclosing template and its companion module or [companion class](#object-definitions).
The `private` modifier is also valid for [top-level](09-top-level-definitions.html#packagings) templates.
@@ -358,18 +359,17 @@ A different form of qualification is `protected[this]`.
A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. In addition, the restrictions for unqualified `protected` apply.
### `override`
-The `override` modifier applies to class member definitions or declarations.
-It is mandatory for member definitions or declarations that override some other concrete member definition in a parent class.
-If an `override` modifier is given, there must be at least one overridden member definition or declaration (either concrete or abstract).
+The `override` modifier applies to class member definitions.
+It is mandatory for member definitions that override some other concrete member definition in a parent class.
+If an `override` modifier is given, there must be at least one overridden member definition (either concrete or abstract).
### `abstract override`
The `override` modifier has an additional significance when combined with the `abstract` modifier.
That modifier combination is only allowed for value members of traits.
-We call a member ´M´ of a template _incomplete_ if it is either abstract (i.e. defined by a declaration), or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete.
+We call a member ´M´ of a template _incomplete_ if it is either abstract, or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete.
Note that the `abstract override` modifier combination does not influence the concept whether a member is concrete or abstract.
-A member is _abstract_ if only a declaration is given for it; it is _concrete_ if a full definition is given.
### `abstract`
The `abstract` modifier is used in class definitions.
@@ -386,7 +386,7 @@ A `final` class member definition may not be overridden in subclasses.
A `final` class may not be inherited by a template.
`final` is redundant for object definitions.
Members of final classes or objects are implicitly also final, so the `final` modifier is generally redundant for them, too.
-Note, however, that [constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object.
+Note, however, that [constant value definitions](04-basic-definitions.html#value-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object.
`final` is permitted for abstract classes but it may not be applied to traits or incomplete members, and it may not be combined in one modifier list with `sealed`.
### `sealed`
@@ -401,6 +401,31 @@ happen at all).
Attempting to access a lazy value during its initialization might lead to looping behavior.
If an exception is thrown during initialization, the value is considered uninitialized, and a later access will retry to evaluate its right hand side.
+### `infix`
+The `infix` modifier applies to method definitions and type definitions.
+It signals that the method or type is intended for use in infix position, even if it has an alphanumeric name.
+
+If a method overrides another, their `infix` annotations must agree. Either both are annotated with `infix`, or none of them are.
+
+The first non-receiver parameter list of an `infix` method must define exactly one parameter. Examples:
+
+```scala
+infix def op1(x: S): R // ok
+infix def op2[T](x: T)(y: S): R // ok
+infix def op3[T](x: T, y: S): R // error: two parameters
+extension (x: A)
+ infix def op4(y: B): R // ok
+ infix def op5(y1: B, y2: B): R // error: two parameters
+```
+
+`infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like
+
+```scala
+infix type op[X, Y]
+```
+
+can be applied using infix syntax, i.e., `A op B`.
+
###### Example
The following code illustrates the use of qualified private:
@@ -481,15 +506,15 @@ Here,
If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed.
- If a formal parameter declaration ´x: T´ is preceded by a `val` or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) for this parameter is implicitly added to the class.
+ If a formal parameter definition ´x: T´ is preceded by a `val` or `var` keyword, an accessor [definition](04-basic-definitions.html#value-definitions) for this parameter is implicitly added to the class.
- The getter introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter.
- If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class.
- In invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´.
+ The accessor introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter.
+ If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-definitions.html#variable-definitions) is also implicitly added to the class.
+ An invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´.
- The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s).
+ The formal parameter definition may contain modifiers, which then carry over to the accessor definition(s).
When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed.
- A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters).
+ A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-definitions.html#by-name-parameters).
- ´t´ is a [template](#templates) of the form
@@ -607,7 +632,7 @@ If the case class definition contains an empty value parameter list, the `unappl
def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = x ne null
```
-The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters).
+The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-definitions.html#repeated-parameters).
A method named `copy` is implicitly added to every case class unless the class already has a member (directly defined or inherited) with that name, or the class has a repeated parameter.
The method is defined as follows:
@@ -872,14 +897,14 @@ Such a class ´C´ is conceptually seen as a pair of a Scala class that contains
Generally, a _companion module_ of a class is an object which has the same name as the class and is defined in the same scope and compilation unit.
Conversely, the class is called the _companion class_ of the module.
-Very much like a concrete class definition, an object definition may still contain declarations of abstract type members, but not of abstract term members.
+Very much like a concrete class definition, an object definition may still contain definitions of abstract type members, but not of abstract term members.
## Enum Definitions
```ebnf
TmplDef ::= ‘enum’ EnumDef
-EnumDef ::= id ClassConstr [‘extends’ [ConstrApps]] EnumBody
+EnumDef ::= id ClassConstr [‘extends’ ConstrApps] EnumBody
EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’
EnumStat ::= TemplateStat
| {Annotation [nl]} {Modifier} EnumCase
@@ -900,18 +925,15 @@ First, some terminology and notational conventions:
- We use `<...>` for syntactic constructs that in some circumstances might be empty.
For instance, `` represents one or more parameter lists `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or nothing at all.
- Enum classes fall into two categories:
- - _parameterized_ enum classes have at least one of the following:
- - a type parameter section, denoted as `[´\mathit{tps}\,´]`;
- - one or more (possibly empty) parameter sections, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`.
- - _unparameterized_ enum classes have no type parameter sections and no parameter sections.
+ - _parameterized_ enum classes have at least one or more (possibly empty) term parameter clauses, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`.
+ - _unparameterized_ enum classes have no term parameter clauses, but may optionally have a type parameter clause, denoted as `[´\mathit{tps}\,´]`.
- Enum cases fall into three categories:
-
- - _Class cases_ are those cases that are parameterized, either with a type parameter section `[´\mathit{tps}\,´]` or with one or more (possibly empty) parameter sections `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`.
- - _Simple cases_ are cases of an unparameterized enum that have neither parameters nor an extends clause or body.
+ - _Class enum cases_ are those cases that possibly have a type parameter clause `[´\mathit{tps}\,´]`, and necessarily have one or more (possibly empty) parameter clauses `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`.
+ - _Simple enum cases_ are those cases that have no parameter clauses and no extends clause.
That is, they consist of a name only.
- - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body.
+ - _Value enum cases_ are those cases that have no parameter clauses but that do have a (possibly generated) `extends` clause.
-- Simple cases and value cases are collectively called _singleton cases_.
+- Simple enum cases and value enum cases are collectively called _singleton enum cases_.
###### Example
@@ -945,13 +967,11 @@ enum Option[+T]:
### Lowering of Enum Definitions
###### Summary
-An enum class is represented as a `sealed` class that extends the `scala.reflect.Enum` trait.
+An enum class is represented as a `sealed abstract` class that extends the `scala.reflect.Enum` trait.
Enum cases are represented as follows:
-- a class case is mapped to a `case class`,
-- a singleton case is mapped to a `val` definition, where
- - Simple cases all share a single implementation class.
- - Value cases will each be implemented by a unique class.
+- a class enum case is mapped to a `case class` member of the enum class' companion object,
+- a singleton enum case is mapped to a `val` member of the enum class' companion object, implemented by a local class definition. Whether that local class is shared with other singleton cases, and which ones, is left as an implementation detail.
###### Precise rules
The `scala.reflect.Enum` trait defines a single public method, `ordinal`:
@@ -964,106 +984,119 @@ transparent trait Enum extends Any, Product, Serializable:
```
There are nine desugaring rules.
Rule (1) desugars enum definitions.
-Rules (2) and (3) desugar simple cases.
-Rules (4) to (6) define `extends` clauses for cases that are missing them.
-Rules (7) to (9) define how such cases with `extends` clauses map into `case class`es or `val`s.
+Rule (2) desugars cases of comma-separated names to simple enum cases.
+Rules (3) to (7) desugar inferrable details of enum cases.
+Rules (8) and (9) define how fully-desugared enum cases map into `case class`es or `val`s.
+Explicit `extends` clauses must be provided in the following cases, where rules (2) to (6) do not apply:
+- any enum case of a parameterized enum,
+- any singleton enum case of an unparameterized enum with non-variant type parameters,
+- any class enum case of an enum with type parameters, where the case also has type parameters.
1. An `enum` definition
```scala
- enum ´E´ ... { }
+ enum ´E´ extends { }
```
expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8).
The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class.
```scala
- sealed abstract class ´E´ ... extends with scala.reflect.Enum {
- import ´E´.{ }
-
+ sealed abstract class ´E´
+ extends with scala.reflect.Enum {
+ import ´E´.{ }
+
}
object ´E´ { }
```
-2. A singleton case consisting of a comma-separated list of enum names
+2. A simple enum case consisting of a comma-separated list of names
```scala
case ´C_1´, ..., ´C_n´
```
- expands to
+ expands to the following simple enum cases
```scala
case ´C_1´; ...; case ´C_n´
```
Any modifiers or annotations on the original case extend to all expanded cases.
- This result is then further rewritten by either (3 or 4).
+
This result is then further rewritten by either (3 or 4).
-3. A singleton case without an extends clause
+3. A simple enum case `´C´` of an unparameterized enum `´E´` without type parameters
```scala
case ´C´
```
- of an unparameterized enum `´E´` expands to the following simple enum case in `´E´`'s companion object:
+ expands to the following value enum case:
```scala
- val ´C´ = $new(n, "C")
+ case ´C´ extends ´E´
```
- Here, `$new` is a private method that creates an instance of ´E´ (see below).
+ This result is then further rewritten with rule (8).
-4. A singleton case without an extends clause
+4. A simple enum case `´C´` of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters
```scala
case ´C´
```
- of an enum `´E´` with type parameters
+ where `´\mathit{tps}´` are of the following form
```scala
´\mathit{v}_1´ ´T_1´ >: ´L_1´ <: ´U_1´ , ... , ´\mathit{v}_n´ ´T_n´ >: ´L_n´ <: ´U_n´ (n > 0)
```
- where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case:
+ and where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case:
```scala
case ´C´ extends ´E´[´B_1´, ..., ´B_n´]
```
where `´B_i´` is `´L_i´` if `´\mathit{v}_i´ = '+'` and `´U_i´` if `´\mathit{v}_i´ = '-'`.
- This result is then further rewritten with rule (8).
- **NOTE:** It is not permitted for enums with non-variant type parameters to have singleton cases without an extends clause.
+
This result is then further rewritten with rule (8).
-5. A class case without an extends clause
+5. A class enum case with type parameters, but without an extends clause
```scala
- case ´C´
+ case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)
```
- of an enum `´E´` that does not take type parameters expands to
+ of an unparameterized enum `´E´` without type parameters expands to
```scala
- case ´C´ extends ´E´
+ case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´
```
This result is then further rewritten with rule (9).
-6. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case with neither type parameters nor an extends clause
+6. A class enum case without type parameters or an extends clause
```scala
- case ´C´
+ case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)
```
- expands to
+ of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters expands to
```scala
- case ´C´[´\mathit{tps}´] extends ´E´[´\mathit{tps}´]
+ case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´[´\mathit{tps}´]
```
- This result is then further rewritten with rule (9).
- For class cases that have type parameters themselves, an extends clause needs to be given explicitly.
-
+ This result is then further rewritten with rule (7).
-7. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case without type parameters but with an extends clause
+7. A class enum case without type parameters, but has an extends clause
```scala
- case ´C´ extends
+ case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends
```
- expands to
+ of an enum `´E´[´\mathit{tps}´]` with type parameters expands to
```scala
- case ´C´[´\mathit{tps}´] extends
+ case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends
```
- provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `` or in a type argument in ``.
+ provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or in a type argument in ``.
+
+ This result is then further rewritten with rule (9).
-8. A value case
+8. A singleton enum case
```scala
case ´C´ extends
```
expands to the following `val` definition in `´E´`'s companion object:
```scala
- val ´C´ = new { ; def ordinal = ´\mathit{n}´ }
+ val ´C´ = $factory(_$ordinal = ´\mathit{n}´, $name = "C")
```
where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0.
+ `$factory` is a placeholder that expands its arguments into an expression that produces something equivalent to
+ a new instance of the following (possibly shared) anonymous class:
+ ```scala
+ new {
+ def ordinal: Int = _$ordinal
+ override def toString: String = $name
+ }
+ ```
The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`.
+
**NOTE:** It is an error if a value case refers to a type parameter of `´E´` in a type argument within ``.
-9. A class case
+9. A class enum case
```scala
case ´C´ extends
```
@@ -1074,6 +1107,7 @@ Rules (7) to (9) define how such cases with `extends` clauses map into `case cla
}
```
where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0.
+
**NOTE:** It is an error if a class case refers to a type parameter of `´E´` in a parameter type in `` or `` or in a type argument of ``, unless that parameter is already a type parameter of the case, i.e. the parameter name is defined in ``.
###### Superclass of an enum case
@@ -1106,34 +1140,6 @@ private def $new(_$ordinal: Int, $name: String) =
override def toString = $name
```
-
-###### Example
-
-Consider the more complex enumeration `Color`, consisting of value enum cases:
-```scala
-enum Color(val rgb: Int):
- case Red extends Color(0xFF0000)
- case Green extends Color(0x00FF00)
- case Blue extends Color(0x0000FF)
-```
-
-The three value cases will expand as follows in the companion of `Color`:
-
-```scala
-val Red = new Color(0xFF0000):
- def ordinal: Int = 0
- override def productPrefix: String = "Red"
- override def toString: String = "Red"
-val Green = new Color(0x00FF00):
- def ordinal: Int = 1
- override def productPrefix: String = "Green"
- override def toString: String = "Green"
-val Blue = new Color(0x0000FF):
- def ordinal: Int = 2
- override def productPrefix: String = "Blue"
- override def toString: String = "Blue"
-```
-
### Widening of enum cases post-construction
The compiler-generated `apply` and `copy` methods of an class enum case
```scala
@@ -1151,20 +1157,6 @@ An enum `´E´` (possibly generic) that defines one or more singleton cases, and
It returns the singleton case value whose identifier is `name`.
- A method `values` which returns an `Array[´E'´]` of all singleton case values defined by `E`, in the order of their definitions.
-### Factory method for simple enum cases
-
-If an enum `´E´` contains at least one simple case, its companion object will define in addition:
-
- - A private method `$new` which defines a new simple case value with given ordinal number and name.
- This method can be thought as being defined as follows.
-
- ```scala
- private def $new(_$ordinal: Int, $name: String): ´E´ with runtime.EnumValue
- ```
- - `$new` returns a new instance of an anonymous class which implements the abstract `Product` methods that it inherits from `Enum`.
- - if `´E´` inherits from `java.lang.Enum` the anonymous class does not override the `ordinal` or `toString` methods, as these are final in `java.lang.Enum`.
- Additionally `productPrefix` will delegate to `this.name`.
-
### Translation of Java-compatible enums
A Java-compatible enum is an enum that extends `java.lang.Enum`.
@@ -1211,4 +1203,4 @@ A correctly typed version would use an _explicit_, _invariant_ type parameter `
```scala
enum View[-´T´]:
case Refl[´R´](f: ´R´ => ´R´) extends View[´R´]
-```
\ No newline at end of file
+```
diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md
index fa21b4330728..5043e752ebe6 100644
--- a/docs/_spec/06-expressions.md
+++ b/docs/_spec/06-expressions.md
@@ -10,22 +10,26 @@ chapter: 6
Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr
| Expr1
Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr]
+ | ‘if‘ Expr ‘then‘ Expr [[semi] ‘else‘ Expr]
| ‘while’ ‘(’ Expr ‘)’ {nl} Expr
- | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr]
- | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr
+ | ‘while’ Expr ‘do’ Expr
+ | ‘try’ Expr [Catches] [‘finally’ Expr]
+ | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘do‘ | ‘yield’] Expr
+ | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr
| ‘throw’ Expr
| ‘return’ [Expr]
| [SimpleExpr ‘.’] id ‘=’ Expr
| SimpleExpr1 ArgumentExprs ‘=’ Expr
| PostfixExpr
| PostfixExpr Ascription
- | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
+ | InfixExpr MatchClause
PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr
SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody)
| BlockExpr
+ | SimpleExpr ‘.’ MatchClause
| SimpleExpr1 [‘_’]
SimpleExpr1 ::= Literal
| Path
@@ -36,6 +40,7 @@ SimpleExpr1 ::= Literal
| SimpleExpr1 ArgumentExprs
| XmlExpr
Exprs ::= Expr {‘,’ Expr}
+MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’
BlockExpr ::= ‘{’ CaseClauses ‘}’
| ‘{’ Block ‘}’
Block ::= BlockStat {semi BlockStat} [ResultExpr]
@@ -44,6 +49,7 @@ ResultExpr ::= Expr1
Ascription ::= ‘:’ InfixType
| ‘:’ Annotation {Annotation}
| ‘:’ ‘_’ ‘*’
+Catches ::= ‘catch‘ (Expr | ExprCaseClause)
```
Expressions are composed of operators and operands.
@@ -85,7 +91,7 @@ This object implements methods in class `scala.AnyRef` as follows:
- `eq(´x\,´)` and `==(´x\,´)` return `true` iff the argument ´x´ is also the "null" object.
- `ne(´x\,´)` and `!=(´x\,´)` return true iff the argument x is not also the "null" object.
- `isInstanceOf[´T\,´]` always returns `false`.
-- `asInstanceOf[´T\,´]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type ´T´.
+- `asInstanceOf[´T\,´]` returns the [default value](04-basic-definitions.html#value-definitions) of type ´T´.
- `##` returns ``0``.
A reference to any other member of the "null" object causes a `NullPointerException` to be thrown.
@@ -100,7 +106,7 @@ SimpleExpr ::= Path
A designator refers to a named term. It can be a _simple name_ or a _selection_.
A simple name ´x´ refers to a value as specified [here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes).
-If ´x´ is bound by a definition or declaration in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´.
+If ´x´ is bound by a definition in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´.
If ´r´ is a [stable identifier](03-types.html#paths) of type ´T´, the selection ´r.x´ refers statically to a term member ´m´ of ´r´ that is identified in ´T´ by the name ´x´.
@@ -205,8 +211,9 @@ An application `´f(e_1, ..., e_m)´` applies the method `´f´` to the argument
For this expression to be well-typed, the method must be *applicable* to its arguments:
If ´f´ has a method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´`, each argument expression ´e_i´ is typed with the corresponding parameter type ´T_i´ as expected type.
-Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., m)´.
+Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., n)´.
The method ´f´ must be _applicable_ to its arguments ´e_1, ..., e_n´ of types ´S_1, ..., S_n´.
+If the last parameter type of ´f´ is [repeated](04-basic-definitions.html#repeated-parameters), [harmonization](#harmonization) is attempted on the suffix ´e_m, ..., e_n´ of the expression list that match the repeated parameter.
We say that an argument expression ´e_i´ is a _named_ argument if it has the form `´x_i=e'_i´` and `´x_i´` is one of the parameter names `´p_1, ..., p_n´`.
Once the types ´S_i´ have been determined, the method ´f´ of the above method type is said to be applicable if all of the following conditions hold:
@@ -235,7 +242,7 @@ The behavior of by-name parameters is preserved if the application is transforme
In this case, the local value for that parameter has the form `val ´y_i´ = () => ´e´` and the argument passed to the method is `´y_i´()`.
The last argument in an application may be marked as a sequence argument, e.g. `´e´: _*`.
-Such an argument must correspond to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same).
+Such an argument must correspond to a [repeated parameter](04-basic-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same).
Furthermore, the type of ´e´ must conform to `scala.Seq[´T´]`, for some type ´T´ which conforms to ´S´.
In this case, the argument list is transformed by replacing the sequence ´e´ with its elements.
When the application uses named arguments, the vararg parameter has to be specified exactly once.
@@ -304,7 +311,7 @@ The result of transforming ´f´ is a block of the form
where every argument in ´(\mathit{args}\_1), ..., (\mathit{args}\_l)´ is a reference to one of the values ´x_1, ..., x_k´.
To integrate the current application into the block, first a value definition using a fresh name ´y_i´ is created for every argument in ´e_1, ..., e_m´, which is initialised to ´e_i´ for positional arguments and to ´e'_i´ for named arguments of the form `´x_i=e'_i´`.
-Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-declarations-and-definitions.html#method-declarations-and-definitions) of this parameter.
+Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-definitions.html#method-definitions) of this parameter.
Let ´\mathit{args}´ be a permutation of the generated names ´y_i´ and ´z_i´ such such that the position of each name matches the position of its corresponding parameter in the method type `(´p_1:T_1, ..., p_n:T_n´)´U´`.
The final result of the transformation is a block of the form
@@ -453,7 +460,7 @@ Block ::= BlockStat {semi BlockStat} [ResultExpr]
```
A _block expression_ `{´s_1´; ...; ´s_n´; ´e\,´}` is constructed from a sequence of block statements ´s_1, ..., s_n´ and a final expression ´e´.
-The statement sequence may not contain two definitions or declarations that bind the same name in the same namespace.
+The statement sequence may not contain two definitions that bind the same name in the same namespace.
The final expression can be omitted, in which case the unit value `()` is assumed.
The expected type of the final expression ´e´ is the expected type of the block.
@@ -544,6 +551,8 @@ This expression is then interpreted as ´e.\mathit{op}(e_1,...,e_n)´.
A left-associative binary operation ´e_1;\mathit{op};e_2´ is interpreted as ´e_1.\mathit{op}(e_2)´. If ´\mathit{op}´ is right-associative and its parameter is passed by name, the same operation is interpreted as ´e_2.\mathit{op}(e_1)´.
If ´\mathit{op}´ is right-associative and its parameter is passed by value, it is interpreted as `{ val ´x´=´e_1´; ´e_2´.´\mathit{op}´(´x\,´) }`, where ´x´ is a fresh name.
+Under `-source:future`, if the method name is alphanumeric and the target method is not marked [`infix`](./05-classes-and-objects.html#infix), a deprecation warning is emitted.
+
### Assignment Operators
An _assignment operator_ is an operator symbol (syntax category `op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character “`=`”, with the following exceptions:
@@ -676,12 +685,14 @@ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
```ebnf
Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr]
+ | ‘if‘ Expr ‘then‘ Expr [[semi] ‘else‘ Expr]
```
The _conditional expression_ `if (´e_1´) ´e_2´ else ´e_3´` chooses one of the values of ´e_2´ and ´e_3´, depending on the value of ´e_1´.
The condition ´e_1´ is expected to conform to type `Boolean`.
The then-part ´e_2´ and the else-part ´e_3´ are both expected to conform to the expected type of the conditional expression.
-The type of the conditional expression is the [weak least upper bound](03-types.html#weak-conformance) of the types of ´e_2´ and ´e_3´.
+If there is no expected type, [harmonization](#harmonization) is attempted on ´e_2´ and ´e_3´.
+The type of the conditional expression is the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the types of ´e_2´ and ´e_3´ after harmonization.
A semicolon preceding the `else` symbol of a conditional expression is ignored.
The conditional expression is evaluated by evaluating first ´e_1´.
@@ -694,6 +705,7 @@ The conditional expression `if (´e_1´) ´e_2´` is evaluated as if it was `if
```ebnf
Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr
+ | ‘while’ Expr ‘do’ Expr
```
The _while loop expression_ `while (´e_1´) ´e_2´` is typed and evaluated as if it was an application of `whileLoop (´e_1´) (´e_2´)` where the hypothetical method `whileLoop` is defined as follows.
@@ -841,7 +853,11 @@ The type of a return expression is `scala.Nothing`.
The expression ´e´ may be omitted.
The return expression `return` is type-checked and evaluated as if it were `return ()`.
-Returning from the method from within a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`.
+### Non-Local Returns (deprecated)
+
+Returning from a method from within a nested function is deprecated.
+
+It is implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`.
Any exception catches between the point of return and the enclosing methods might see and catch that exception.
A key comparison makes sure that this exception is only caught by the method instance which is terminated by the return.
@@ -864,15 +880,19 @@ The type of a throw expression is `scala.Nothing`.
## Try Expressions
```ebnf
-Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr]
+Expr1 ::= ‘try’ Expr [Catches] [‘finally’ Expr]
+
+Catches ::= ‘catch‘ (Expr | ExprCaseClause)
```
-A _try expression_ is of the form `try { ´b´ } catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions)
+A _try expression_ is of the form `try ´b´ catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions)
```scala
{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ }
```
+If the handler is a single `ExprCaseClause`, it is a shorthand for that `ExprCaseClause` wrapped in a pattern matching anonymous function.
+
This expression is evaluated by evaluating the block ´b´.
If evaluation of ´b´ does not cause an exception to be thrown, the result of ´b´ is returned.
Otherwise the handler ´h´ is applied to the thrown exception.
@@ -881,11 +901,11 @@ If the handler contains no case matching the thrown exception, the exception is
More generally, if the handler is a `PartialFunction`, it is applied only if it is defined at the given exception.
Let ´\mathit{pt}´ be the expected type of the try expression.
-The block ´b´ is expected to conform to ´\mathit{pt}´.
+The expression ´b´ is expected to conform to ´\mathit{pt}´.
The handler ´h´ is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`.
-The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) of the type of ´b´ and the result type of ´h´.
+The type of the try expression is the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the type of ´b´ and the result type of ´h´.
-A try expression `try { ´b´ } finally ´e´` evaluates the block ´b´.
+A try expression `try ´b´ finally ´e´` evaluates the expression ´b´.
If evaluation of ´b´ does not cause an exception to be thrown, the expression ´e´ is evaluated.
If an exception is thrown during evaluation of ´e´, the evaluation of the try expression is aborted with the thrown exception.
If no exception is thrown during evaluation of ´e´, the result of ´b´ is returned as the result of the try expression.
@@ -893,10 +913,10 @@ If no exception is thrown during evaluation of ´e´, the result of ´b´ is ret
If an exception is thrown during evaluation of ´b´, the finally block ´e´ is also evaluated.
If another exception ´e´ is thrown during evaluation of ´e´, evaluation of the try expression is aborted with the thrown exception.
If no exception is thrown during evaluation of ´e´, the original exception thrown in ´b´ is re-thrown once evaluation of ´e´ has completed.
-The block ´b´ is expected to conform to the expected type of the try expression.
+The expression ´b´ is expected to conform to the expected type of the try expression.
The finally expression ´e´ is expected to conform to type `Unit`.
-A try expression `try { ´b´ } catch ´e_1´ finally ´e_2´` is a shorthand for `try { try { ´b´ } catch ´e_1´ } finally ´e_2´`.
+A try expression `try ´b´ catch ´e_1´ finally ´e_2´` is a shorthand for `try { try ´b´ catch ´e_1´ } finally ´e_2´`.
## Anonymous Functions
@@ -1011,7 +1031,7 @@ The definition of "constant expression" depends on the platform, but they includ
- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object)
- An element of an enumeration from the underlying platform
- A literal array, of the form `Array´(c_1, ..., c_n)´`, where all of the ´c_i´'s are themselves constant expressions
-- An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions).
+- An identifier defined by a [constant value definition](04-basic-definitions.html#value-definitions).
## Statements
@@ -1030,7 +1050,6 @@ TemplateStat ::= Import
Statements occur as parts of blocks and templates.
A _statement_ can be an import, a definition or an expression, or it can be empty.
-Statements used in the template of a class definition can also be declarations.
An expression that is used as a statement can have an arbitrary value type.
An expression statement ´e´ is evaluated by evaluating ´e´ and discarding the result of the evaluation.
@@ -1042,6 +1061,29 @@ When prefixing a class or object definition, modifiers `abstract`, `final`, and
Evaluation of a statement sequence entails evaluation of the statements in the order they are written.
+## Harmonization
+
+_Harmonization_ of a list of expressions tries to adapt `Int` literals to match the types of sibling trees.
+For example, when writing
+
+```scala
+scala.collection.mutable.ArrayBuffer(5.4, 6, 6.4)
+```
+
+the inferred element type would be `AnyVal` without harmonization.
+Harmonization turns the integer literal `6` into the double literal `6.0` so that the element type becomes `Double`.
+
+Formally, given a list of expressions ´e_1, ..., e_n´ with types ´T_1, ..., T_n´, harmonization behaves as follows:
+
+1. If there is an expected type, return the original list.
+2. Otherwise, if there exists ´T_i´ that is not a primitive numeric type (`Char`, `Byte`, `Short`, `Int`, `Long`, `Float`, `Double`), return the original list.
+3. Otherwise,
+ 1. Partition the ´e_i´ into the integer literals ´f_j´ and the other expressions ´g_k´.
+ 2. If all the ´g_k´ have the same numeric type ´T´, possibly after widening, and if all the integer literals ´f_j´ can be converted without loss of precision to ´T´, return the list of ´e_i´ where every int literal is converted to ´T´.
+ 3. Otherwise, return the original list.
+
+Harmonization is used in [conditional expressions](#conditional-expressions) and [pattern matches](./08-pattern-matching.html), as well as in [local type inference](#local-type-inference).
+
## Implicit Conversions
Implicit conversions can be applied to expressions whose type does not match their expected type, to qualifiers in selections, and to unapplied methods.
@@ -1063,14 +1105,10 @@ An expression ´e´ of polymorphic type
which does not appear as the function part of a type application is converted to a type instance of ´T´ by determining with [local type inference](#local-type-inference) instance types `´T_1, ..., T_n´` for the type variables `´a_1, ..., a_n´` and implicitly embedding ´e´ in the [type application](#type-applications) `´e´[´T_1, ..., T_n´]`.
-###### Numeric Widening
-If ´e´ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) to the expected type, it is widened to the expected type using one of the numeric conversion methods `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` defined [in the standard library](12-the-scala-standard-library.html#numeric-value-types).
-
-Since conversions from `Int` to `Float` and from `Long` to `Float` or `Double` may incur a loss of precision, those implicit conversions are deprecated.
-The conversion is permitted for literals if the original value can be recovered, that is, if conversion back to the original type produces the original value.
+###### Numeric Literal Conversion
+If the expected type is `Byte`, `Short`, `Long` or `Char`, and the expression ´e´ is an `Int` literal fitting in the range of that type, it is converted to the same literal in that type.
-###### Numeric Literal Narrowing
-If the expected type is `Byte`, `Short` or `Char`, and the expression ´e´ is an integer literal fitting in the range of that type, it is converted to the same literal in that type.
+Likewise, if the expected type is `Float` or `Double`, and the expression ´e´ is a numeric literal (of any type) fitting in the range of that type, it is converted to the same literal in that type.
###### Value Discarding
If ´e´ has some value type and the expected type is `Unit`, ´e´ is converted to the expected type by embedding it in the term `{ ´e´; () }`.
@@ -1255,7 +1293,7 @@ Solving means finding a substitution ´\sigma´ of types ´T_i´ for the type pa
It is a compile time error if no such substitution exists.
If several substitutions exist, local-type inference will choose for each type variable ´a_i´ a minimal or maximal type ´T_i´ of the solution space.
-A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the type ´T´ of the expression.
+A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-definitions.html#variance-annotations) in the type ´T´ of the expression.
A _minimal_ type ´T_i´ will be chosen in all other situations, i.e. if the variable appears covariantly, non-variantly or not at all in the type ´T´.
We call such a substitution an _optimal solution_ of the given constraint system for the type ´T´.
diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md
index 2cd80f227cd4..dacc0c0c277e 100644
--- a/docs/_spec/07-implicits.md
+++ b/docs/_spec/07-implicits.md
@@ -49,7 +49,7 @@ However, if such a method misses arguments for its implicit parameters, such arg
The actual arguments that are eligible to be passed to an implicit parameter of type ´T´ fall into two categories.
First, eligible are all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) or an implicit parameter.
-To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-declarations-and-definitions.html#import-clauses).
+To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-definitions.html#import-clauses).
If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit scope of the implicit parameter's type, ´T´.
The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type.
diff --git a/docs/_spec/08-pattern-matching.md b/docs/_spec/08-pattern-matching.md
index 1d50b814ee24..4a34ae8631c4 100644
--- a/docs/_spec/08-pattern-matching.md
+++ b/docs/_spec/08-pattern-matching.md
@@ -276,7 +276,7 @@ SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’
```
A _pattern sequence_ ´p_1, ..., p_n´ appears in two contexts.
-First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`.
+First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-definitions.html#repeated-parameters) of type `S*`.
Second, in an extractor pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if the extractor object ´x´ does not have an `unapply` method, but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´.
The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`.
@@ -484,9 +484,12 @@ Therefore, the right hand side of the case clause, `y.n`, of type `Int`, is foun
## Pattern Matching Expressions
```ebnf
+ InfixExpr ::= InfixExpr MatchClause
+ SimpleExpr ::= SimpleExpr ‘.’ MatchClause
Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’
CaseClauses ::= CaseClause {CaseClause}
CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block
+ ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr
```
A _pattern matching expression_
@@ -518,7 +521,8 @@ If no such bounds can be found, a compile time error results.
If such bounds are found, the pattern matching clause starting with ´p´ is then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´.
The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression.
-The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks ´b_i´.
+If there is no expected type, [harmonization](./03-types.html#harmonization) is attempted on the list of all blocks ´b_i´.
+The type of the pattern matching expression is then the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the types of all blocks ´b_i´ after harmonization.
When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the [selector value](#patterns).
Say this case is `case ´p_i \Rightarrow b_i´`.
@@ -595,7 +599,7 @@ If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) to
```
Here, each ´x_i´ is a fresh name.
-As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the weak least upper bound of the types of all ´b_i´.
+As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the least upper bound of the types of all ´b_i´.
```scala
new scala.Function´k´[´S_1, ..., S_k´, ´T´] {
@@ -619,7 +623,7 @@ new scala.PartialFunction[´S´, ´T´] {
}
```
-Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the types of all ´b_i´.
+Here, ´x´ is a fresh name and ´T´ is the least upper bound of the types of all ´b_i´.
The final default case in the `isDefinedAt` method is omitted if one of the patterns ´p_1, ..., p_n´ is already a variable or wildcard pattern.
###### Example
diff --git a/docs/_spec/11-annotations.md b/docs/_spec/11-annotations.md
index 3388d55318ea..3381856d78fb 100644
--- a/docs/_spec/11-annotations.md
+++ b/docs/_spec/11-annotations.md
@@ -17,8 +17,8 @@ Annotations associate meta-information with definitions.
A simple annotation has the form `@´c´` or `@´c(a_1, ..., a_n)´`.
Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`.
-Annotations may apply to definitions or declarations, types, or expressions.
-An annotation of a definition or declaration appears in front of that definition.
+Annotations may apply to definitions, types, or expressions.
+An annotation of a definition appears in front of that definition.
An annotation of a type appears after that type.
An annotation of an expression ´e´ appears after the expression ´e´, separated by a colon.
More than one annotation clause may apply to an entity.
@@ -86,7 +86,7 @@ def f(x: Option[Int]) = (x: @unchecked) match {
```
Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive, and could produce a warning because `Option` is a `sealed` class.
-* `@uncheckedStable` When applied a value declaration or definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types).
+* `@uncheckedStable` When applied a value definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types).
For instance, the following member definitions are legal:
```scala
type A { type T }
@@ -97,7 +97,7 @@ val y: x.T // OK since `x' is still a path
Without the `@uncheckedStable` annotation, the designator `x` would not be a path since its type `A with B` is volatile.
Hence, the reference `x.T` would be malformed.
-When applied to value declarations or definitions that have non-volatile types, the annotation has no effect.
+When applied to value definitions that have non-volatile types, the annotation has no effect.
* `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate specialized definitions for primitive types.
An optional list of primitive types may be given, in which case specialization takes into account only those types.
diff --git a/docs/_spec/12-the-scala-standard-library.md b/docs/_spec/12-the-scala-standard-library.md
index 441955df9b4f..df8626b5119c 100644
--- a/docs/_spec/12-the-scala-standard-library.md
+++ b/docs/_spec/12-the-scala-standard-library.md
@@ -12,10 +12,23 @@ Some of these classes are described in the following.
![Class hierarchy of Scala](public/images/classhierarchy.png)
+
+## Fundamental Type Aliases
+
+The `scala` package provides the following fundamental type aliases, which expose to user code some forms of [types](./03-types.html) that cannot otherwise be written:
+
+```scala
+type AnyKind = ´x´ // where ´x´ is the internal AnyKind type
+type Nothing = ´x´ // where ´x´ is the internal Nothing type
+type | = [A, B] =>> A ´|´ B // where | is the internal union type operator
+type & = [A, B] =>> A ´&´ B // where & is the internal intersection type operator
+```
+
## Root Classes
-The root of this hierarchy is formed by class `Any`.
+The root of this hierarchy is formed by class `scala.Any`.
Every class in a Scala execution environment inherits directly or indirectly from this class.
+By definition, `Any` is also the top [proper type](./03-types.html#proper-types).
Class `Any` has two direct subclasses: `AnyRef` and `AnyVal`.
The subclass `AnyRef` represents all values which are represented as objects in the underlying host system.
@@ -304,42 +317,42 @@ case class Tuple´n´[+T_1, ..., +T_n](_1: T_1, ..., _´n´: T_´n´) {
-->
### The `Function` Classes
-For each class type `Function´n´` where ´n = 0, ..., 22´, Scala defines the following function class:
+For each natural ´n \geq 0´, the `scala` package defines the following function class:
```scala
package scala
trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]:
def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´
- override def toString = ""
- def curried: ´T_1´ => ... => ´T_n´ => R = ...
- def tupled: ((´T_1´, ..., ´T_n´)) => R = ...
```
-For function types `Function´n´` where ´n > 22´, Scala defines a unique function class:
+These classes participate in the desugaring of [concrete function types](./03-types.html#function-types).
+For values of ´n \leq 22´, the `Function´_n´` classes define additional methods:
```scala
package scala
-trait FunctionXXL:
- def apply(xs: IArray[Object]): Object
- override def toString = ""
+trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]:
+ ...
+ override def toString = ""
+ def curried: ´T_1´ => ... => ´T_n´ => R = ...
+ def tupled: ((´T_1´, ..., ´T_n´)) => R = ...
```
-There is no loss of type safety, as the internal representation is still `Function´n´` for all ´n´.
-However this means methods `curried` and `tupled` are not available on functions with more than 22 parameters.
-
The implicitly imported [`Predef`](#the-predef-object) object defines the name
`Function` as an alias of `Function1`.
-
The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain.
Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain).
```scala
class PartialFunction[-A, +B] extends Function1[A, B] {
def isDefinedAt(x: A): Boolean
+
+ ... // various derived methods
}
```
+`PartialFunction` participates in the desugaring of [pattern matching anonymous functions](08-pattern-matching.html#pattern-matching-anonymous-functions).
+
### Trait `Product`
@@ -349,6 +362,58 @@ All case classes automatically extend the `Product` trait (and generate syntheti
All enum definitions automatically extend the `reflect.Enum` trait (and generate synthetic methods to conform to it).
+### Tuple Classes
+
+Tuples are a form of _HLists_ defined by the following classes:
+
+```scala
+/** Superclass of all tuples. */
+sealed trait Tuple extends Product:
+ /** Return a new tuple by prepending the element to `this` tuple. */
+ inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = ...
+ ...
+
+object Tuple:
+ /** Type of the element at position N in the tuple X. */
+ type Elem[X <: Tuple, N <: Int] = ...
+ ...
+
+/** A tuple of 0 elements. */
+type EmptyTuple = EmptyTuple.type
+
+/** A tuple of 0 elements. */
+case object EmptyTuple extends Tuple:
+ override def toString(): String = "()"
+
+/** Tuple of arbitrary non-zero arity */
+sealed trait NonEmptyTuple extends Tuple:
+ /** Get the i-th element of this tuple. */
+ inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = ...
+ ...
+
+sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple
+
+object `*:` :
+ def unapply[H, T <: Tuple](x: H *: T): (H, T) = (x.head, x.tail)
+```
+
+For ´1 \leq n \leq 22´, the concrete implementations of `*:` are instances of `Tuple´_n´` classes, which also implement corresponding `Product´_n´` traits.
+They are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits).
+
+```scala
+trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product:
+ override def productArity: Int = ´n´
+ def _1: ´T_1´
+ ...
+ def _n: ´T_n´
+
+final case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´)
+ extends *:[´T_1´, ´T_2´ *: ... _: ´T_n´ *: EmptyTuple]
+ with Product´_n´[´T_1´, ..., ´T_n´]
+```
+
+For ´n > 22´, the concrete implementations of ´*:´ are instances of implementation-specific private classes.
+
### Class `Array`
All operations on arrays desugar to the corresponding operations of the underlying platform.
diff --git a/docs/_spec/TODOreference/changed-features/imports.md b/docs/_spec/APPLIEDreference/changed-features/imports.md
similarity index 100%
rename from docs/_spec/TODOreference/changed-features/imports.md
rename to docs/_spec/APPLIEDreference/changed-features/imports.md
diff --git a/docs/_spec/TODOreference/changed-features/interpolation-escapes.md b/docs/_spec/APPLIEDreference/changed-features/interpolation-escapes.md
similarity index 100%
rename from docs/_spec/TODOreference/changed-features/interpolation-escapes.md
rename to docs/_spec/APPLIEDreference/changed-features/interpolation-escapes.md
diff --git a/docs/_spec/TODOreference/changed-features/match-syntax.md b/docs/_spec/APPLIEDreference/changed-features/match-syntax.md
similarity index 100%
rename from docs/_spec/TODOreference/changed-features/match-syntax.md
rename to docs/_spec/APPLIEDreference/changed-features/match-syntax.md
diff --git a/docs/_spec/TODOreference/changed-features/operators.md b/docs/_spec/APPLIEDreference/changed-features/operators.md
similarity index 100%
rename from docs/_spec/TODOreference/changed-features/operators.md
rename to docs/_spec/APPLIEDreference/changed-features/operators.md
diff --git a/docs/_spec/TODOreference/changed-features/wildcards.md b/docs/_spec/APPLIEDreference/changed-features/wildcards.md
similarity index 100%
rename from docs/_spec/TODOreference/changed-features/wildcards.md
rename to docs/_spec/APPLIEDreference/changed-features/wildcards.md
diff --git a/docs/_spec/TODOreference/contextual/given-imports.md b/docs/_spec/APPLIEDreference/contextual/given-imports.md
similarity index 100%
rename from docs/_spec/TODOreference/contextual/given-imports.md
rename to docs/_spec/APPLIEDreference/contextual/given-imports.md
diff --git a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md
index 6ef815152cd0..a7c491dfb3b3 100644
--- a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md
+++ b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md
@@ -10,7 +10,7 @@ have been dropped. The reasons for dropping them are:
- Existential types violate a type soundness principle on which DOT
and Scala 3 are constructed. That principle says that every
- prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T`
+ prefix (`p`, respectively `S`) of a type selection `p.T` or `S#T`
must either come from a value constructed at runtime or refer to a
type that is known to have only good bounds.
diff --git a/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md b/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md
new file mode 100644
index 000000000000..b7dae17f5a77
--- /dev/null
+++ b/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md
@@ -0,0 +1,23 @@
+---
+layout: doc-page
+title: "Deprecated: Nonlocal Returns"
+
+nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html
+---
+
+Returning from nested anonymous functions is deprecated since Scala 3.2.0.
+
+Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`.
+
+A better alternative to nonlocal returns and also the `scala.util.control.Breaks` API is provided by [`scala.util.boundary` and `boundary.break`](http://dotty.epfl.ch/api/scala/util/boundary$.html).
+
+Example:
+
+```scala
+import scala.util.boundary, boundary.break
+def firstIndex[T](xs: List[T], elem: T): Int =
+ boundary:
+ for (x, i) <- xs.zipWithIndex do
+ if x == elem then break(i)
+ -1
+```
diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance.md b/docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md
similarity index 91%
rename from docs/_spec/TODOreference/dropped-features/weak-conformance.md
rename to docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md
index b1478326b2c9..03760642293d 100644
--- a/docs/_spec/TODOreference/dropped-features/weak-conformance.md
+++ b/docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md
@@ -44,4 +44,5 @@ Therefore, Scala 3 drops the general notion of weak conformance, and
instead keeps one rule: `Int` literals are adapted to other numeric
types if necessary.
-[More details](weak-conformance-spec.md)
+For more details, see Sections "Types > Weak Conformance" and "Expressions > Harmonization" in the specification.
+TODO Link to the spec when it is published.
diff --git a/docs/_spec/TODOreference/dropped-features/wildcard-init.md b/docs/_spec/APPLIEDreference/dropped-features/wildcard-init.md
similarity index 100%
rename from docs/_spec/TODOreference/dropped-features/wildcard-init.md
rename to docs/_spec/APPLIEDreference/dropped-features/wildcard-init.md
diff --git a/docs/_spec/TODOreference/other-new-features/control-syntax.md b/docs/_spec/APPLIEDreference/other-new-features/control-syntax.md
similarity index 100%
rename from docs/_spec/TODOreference/other-new-features/control-syntax.md
rename to docs/_spec/APPLIEDreference/other-new-features/control-syntax.md
diff --git a/docs/_spec/TODOreference/other-new-features/opaques.md b/docs/_spec/APPLIEDreference/other-new-features/opaques.md
similarity index 87%
rename from docs/_spec/TODOreference/other-new-features/opaques.md
rename to docs/_spec/APPLIEDreference/other-new-features/opaques.md
index d8c4d37bcb3b..e6d614b3931d 100644
--- a/docs/_spec/TODOreference/other-new-features/opaques.md
+++ b/docs/_spec/APPLIEDreference/other-new-features/opaques.md
@@ -176,4 +176,28 @@ l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm
```
In general, one can think of an opaque type as being only transparent in the scope of `private[this]`.
-[More details](opaques-details.md)
+## Top-level Opaque Types
+
+An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested
+objects and classes and in all other source files. Example:
+```scala
+// in test1.scala
+opaque type A = String
+val x: A = "abc"
+
+object obj:
+ val y: A = "abc" // error: found: "abc", required: A
+
+// in test2.scala
+def z: String = x // error: found: A, required: String
+```
+This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to
+```scala
+object test1$package:
+ opaque type A = String
+ val x: A = "abc"
+
+object obj:
+ val y: A = "abc" // error: cannot assign "abc" to opaque type alias A
+```
+The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`.
diff --git a/docs/_spec/Dockerfile b/docs/_spec/Dockerfile
index 1fc28081c59f..6f0c349da396 100644
--- a/docs/_spec/Dockerfile
+++ b/docs/_spec/Dockerfile
@@ -1,3 +1,5 @@
+# Keep in sync with relevant parts of .github/workflows/spec.yml
+
FROM ruby:2.7
RUN apt-get install -y curl \
diff --git a/docs/_spec/README.md b/docs/_spec/README.md
index b9eba413f8a2..f8a59e86896c 100644
--- a/docs/_spec/README.md
+++ b/docs/_spec/README.md
@@ -1,6 +1,6 @@
# WIP Scala 3 Language Specification
-**This is still a work in progress, and should *not* be regarded as a source of truth.**
+**This is still a work in progress. There are still Scala 3 features missing, as well as some areas that have not been updated since 2.13 yet.**
First of all, the language specification is meant to be correct, precise and clear.
@@ -25,8 +25,8 @@ To preview locally, run the following commands in the docs/_spec subfolder:
env UID="$(id -u)" GID="$(id -g)" docker-compose up
```
-and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`.
-
+ cve{"Is this
+ a fix for a CVE?"}
+ -- yes --> ocb
+
+ subgraph "CVE"
+ ocb{"Does it cause
+ any new failures
+ in the full CB?"}
+
+ -- yes -->
+ regFix[\"Try to provide
+ a followup fix for
+ a regressions"/]
+
+ -- failure -->
+ debate[\"Possible workarounds
+ for new regressions are discussed
+ by the compiler team"/]
+
+ regFix -- success --> ocb
+ end
+ ocb -- no --> acc
+ debate -->|"decision on
+ the recommended
+ workarounds"| acc
+
+ cve -- no -->
+ incompat{"Does the fix
+ break forward
+ compatibiliy?"}
+ -- yes --> reject
+
+ incompat -- no -->
+
+ regression{"Is this a fix for
+ a regression present
+ also in LTS?"}
+ -- yes -->
+
+ regIsLTS{"Was the last version
+ affected by
+ the regression released
+ before 3.3.0?"}
+ -- yes --> ocbReg
+
+ subgraph "LTS Regression"
+ ocbReg{"Does it cause
+ any new failures
+ in the full CB?"}
+
+ -- yes -->
+ regFixReg[\"Try to provide
+ a followup fix for
+ a regressions"/]
+
+ -- failure -->
+ debateReg[\"Impact of both new and old regression
+ and possible workarounds
+ are discussed by the compiler team."/]
+
+ regFixReg -- success --> ocbReg
+ end
+ ocbReg -- no --> acc
+ debateReg -->|"decision on
+ the recommended
+ workarounds for
+ the new regression"| acc
+ debateReg -->|"decision on
+ the recommended
+ workarounds for
+ the old regression"| reject
+
+ regression -- no --> types
+ regIsLTS -- no --> types
+ types{"Can the fix
+ change types
+ in any correct
+ Scala 3 code?"}
+ -- yes --> request
+ types -- no --> ocbOther
+
+ request{"Is backport
+ of the fix
+ heavily requested?"}
+ -- yes --> debateReq
+ request -- no --> reject
+
+ debateReq[\"Possibility of
+ the backport is discussed
+ by the compiler team"/]
+ --> |"backport is rejected"| reject
+ debateReq --> |"backport is accepted"| ocbOther
+
+ subgraph "Other Fixes"
+ ocbOther{"Does it cause
+ any new failures
+ in the full CB?"}
+
+ -- yes -->
+ regFixOther[\"Try to provide
+ a followup fix for
+ a regressions"/]
+ -- success --> ocbOther
+
+ ocbOther -- no -->
+ lint{"Does it introduce
+ any new warnings
+ behind flags?"}
+ -- yes -->
+ lintOcb{"Does it cause any
+ new failures in the full CB
+ after forcing a new flag?"}
+ -- yes --> regFixOther
+ end
+
+ lint -- no --> acc
+ lintOcb -- no --> acc
+ regFixOther -- failure --> reject
+
+ acc(["The PR is backported"])
+ reject(["The PR is not backported"])
+```
+
+CVE stands for Common Vulnerabilities and Exposures and in the chart above it means an issue with an CVE-ID assigned to it.
+
+CB stands for Community Build, and by full CB we mean the full run of [the Scala 3 Open Community Build](https://github.com/VirtusLab/community-build3).
+
+## How should things be backported?
+
+The backporting process is tracked by [a GitHub Project](https://github.com/orgs/lampepfl/projects/6) in the lampepfl organization. Every PR merged to the `main` branch is automatically added to the `Needs Assessment` column. Those PRs are reviewed by the release officer or other appointed person. They can decide to remove them from the project (backport rejected) or to move them to the `Backporting in progress` column. If the PR with the backport has any differences from the original PR, the person doing the backport will ask the author and reviewers of the original change to review the backport. After merging the backport, the PR will be moved to the `Backport done` column and, after the release, to the `Released` column.
+
+Maintainers can request backporting the entirety or a part of previously rejected PR by adding it to the `Backport requested` column.
+
+Labels from the `backport:*` are used only for backports that targets versions with already released RCs. They can be used to mark changes on the main that are fixing a critical bug present in the Scala Next RC release or changes that were backported to the future Scala LTS versions that should be also backported to the current RCs.
+
+## The release cycles
+
+Two separate lines of the compiler require two intertwined release cycles.
+
+Scala Next strictly follows a six-week release train model. Every six weeks, a release candidate for the next version is published. During the next six weeks, we may release subsequent RCs containing fixes to critical bugs found in the previous RCs. A bug may be considered critical only if it is a regression; that is, some code that was correctly passing a compilation in any earlier versions of Scala 3 is now either failing compilation, crashing the compiler, or generating incorrect output (bytecode or TASTy). The compiler team decides which regression is considered a critical bug that requires a new RC and which can be fixed in the next release. After six weeks, the clock resets, the last released RC is promoted to a stable release, and the RC for the next version is published.
+
+If there is less than a week left before the release, and the last RC still contains critical bugs, the compiler team may decide to postpone publishing the stable version. There will always be at least one whole week between publishing the last RC and promoting it to the status of a stable release. This delay doesn't affect the RC1 date for the next version. It will be released six weeks after the previous version's RC1. The goal is to ensure that delay in releasing one version doesn't cause future releases to be larger in terms of the number of merged PRs, as it can make regressions inside of them more complex to pinpoint and fix, leading to the accumulation of delays for future versions.
+
+Scala LTS has a more relaxed release model. RC1 for the next version is published after the stable release of the previous version. Similar to Scala Next, we may release more RCs, fixing bugs. Unlike Scala Next, the bug doesn't need to be considered critical to guarantee the new RC. For Sala LTS, our primary goal is stability, so delays are acceptable. We guarantee that a stable release is at least six weeks after the first RC and at least one week after the last RC.
+
+The two release cycles are not synchronized in any way, as any synchronization would be broken on any delay in the Scala LTS cycle.
+
+The compiler team may pause the release cycles for a week or two on occasions such as New Year or a conference that most of the team is attending.
+
+### What is being released?
+
+For the Scala LTS, what is released as an RC is always the current head of the release branch for the next release.
+
+For the Scala Next minor releases RC, by default, it is the head of the `main` branch. Based on the Open Community Build results, the compiler team may decide to base the release on some earlier state of the branch.
diff --git a/project/ScaladocGeneration.scala b/project/ScaladocGeneration.scala
index fd972311da1d..ade9b65d5445 100644
--- a/project/ScaladocGeneration.scala
+++ b/project/ScaladocGeneration.scala
@@ -97,6 +97,10 @@ object ScaladocGeneration {
def key: String = "-no-link-warnings"
}
+ case class NoLinkAssetWarnings(value: Boolean) extends Arg[Boolean] {
+ def key: String = "-no-link-asset-warnings"
+ }
+
case class VersionsDictionaryUrl(value: String) extends Arg[String] {
def key: String = "-versions-dictionary-url"
}
diff --git a/project/TastyMiMaFilters.scala b/project/TastyMiMaFilters.scala
new file mode 100644
index 000000000000..0d2ed387da33
--- /dev/null
+++ b/project/TastyMiMaFilters.scala
@@ -0,0 +1,93 @@
+import java.util.Arrays.asList
+import tastymima.intf._
+
+object TastyMiMaFilters {
+ val StdlibBootstrapped: java.util.List[ProblemMatcher] = asList(
+ // Probably OK
+ ProblemMatcher.make(ProblemKind.IncompatibleSelfTypeChange, "scala.*"),
+
+ // Probably OK: Case class with varargs
+ ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.StringContext.parts"), // before: scala.[Predef.String]; after: scala.collection.immutable.Seq[Predef.String] @scala.annotation.internal.Repeated
+
+ // Probably OK: ConstantType for `null` versus `scala.Null`
+ // Calls to the default getter seem to link correctly.
+ // Tested in scala2-library-bootstrapped/test/scala/collection/UnrolledBufferTest.scala
+ ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.Unrolled.$default$4"),
+
+ // Probably OK: Overriding java method (`public abstract Object underlying();` with `def underlying: Object`)
+ // Calls to the underlying seem to link correctly.
+ // Tested in scala2-library-bootstrapped/test/Main.scala
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.math.Big*.underlying"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.math.ScalaNumericConversions.underlying"),
+
+ // Problem: super accessors
+ // In Scala 3 these accessors are added in the `postyper` phase.
+ // In Scala 2 these accessors are added in the `superaccessors` phase after typer.
+ // Are these accessors in the Scala 2 pickles? If so, it implies that TASTy Query/MiMa is ignoring them in Scala 2 but not Scala 3.
+ // Otherwise, if these are not in the Scala 2 pickles, we might need to remove them when compiling with -Ycompile-scala2-library
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeqOps.superscala$collection$immutable$IndexedSeqOps$$slice"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.StrictOptimizedSeqOps.superscala$collection$immutable$StrictOptimizedSeqOps$$sorted"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeq.superscala$collection$immutable$IndexedSeq$$*"/* sameElements, canEqual */),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSetOps.superscala$collection$SortedSetOps$$*"/* min, max */),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSet.superscala$collection$SortedSet$$equals"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.LinearSeqOps.superscala$collection$LinearSeqOps$$sameElements"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedMap.superscala$collection$SortedMap$$equals"),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SeqOps.superscala$collection$SeqOps$$*"/* concat, sizeCompare */),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.BitSetOps.superscala$collection$BitSetOps$$*"/* min, intersect, concat, diff, max */),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone"), // The member scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone was concrete or did not exist but is abstract in current version
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.util.control.NoStackTrace.superscala$util$control$NoStackTrace$$fillInStackTrace"),
+
+ // TASTy-MiMa bug (probably OK): `private[scala] var` in case class
+ // This is probably because we can only access the next field from the scala library.
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.immutable.::.next$access$1"),
+
+ // Probably OK: Problem Missing setter for `protected var`
+ // All the classes that contain these `protected var`s are private in `collection` or `convert`
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.index_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.myCurrent_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.maxLength_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.stack_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.ChampStepperBase.maxSize_="), // The member scala.collection.convert.impl.ChampStepperBase.maxSize_= with signature (scala.Int):scala.Unit was concrete or did not exist but is abstract in current version
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.iN_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.i0_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.iN_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.i0_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.i0_="),
+ ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.maxLength_="),
+
+ // Problem: ???
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.nn"), // The member scala.Predef.nn with signature (1,java.lang.Object):java.lang.Object does not have a correspondant in current version
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.ne"), // The member scala.Predef.ne with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.eq"), // The member scala.Predef.eq with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version
+
+ // Problem: protected lazy val (processThread, (futureThread, futureValue), destroyer) = { ... }
+ // https://github.com/scala/scala/blob/cff8a9af4da67658d8e1e32f929e1aff03ffa384/src/library/scala/sys/process/ProcessImpl.scala#L99C5-L99C83
+ ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.destroyer"), // before: lazy val; after: def
+ ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureThread"), // before: lazy val; after: def
+ ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.processThread"), // before: lazy val; after: def
+ ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureValue"), // before: lazy val; after: def
+
+ // Problem?
+ // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/convert/JavaCollectionWrappers.scala#L66-L71
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), // The member scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator with signature ():scala.collection.convert.JavaCollectionWrappers.IteratorWrapper does not have a correspondant in current version
+
+ // Problem?
+ // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/mutable/ArrayBuilder.scala#L504C1-L504C87
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.mutable.ArrayBuilder.ofUnit.addAll"), // The member scala.collection.mutable.ArrayBuilder.ofUnit.addAll with signature (java.lang.Object,scala.Int,scala.Int):scala.collection.mutable.ArrayBuilder$.ofUnit does not have a correspondant in current version
+
+ // Probably OK (TASTy MiMa bug): Patched Predef members
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.valueOf"), // The member scala.Predef.valueOf with signature (1):java.lang.Object does not have a correspondant in current version
+ ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.summon"), // The member scala.Predef.summon with signature (1,java.lang.Object):java.lang.Object does not have a correspondant in current version
+
+ // TASTy-MiMa bugs
+ ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.appendedAll"),
+ ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.concat"),
+ ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.prependedAll"),
+ ProblemMatcher.make(ProblemKind.InternalError, "scala.concurrent.duration.package.*"),
+
+ // Problems introduced in 2.13.11: Implicit classes with complex signatures
+ ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFromLowPriority1.buildFromSortedSetOps"), // The symbol scala.collection.BuildFromLowPriority1.buildFromSortedSetOps has an incompatible type in current version: before: [CC <: ([X] =>> (scala.collection.SortedSet[X] & scala.collection.SortedSetOps[X, CC, ?])), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[(CC[A0] & scala.collection.SortedSet[A0]), A, (CC[A] & scala.collection.SortedSet[A])]; after: [CC >: ([X] =>> scala.Nothing) <: ([X] =>> scala.&[scala.collection.SortedSet[X], scala.collection.SortedSetOps[X, CC, ?]]), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[scala.&[CC[A0], scala.collection.SortedSet[A0]], A, scala.&[CC[A], scala.collection.SortedSet[A]]]
+ ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromMapOps"), // The symbol scala.collection.BuildFrom.buildFromMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.Map[X, Y] & scala.collection.MapOps[X, Y, CC, ?])), K0, V0, K, V]scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.Map[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.Map[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.Map[X, Y], scala.collection.MapOps[X, Y, CC, ?]]), K0, V0, K, V]scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.Map[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.Map[K, V]]]
+ ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromSortedMapOps"), // The symbol scala.collection.BuildFrom.buildFromSortedMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.SortedMap[X, Y] & scala.collection.SortedMapOps[X, Y, CC, ?])), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.SortedMap[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.SortedMap[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.SortedMap[X, Y], scala.collection.SortedMapOps[X, Y, CC, ?]]), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.SortedMap[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.SortedMap[K, V]]]
+ )
+}
diff --git a/project/build.properties b/project/build.properties
index 46e43a97ed86..52413ab79a18 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=1.8.2
+sbt.version=1.9.3
diff --git a/project/plugins.sbt b/project/plugins.sbt
index ccbcdeed22fc..c94d4d5afe8d 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -8,14 +8,16 @@ libraryDependencySchemes +=
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0")
-addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.20")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21")
addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17")
-addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2")
+
+addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0")
diff --git a/project/scripts/addToBackportingProject.scala b/project/scripts/addToBackportingProject.scala
new file mode 100644
index 000000000000..2c1929972791
--- /dev/null
+++ b/project/scripts/addToBackportingProject.scala
@@ -0,0 +1,84 @@
+//> using scala 3.3.1
+//> using toolkit 0.2.1
+//> using lib pro.kordyjan::pytanie:0.1.7
+
+import pytanie.*
+import sttp.client4.*
+
+lazy val apiToken =
+ System.getenv("GRAPHQL_API_TOKEN")
+
+case class ID(value: String) derives WrapperVariable
+
+val PROJECT_ID = ID("PVT_kwDOACj3ec4AWSoi")
+val FIELD_ID = ID("PVTF_lADOACj3ec4AWSoizgO7uJ4")
+
+@main def run(commitSha: String) =
+ val (id, date) = getPrData(commitSha)
+ val newId = addItem(id)
+ timestampItem(newId, date)
+
+def getPrData(commitSha: String): (ID, String) =
+ val res = query"""
+ |query prForCommit {
+ | repository(owner:"lampepfl", name:"dotty") {
+ | object(expression: $commitSha){
+ | __typename
+ | ... on Commit {
+ | associatedPullRequests(first: 1) {
+ | nodes {
+ | number
+ | id
+ | mergedAt
+ | }
+ | }
+ | }
+ | }
+ | }
+ |}
+ """.send(
+ uri"https://api.github.com/graphql",
+ "DummyUser",
+ apiToken
+ )
+ val pr = res.repository.`object`.asCommit.get.associatedPullRequests.nodes.head
+ (ID(pr.id), pr.mergedAt)
+
+def timestampItem(id: ID, date: String) =
+ query"""
+ |mutation editField {
+ | updateProjectV2ItemFieldValue(input: {
+ | projectId: $PROJECT_ID,
+ | itemId: $id,
+ | fieldId: $FIELD_ID,
+ | value: { text: $date }
+ | }) {
+ | projectV2Item {
+ | updatedAt
+ | }
+ | }
+ |}
+ """.send(
+ uri"https://api.github.com/graphql",
+ "DummyUser",
+ apiToken
+ )
+
+def addItem(id: ID) =
+ val res = query"""
+ |mutation addItem {
+ | addProjectV2ItemById(input: {
+ | projectId: $PROJECT_ID,
+ | contentId: $id
+ | }) {
+ | item {
+ | id
+ | }
+ | }
+ |}
+ """.send(
+ uri"https://api.github.com/graphql",
+ "DummyUser",
+ apiToken
+ )
+ ID(res.addProjectV2ItemById.item.id)
diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala
index 2e554a885c79..dbb14f2c4587 100755
--- a/project/scripts/bisect.scala
+++ b/project/scripts/bisect.scala
@@ -235,10 +235,10 @@ class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Bo
val scala3CompilerProject = if bootstrapped then "scala3-compiler-bootstrapped" else "scala3-compiler"
val scala3Project = if bootstrapped then "scala3-bootstrapped" else "scala3"
val validationCommandStatusModifier = if shouldFail then "! " else "" // invert the process status if failure was expected
- val bisectRunScript = s"""
+ val bisectRunScript = raw"""
|scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1)
|rm -r out
- |sbt "clean; ${scala3Project}/publishLocal"
+ |sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal"
|${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion"
""".stripMargin
"git bisect start".!
diff --git a/project/scripts/check-cla.sh b/project/scripts/check-cla.sh
index 1a91363f5079..e4e489830f11 100755
--- a/project/scripts/check-cla.sh
+++ b/project/scripts/check-cla.sh
@@ -2,7 +2,7 @@
set -eux
echo "Pull request submitted by $AUTHOR";
-if [ "$AUTHOR" = "github-actions[bot]" ] ; then
+if [[ "$AUTHOR" == "github-actions[bot]" || "$AUTHOR" == "dependabot[bot]" ]] ; then
echo "CLA check for $AUTHOR successful";
else
signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed");
diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests
index 2168e3e8e334..e9403d988b98 100755
--- a/project/scripts/cmdScaladocTests
+++ b/project/scripts/cmdScaladocTests
@@ -23,8 +23,8 @@ SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}"
dist/target/pack/bin/scaladoc \
-d "$OUT1" \
-project "scaladoc testcases" \
- -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \
- -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}"\#library/src \
+ -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \
+ -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}"\#library/src \
-source-links:github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}" \
"-external-mappings:.*scala/.*::scaladoc3::https://dotty.epfl.ch/api/,.*java/.*::javadoc::https://docs.oracle.com/javase/8/docs/api/" \
"-skip-by-regex:.+\.internal($|\..+)" \
@@ -37,7 +37,7 @@ dist/target/pack/bin/scaladoc \
"-snippet-compiler:scaladoc-testcases/docs=compile" \
"-comment-syntax:scaladoc-testcases/src/example/comment-md=markdown,scaladoc-testcases/src/example/comment-wiki=wiki" \
-siteroot scaladoc-testcases/docs \
- -project-footer "Copyright (c) 2002-2023, LAMP/EPFL" \
+ -project-footer "Copyright (c) 2002-$(date +%Y), LAMP/EPFL" \
-default-template static-site-main \
-author -groups -revision main -project-version "${DOTTY_BOOTSTRAPPED_VERSION}" \
"-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/," \
diff --git a/project/scripts/cmdTests b/project/scripts/cmdTests
index 3405c06b056f..453590084b00 100755
--- a/project/scripts/cmdTests
+++ b/project/scripts/cmdTests
@@ -25,9 +25,9 @@ grep -qe "$EXPECTED_OUTPUT" "$tmp"
echo "testing sbt scalac -print-tasty"
clear_out "$OUT"
-"$SBT" ";scalac $SOURCE -d $OUT ;scalac -print-tasty -color:never $TASTY" > "$tmp"
+"$SBT" ";scalac $SOURCE -d $OUT ;scalac -print-tasty -color:never $OUT/$TASTY" > "$tmp"
grep -qe "0: ASTs" "$tmp"
-grep -qe "0: tests/pos/HelloWorld.scala" "$tmp"
+grep -qe "0: 41 \[tests/pos/HelloWorld.scala\]" "$tmp"
echo "testing that paths SourceFile annotations are relativized"
clear_out "$OUT"
diff --git a/project/scripts/scala2-library-tasty-mima.sh b/project/scripts/scala2-library-tasty-mima.sh
new file mode 100755
index 000000000000..7118ee28c2f3
--- /dev/null
+++ b/project/scripts/scala2-library-tasty-mima.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+set -eux
+
+source $(dirname $0)/cmdTestsCommon.inc.sh
+
+TASTY_FROMAT_FILE="tasty/src/dotty/tools/tasty/TastyFormat.scala"
+MINOR_TASTY_VERSION_SUPPORTED_BY_TASTY_MIMA=3
+MINOR_TASTY_VERSION=$(grep -oE 'val MinorVersion: Int = ([0-9]+)' $TASTY_FROMAT_FILE | grep -oE '[0-9]+')
+EXPERIMENTAL_TASTY_VERSION=$(grep -oE 'val ExperimentalVersion: Int = ([0-9]+)' $TASTY_FROMAT_FILE | grep -oE '[0-9]+')
+
+setTastyVersion() {
+ sed -i -E -e "s/val MinorVersion: Int = [0-9]+/val MinorVersion: Int = $1/" -e "s/val ExperimentalVersion: Int = [0-9]+/val ExperimentalVersion: Int = $2/" $TASTY_FROMAT_FILE
+}
+
+setTastyVersion $MINOR_TASTY_VERSION_SUPPORTED_BY_TASTY_MIMA 0
+
+# Run scala2-library-bootstrapped/tastyMiMaReportIssues using a custom TASTy version.
+# We clean before to make sure all sources are recompiled using the new TASTY version.
+# We clean after to make sure no other test will use the TASTy generated with this version.
+# We set -Ycheck:all to check that -Ycompile-scala2-library does not gererate inconsistent trees.
+"$SBT" 'clean; scala2-library-bootstrapped/clean; reload; set `scala2-library-bootstrapped`/scalacOptions += "-Ycheck:all"; scala2-library-bootstrapped/tastyMiMaReportIssues; clean; scala2-library-bootstrapped/clean'
+
+setTastyVersion $MINOR_TASTY_VERSION $EXPERIMENTAL_TASTY_VERSION
diff --git a/sbt-bridge/src/dotty/tools/xsbt/Action.java b/sbt-bridge/src/dotty/tools/xsbt/Action.java
new file mode 100644
index 000000000000..2a1818fef78c
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/Action.java
@@ -0,0 +1,28 @@
+package dotty.tools.xsbt;
+
+import java.util.Optional;
+
+final public class Action implements xsbti.Action {
+ private final String _title;
+ private final Optional _description;
+ private final WorkspaceEdit _edit;
+
+ public Action(String title, Optional description, WorkspaceEdit edit) {
+ super();
+ this._title = title;
+ this._description = description;
+ this._edit = edit;
+ }
+
+ public String title() {
+ return _title;
+ }
+
+ public Optional description() {
+ return _description;
+ }
+
+ public WorkspaceEdit edit() {
+ return _edit;
+ }
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java
index 92b8062700c4..6e2095a9df1e 100644
--- a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java
+++ b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java
@@ -19,6 +19,6 @@ public final class CompilerBridge implements CompilerInterface2 {
public void run(VirtualFile[] sources, DependencyChanges changes, String[] options, Output output,
AnalysisCallback callback, Reporter delegate, CompileProgress progress, Logger log) {
CompilerBridgeDriver driver = new CompilerBridgeDriver(options, output);
- driver.run(sources, callback, log, delegate);
+ driver.run(sources, callback, log, delegate, progress);
}
}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java
index 12291120b157..2d54d4e83404 100644
--- a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java
+++ b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java
@@ -10,15 +10,26 @@
import dotty.tools.dotc.ScalacCommand;
import dotty.tools.dotc.config.Properties;
import dotty.tools.dotc.core.Contexts;
+import dotty.tools.dotc.util.SourceFile;
import dotty.tools.io.AbstractFile;
+import dotty.tools.io.PlainFile;
+import dotty.tools.io.Path;
+import dotty.tools.io.Streamable;
import scala.collection.mutable.ListBuffer;
+import scala.jdk.javaapi.CollectionConverters;
import scala.io.Codec;
import xsbti.Problem;
import xsbti.*;
import xsbti.compile.Output;
+import xsbti.compile.CompileProgress;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.util.Comparator;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
import java.util.Arrays;
public class CompilerBridgeDriver extends Driver {
@@ -50,15 +61,72 @@ public boolean sourcesRequired() {
return false;
}
- synchronized public void run(VirtualFile[] sources, AnalysisCallback callback, Logger log, Reporter delegate) {
- DelegatingReporter reporter = new DelegatingReporter(delegate);
+ private static VirtualFile asVirtualFile(SourceFile sourceFile, DelegatingReporter reporter,
+ HashMap lookup) {
+ return lookup.computeIfAbsent(sourceFile.file(), path -> {
+ reportMissingFile(reporter, sourceFile);
+ if (sourceFile.file().jpath() != null)
+ return new FallbackPathBasedFile(sourceFile);
+ else
+ return new FallbackVirtualFile(sourceFile);
+ });
+ }
+
+ private static void reportMissingFile(DelegatingReporter reporter, SourceFile sourceFile) {
+ String underline = String.join("", Collections.nCopies(sourceFile.path().length(), "^"));
+ String message =
+ sourceFile.path() + ": Missing Zinc virtual file\n" +
+ underline + "\n" +
+ " Falling back to placeholder for the given source file (of class " + sourceFile.getClass().getName() + ")\n" +
+ " This is likely a bug in incremental compilation for the Scala 3 compiler.\n" +
+ " Please report it to the Scala 3 maintainers at https://github.com/lampepfl/dotty/issues.";
+ reporter.reportBasicWarning(message);
+ }
+
+ synchronized public void run(
+ VirtualFile[] sources, AnalysisCallback callback, Logger log, Reporter delegate, CompileProgress progress) {
+ VirtualFile[] sortedSources = new VirtualFile[sources.length];
+ System.arraycopy(sources, 0, sortedSources, 0, sources.length);
+ Arrays.sort(sortedSources, (x0, x1) -> x0.id().compareTo(x1.id()));
+
+ ListBuffer sourcesBuffer = new ListBuffer<>();
+ HashMap lookup = new HashMap<>(sources.length, 0.25f);
+
+ for (int i = 0; i < sources.length; i++) {
+ VirtualFile source = sortedSources[i];
+ AbstractFile abstractFile = asDottyFile(source);
+ sourcesBuffer.append(abstractFile);
+ lookup.put(abstractFile, source);
+ }
+
+ DelegatingReporter reporter = new DelegatingReporter(delegate, sourceFile -> {
+ // TODO: possible situation here where we use -from-tasty and TASTy source files but
+ // the reporter log is associated to a Scala source file?
+
+ // Zinc will use the output of this function to possibly lookup a mapped virtual file,
+ // e.g. convert `${ROOT}/Foo.scala` to `/path/to/Foo.scala` if it exists in the lookup map.
+ VirtualFile vf = lookup.get(sourceFile.file());
+ if (vf != null)
+ return vf.id();
+ else
+ // follow Zinc, which uses the path of the source file as a fallback.
+ return sourceFile.path();
+ });
+
+ ProgressCallbackImpl progressCallback = new ProgressCallbackImpl(progress);
+
+ IncrementalCallback incCallback = new IncrementalCallback(callback, sourceFile ->
+ asVirtualFile(sourceFile, reporter, lookup)
+ );
+
try {
log.debug(this::infoOnCachedCompiler);
Contexts.Context initialCtx = initCtx()
.fresh()
.setReporter(reporter)
- .setSbtCallback(callback);
+ .setIncCallback(incCallback)
+ .setProgressCallback(progressCallback);
Contexts.Context context = setup(args, initialCtx).map(t -> t._2).getOrElse(() -> initialCtx);
@@ -70,28 +138,28 @@ synchronized public void run(VirtualFile[] sources, AnalysisCallback callback, L
log.debug(this::prettyPrintCompilationArguments);
Compiler compiler = newCompiler(context);
- VirtualFile[] sortedSources = new VirtualFile[sources.length];
- System.arraycopy(sources, 0, sortedSources, 0, sources.length);
- Arrays.sort(
- sortedSources,
- new Comparator() {
- @Override
- public int compare(VirtualFile x0, VirtualFile x1) {
- return x0.id().compareTo(x1.id());
- }
- }
- );
-
- ListBuffer sourcesBuffer = new ListBuffer<>();
- for (VirtualFile file: sortedSources)
- sourcesBuffer.append(asDottyFile(file));
doCompile(compiler, sourcesBuffer.toList(), context);
for (xsbti.Problem problem: delegate.problems()) {
- callback.problem(problem.category(), problem.position(), problem.message(), problem.severity(),
- true);
+ try {
+ AnalysisCallback2 callback2 = (AnalysisCallback2)callback;
+ callback2.problem2(
+ problem.category(),
+ problem.position(),
+ problem.message(),
+ problem.severity(),
+ true, // reported
+ problem.rendered(),
+ problem.diagnosticCode(),
+ problem.diagnosticRelatedInformation(),
+ problem.actions()
+ );
+ } catch (NoClassDefFoundError e) {
+ callback.problem(problem.category(), problem.position(), problem.message(), problem.severity(),
+ true);
+ }
}
- } else {
+ } else {
delegate.printSummary();
}
@@ -105,11 +173,28 @@ public int compare(VirtualFile x0, VirtualFile x1) {
}
private static AbstractFile asDottyFile(VirtualFile virtualFile) {
- if (virtualFile instanceof PathBasedFile)
- return new ZincPlainFile((PathBasedFile) virtualFile);
+ if (virtualFile instanceof PathBasedFile) {
+ java.nio.file.Path path = ((PathBasedFile) virtualFile).toPath();
+ return new PlainFile(new Path(path));
+ }
try {
- return new ZincVirtualFile(virtualFile);
+ return new dotty.tools.io.VirtualFile(virtualFile.name(), virtualFile.id()) {
+ {
+ // fill in the content
+ try (OutputStream output = output()) {
+ try (InputStream input = virtualFile.input()) {
+ Streamable.Bytes bytes = new Streamable.Bytes() {
+ @Override
+ public InputStream inputStream() {
+ return input;
+ }
+ };
+ output.write(bytes.toByteArray());
+ }
+ }
+ }
+ };
} catch (IOException e) {
throw new IllegalArgumentException("invalid file " + virtualFile.name(), e);
}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java
index 25b934000144..3bcff72601a7 100644
--- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java
+++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java
@@ -3,11 +3,15 @@
*/
package dotty.tools.xsbt;
+import java.util.List;
+
import scala.Tuple2;
import scala.collection.mutable.HashMap;
+import scala.jdk.javaapi.CollectionConverters;
import dotty.tools.dotc.core.Contexts.Context;
import dotty.tools.dotc.reporting.AbstractReporter;
+import dotty.tools.dotc.reporting.CodeAction;
import dotty.tools.dotc.reporting.Diagnostic;
import dotty.tools.dotc.reporting.Message;
import dotty.tools.dotc.util.SourceFile;
@@ -15,12 +19,21 @@
import xsbti.Position;
import xsbti.Severity;
+import java.util.Collections;
+import java.util.function.*;
+
final public class DelegatingReporter extends AbstractReporter {
private xsbti.Reporter delegate;
- public DelegatingReporter(xsbti.Reporter delegate) {
+ // A function that can lookup the `id` of the VirtualFile
+ // associated with a SourceFile. If there is not an associated virtual file,
+ // then it is the path of the SourceFile as a String.
+ private final Function lookupVirtualFileId;
+
+ public DelegatingReporter(xsbti.Reporter delegate, Function lookupVirtualFileId) {
super();
this.delegate = delegate;
+ this.lookupVirtualFileId = lookupVirtualFileId;
}
public void dropDelegate() {
@@ -35,20 +48,25 @@ public void printSummary(Context ctx) {
public void doReport(Diagnostic dia, Context ctx) {
Severity severity = severityOf(dia.level());
Position position = positionOf(dia.pos().nonInlined());
-
- StringBuilder rendered = new StringBuilder();
- rendered.append(messageAndPos(dia, ctx));
Message message = dia.msg();
- StringBuilder messageBuilder = new StringBuilder();
- messageBuilder.append(message.message());
+ String text;
+ if (Diagnostic.shouldExplain(dia, ctx) && !message.explanation().isEmpty())
+ text = message.message() + System.lineSeparator() + explanation(message, ctx);
+ else
+ text = message.message();
+ String rendered = messageAndPos(dia, ctx);
String diagnosticCode = String.valueOf(message.errorId().errorNumber());
- boolean shouldExplain = Diagnostic.shouldExplain(dia, ctx);
- if (shouldExplain && !message.explanation().isEmpty()) {
- rendered.append(explanation(message, ctx));
- messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx));
- }
+ List actions = CollectionConverters.asJava(message.actions(ctx));
+ Problem problem = new Problem(position, text, severity, rendered, diagnosticCode, actions, lookupVirtualFileId);
+ delegate.log(problem);
+ }
- delegate.log(new Problem(position, messageBuilder.toString(), severity, rendered.toString(), diagnosticCode));
+ public void reportBasicWarning(String message) {
+ Position position = PositionBridge.noPosition;
+ Severity severity = Severity.Warn;
+ String diagnosticCode = "-1"; // no error code
+ List actions = Collections.emptyList();
+ delegate.log(new Problem(position, message, severity, message, diagnosticCode, actions, lookupVirtualFileId));
}
private static Severity severityOf(int level) {
@@ -63,9 +81,9 @@ private static Severity severityOf(int level) {
return severity;
}
- private static Position positionOf(SourcePosition pos) {
- if (pos.exists()){
- return new PositionBridge(pos, pos.source());
+ private Position positionOf(SourcePosition pos) {
+ if (pos.exists()) {
+ return new PositionBridge(pos, lookupVirtualFileId.apply(pos.source()));
} else {
return PositionBridge.noPosition;
}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java b/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java
new file mode 100644
index 000000000000..28c2170d2b50
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java
@@ -0,0 +1,20 @@
+package dotty.tools.xsbt;
+
+import dotty.tools.dotc.util.SourceFile;
+
+/**A basic implementation of PathBasedFile that is only used when
+ * the real virtual file can not be found.
+ *
+ * See FallbackVirtualFile for more details.
+ */
+public class FallbackPathBasedFile extends FallbackVirtualFile implements xsbti.PathBasedFile {
+
+ public FallbackPathBasedFile(SourceFile sourceFile) {
+ super(sourceFile);
+ }
+
+ public java.nio.file.Path toPath() {
+ return sourceFile.file().jpath();
+ }
+
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java b/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java
new file mode 100644
index 000000000000..6fcb6ef73e1f
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java
@@ -0,0 +1,36 @@
+package dotty.tools.xsbt;
+
+import dotty.tools.dotc.util.SourceFile;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+
+/**A basic implementation of VirtualFile that is only used when
+ * the real virtual file can not be found.
+ *
+ * This has a very basic implementation of contentHash that is almost certainly colliding more than the implementation
+ * in Zinc. It does not matter anyway as Zinc will recompile the associated source file, because it did not recieve the
+ * same virtual file back.
+ */
+public class FallbackVirtualFile extends xsbti.BasicVirtualFileRef implements xsbti.VirtualFile {
+
+ protected final SourceFile sourceFile;
+
+ public FallbackVirtualFile(SourceFile sourceFile) {
+ super(sourceFile.path());
+ this.sourceFile = sourceFile;
+ }
+
+ private static byte[] toBytes(char[] chars) {
+ return new String(chars).getBytes(StandardCharsets.UTF_8);
+ }
+
+ public InputStream input() {
+ return new java.io.ByteArrayInputStream(toBytes(sourceFile.content()));
+ }
+
+ public long contentHash() {
+ int murmurHash3 = scala.util.hashing.MurmurHash3$.MODULE$.bytesHash(toBytes(sourceFile.content()));
+ return (long) murmurHash3;
+ }
+
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java
new file mode 100644
index 000000000000..3c3d33c1c1fe
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java
@@ -0,0 +1,60 @@
+package dotty.tools.xsbt;
+
+import dotty.tools.dotc.util.SourceFile;
+import java.util.function.Function;
+
+public final class IncrementalCallback implements dotty.tools.dotc.sbt.interfaces.IncrementalCallback {
+
+ private final xsbti.AnalysisCallback delegate;
+ private final Function asVirtualFile;
+
+ public IncrementalCallback(xsbti.AnalysisCallback delegate, Function asVirtualFile) {
+ this.delegate = delegate;
+ this.asVirtualFile = asVirtualFile;
+ }
+
+ @Override
+ public void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) {
+ delegate.api(asVirtualFile.apply(sourceFile), classApi);
+ }
+
+ @Override
+ public void startSource(SourceFile sourceFile) {
+ delegate.startSource(asVirtualFile.apply(sourceFile));
+ }
+
+ @Override
+ public void mainClass(SourceFile sourceFile, String className) {
+ delegate.mainClass(asVirtualFile.apply(sourceFile), className);
+ }
+
+ @Override
+ public boolean enabled() {
+ return delegate.enabled();
+ }
+
+ @Override
+ public void usedName(String className, String name, java.util.EnumSet useScopes) {
+ delegate.usedName(className, name, useScopes);
+ }
+
+ @Override
+ public void binaryDependency(java.nio.file.Path onBinaryEntry, String onBinaryClassName, String fromClassName, SourceFile fromSourceFile, xsbti.api.DependencyContext context) {
+ delegate.binaryDependency(onBinaryEntry, onBinaryClassName, fromClassName, asVirtualFile.apply(fromSourceFile), context);
+ }
+
+ @Override
+ public void classDependency(String onClassName, String sourceClassName, xsbti.api.DependencyContext context) {
+ delegate.classDependency(onClassName, sourceClassName, context);
+ }
+
+ @Override
+ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) {
+ delegate.generatedLocalClass(asVirtualFile.apply(source), classFile);
+ }
+
+ @Override
+ public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) {
+ delegate.generatedNonLocalClass(asVirtualFile.apply(source), classFile, binaryClassName, srcClassName);
+ }
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java
new file mode 100644
index 000000000000..597a964eb944
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java
@@ -0,0 +1,74 @@
+package dotty.tools.xsbt;
+
+import dotty.tools.dotc.util.SourceFile;
+import java.util.function.Function;
+import java.util.Optional;
+
+import java.io.File;
+
+/** To be compatible with the Zinc 1.3 API */
+public final class OldIncrementalCallback implements dotty.tools.dotc.sbt.interfaces.IncrementalCallback {
+
+ private final xsbti.AnalysisCallback delegate;
+
+ public OldIncrementalCallback(xsbti.AnalysisCallback delegate) {
+ this.delegate = delegate;
+ }
+
+ private static File asJavaFile(SourceFile sourceFile) {
+ File jfileOrNull = sourceFile.file().file();
+ if (jfileOrNull != null) return jfileOrNull;
+ throw new IllegalArgumentException("SourceFile " + sourceFile + " is not backed by a java.io.File");
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) {
+ delegate.api(asJavaFile(sourceFile), classApi);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void startSource(SourceFile sourceFile) {
+ delegate.startSource(asJavaFile(sourceFile));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void mainClass(SourceFile sourceFile, String className) {
+ delegate.mainClass(asJavaFile(sourceFile), className);
+ }
+
+ @Override
+ public boolean enabled() {
+ return delegate.enabled();
+ }
+
+ @Override
+ public void usedName(String className, String name, java.util.EnumSet useScopes) {
+ delegate.usedName(className, name, useScopes);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void binaryDependency(java.nio.file.Path onBinaryEntry, String onBinaryClassName, String fromClassName, SourceFile fromSourceFile, xsbti.api.DependencyContext context) {
+ delegate.binaryDependency(onBinaryEntry.toFile(), onBinaryClassName, fromClassName, asJavaFile(fromSourceFile), context);
+ }
+
+ @Override
+ public void classDependency(String onClassName, String sourceClassName, xsbti.api.DependencyContext context) {
+ delegate.classDependency(onClassName, sourceClassName, context);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) {
+ delegate.generatedLocalClass(asJavaFile(source), classFile.toFile());
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) {
+ delegate.generatedNonLocalClass(asJavaFile(source), classFile.toFile(), binaryClassName, srcClassName);
+ }
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java b/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java
index 6b3c25e2e27c..eb01da25ba1c 100644
--- a/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java
+++ b/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java
@@ -12,10 +12,12 @@
import java.io.File;
import java.util.Optional;
+import java.util.function.Function;
public class PositionBridge implements Position {
private final SourcePosition pos;
private final SourceFile src;
+ private final String pathId;
public static final Position noPosition = new Position() {
public Optional sourceFile() {
@@ -45,9 +47,10 @@ public String toString() {
}
};
- public PositionBridge(SourcePosition pos, SourceFile src) {
+ public PositionBridge(SourcePosition pos, String path) {
this.pos = pos;
- this.src = src;
+ this.src = pos.source();
+ this.pathId = path;
}
@Override
@@ -82,17 +85,7 @@ public Optional offset() {
@Override
public Optional sourcePath() {
- if (!src.exists())
- return Optional.empty();
-
- AbstractFile sourceFile = pos.source().file();
- if (sourceFile instanceof ZincPlainFile) {
- return Optional.of(((ZincPlainFile) sourceFile).underlying().id());
- } else if (sourceFile instanceof ZincVirtualFile) {
- return Optional.of(((ZincVirtualFile) sourceFile).underlying().id());
- } else {
- return Optional.of(sourceFile.path());
- }
+ return Optional.of(pathId);
}
@Override
@@ -131,7 +124,7 @@ public String toString() {
else
return path;
}
-
+
@Override
public Optional startOffset() {
if (src.content().length == 0)
diff --git a/sbt-bridge/src/dotty/tools/xsbt/Problem.java b/sbt-bridge/src/dotty/tools/xsbt/Problem.java
index 29d64cc26c4a..532bb35786c4 100644
--- a/sbt-bridge/src/dotty/tools/xsbt/Problem.java
+++ b/sbt-bridge/src/dotty/tools/xsbt/Problem.java
@@ -1,8 +1,23 @@
package dotty.tools.xsbt;
+import java.util.List;
import java.util.Optional;
+import java.util.function.Function;
+
+import static java.util.stream.Collectors.toList;
+
+import dotty.tools.dotc.reporting.CodeAction;
+import dotty.tools.dotc.rewrites.Rewrites.ActionPatch;
+import dotty.tools.dotc.util.SourcePosition;
+import dotty.tools.dotc.util.SourceFile;
+
+import scala.jdk.javaapi.CollectionConverters;
+import scala.jdk.javaapi.OptionConverters;
+
import xsbti.Position;
import xsbti.Severity;
+import xsbti.VirtualFile;
+
final public class Problem implements xsbti.Problem {
private final Position _position;
@@ -10,14 +25,23 @@ final public class Problem implements xsbti.Problem {
private final Severity _severity;
private final Optional _rendered;
private final String _diagnosticCode;
+ private final List _actions;
+
+ // A function that can lookup the `id` of the VirtualFile
+ // associated with a SourceFile. If there is not an associated virtual file,
+ // then it is the path of the SourceFile as a String.
+ private final Function _lookupVirtualFileId;
- public Problem(Position position, String message, Severity severity, String rendered, String diagnosticCode) {
+ public Problem(Position position, String message, Severity severity, String rendered, String diagnosticCode, List actions,
+ Function lookupVirtualFileId) {
super();
this._position = position;
this._message = message;
this._severity = severity;
this._rendered = Optional.of(rendered);
this._diagnosticCode = diagnosticCode;
+ this._actions = actions;
+ this._lookupVirtualFileId = lookupVirtualFileId;
}
public String category() {
@@ -56,6 +80,38 @@ public Optional diagnosticCode() {
}
}
+ public List actions() {
+ if (_actions.isEmpty()) {
+ return java.util.Collections.emptyList();
+ } else {
+ // Same as with diagnosticCode, we need to ensure we don't create the actual
+ // Action until we are here to ensure that when using an older version of sbt/zinc
+ // with the new versions of the compiler, this doesn't blow up because this is
+ // never getting called.
+ return _actions
+ .stream()
+ .map(action -> new Action(action.title(), OptionConverters.toJava(action.description()), toWorkspaceEdit(CollectionConverters.asJava(action.patches()), _lookupVirtualFileId)))
+ .collect(toList());
+ }
+ }
+
+ private static WorkspaceEdit toWorkspaceEdit(List patches, Function lookupVirtualFileId) {
+ return new WorkspaceEdit(
+ patches
+ .stream()
+ .map(patch -> new TextEdit(positionOf(patch.srcPos(), lookupVirtualFileId), patch.replacement()))
+ .collect(toList())
+ );
+ }
+
+ private static Position positionOf(SourcePosition pos, Function lookupVirtualFileId) {
+ if (pos.exists()){
+ return new PositionBridge(pos, lookupVirtualFileId.apply(pos.source()));
+ } else {
+ return PositionBridge.noPosition;
+ }
+ }
+
@Override
public String toString() {
return "Problem(" + _position + ", " + _message + ", " + _severity + ", " + _rendered + ", " + _diagnosticCode + ")";
diff --git a/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java b/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java
new file mode 100644
index 000000000000..f5fb78f12bb1
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java
@@ -0,0 +1,35 @@
+package dotty.tools.xsbt;
+
+import dotty.tools.dotc.sbt.interfaces.ProgressCallback;
+import dotty.tools.dotc.CompilationUnit;
+
+import xsbti.compile.CompileProgress;
+
+public final class ProgressCallbackImpl implements ProgressCallback {
+ private boolean _cancelled = false; // TODO: atomic boolean?
+ private final CompileProgress _progress;
+
+ public ProgressCallbackImpl(CompileProgress progress) {
+ _progress = progress;
+ }
+
+ @Override
+ public void cancel() {
+ _cancelled = true;
+ }
+
+ @Override
+ public boolean isCancelled() {
+ return _cancelled;
+ }
+
+ @Override
+ public void informUnitStarting(String phase, CompilationUnit unit) {
+ _progress.startUnit(phase, unit.source().file().path());
+ }
+
+ @Override
+ public boolean progress(int current, int total, String currPhase, String nextPhase) {
+ return _progress.advance(current, total, currPhase, nextPhase);
+ }
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java b/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java
new file mode 100644
index 000000000000..df717446b2f2
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java
@@ -0,0 +1,23 @@
+package dotty.tools.xsbt;
+
+import xsbti.Position;
+
+final public class TextEdit implements xsbti.TextEdit {
+ private final Position _position;
+ private final String _newText;
+
+ public TextEdit(Position position, String newText) {
+ super();
+ this._position = position;
+ this._newText = newText;
+ }
+
+ public Position position() {
+ return _position;
+ }
+
+ public String newText() {
+ return _newText;
+ }
+
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java b/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java
new file mode 100644
index 000000000000..153de63e3765
--- /dev/null
+++ b/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java
@@ -0,0 +1,20 @@
+package dotty.tools.xsbt;
+
+import java.util.List;
+
+import xsbti.TextEdit;
+
+final public class WorkspaceEdit implements xsbti.WorkspaceEdit {
+
+ private final List _changes;
+
+ public WorkspaceEdit(List changes) {
+ super();
+ this._changes = changes;
+ }
+
+ public List changes() {
+ return _changes;
+ }
+
+}
diff --git a/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java b/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java
deleted file mode 100644
index 68b3494cb84b..000000000000
--- a/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Zinc - The incremental compiler for Scala.
- * Copyright Lightbend, Inc. and Mark Harrah
- */
-
-package dotty.tools.xsbt;
-
-import xsbti.PathBasedFile;
-
-public class ZincPlainFile extends dotty.tools.io.PlainFile {
- private final PathBasedFile _underlying;
-
- public ZincPlainFile(PathBasedFile underlying) {
- super(new dotty.tools.io.Path(underlying.toPath()));
- this._underlying = underlying;
- }
-
- public PathBasedFile underlying() {
- return _underlying;
- }
-}
\ No newline at end of file
diff --git a/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java b/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java
deleted file mode 100644
index a79686270f34..000000000000
--- a/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Zinc - The incremental compiler for Scala.
- * Copyright Lightbend, Inc. and Mark Harrah
- */
-
-package dotty.tools.xsbt;
-
-import dotty.tools.io.Streamable;
-import xsbti.VirtualFile;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-public class ZincVirtualFile extends dotty.tools.io.VirtualFile {
- private final VirtualFile _underlying;
-
- public ZincVirtualFile(VirtualFile underlying) throws IOException {
- super(underlying.name(), underlying.id());
- this._underlying = underlying;
-
- // fill in the content
- OutputStream output = output();
- try {
- Streamable.Bytes bytes = new Streamable.Bytes() {
- @Override
- public InputStream inputStream() {
- return underlying.input();
- }
- };
- output.write(bytes.toByteArray());
- } finally {
- output.close();
- }
- }
-
- public VirtualFile underlying() {
- return _underlying;
- }
-}
diff --git a/sbt-bridge/src/xsbt/CachedCompilerImpl.java b/sbt-bridge/src/xsbt/CachedCompilerImpl.java
index 0b876475e51e..8b7779f9c9cb 100644
--- a/sbt-bridge/src/xsbt/CachedCompilerImpl.java
+++ b/sbt-bridge/src/xsbt/CachedCompilerImpl.java
@@ -13,6 +13,9 @@
import dotty.tools.dotc.Main;
import dotty.tools.xsbt.InterfaceCompileFailed;
import dotty.tools.xsbt.DelegatingReporter;
+import dotty.tools.xsbt.OldIncrementalCallback;
+
+import dotty.tools.dotc.sbt.interfaces.IncrementalCallback;
// deprecation warnings are suppressed because scala3-sbt-bridge must stay compatible with Zinc 1.3
// see https://github.com/lampepfl/dotty/issues/10816
@@ -60,9 +63,11 @@ synchronized public void run(File[] sources, DependencyChanges changes, Analysis
return msg;
});
+ IncrementalCallback incCallback = new OldIncrementalCallback(callback);
+
Context ctx = new ContextBase().initialCtx().fresh()
- .setSbtCallback(callback)
- .setReporter(new DelegatingReporter(delegate));
+ .setIncCallback(incCallback)
+ .setReporter(new DelegatingReporter(delegate, source -> source.file().absolutePath()));
dotty.tools.dotc.reporting.Reporter reporter = Main.process(commandArguments(sources), ctx);
if (reporter.hasErrors()) {
diff --git a/sbt-bridge/src/xsbt/CompilerInterface.java b/sbt-bridge/src/xsbt/CompilerInterface.java
index 3f26036eee6d..c48ee4c9d909 100644
--- a/sbt-bridge/src/xsbt/CompilerInterface.java
+++ b/sbt-bridge/src/xsbt/CompilerInterface.java
@@ -54,6 +54,7 @@ private boolean isClassLoaderValid() {
}
}
+ @SuppressWarnings("deprecation")
public void run(File[] sources, DependencyChanges changes, AnalysisCallback callback, Logger log,
Reporter delegate, CompileProgress progress, CachedCompiler cached) {
cached.run(sources, changes, callback, log, delegate, progress);
diff --git a/sbt-bridge/src/xsbt/DottydocRunner.java b/sbt-bridge/src/xsbt/DottydocRunner.java
index e4c35a317e71..a91ff087cea9 100644
--- a/sbt-bridge/src/xsbt/DottydocRunner.java
+++ b/sbt-bridge/src/xsbt/DottydocRunner.java
@@ -53,7 +53,7 @@ public void run() {
args = retained.toArray(new String[retained.size()]);
Context ctx = new ContextBase().initialCtx().fresh()
- .setReporter(new DelegatingReporter(delegate));
+ .setReporter(new DelegatingReporter(delegate, source -> source.file().absolutePath()));
try {
Class> dottydocMainClass = Class.forName("dotty.tools.dottydoc.Main");
diff --git a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala
new file mode 100644
index 000000000000..bcdac0547e75
--- /dev/null
+++ b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala
@@ -0,0 +1,79 @@
+package xsbt
+
+import org.junit.{ Test, Ignore }
+import org.junit.Assert._
+
+/**Only does some rudimentary checks to assert compat with sbt.
+ * More thorough tests are found in compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala
+ */
+class CompileProgressSpecification {
+
+ @Test
+ def totalIsMoreWhenSourcePath = {
+ val srcA = """class A"""
+ val srcB = """class B"""
+ val extraC = """trait C""" // will only exist in the `-sourcepath`, causing a late compile
+ val extraD = """trait D""" // will only exist in the `-sourcepath`, causing a late compile
+ val srcE = """class E extends C""" // depends on class in the sourcepath
+ val srcF = """class F extends C, D""" // depends on classes in the sourcepath
+
+ val compilerForTesting = new ScalaCompilerForUnitTesting
+
+ val totalA = compilerForTesting.extractTotal(srcA)()
+ assertTrue("expected more than 1 unit of work for a single file", totalA > 1)
+
+ val totalB = compilerForTesting.extractTotal(srcA, srcB)()
+ assertEquals("expected twice the work for two sources", totalA * 2, totalB)
+
+ val totalC = compilerForTesting.extractTotal(srcA, srcE)(extraC)
+ assertEquals("expected 2x+1 the work for two sources, and 1 late compile", totalA * 2 + 1, totalC)
+
+ val totalD = compilerForTesting.extractTotal(srcA, srcF)(extraC, extraD)
+ assertEquals("expected 2x+2 the work for two sources, and 2 late compiles", totalA * 2 + 2, totalD)
+ }
+
+ @Test
+ def multipleFilesVisitSamePhases = {
+ val srcA = """class A"""
+ val srcB = """class B"""
+ val compilerForTesting = new ScalaCompilerForUnitTesting
+ val Seq(phasesA, phasesB) = compilerForTesting.extractEnteredPhases(srcA, srcB)
+ assertTrue("expected some phases, was empty", phasesA.nonEmpty)
+ assertEquals(phasesA, phasesB)
+ }
+
+ @Test
+ def multipleFiles = {
+ val srcA = """class A"""
+ val srcB = """class B"""
+ val compilerForTesting = new ScalaCompilerForUnitTesting
+ val allPhases = compilerForTesting.extractProgressPhases(srcA, srcB)
+ assertTrue("expected some phases, was empty", allPhases.nonEmpty)
+ val someExpectedPhases = // just check some "fundamental" phases, don't put all phases to avoid brittleness
+ Set(
+ "parser",
+ "typer[indexing]", "typer[typechecking]", "typer[checkingJava]",
+ "sbt-deps",
+ "posttyper",
+ "sbt-api",
+ "SetRootTree",
+ "pickler",
+ "inlining",
+ "postInlining",
+ "staging",
+ "splicing",
+ "pickleQuotes",
+ "MegaPhase{pruneErasedDefs,...,arrayConstructors}",
+ "erasure",
+ "constructors",
+ "genSJSIR",
+ "genBCode"
+ )
+ val missingExpectedPhases = someExpectedPhases -- allPhases.toSet
+ val msgIfMissing =
+ s"missing expected phases: $missingExpectedPhases. " +
+ s"Either the compiler phases changed, or the encoding of Run.SubPhases.subPhase"
+ assertTrue(msgIfMissing, missingExpectedPhases.isEmpty)
+ }
+
+}
diff --git a/sbt-bridge/test/xsbt/DependencySpecification.scala b/sbt-bridge/test/xsbt/DependencySpecification.scala
index a3fec950e120..54d37048dd09 100644
--- a/sbt-bridge/test/xsbt/DependencySpecification.scala
+++ b/sbt-bridge/test/xsbt/DependencySpecification.scala
@@ -209,4 +209,4 @@ class DependencySpecification {
compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD)
classDependencies
}
-}
\ No newline at end of file
+}
diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala
index 819bedec3cbc..2b2b7d26c716 100644
--- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala
+++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala
@@ -1,6 +1,7 @@
package xsbt
import xsbti.UseScope
+import ScalaCompilerForUnitTesting.Callbacks
import org.junit.{ Test, Ignore }
import org.junit.Assert._
@@ -226,7 +227,7 @@ class ExtractUsedNamesSpecification {
def findPatMatUsages(in: String): Set[String] = {
val compilerForTesting = new ScalaCompilerForUnitTesting
- val (_, callback) =
+ val (_, Callbacks(callback, _)) =
compilerForTesting.compileSrcs(List(List(sealedClass, in)))
val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base."))
diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
index e58f9fefd92d..f17be692ee50 100644
--- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
+++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
@@ -13,20 +13,40 @@ import dotty.tools.io.PlainFile.toPlainFile
import dotty.tools.xsbt.CompilerBridge
import TestCallback.ExtractedClassDependencies
+import ScalaCompilerForUnitTesting.Callbacks
+
+object ScalaCompilerForUnitTesting:
+ case class Callbacks(analysis: TestCallback, progress: TestCompileProgress)
/**
* Provides common functionality needed for unit tests that require compiling
* source code using Scala compiler.
*/
class ScalaCompilerForUnitTesting {
- import scala.language.reflectiveCalls
+
+ def extractEnteredPhases(srcs: String*): Seq[List[String]] = {
+ val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*)
+ val run = testProgress.runs.head
+ tempSrcFiles.map(src => run.unitPhases(src.id))
+ }
+
+ def extractTotal(srcs: String*)(extraSourcePath: String*): Int = {
+ val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList)
+ val run = testProgress.runs.head
+ run.total
+ }
+
+ def extractProgressPhases(srcs: String*): List[String] = {
+ val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*)
+ testProgress.runs.head.phases
+ }
/**
* Compiles given source code using Scala compiler and returns API representation
* extracted by ExtractAPI class.
*/
def extractApiFromSrc(src: String): Seq[ClassLike] = {
- val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
+ val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src)
analysisCallback.apis(tempSrcFile)
}
@@ -35,7 +55,7 @@ class ScalaCompilerForUnitTesting {
* extracted by ExtractAPI class.
*/
def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = {
- val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList)
+ val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList)
tempSrcFiles.map(analysisCallback.apis)
}
@@ -53,7 +73,7 @@ class ScalaCompilerForUnitTesting {
assertDefaultScope: Boolean = true
): Map[String, Set[String]] = {
// we drop temp src file corresponding to the definition src file
- val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc)
+ val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc)
if (assertDefaultScope) for {
(className, used) <- analysisCallback.usedNamesAndScopes
@@ -71,7 +91,7 @@ class ScalaCompilerForUnitTesting {
* Only the names used in the last src file are returned.
*/
def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = {
- val (srcFiles, analysisCallback) = compileSrcs(sources: _*)
+ val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*)
srcFiles
.map { srcFile =>
val classesInSrc = analysisCallback.classNames(srcFile).map(_._1)
@@ -93,7 +113,7 @@ class ScalaCompilerForUnitTesting {
* file system-independent way of testing dependencies between source code "files".
*/
def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = {
- val (_, testCallback) = compileSrcs(srcs)
+ val (_, Callbacks(testCallback, _)) = compileSrcs(srcs)
val memberRefDeps = testCallback.classDependencies collect {
case (target, src, DependencyByMemberRef) => (src, target)
@@ -122,50 +142,60 @@ class ScalaCompilerForUnitTesting {
* The sequence of temporary files corresponding to passed snippets and analysis
* callback is returned as a result.
*/
- def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = {
+ def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = {
val temp = IO.createTemporaryDirectory
val analysisCallback = new TestCallback
+ val testProgress = new TestCompileProgress
val classesDir = new File(temp, "classes")
classesDir.mkdir()
val bridge = new CompilerBridge
- val files = for ((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield {
- val srcFiles = compilationUnit.toSeq.zipWithIndex.map {
+ val files = for ((compilationUnits, unitId) <- groupedSrcs.zipWithIndex) yield {
+ val extraFiles = sourcePath.toSeq.zipWithIndex.map {
+ case (src, i) =>
+ val fileName = s"Extra-$unitId-$i.scala"
+ prepareSrcFile(temp, fileName, src)
+ }
+ val srcFiles = compilationUnits.toSeq.zipWithIndex.map {
(src, i) =>
val fileName = s"Test-$unitId-$i.scala"
prepareSrcFile(temp, fileName, src)
}
- val virtualSrcFiles = srcFiles.map(file => TestVirtualFile(file.toPath)).toArray
+ val virtualSrcFiles = srcFiles.toArray
val classesDirPath = classesDir.getAbsolutePath.toString
val output = new SingleOutput:
def getOutputDirectory() = classesDir
+ val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString)
+
bridge.run(
- virtualSrcFiles.toArray,
+ virtualSrcFiles,
new TestDependencyChanges,
- Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath),
+ Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath,
output,
analysisCallback,
new TestReporter,
- new CompileProgress {},
+ testProgress,
new TestLogger
)
+ testProgress.completeRun()
+
srcFiles
}
- (files.flatten.toSeq, analysisCallback)
+ (files.flatten.toSeq, Callbacks(analysisCallback, testProgress))
}
- def compileSrcs(srcs: String*): (Seq[File], TestCallback) = {
+ def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = {
compileSrcs(List(srcs.toList))
}
- private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = {
+ private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = {
val srcFile = new File(baseDir, fileName)
IO.write(srcFile, src)
- srcFile
+ new TestVirtualFile(srcFile.toPath)
}
}
diff --git a/sbt-bridge/test/xsbt/TestVirtualFile.scala b/sbt-bridge/test/xsbt/TestVirtualFile.scala
index db00038272a8..2c7729d0a6cd 100644
--- a/sbt-bridge/test/xsbt/TestVirtualFile.scala
+++ b/sbt-bridge/test/xsbt/TestVirtualFile.scala
@@ -1,6 +1,6 @@
package xsbt
-import xsbti.PathBasedFile
+import xsbti.{PathBasedFile, VirtualFileRef}
import java.nio.file.{Files, Path}
import scala.io.Source
import scala.io.Codec
@@ -8,7 +8,17 @@ import scala.io.Codec
class TestVirtualFile(path: Path) extends PathBasedFile:
override def contentHash(): Long = ???
override def input(): java.io.InputStream = Files.newInputStream(path)
- override def id(): String = name()
+ lazy val absolutePath: String = path.toAbsolutePath.toString()
+ override def id(): String = absolutePath
override def name(): String = path.toFile.getName
override def names(): Array[String] = ???
override def toPath(): Path = path
+
+
+ override def hashCode(): Int = absolutePath.hashCode()
+
+ override def equals(x: Any): Boolean = this.eq(x.asInstanceOf[AnyRef]) || x.match {
+ case vf: VirtualFileRef => vf.id() == id()
+ }
+
+
diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala
index a0919dc69bc4..3398590b169a 100644
--- a/sbt-bridge/test/xsbti/TestCallback.scala
+++ b/sbt-bridge/test/xsbti/TestCallback.scala
@@ -1,4 +1,4 @@
-/** Copied from https://github.com/sbt/sbt/blob/0.13/interface/src/test/scala/xsbti/TestCallback.scala */
+// Taken from https://github.com/sbt/zinc/blob/aa1c04f445092e87f76aaceee4da61ea0724419e/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala
package xsbti
import java.io.File
@@ -8,61 +8,121 @@ import xsbti.VirtualFileRef
import xsbti.api.ClassLike
import xsbti.api.DependencyContext
import DependencyContext._
-import java.util.EnumSet
+import java.{util => ju}
+import ju.Optional
+
+class TestCallback extends AnalysisCallback2 {
+ case class TestUsedName(name: String, scopes: ju.EnumSet[UseScope])
-class TestCallback extends AnalysisCallback
-{
- case class TestUsedName(name: String, scopes: EnumSet[UseScope])
val classDependencies = new ArrayBuffer[(String, String, DependencyContext)]
- val binaryDependencies = new ArrayBuffer[(File, String, String, File, DependencyContext)]
- val products = new ArrayBuffer[(File, File)]
- val usedNamesAndScopes = scala.collection.mutable.Map.empty[String, Set[TestUsedName]].withDefaultValue(Set.empty)
- val classNames = scala.collection.mutable.Map.empty[File, Set[(String, String)]].withDefaultValue(Set.empty)
- val apis: scala.collection.mutable.Map[File, Seq[ClassLike]] = scala.collection.mutable.Map.empty
+ val binaryDependencies =
+ new ArrayBuffer[(Path, String, String, VirtualFileRef, DependencyContext)]
+ val productClassesToSources =
+ scala.collection.mutable.Map.empty[Path, VirtualFileRef]
+ val usedNamesAndScopes = scala.collection.mutable.Map
+ .empty[String, Set[TestUsedName]]
+ .withDefaultValue(Set.empty)
+ val classNames = scala.collection.mutable.Map
+ .empty[VirtualFileRef, Set[(String, String)]]
+ .withDefaultValue(Set.empty)
+ val apis: scala.collection.mutable.Map[VirtualFileRef, Seq[ClassLike]] =
+ scala.collection.mutable.Map.empty
def usedNames = usedNamesAndScopes.view.mapValues(_.map(_.name)).toMap
- override def startSource(source: File): Unit = {
- assert(!apis.contains(source), s"startSource can be called only once per source file: $source")
+ override def startSource(source: File): Unit = ???
+ override def startSource(source: VirtualFile): Unit = {
+ assert(
+ !apis.contains(source),
+ s"startSource can be called only once per source file: $source"
+ )
apis(source) = Seq.empty
}
- override def startSource(source: VirtualFile): Unit = ???
- override def binaryDependency(binary: File, name: String, fromClassName: String, source: File, context: DependencyContext): Unit = {
+ override def binaryDependency(
+ binary: File,
+ name: String,
+ fromClassName: String,
+ source: File,
+ context: DependencyContext
+ ): Unit = ???
+ override def binaryDependency(
+ binary: Path,
+ name: String,
+ fromClassName: String,
+ source: VirtualFileRef,
+ context: DependencyContext
+ ): Unit = {
binaryDependencies += ((binary, name, fromClassName, source, context))
}
- override def binaryDependency(binary: Path, name: String, fromClassName: String, source: VirtualFileRef, context: DependencyContext): Unit = ???
-
- override def generatedNonLocalClass(source: File,
- module: File,
- binaryClassName: String,
- srcClassName: String): Unit = {
- products += ((source, module))
- classNames(source) += ((srcClassName, binaryClassName))
+
+ override def generatedNonLocalClass(
+ source: File,
+ module: File,
+ binaryClassName: String,
+ srcClassName: String
+ ): Unit = ???
+
+ override def generatedNonLocalClass(
+ sourceFile: VirtualFileRef,
+ classFile: Path,
+ binaryClassName: String,
+ srcClassName: String
+ ): Unit = {
+ productClassesToSources += ((classFile, sourceFile))
+ classNames(sourceFile) += ((srcClassName, binaryClassName))
()
}
- override def generatedNonLocalClass(source: VirtualFileRef, module: Path, binaryClassName: String, srcClassName: String): Unit = ???
- override def generatedLocalClass(source: File, module: File): Unit = {
- products += ((source, module))
+ override def generatedLocalClass(source: File, module: File): Unit = ???
+ override def generatedLocalClass(
+ sourceFile: VirtualFileRef,
+ classFile: Path
+ ): Unit = {
+ productClassesToSources += ((classFile, sourceFile))
()
}
- override def generatedLocalClass(source: VirtualFileRef, module: Path): Unit = ???
- override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = {
- if (onClassName != sourceClassName) classDependencies += ((onClassName, sourceClassName, context))
+ override def classDependency(
+ onClassName: String,
+ sourceClassName: String,
+ context: DependencyContext
+ ): Unit = {
+ if (onClassName != sourceClassName)
+ classDependencies += ((onClassName, sourceClassName, context))
}
- override def usedName(className: String, name: String, scopes: EnumSet[UseScope]): Unit = {
+ override def usedName(
+ className: String,
+ name: String,
+ scopes: ju.EnumSet[UseScope]
+ ): Unit = {
usedNamesAndScopes(className) += TestUsedName(name, scopes)
}
- override def api(source: File, classApi: ClassLike): Unit = {
+ override def api(source: File, classApi: ClassLike): Unit = ???
+ override def api(source: VirtualFileRef, classApi: ClassLike): Unit = {
apis(source) = classApi +: apis(source)
}
- override def api(source: VirtualFileRef, classApi: ClassLike): Unit = ???
- override def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = ()
+ override def problem(
+ category: String,
+ pos: xsbti.Position,
+ message: String,
+ severity: xsbti.Severity,
+ reported: Boolean
+ ): Unit = ()
+ override def problem2(
+ category: String,
+ pos: Position,
+ msg: String,
+ severity: Severity,
+ reported: Boolean,
+ rendered: Optional[String],
+ diagnosticCode: Optional[xsbti.DiagnosticCode],
+ diagnosticRelatedInformation: ju.List[xsbti.DiagnosticRelatedInformation],
+ actions: ju.List[xsbti.Action]
+ ): Unit = ()
override def dependencyPhaseCompleted(): Unit = ()
override def apiPhaseCompleted(): Unit = ()
override def enabled(): Boolean = true
@@ -71,29 +131,39 @@ class TestCallback extends AnalysisCallback
override def mainClass(source: VirtualFileRef, className: String): Unit = ???
override def classesInOutputJar(): java.util.Set[String] = ???
- override def getPickleJarPair(): java.util.Optional[xsbti.T2[Path, Path]] = ???
+ override def getPickleJarPair(): java.util.Optional[xsbti.T2[Path, Path]] =
+ ???
override def isPickleJava(): Boolean = ???
}
object TestCallback {
- case class ExtractedClassDependencies(memberRef: Map[String, Set[String]],
- inheritance: Map[String, Set[String]],
- localInheritance: Map[String, Set[String]])
+ case class ExtractedClassDependencies(
+ memberRef: Map[String, Set[String]],
+ inheritance: Map[String, Set[String]],
+ localInheritance: Map[String, Set[String]]
+ )
object ExtractedClassDependencies {
def fromPairs(
- memberRefPairs: collection.Seq[(String, String)],
- inheritancePairs: collection.Seq[(String, String)],
- localInheritancePairs: collection.Seq[(String, String)]
- ): ExtractedClassDependencies = {
- ExtractedClassDependencies(pairsToMultiMap(memberRefPairs),
+ memberRefPairs: collection.Seq[(String, String)],
+ inheritancePairs: collection.Seq[(String, String)],
+ localInheritancePairs: collection.Seq[(String, String)]
+ ): ExtractedClassDependencies = {
+ ExtractedClassDependencies(
+ pairsToMultiMap(memberRefPairs),
pairsToMultiMap(inheritancePairs),
- pairsToMultiMap(localInheritancePairs))
+ pairsToMultiMap(localInheritancePairs)
+ )
}
- private def pairsToMultiMap[A, B](pairs: collection.Seq[(A, B)]): Map[A, Set[B]] = {
- pairs.groupBy(_._1).view.mapValues(values => values.map(_._2).toSet)
- .toMap.withDefaultValue(Set.empty)
+ private def pairsToMultiMap[A, B](
+ pairs: collection.Seq[(A, B)]
+ ): Map[A, Set[B]] = {
+ pairs
+ .groupBy(_._1)
+ .view
+ .mapValues(values => values.map(_._2).toSet)
+ .toMap
+ .withDefaultValue(Set.empty)
}
}
}
-
diff --git a/sbt-bridge/test/xsbti/TestCompileProgress.scala b/sbt-bridge/test/xsbti/TestCompileProgress.scala
new file mode 100644
index 000000000000..d5dc81dfda24
--- /dev/null
+++ b/sbt-bridge/test/xsbti/TestCompileProgress.scala
@@ -0,0 +1,33 @@
+package xsbti
+
+import xsbti.compile.CompileProgress
+
+import scala.collection.mutable
+
+class TestCompileProgress extends CompileProgress:
+ class Run:
+ private[TestCompileProgress] val _phases: mutable.Set[String] = mutable.LinkedHashSet.empty
+ private[TestCompileProgress] val _unitPhases: mutable.Map[String, mutable.Set[String]] = mutable.LinkedHashMap.empty
+ private[TestCompileProgress] var _latestTotal: Int = 0
+
+ def phases: List[String] = _phases.toList
+ def unitPhases: collection.MapView[String, List[String]] = _unitPhases.view.mapValues(_.toList)
+ def total: Int = _latestTotal
+
+ private val _runs: mutable.ListBuffer[Run] = mutable.ListBuffer.empty
+ private var _currentRun: Run = new Run
+
+ def runs: List[Run] = _runs.toList
+
+ def completeRun(): Unit =
+ _runs += _currentRun
+ _currentRun = new Run
+
+ override def startUnit(phase: String, unitPath: String): Unit =
+ _currentRun._unitPhases.getOrElseUpdate(unitPath, mutable.LinkedHashSet.empty) += phase
+
+ override def advance(current: Int, total: Int, prevPhase: String, nextPhase: String): Boolean =
+ _currentRun._phases += prevPhase
+ _currentRun._phases += nextPhase
+ _currentRun._latestTotal = total
+ true
diff --git a/sbt-test/compilerReporter/i14576/Test.scala b/sbt-test/compilerReporter/i14576/Test.scala
index d94a49145f81..4f65c2267134 100644
--- a/sbt-test/compilerReporter/i14576/Test.scala
+++ b/sbt-test/compilerReporter/i14576/Test.scala
@@ -10,8 +10,5 @@ object Test:
def f(x: Text) = println(x.str)
f("abc")
- // private[this] and = _ are deprecated under -source:future
- private[this] var x: AnyRef = _
-
- // under -source:future, `_` is deprecated for wildcard arguments of types: use `?` instead
- val xs: List[_] = Nil
+ @deprecated("", "") def deprecatedFun(): Unit = ()
+ deprecatedFun()
diff --git a/sbt-test/compilerReporter/i14576/build.sbt b/sbt-test/compilerReporter/i14576/build.sbt
index 9831c23c103e..cc0402a7ba5e 100644
--- a/sbt-test/compilerReporter/i14576/build.sbt
+++ b/sbt-test/compilerReporter/i14576/build.sbt
@@ -10,7 +10,7 @@ lazy val resetMessages = taskKey[Unit]("empties the messages list")
lazy val root = (project in file("."))
.settings(
- scalacOptions += "-source:future",
+ scalacOptions += "-source:future-migration",
extraAppenders := { s => Seq(ConsoleAppender(FakePrintWriter)) },
assertFeatureSummary := {
assert {
@@ -24,7 +24,7 @@ lazy val root = (project in file("."))
},
assertDeprecationSummary := {
assert {
- FakePrintWriter.messages.exists(_.contains("there were 3 deprecation warnings; re-run with -deprecation for details"))
+ FakePrintWriter.messages.exists(_.contains("there was 1 deprecation warning; re-run with -deprecation for details"))
}
},
assertNoDeprecationSummary := {
diff --git a/sbt-test/compilerReporter/simple/Source.scala b/sbt-test/compilerReporter/simple/Source.scala
index 6f06785990c3..fcfd8672475b 100644
--- a/sbt-test/compilerReporter/simple/Source.scala
+++ b/sbt-test/compilerReporter/simple/Source.scala
@@ -7,4 +7,7 @@ trait Wr {
object Er {
val a = er1
-}
\ No newline at end of file
+
+ def f: Int = 1
+ val x = f _
+}
diff --git a/sbt-test/compilerReporter/simple/project/Reporter.scala b/sbt-test/compilerReporter/simple/project/Reporter.scala
index 6c3b60cebb3a..a22b5cfb904d 100644
--- a/sbt-test/compilerReporter/simple/project/Reporter.scala
+++ b/sbt-test/compilerReporter/simple/project/Reporter.scala
@@ -2,6 +2,8 @@ import sbt._
import Keys._
import KeyRanks.DTask
+import scala.jdk.CollectionConverters.*
+
object Reporter {
import xsbti.{Reporter, Problem, Position, Severity}
@@ -27,27 +29,62 @@ object Reporter {
check := (Compile / compile).failure.map(_ => {
val problems = reporter.problems
println(problems.toList)
- assert(problems.size == 1)
- // make sure position reported by zinc are proper
- val mainProblem = problems.head
+ problems match {
+ case Array(err, warning) =>
+ // Checking the error reported
+ val eline = err.position().line()
+ assert(eline.isPresent() == true)
+ assert(eline.get() == 9)
+
+ val ediagnosticCode = err.diagnosticCode()
+ assert(ediagnosticCode.isPresent() == true)
+ val ecode = ediagnosticCode.get().code()
+ assert(ecode == "6")
+
+ val epointer = err.position().pointer()
+ assert(epointer.isPresent() == true)
+ assert(epointer.get() == 10)
+
+ assert(err.position.offset.isPresent)
+
+ assert(err.severity == Severity.Error) // not found: er1,
+
+ // Checking the warning reported
+
+ val wline = warning.position().line()
+ assert(wline.isPresent() == true)
+ assert(wline.get() == 12)
+
+ val wdiagnosticCode = warning.diagnosticCode()
+ assert(wdiagnosticCode.isPresent() == true)
+ val wcode = wdiagnosticCode.get().code()
+ assert(wcode == "99")
+
+ val wpointer = warning.position().pointer()
+ assert(wpointer.isPresent() == true)
+ assert(wpointer.get() == 12)
+
+ assert(warning.position.offset.isPresent)
+
+ assert(warning.severity == Severity.Warn) // Only function types can be followed by _ but the current expression has type Int
+
+ val actions = warning.actions().asScala.toList
+
+ assert(actions.size == 1)
+
+ val action = actions.head
- val line = mainProblem.position().line()
- assert(line.isPresent() == true)
- assert(line.get() == 9)
+ assert(action.title() == "Rewrite to function value")
- val diagnosticCode = mainProblem.diagnosticCode()
- assert(diagnosticCode.isPresent() == true)
- val code = diagnosticCode.get()
- assert(diagnosticCode.get().code() == "6")
+ val edits = action.edit().changes().asScala.toList
- val pointer = mainProblem.position().pointer()
- assert(pointer.isPresent() == true)
- assert(pointer.get() == 10)
+ assert(edits.size == 2)
- assert(problems.forall(_.position.offset.isPresent))
+ case somethingElse =>
+ assert(false, s"Only expected to have a single error and a single warning, but instead got: ${somethingElse.toString}")
- assert(problems.count(_.severity == Severity.Error) == 1) // not found: er1,
+ }
}).value
)
}
diff --git a/sbt-test/java-compat/i18764/Test.scala b/sbt-test/java-compat/i18764/Test.scala
new file mode 100644
index 000000000000..030afb46b953
--- /dev/null
+++ b/sbt-test/java-compat/i18764/Test.scala
@@ -0,0 +1,4 @@
+
+import org.jooq.impl.TableRecordImpl
+
+class TRecord extends TableRecordImpl[TRecord](null) {}
diff --git a/sbt-test/java-compat/i18764/build.sbt b/sbt-test/java-compat/i18764/build.sbt
new file mode 100644
index 000000000000..2ad74478d52b
--- /dev/null
+++ b/sbt-test/java-compat/i18764/build.sbt
@@ -0,0 +1,9 @@
+
+scalaVersion := sys.props("plugin.scalaVersion")
+
+lazy val dependencies = Seq(
+ "org.jooq" % "jooq-codegen" % "3.18.7",
+)
+
+lazy val jooqtest = (project in file("."))
+ .settings(libraryDependencies ++= dependencies)
diff --git a/sbt-test/java-compat/i18764/test b/sbt-test/java-compat/i18764/test
new file mode 100644
index 000000000000..5df2af1f3956
--- /dev/null
+++ b/sbt-test/java-compat/i18764/test
@@ -0,0 +1 @@
+> compile
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java
new file mode 100644
index 000000000000..49c55a7c4d9c
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java
@@ -0,0 +1,11 @@
+// this test ensures that it is possible to read a java annotation from TASTy.
+package a;
+
+import java.lang.annotation.*;
+
+
+@Documented
+@Retention(RetentionPolicy.CLASS)
+@Target({ ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.TYPE, ElementType.PACKAGE })
+public @interface A {
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala
new file mode 100644
index 000000000000..93f99e9892fe
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala
@@ -0,0 +1,2 @@
+// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC
+package a
diff --git a/scaladoc/scripts/tocheck.txt b/sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep
similarity index 100%
rename from scaladoc/scripts/tocheck.txt
rename to sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala
new file mode 100644
index 000000000000..51c7322bf264
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala
@@ -0,0 +1,9 @@
+package b
+
+import a.A
+
+object B {
+ @A
+ val foo = 23
+}
+
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt
new file mode 100644
index 000000000000..18f6b8224968
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt
@@ -0,0 +1,21 @@
+lazy val a = project.in(file("a"))
+ .settings(
+ scalacOptions += "-Yjava-tasty", // enable pickling of java signatures
+ scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString),
+ scalacOptions += "-Ycheck:all",
+ Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory
+ )
+
+lazy val b = project.in(file("b"))
+ .settings(
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar")),
+ scalacOptions += "-Ycheck:all",
+ )
+
+// same as b, but adds the real classes to the classpath instead of the tasty jar
+lazy val bAlt = project.in(file("b-alt"))
+ .settings(
+ Compile / sources := (b / Compile / sources).value,
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-annotation-classes")),
+ scalacOptions += "-Ycheck:all",
+ )
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala
new file mode 100644
index 000000000000..69f15d168bfc
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala
@@ -0,0 +1,12 @@
+import sbt._
+import Keys._
+
+object DottyInjectedPlugin extends AutoPlugin {
+ override def requires = plugins.JvmPlugin
+ override def trigger = allRequirements
+
+ override val projectSettings = Seq(
+ scalaVersion := sys.props("plugin.scalaVersion"),
+ scalacOptions += "-source:3.0-migration"
+ )
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/test b/sbt-test/pipelining/Yjava-tasty-annotation/test
new file mode 100644
index 000000000000..6f7f57e91ab1
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-annotation/test
@@ -0,0 +1,5 @@
+> a/compile
+# Test depending on a java compiled annotation through TASTy
+> b/compile
+# double check against the real java classes
+> bAlt/compile
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java
new file mode 100644
index 000000000000..26bf8a246774
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java
@@ -0,0 +1,7 @@
+// this test ensures that ExtractAPI does not cause a crash
+// when looking at sealedDescendants of a Java enum.
+package a;
+
+public enum A {
+ X, Y, Z;
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala
new file mode 100644
index 000000000000..93f99e9892fe
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala
@@ -0,0 +1,2 @@
+// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC
+package a
diff --git a/tests/neg-custom-args/fatal-warnings/i6190a.check b/sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep
similarity index 100%
rename from tests/neg-custom-args/fatal-warnings/i6190a.check
rename to sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala
new file mode 100644
index 000000000000..a648bb4e83d6
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala
@@ -0,0 +1,17 @@
+package b
+
+import a.A
+
+object B {
+
+ def formattedEnum(e: A): String = e match {
+ case A.X => "X"
+ case A.Y => "Y"
+ case A.Z => "Z"
+ }
+
+ @main def test =
+ assert(A.values.toList == List(A.X, A.Y, A.Z))
+ assert(A.values.toList.map(formattedEnum) == List("X", "Y", "Z"))
+}
+
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt
new file mode 100644
index 000000000000..aca2391987e9
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt
@@ -0,0 +1,31 @@
+lazy val a = project.in(file("a"))
+ .settings(
+ compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler
+ scalacOptions += "-Yjava-tasty", // enable pickling of java signatures
+ scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString),
+ scalacOptions += "-Ycheck:all",
+ Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory
+ )
+
+
+lazy val b = project.in(file("b"))
+ .settings(
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar")),
+ scalacOptions += "-Ycheck:all",
+ )
+ .settings(
+ fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics
+ Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime
+ )
+
+// same as b, but adds the real classes to the classpath instead of the tasty jar
+lazy val bAlt = project.in(file("b-alt"))
+ .settings(
+ Compile / sources := (b / Compile / sources).value,
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes")),
+ scalacOptions += "-Ycheck:all",
+ )
+ .settings(
+ fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics
+ Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime
+ )
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala
new file mode 100644
index 000000000000..69f15d168bfc
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala
@@ -0,0 +1,12 @@
+import sbt._
+import Keys._
+
+object DottyInjectedPlugin extends AutoPlugin {
+ override def requires = plugins.JvmPlugin
+ override def trigger = allRequirements
+
+ override val projectSettings = Seq(
+ scalaVersion := sys.props("plugin.scalaVersion"),
+ scalacOptions += "-source:3.0-migration"
+ )
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-enum/test b/sbt-test/pipelining/Yjava-tasty-enum/test
new file mode 100644
index 000000000000..fa53c47aea59
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-enum/test
@@ -0,0 +1,5 @@
+> a/compile
+# test depending on a java compiled enum through TASTy
+> b/run
+# double check against the real java classes
+> bAlt/run
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java
new file mode 100644
index 000000000000..381da612df90
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java
@@ -0,0 +1,5 @@
+package a;
+
+public class A {
+ public String VALUE = "A";
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala
new file mode 100644
index 000000000000..8cfc7fa44d87
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala
@@ -0,0 +1,2 @@
+// THE PURPOSE OF THIS FILE IS TO MAKE SBT SEND A.java TO THE SCALA COMPILER
+package a
diff --git a/tests/run-custom-args/erased/erased-select-prefix.check b/sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep
similarity index 100%
rename from tests/run-custom-args/erased/erased-select-prefix.check
rename to sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep b/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala
new file mode 100644
index 000000000000..43a45ae53ce2
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala
@@ -0,0 +1,9 @@
+package b
+
+object B {
+ val A_VALUE = (new a.A).VALUE
+
+ @main def test = {
+ assert(A_VALUE == "A", s"actually was $A_VALUE")
+ }
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt
new file mode 100644
index 000000000000..e4b15d3d9c7e
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt
@@ -0,0 +1,49 @@
+// `a` contains mixed java/scala sources so sbt will send java sources to Scala compiler.
+lazy val a = project.in(file("a"))
+ .settings(
+ compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler
+ scalacOptions += "-Yjava-tasty", // enable pickling of java signatures
+ scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString),
+ scalacOptions += "-Ycheck:all",
+ Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory
+ )
+
+// recompile `a` with `-from-tasty` flag to test idempotent read/write java signatures.
+// Requires -Yjava-tasty to be set in order to read them.
+lazy val a_from_tasty = project.in(file("a_from_tasty"))
+ .settings(
+ Compile / sources := Seq((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar"),
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar")),
+ scalacOptions += "-from-tasty", // read the jar file tasties as the source files
+ scalacOptions += "-Yjava-tasty",
+ scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty
+ scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString),
+ scalacOptions += "-Ycheck:all",
+ Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory
+ )
+
+lazy val b = project.in(file("b"))
+ .settings(
+ scalacOptions += "-Ycheck:all",
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar")),
+ )
+ .settings(
+ // we have to fork the JVM if we actually want to run the code with correct failure semantics
+ fork := true,
+ // make sure the java classes are visible at runtime
+ Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes"),
+ )
+
+// same as b, but adds the real classes to the classpath instead of the tasty jar
+lazy val bAlt = project.in(file("b-alt"))
+ .settings(
+ scalacOptions += "-Ycheck:all",
+ Compile / sources := (b / Compile / sources).value,
+ Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes")),
+ )
+ .settings(
+ // we have to fork the JVM if we actually want to run the code with correct failure semantics
+ fork := true,
+ // make sure the java classes are visible at runtime
+ Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes"),
+ )
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala
new file mode 100644
index 000000000000..69f15d168bfc
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala
@@ -0,0 +1,12 @@
+import sbt._
+import Keys._
+
+object DottyInjectedPlugin extends AutoPlugin {
+ override def requires = plugins.JvmPlugin
+ override def trigger = allRequirements
+
+ override val projectSettings = Seq(
+ scalaVersion := sys.props("plugin.scalaVersion"),
+ scalacOptions += "-source:3.0-migration"
+ )
+}
diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/test b/sbt-test/pipelining/Yjava-tasty-from-tasty/test
new file mode 100644
index 000000000000..b4ce2965b995
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/test
@@ -0,0 +1,7 @@
+> a/compile
+# test reading java tasty with -from-tasty
+> a_from_tasty/compile
+# test java tasty is still written even with -from-tasty
+> b/run
+# double check against the real java classes
+> bAlt/run
diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java
new file mode 100644
index 000000000000..b798a9dedce9
--- /dev/null
+++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java
@@ -0,0 +1,56 @@
+// this test ensures that Object can accept Any from Scala
+// see Definitions.FromJavaObjectSymbol
+package a;
+
+public class A {
+
+ public static class Inner extends Object {
+ public T field1;
+ public T getter1() { return field1; }
+ public Object field2;
+ public Object getter2() { return field2; }
+
+ public Inner(T param1, Object param2) {
+ this.field1 = param1;
+ this.field2 = param2;
+ }
+
+ public void meth1(T arg) {}
+ public void meth2(U arg) {}
+ }
+
+ public static class Inner_sel extends java.lang.Object {
+ public T field1;
+ public T getter1() { return field1; }
+ public java.lang.Object field2;
+ public java.lang.Object getter2() { return field2; }
+
+ public Inner_sel(T param1, java.lang.Object param2) {
+ this.field1 = param1;
+ this.field2 = param2;
+ }
+
+ public void meth1(T arg) {}
+ public void meth2(U arg) {}
+ }
+
+ // 1. At the top level:
+ public void meth1(Object arg) {}
+ public void meth1_sel(java.lang.Object arg) {}
+ public void meth2(T arg) {} // T implicitly extends Object
+
+ // 2. In a class type parameter:
+ public void meth3(scala.collection.immutable.List