diff --git a/.github/ISSUE_TEMPLATE/crash.md b/.github/ISSUE_TEMPLATE/crash.md index 5a4cfe3f5e15..c742875616e2 100644 --- a/.github/ISSUE_TEMPLATE/crash.md +++ b/.github/ISSUE_TEMPLATE/crash.md @@ -25,6 +25,9 @@ println("hello, world") ``` ## Output (click arrow to expand) +
```scala diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md index d622f4439f9d..52f8010c372e 100644 --- a/.github/ISSUE_TEMPLATE/feature.md +++ b/.github/ISSUE_TEMPLATE/feature.md @@ -1,10 +1,10 @@ --- name: "\U0001F389 Suggest a feature" -about: Please create a feature request here https://github.com/lampepfl/dotty-feature-requests +about: Please create a feature request here https://github.com/lampepfl/dotty/discussions/new?category=feature-requests title: '' labels: '' assignees: '' --- -Please create a feature request here: [lampepfl/dotty-feature-requests](https://github.com/lampepfl/dotty-feature-requests). +Please create a feature request in the [Dotty Discussions](https://github.com/lampepfl/dotty/discussions/new?category=feature-requests). diff --git a/.github/ISSUE_TEMPLATE/improve-error.md b/.github/ISSUE_TEMPLATE/improve-error.md new file mode 100644 index 000000000000..918196e1ec53 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/improve-error.md @@ -0,0 +1,55 @@ +--- +name: "\U0001F615 Error/Warning message report" +about: Report an error/warning message that was confusing/unhelpful +title: '' +labels: itype:enhancement, area:reporting, better-errors, stat:needs triage +assignees: '' + +--- + +## Compiler version + +If you're not sure what version you're using, run `print scalaVersion` from sbt +(if you're running scalac manually, use `scalac -version` instead). + +## Minimized example + + + +```Scala +printl("hello, world") +``` + +## Output Error/Warning message + + + +```scala +-- [E006] Not Found Error: ---------------- +1 |printl("hello, world") + |^^^^^^ + |Not found: printl +1 error found +``` + +## Why this Error/Warning was not helpful + + + +The message was unhelpful because... + +## Suggested improvement + + + +It could be made more helpful by... diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 370b66854051..fd1fe30d3fa9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -18,6 +18,8 @@ on: - '*' branches-ignore: - 'gh-readonly-queue/**' + - 'release-**' + - 'lts-**' pull_request: merge_group: schedule: @@ -70,13 +72,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -121,20 +123,20 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: Cmd Tests run: | - ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala2-library-tasty-tests/run ;scala2-library-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -142,6 +144,51 @@ jobs: run: | ./project/scripts/sbt ";sjsSandbox/run ;sjsSandbox/test ;sjsJUnitTests/test ;set sjsJUnitTests/scalaJSLinkerConfig ~= switchToESModules ;sjsJUnitTests/test ;sjsCompilerTests/test" + - name: Test with Scala 2 library TASTy (fast) + run: ./project/scripts/sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/testCompilation i5; scala3-bootstrapped/testCompilation tests/run/typelevel-peano.scala; scala3-bootstrapped/testOnly dotty.tools.backend.jvm.DottyBytecodeTests" # only test a subset of test to avoid doubling the CI execution time + + test_scala2_library_tasty: + runs-on: [self-hosted, Linux] + container: + image: lampepfl/dotty:2021-03-22 + options: --cpu-shares 4096 + volumes: + - ${{ github.workspace }}/../../cache/sbt:/root/.sbt + - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache + - ${{ github.workspace }}/../../cache/general:/root/.cache + if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' + || ( + github.event_name == 'pull_request' + && contains(github.event.pull_request.body, '[test_scala2_library_tasty]') + ) + || ( + github.event_name == 'workflow_dispatch' + && github.repository == 'lampepfl/dotty' + )" + + steps: + - name: Set JDK 16 as default + run: echo "/usr/lib/jvm/java-16-openjdk-amd64/bin" >> $GITHUB_PATH + + - name: Reset existing repo + run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true + + - name: Checkout cleanup script + uses: actions/checkout@v4 + + - name: Cleanup + run: .github/workflows/cleanup.sh + + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Add SBT proxy repositories + run: cp -vf .github/workflows/repositories /root/.sbt/ ; true + + - name: Test with Scala 2 library TASTy + run: ./project/scripts/sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/test" + + test_windows_fast: runs-on: [self-hosted, Windows] if: "( @@ -161,7 +208,7 @@ jobs: shell: cmd - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Test run: sbt ";scala3-bootstrapped/compile" @@ -203,7 +250,7 @@ jobs: shell: cmd - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Test run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" @@ -240,20 +287,25 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: MiMa run: | - ./project/scripts/sbt ";scala3-interfaces/mimaReportBinaryIssues ;scala3-library-bootstrapped/mimaReportBinaryIssues ;scala3-library-bootstrappedJS/mimaReportBinaryIssues; tasty-core-bootstrapped/mimaReportBinaryIssues" + ./project/scripts/sbt ";scala3-interfaces/mimaReportBinaryIssues ;scala3-library-bootstrapped/mimaReportBinaryIssues ;scala3-library-bootstrappedJS/mimaReportBinaryIssues; tasty-core-bootstrapped/mimaReportBinaryIssues; scala2-library-bootstrapped/mimaReportBinaryIssues" + + - name: TASTy MiMa + run: | + # This script cleans the compiler and recompiles it from scratch (keep as last run) + ./project/scripts/scala2-library-tasty-mima.sh community_build_a: runs-on: [self-hosted, Linux] @@ -283,13 +335,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -332,13 +384,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -381,13 +433,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -428,13 +480,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -475,20 +527,20 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: Test run: | - ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test" + ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala2-library-tasty-tests/run ;scala2-library-tasty-tests/test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -519,13 +571,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -573,13 +625,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -623,13 +675,13 @@ jobs: run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - name: Checkout cleanup script - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cleanup run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -696,7 +748,7 @@ jobs: if: "failure() && github.event_name == 'schedule'" steps: - name: Checkout issue template - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Open an issue uses: JasonEtco/create-an-issue@v2 diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index bb1aec1290c0..f370cb2b541c 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -15,7 +15,7 @@ jobs: check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: ./project/scripts/check-cla.sh if: github.event_name == 'pull_request' env: diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml index f8facc0453ca..e96c3efbc8aa 100644 --- a/.github/workflows/dependency-graph.yml +++ b/.github/workflows/dependency-graph.yml @@ -8,5 +8,5 @@ jobs: name: Update Dependency Graph runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: scalacenter/sbt-dependency-submission@v2 diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml index ec134ec35ffe..786785eaa4a2 100644 --- a/.github/workflows/language-reference.yaml +++ b/.github/workflows/language-reference.yaml @@ -24,14 +24,14 @@ jobs: run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: 'dotty' fetch-depth: 0 ssh-key: ${{ secrets.DOCS_KEY }} - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 17 @@ -46,7 +46,7 @@ jobs: - name: Push changes to scala3-reference-docs if: github.event_name == 'push' - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: lampepfl/scala3-reference-docs fetch-depth: 0 @@ -74,7 +74,7 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: repo-sync/pull-request@v2 with: destination_branch: main diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml new file mode 100644 index 000000000000..8e8added1c03 --- /dev/null +++ b/.github/workflows/lts-backport.yaml @@ -0,0 +1,22 @@ +name: Add to backporting project + +on: + push: + branches: + - main + +jobs: + add-to-backporting-project: + if: "!contains(github.event.push.head_commit.message, '[Next only]')" + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: coursier/cache-action@v6 + - uses: VirtusLab/scala-cli-setup@v1.1.0 + - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} + env: + GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} + diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index ba4bae0456d0..f2cd0706cfe7 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -24,7 +24,7 @@ jobs: run: .github/workflows/cleanup.sh - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Publish to SDKMAN run: .github/workflows/scripts/publish-sdkman.sh diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 3108f2b94562..98ce94718fe5 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -27,10 +27,10 @@ jobs: steps: - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 17 @@ -62,21 +62,6 @@ jobs: - name: Generate documentation for example project using dotty-sbt run: ./project/scripts/sbt "sbt-test/scripted sbt-dotty/scaladoc" - - name: Generate index file - run: scaladoc/scripts/mk-index.sh scaladoc/output > scaladoc/output/index.html - - - name: Upload documentation to server - uses: azure/CLI@v1 - if: env.AZURE_STORAGE_SAS_TOKEN - env: - PR_NUMBER: ${{ github.event.pull_request.number }} - with: - inlineScript: | - DOC_DEST=$(echo pr-${PR_NUMBER:-${GITHUB_REF##*/}} | tr -d -c "[-A-Za-z0-9]") - echo uplading docs to https://scala3doc.virtuslab.com/$DOC_DEST - az storage container create --name $DOC_DEST --account-name scala3docstorage --public-access container - az storage blob upload-batch --overwrite true -s scaladoc/output -d $DOC_DEST --account-name scala3docstorage - stdlib-sourcelinks-test: runs-on: ubuntu-latest # if false - disable flaky test @@ -90,10 +75,10 @@ jobs: steps: - name: Git Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 17 diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml new file mode 100644 index 000000000000..0c09ec170986 --- /dev/null +++ b/.github/workflows/spec.yml @@ -0,0 +1,56 @@ +name: Specification + +on: + push: + tags: + - '*' + branches-ignore: + - 'gh-readonly-queue/**' + pull_request: + merge_group: + workflow_dispatch: + +env: + DOTTY_CI_RUN: true + +jobs: + specification: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./docs/_spec + + steps: + - uses: actions/checkout@v4 + + # Keep in sync with ./docs/_spec/Dockerfile + - uses: ruby/setup-ruby@v1 + with: + ruby-version: '2.7' + - name: Install required gems + run: | + gem install "rubygems-update:<3.5" --no-document + update_rubygems + gem install sass-embedded -v 1.58.0 + gem install bundler:1.17.2 jekyll + bundle install + npm install bower + + - name: Build the specification + run: | + bundle exec jekyll build + + # Deploy + - name: Deployment + env: + USER_FOR_TEST: ${{ secrets.SPEC_DEPLOY_USER }} + if: ${{ env.USER_FOR_TEST != '' }} + uses: burnett01/rsync-deployments@6.0.0 + with: + switches: -rzv + path: docs/_spec/_site/ + remote_path: ${{ secrets.SPEC_DEPLOY_PATH }} + remote_host: ${{ secrets.SPEC_DEPLOY_HOST }} + remote_user: ${{ secrets.SPEC_DEPLOY_USER }} + remote_key: ${{ secrets.SPEC_DEPLOY_KEY }} + remote_key_pass: ${{ secrets.SPEC_DEPLOY_PASS }} diff --git a/.gitignore b/.gitignore index 5240662741bb..3d44cdefb941 100644 --- a/.gitignore +++ b/.gitignore @@ -37,6 +37,7 @@ metals.sbt # scala-cli .scala-build +sbt-launch.jar # Partest dotty.jar @@ -63,8 +64,8 @@ testlogs/ local/ compiler/test/debug/Gen.jar -compiler/before-pickling.txt -compiler/after-pickling.txt +before-pickling.txt +after-pickling.txt bench/compile.txt community-build/scala3-bootstrapped.version @@ -95,3 +96,10 @@ contributors.js content-contributors.css docs/_spec/_site/ docs/_spec/.jekyll-metadata + +# scaladoc related +scaladoc/output/ + +#coverage +coverage/ + diff --git a/.gitmodules b/.gitmodules index 4d87dd214e9c..8f87e992013a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -21,17 +21,13 @@ url = https://github.com/dotty-staging/fastparse [submodule "community-build/community-projects/stdLib213"] path = community-build/community-projects/stdLib213 - url = https://github.com/dotty-staging/scala + url = https://github.com/dotty-staging/scala213 [submodule "community-build/community-projects/sourcecode"] path = community-build/community-projects/sourcecode url = https://github.com/dotty-staging/sourcecode [submodule "community-build/community-projects/scala-xml"] path = community-build/community-projects/scala-xml url = https://github.com/dotty-staging/scala-xml -[submodule "community-build/community-projects/shapeless"] - path = community-build/community-projects/shapeless - url = https://github.com/dotty-staging/shapeless - branch = shapeless-3-staging [submodule "community-build/community-projects/xml-interpolator"] path = community-build/community-projects/xml-interpolator url = https://github.com/dotty-staging/xml-interpolator.git @@ -222,3 +218,6 @@ [submodule "community-build/community-projects/parboiled2"] path = community-build/community-projects/parboiled2 url = https://github.com/dotty-staging/parboiled2.git +[submodule "community-build/community-projects/shapeless-3"] + path = community-build/community-projects/shapeless-3 + url = https://github.com/dotty-staging/shapeless-3.git diff --git a/.vscode-template/settings.json b/.vscode-template/settings.json index 8cf2d29e3bae..257da27b118f 100644 --- a/.vscode-template/settings.json +++ b/.vscode-template/settings.json @@ -9,6 +9,7 @@ "**/*.class": true, "**/*.tasty": true, "**/target/": true, - "community-build/community-projects": true + "community-build/community-projects": true, + "tests/pos-with-compiler-cc/dotc/**/*.scala": true } } diff --git a/MAINTENANCE.md b/MAINTENANCE.md index 54e74f7cb7ca..1e80f891e987 100644 --- a/MAINTENANCE.md +++ b/MAINTENANCE.md @@ -16,6 +16,11 @@ The issue supervisor is responsible for: - Attempting to reproduce the issue (or label “stat:cannot reproduce”) - Further minimizing the issue or asking the reporter of the issue to minimize it correctly (or label “stat:needs minimization”) - Identifying which issues are of considerable importance and bringing them to the attention of the team during the Dotty meeting, where they can be filtered and added to the [Future Versions](https://github.com/lampepfl/dotty/milestone/46) milestone. + - Identifying if a report is really a feature request and if so, converting it to + a [feature request discussion](https://github.com/lampepfl/dotty/discussions/categories/feature-requests). +- Keeping an eye on new +[discussions](https://github.com/lampepfl/dotty/discussions), making sure they +don't go unanswered and also correctly labeling new feature requests. Other core teammates are responsible for providing information to the issue supervisor in a timely manner when it is requested if they have that information. @@ -32,7 +37,6 @@ The issue supervisor schedule is maintained in the [Issue Supervisor Statistics An issue supervisor needs to have all the accesses and privileges required to get their job done. This might include: - Admin rights in lampepfl/dotty repository -- Admin rights in lampepfl/dotty-feature-requests repository - Permission to create new repositories in lampepfl organization (needed to fork repositories for the community build) - Access to the LAMP slack to be able to ask for help with the infrastructure, triaging and such @@ -62,24 +66,25 @@ At the end of their supervision period, the supervisor reports to the team durin The following is the list of all the principal areas of the compiler and the core team members who are responsible for their maintenance: ### Compiler -- Parser: @odersky -- Typer: @odersky, @smarter, (@dwijnand) +- Parser: @odersky, @hamzaremmal +- Typer: @odersky, @smarter, (@dwijnand), @noti0nal - Erasure: @smarter, @odersky - Enums: @bishabosha -- Derivation & Mirrors: @bishabosha, (@dwijnand) +- Derivation & Mirrors: @bishabosha, (@dwijnand), @EugeneFlesselle - Export: @bishabosha, @odersky - Pattern Matching: @dwijnand, (@liufengyun), @sjrd -- Inline: @nicolasstucki, @odersky -- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb -- Match types: @sjrd, @dwijnand, @Decel +- Inline: @nicolasstucki, @odersky, @hamzaremmal +- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb, @hamzaremmal +- Match types: @sjrd, @dwijnand, @Decel, @Linyxus - GADT: @dwijnand, @Linyxus - Initialization checker: @olhotak, @liufengyun - Safe nulls: @noti0na1, @olhotak -- Lazy vals: @szymon-rd, @sjrd +- Transforms: @szymon-rd, @sjrd, @odersky, @smarter - tailrec: @sjrd, @mbovel - JS backend: @sjrd - JVM backend: @sjrd -- Java-compat: @smarter +- Java-compat: @smarter, @dwijnand +- Capture checker: @odersky, @Linyxus ### Tooling - REPL: @dwijnand, @prolativ diff --git a/NOTICE.md b/NOTICE.md index f4d0e6ed2b5a..64c1ede1a5eb 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -89,15 +89,19 @@ major authors were omitted by oversight. details. * dotty.tools.dotc.coverage: Coverage instrumentation utilities have been - adapted from the scoverage plugin for scala 2 [5], which is under the + adapted from the scoverage plugin for scala 2 [4], which is under the Apache 2.0 license. + * dooty.tools.pc: Presentation compiler implementation adapted from + scalameta/metals [5] mtags module, which is under the Apache 2.0 license. + * The Dotty codebase contains parts which are derived from - the ScalaPB protobuf library [4], which is under the Apache 2.0 license. + the ScalaPB protobuf library [6], which is under the Apache 2.0 license. [1] https://github.com/scala/scala [2] https://github.com/adriaanm/scala/tree/sbt-api-consolidate/src/compiler/scala/tools/sbt [3] https://github.com/sbt/sbt/tree/0.13/compile/interface/src/main/scala/xsbt -[4] https://github.com/lampepfl/dotty/pull/5783/files -[5] https://github.com/scoverage/scalac-scoverage-plugin +[4] https://github.com/scoverage/scalac-scoverage-plugin +[5] https://github.com/scalameta/metals +[6] https://github.com/lampepfl/dotty/pull/5783/files diff --git a/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala b/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala index ac26e57c7381..97dbf95556bf 100644 --- a/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala +++ b/bench-run/src/main/scala/dotty/tools/benchmarks/Main.scala @@ -68,7 +68,7 @@ object Bench { println() println("Usage:") println() - println("dotty-bench-run/jmh:run [] [] [] [|--] []") + println("scala3-bench-run/jmh:run [] [] [] [|--] []") println() println("warmup: warmup iterations. defaults to 20.") println("iterations: benchmark iterations. defaults to 20.") diff --git a/bench/scripts/collection-vector.sh b/bench/scripts/collection-vector.sh index fb23a4a709db..e9395cfdd2ea 100755 --- a/bench/scripts/collection-vector.sh +++ b/bench/scripts/collection-vector.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -sbt "dotty-bench-bootstrapped/jmh:run 40 40 3 bench/tests/Vector.scala" +sbt "scala3-bench-bootstrapped/jmh:run 40 40 3 bench/tests/Vector.scala" diff --git a/bench/scripts/compiler-cold.sh b/bench/scripts/compiler-cold.sh index 06c1e3823a50..73a19c47036e 100755 --- a/bench/scripts/compiler-cold.sh +++ b/bench/scripts/compiler-cold.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -find compiler/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "dotty-bench-bootstrapped/jmh:run 0 1 10" {} + | sbt +find compiler/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "scala3-bench-bootstrapped/jmh:run 0 1 10" {} + | sbt diff --git a/bench/scripts/compiler.sh b/bench/scripts/compiler.sh index 0d1491d88676..f4864d027b74 100755 --- a/bench/scripts/compiler.sh +++ b/bench/scripts/compiler.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -find compiler/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "dotty-bench-bootstrapped/jmh:run 5 10" {} + | sbt +find compiler/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "scala3-bench-bootstrapped/jmh:run 5 10" {} + | sbt diff --git a/bench/scripts/library-cold.sh b/bench/scripts/library-cold.sh index 349be0cc282f..5ebfbf68ddd9 100755 --- a/bench/scripts/library-cold.sh +++ b/bench/scripts/library-cold.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -find library/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "dotty-bench-bootstrapped/jmh:run 0 1 10" {} + | sbt +find library/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "scala3-bench-bootstrapped/jmh:run 0 1 10" {} + | sbt diff --git a/bench/scripts/library.sh b/bench/scripts/library.sh index b811349f85ec..dd6bb733604c 100755 --- a/bench/scripts/library.sh +++ b/bench/scripts/library.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -find library/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "dotty-bench-bootstrapped/jmh:run 40 30" {} + | sbt +find library/src/ -type f \( -name "*.scala" -or -name "*.java" \) -exec echo "scala3-bench-bootstrapped/jmh:run 40 30" {} + | sbt diff --git a/build.sbt b/build.sbt index 80a36739d5e8..9b6ebc194ea0 100644 --- a/build.sbt +++ b/build.sbt @@ -14,8 +14,9 @@ val `scala3-language-server` = Build.`scala3-language-server` val `scala3-bench` = Build.`scala3-bench` val `scala3-bench-bootstrapped` = Build.`scala3-bench-bootstrapped` val `scala3-bench-micro` = Build.`scala3-bench-micro` -val `stdlib-bootstrapped` = Build.`stdlib-bootstrapped` -val `stdlib-bootstrapped-tasty-tests` = Build.`stdlib-bootstrapped-tasty-tests` +val `scala2-library-bootstrapped` = Build.`scala2-library-bootstrapped` +val `scala2-library-tasty` = Build.`scala2-library-tasty` +val `scala2-library-tasty-tests` = Build.`scala2-library-tasty-tests` val `tasty-core` = Build.`tasty-core` val `tasty-core-bootstrapped` = Build.`tasty-core-bootstrapped` val `tasty-core-scala2` = Build.`tasty-core-scala2` @@ -28,6 +29,8 @@ val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` +val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` +val `scala3-presentation-compiler-bootstrapped` = Build.`scala3-presentation-compiler-bootstrapped` val sjsSandbox = Build.sjsSandbox val sjsJUnitTests = Build.sjsJUnitTests diff --git a/changelogs/3.4.0-RC1.md b/changelogs/3.4.0-RC1.md new file mode 100644 index 000000000000..79695cad83f9 --- /dev/null +++ b/changelogs/3.4.0-RC1.md @@ -0,0 +1,466 @@ +# Highlights of the release + +- Make polymorphic functions more efficient and expressive [#17548](https://github.com/lampepfl/dotty/pull/17548) +- SIP-56: Better foundations for match types [#18262](https://github.com/lampepfl/dotty/pull/18262) +- Make SIP 54 (Multi-Source Extension Overloads) a standard feature [#17441](https://github.com/lampepfl/dotty/pull/17441) +- Value parameter inference for polymorphic lambdas [#18041](https://github.com/lampepfl/dotty/pull/18041) +- Add `@publicInBinary` annotation and `-WunstableInlineAccessors` linting flag [#18402](https://github.com/lampepfl/dotty/pull/18402) +- Stabilize Quotes `defn.PolyFunction` [#18480](https://github.com/lampepfl/dotty/pull/18480) +- Stabilize Quotes `Flags.AbsOverride` [#18482](https://github.com/lampepfl/dotty/pull/18482) +- Add `-experimental` compiler flags [#18571](https://github.com/lampepfl/dotty/pull/18571) +- Stabilize SIP-53 (quote pattern explicit type variable syntax) [#18574](https://github.com/lampepfl/dotty/pull/18574) +- Add reflect TypeRepr.dealiasKeepOpaques [#18583](https://github.com/lampepfl/dotty/pull/18583) +- Add attributes section to TASTy and use it for Stdlib TASTy [#18599](https://github.com/lampepfl/dotty/pull/18599) +- Error when reading class file with unknown newer jdk version [#18618](https://github.com/lampepfl/dotty/pull/18618) +- Add support for xsbti.compile.CompileProgress [#18739](https://github.com/lampepfl/dotty/pull/18739) +- Improve type inference for functions like fold [#18780](https://github.com/lampepfl/dotty/pull/18780) +- Improve error message for mismatched tasty versions, allow configuration of header unpickler [#18828](https://github.com/lampepfl/dotty/pull/18828) +- In 3.4 make refutable patterns in a for comprehension an error [#18842](https://github.com/lampepfl/dotty/pull/18842) +- Disallow use of PolyFunction in user code [#18920](https://github.com/lampepfl/dotty/pull/18920) +- Store source file in TASTY attributes [#18948](https://github.com/lampepfl/dotty/pull/18948) +- First step to pipelining support - enable reading Java symbols from TASTy [#19074](https://github.com/lampepfl/dotty/pull/19074) +- Activate constrainResult fix in 3.4 [#19253](https://github.com/lampepfl/dotty/pull/19253) +- Parallelise JVM backend - Scala 2 port [#15392](https://github.com/lampepfl/dotty/pull/15392) + +## Deprecation warnings for old syntax + +- `_` type wildcards [#18813](https://github.com/lampepfl/dotty/pull/18813) +- `private[this]` [#18819](https://github.com/lampepfl/dotty/pull/18819) +- `var x = _` [#18821](https://github.com/lampepfl/dotty/pull/18821) +- `with` as a type operator [#18837](https://github.com/lampepfl/dotty/pull/18837) +- `xs: _*` varargs [#18872](https://github.com/lampepfl/dotty/pull/18872) +- trailing `_` to force eta expansion [#18926](https://github.com/lampepfl/dotty/pull/18926) + +# Other changes and fixes + +## Backend + +- Count size of parameters for platform limit check [#18464](https://github.com/lampepfl/dotty/pull/18464) +- Don't emit line number for synthetic unit value [#18717](https://github.com/lampepfl/dotty/pull/18717) +- Avoid too eager transform of $outer for lhs & accessor rhs [#18949](https://github.com/lampepfl/dotty/pull/18949) +- Make more anonymous functions static [#19251](https://github.com/lampepfl/dotty/pull/19251) +- Fix deadlock in initialization of CoreBTypes using Lazy container [#19298](https://github.com/lampepfl/dotty/pull/19298) +- Fix #18769: Allow HK type args in Java signatures. [#18883](https://github.com/lampepfl/dotty/pull/18883) +- Loading symbols from TASTy files directly [#17594](https://github.com/lampepfl/dotty/pull/17594) +- Use dedicated equals method for univerval equality of chars [#18770](https://github.com/lampepfl/dotty/pull/18770) + +## Erasure + +- Get generic signature of fields entered after erasure from their accessor [#19207](https://github.com/lampepfl/dotty/pull/19207) +- Detect case where two alternatives are the same after widening ExprTypes [#18787](https://github.com/lampepfl/dotty/pull/18787) +- Improve erased params logic [#18433](https://github.com/lampepfl/dotty/pull/18433) + +## Experimental: Capture Checking + +- Fix capture set variable installation in Setup [#18885](https://github.com/lampepfl/dotty/pull/18885) +- Don't follow opaque aliases when transforming sym info for cc [#18929](https://github.com/lampepfl/dotty/pull/18929) +- Reset `comparersInUse` to zero in `ContextState.reset` [#18915](https://github.com/lampepfl/dotty/pull/18915) +- Special handling of experimental.captureChecking import [#17427](https://github.com/lampepfl/dotty/pull/17427) +- Change handling of curried function types in capture checking [#18131](https://github.com/lampepfl/dotty/pull/18131) +- Fix #18246: correctly compute capture sets in `TypeComparer.glb` [#18254](https://github.com/lampepfl/dotty/pull/18254) +- New capture escape checking based on levels [#18463](https://github.com/lampepfl/dotty/pull/18463) +- A more robust scheme for resetting denotations after Recheck [#18534](https://github.com/lampepfl/dotty/pull/18534) +- A more flexible scheme for handling the universal capability [#18699](https://github.com/lampepfl/dotty/pull/18699) +- Fix potential soundness hole when adding references to a mapped capture set [#18758](https://github.com/lampepfl/dotty/pull/18758) +- Alternative scheme for cc encapsulation [#18899](https://github.com/lampepfl/dotty/pull/18899) +- Make reach refinement shallow [#19171](https://github.com/lampepfl/dotty/pull/19171) + +## F-bounds + +- Don't check bounds of Java applications in Java units [#18054](https://github.com/lampepfl/dotty/pull/18054) + +## GADTs + +- Avoid embedding SelectionProtos in Conversions [#17755](https://github.com/lampepfl/dotty/pull/17755) +- Freeze constraints while calculating GADT full bounds [#18222](https://github.com/lampepfl/dotty/pull/18222) + +## Implicits + +- Followup fix to transparent inline conversion [#18130](https://github.com/lampepfl/dotty/pull/18130) +- Select local implicits over name-imported over wildcard imported [#18203](https://github.com/lampepfl/dotty/pull/18203) +- Fix how implicit candidates are combined [#18321](https://github.com/lampepfl/dotty/pull/18321) +- Improve error message about missing type of context function parameter [#18788](https://github.com/lampepfl/dotty/pull/18788) +- Support implicit arguments before extractor method [#18671](https://github.com/lampepfl/dotty/pull/18671) +- Tweak convertible implicits fix [#18727](https://github.com/lampepfl/dotty/pull/18727) + +## Incremental Compilation + +- Make incremental compilation aware of synthesized mirrors [#18310](https://github.com/lampepfl/dotty/pull/18310) + +## Inference + +- Honour hard unions in lubbing and param replacing [#18680](https://github.com/lampepfl/dotty/pull/18680) + +## Infrastructure + +- Use -Yscala2-library-tasty to add Scala 2 lib TASTY to scalac (internal only) [#18613](https://github.com/lampepfl/dotty/pull/18613) +- Rename `stdlib-bootstrapped-tasty` to `scala2-library-tasty` [#18615](https://github.com/lampepfl/dotty/pull/18615) +- Fix #19286: Freeze rubygems-update at < 3.5.0. [#19288](https://github.com/lampepfl/dotty/pull/19288) + +## Initialization + +- Fix #17997: Handle intersection type as this type of super type [#18069](https://github.com/lampepfl/dotty/pull/18069) +- Add test for issue #17997 affecting the global object initialization checker [#18141](https://github.com/lampepfl/dotty/pull/18141) +- Fix i18624 and add test case for it [#18859](https://github.com/lampepfl/dotty/pull/18859) +- Treat new Array(0) as immutable [#19192](https://github.com/lampepfl/dotty/pull/19192) +- Fix #18407: Ignore Quote/Slice in init checker [#18848](https://github.com/lampepfl/dotty/pull/18848) +- Check safe initialization of static objects [#16970](https://github.com/lampepfl/dotty/pull/16970) +- Pattern match support in checking global objects [#18127](https://github.com/lampepfl/dotty/pull/18127) +- Fix crash in global object initialization checker when select target has no source [#18627](https://github.com/lampepfl/dotty/pull/18627) +- Fix warning underlining in global init checker [#18668](https://github.com/lampepfl/dotty/pull/18668) +- Fix i18629 [#18839](https://github.com/lampepfl/dotty/pull/18839) +- I18628 [#18841](https://github.com/lampepfl/dotty/pull/18841) +- Make safe init checker skip global objects [#18906](https://github.com/lampepfl/dotty/pull/18906) +- Handle local lazy vals properly [#18998](https://github.com/lampepfl/dotty/pull/18998) + +## Inline + +- Fix regression: inline match crash when rhs uses private inlined methods [#18595](https://github.com/lampepfl/dotty/pull/18595) +- Add structural classes of dynamicApply before inlining [#18766](https://github.com/lampepfl/dotty/pull/18766) +- Set missing expansion span for copied inlined node [#18229](https://github.com/lampepfl/dotty/pull/18229) +- Fix `callTrace` of inlined methods [#18738](https://github.com/lampepfl/dotty/pull/18738) + +## Linting + +- Keep tree of type ascriptions of quote pattern splices [#18412](https://github.com/lampepfl/dotty/pull/18412) +- Fix false positive in WUnused for renamed path-dependent imports [#18468](https://github.com/lampepfl/dotty/pull/18468) +- Fix false positive in WUnused for renamed path-dependent imports (2) [#18617](https://github.com/lampepfl/dotty/pull/18617) +- Fix wunused false positive on CanEqual [#18641](https://github.com/lampepfl/dotty/pull/18641) +- Implement -Xlint:private-shadow, type-parameter-shadow [#17622](https://github.com/lampepfl/dotty/pull/17622) +- Fix: reversed wconf parsing order to mirror scala 2 [#18503](https://github.com/lampepfl/dotty/pull/18503) +- Revert Fix false positive in WUnused for renamed path-dependent imports [#18514](https://github.com/lampepfl/dotty/pull/18514) + +## Macro Annotations + +- Enter missing symbols generated by the MacroAnnotation expansion [#18826](https://github.com/lampepfl/dotty/pull/18826) + +## Match Types + +- Allow Tuple.Head and Tuple.Tail to work with EmptyTuple [#17189](https://github.com/lampepfl/dotty/pull/17189) +- Fix match type reduction with avoided types [#18043](https://github.com/lampepfl/dotty/pull/18043) +- Strip LazyRef before calling simplified, in MT reduction [#18218](https://github.com/lampepfl/dotty/pull/18218) +- Fix MT separate compilation bug [#18398](https://github.com/lampepfl/dotty/pull/18398) +- Do not show deprecation warning for `_` in type match case [#18887](https://github.com/lampepfl/dotty/pull/18887) + +## Nullability + +- Improve logic when to emit pattern type error [#18093](https://github.com/lampepfl/dotty/pull/18093) +- Allow nullability flow typing even in presence of pattern match [#18206](https://github.com/lampepfl/dotty/pull/18206) +- Fix #11967: flow typing nullability in pattern matches [#18212](https://github.com/lampepfl/dotty/pull/18212) +- Fix #18282: consider Predef.eq/ne in nullability flow typing [#18299](https://github.com/lampepfl/dotty/pull/18299) +- Make `this.type` nullable again (unless under -Yexplicit-nulls). [#18399](https://github.com/lampepfl/dotty/pull/18399) + +## Opaque Types + +- Type ascribe trees that require opaque type usage [#18101](https://github.com/lampepfl/dotty/pull/18101) + +## Parser + +- Fix selecting terms using _root_ [#18335](https://github.com/lampepfl/dotty/pull/18335) +- Tweak java getlitch not to skip zero [#18491](https://github.com/lampepfl/dotty/pull/18491) +- Fix i18518 [#18520](https://github.com/lampepfl/dotty/pull/18520) +- Only apply `future` patches on `future-migration` [#18820](https://github.com/lampepfl/dotty/pull/18820) +- Parser simple expression error recovery change from `null` to `???` [#19103](https://github.com/lampepfl/dotty/pull/19103) + +## Pattern Matching + +- Fix syntax and parsing of vararg patterns [#18055](https://github.com/lampepfl/dotty/pull/18055) +- Avoid over widening in SpaceEngine [#18252](https://github.com/lampepfl/dotty/pull/18252) +- Fix regression in exhaustivity of HK types [#18303](https://github.com/lampepfl/dotty/pull/18303) +- Fix missing case in isSubspace, which broke reachablility [#18326](https://github.com/lampepfl/dotty/pull/18326) +- Unsuppress unchecked warnings [#18377](https://github.com/lampepfl/dotty/pull/18377) +- Consider extension methods in Space isSameUnapply [#18642](https://github.com/lampepfl/dotty/pull/18642) +- Fix unreachable warning in deeply nested sealed hierarchy [#18706](https://github.com/lampepfl/dotty/pull/18706) +- Remove unnecessary and recursive Space decomposition [#19216](https://github.com/lampepfl/dotty/pull/19216) +- Prioritise sequence-matches over product-sequence-matches [#19260](https://github.com/lampepfl/dotty/pull/19260) +- Propagate constant in result of inline match [#18455](https://github.com/lampepfl/dotty/pull/18455) +- Disable match anaylsis in inlined trees [#19190](https://github.com/lampepfl/dotty/pull/19190) +- Teach provablyDisjoint about AnyKind [#18510](https://github.com/lampepfl/dotty/pull/18510) +- Warn about unchecked type tests in primitive catch cases [#19206](https://github.com/lampepfl/dotty/pull/19206) +- Reprioritise seq-match over product-seq-match [#19277](https://github.com/lampepfl/dotty/pull/19277) +- Fix exhaustivity due to separate TypeVar lambdas [#18616](https://github.com/lampepfl/dotty/pull/18616) + +## Presentation Compiler + +- Support completions for extension definition parameter [#18331](https://github.com/lampepfl/dotty/pull/18331) +- Fix: Don't collect map, flatMap, withFilter in for-comprehension [#18430](https://github.com/lampepfl/dotty/pull/18430) +- Bugfix: Catch exception from the compiler for broken shadowed pickles [#18502](https://github.com/lampepfl/dotty/pull/18502) +- Bugfix: highlight for enum type params [#18528](https://github.com/lampepfl/dotty/pull/18528) +- Bugfix: No signature help for local methods [#18594](https://github.com/lampepfl/dotty/pull/18594) +- Bugfix: add `moduleClass` imported symbols in `IndexedContext` [#18620](https://github.com/lampepfl/dotty/pull/18620) +- Bugfix: Named args completions with default values [#18633](https://github.com/lampepfl/dotty/pull/18633) +- Fix: match completions for type aliases [#18667](https://github.com/lampepfl/dotty/pull/18667) +- Bugfix: add multiline comment completion [#18703](https://github.com/lampepfl/dotty/pull/18703) +- Bugfix: Backticked named arguments [#18704](https://github.com/lampepfl/dotty/pull/18704) +- Bugfix: [metals] Case completions for tuple type [#18751](https://github.com/lampepfl/dotty/pull/18751) +- Completions should prepend, not replace as it is for Scala 2 [#18803](https://github.com/lampepfl/dotty/pull/18803) +- Bugfix: rename end marker [#18838](https://github.com/lampepfl/dotty/pull/18838) +- Presentation compiler: Bugfix for semantic tokens and synthetic decorations [#18955](https://github.com/lampepfl/dotty/pull/18955) +- Show documentation for value forwarders in completions [#19200](https://github.com/lampepfl/dotty/pull/19200) +- Bugfix: Document highlight on class constructors [#19209](https://github.com/lampepfl/dotty/pull/19209) +- Bugfix: Completions for extension methods with name conflict [#19225](https://github.com/lampepfl/dotty/pull/19225) + +## Polyfunctions + +- Check user defined PolyFunction refinements [#18457](https://github.com/lampepfl/dotty/pull/18457) +- Support polymorphic functions with erased parameters [#18293](https://github.com/lampepfl/dotty/pull/18293) +- Use `PolyFunction` instead of `ErasedFunction` [#18295](https://github.com/lampepfl/dotty/pull/18295) + +## Quotes + +- Support type variable with bounds in quoted pattern [#16910](https://github.com/lampepfl/dotty/pull/16910) +- Add new EXPLICITtpt to TASTy format [#17298](https://github.com/lampepfl/dotty/pull/17298) +- Inhibit typer to insert contextual arguments when it is inside arguments of HOAS patterns [#18040](https://github.com/lampepfl/dotty/pull/18040) +- Compile quote patterns directly into QuotePattern AST [#18133](https://github.com/lampepfl/dotty/pull/18133) +- Add missing span to synthesized product mirror [#18354](https://github.com/lampepfl/dotty/pull/18354) +- Improve non-static macro implementation error message [#18405](https://github.com/lampepfl/dotty/pull/18405) +- Fix scala 2 macros in traits with type parameters [#18663](https://github.com/lampepfl/dotty/pull/18663) +- Patch `underlyingArgument` to avoid mapping into modules [#18923](https://github.com/lampepfl/dotty/pull/18923) +- Fallback erasing term references [#18731](https://github.com/lampepfl/dotty/pull/18731) +- Fix ignored type variable bound warning in type quote pattern [#18199](https://github.com/lampepfl/dotty/pull/18199) +- Splice hole with singleton captures [#18357](https://github.com/lampepfl/dotty/pull/18357) +- Fix macros with erased arguments [#18431](https://github.com/lampepfl/dotty/pull/18431) +- Deprecate 3-arg `FunctionClass` constructor [#18472](https://github.com/lampepfl/dotty/pull/18472) +- Deprecate `Quotes` `{MethodType,TermParamClause}.isErased` [#18479](https://github.com/lampepfl/dotty/pull/18479) +- Avoid crashes on missing positions [#19250](https://github.com/lampepfl/dotty/pull/19250) + +## Reflection + +- Add reflect.ValOrDefDef [#16974](https://github.com/lampepfl/dotty/pull/16974) +- Check New tree for ill-formed module instantiations [#17553](https://github.com/lampepfl/dotty/pull/17553) +- Add reflect `TypeLambda.paramVariances` [#17568](https://github.com/lampepfl/dotty/pull/17568) +- Make check flags for `newMethod`, `newVal` and `newBind` in Quotes API less restrictive [#18217](https://github.com/lampepfl/dotty/pull/18217) +- Normalise mirrorType for mirror Synthesis [#19199](https://github.com/lampepfl/dotty/pull/19199) +- Add reflect `defn.FunctionClass` overloads [#16849](https://github.com/lampepfl/dotty/pull/16849) +- Stabilize reflect flag `JavaAnnotation` [#19267](https://github.com/lampepfl/dotty/pull/19267) +- Stabilize reflect `paramVariance` [#19268](https://github.com/lampepfl/dotty/pull/19268) + +## Reporting + +- Take into account the result type of inline implicit conversions unless they are transparent [#17924](https://github.com/lampepfl/dotty/pull/17924) +- Check if a fatal warning issued in typer is silenced, before converting it into an error [#18089](https://github.com/lampepfl/dotty/pull/18089) +- Elide companion defs to a `object` extending `AnyVal` [#18451](https://github.com/lampepfl/dotty/pull/18451) +- Add regression test for issue i18493 [#18497](https://github.com/lampepfl/dotty/pull/18497) +- Add better explanation to error message [#18665](https://github.com/lampepfl/dotty/pull/18665) +- Better error message when accessing private members [#18690](https://github.com/lampepfl/dotty/pull/18690) +- Improve message for discarded pure non-Unit values [#18723](https://github.com/lampepfl/dotty/pull/18723) +- Better error message when a pattern match extractor is not found. [#18725](https://github.com/lampepfl/dotty/pull/18725) +- Give "did you mean ...?" hints also for simple identifiers [#18747](https://github.com/lampepfl/dotty/pull/18747) +- Better error for definition followed by keyword [#18752](https://github.com/lampepfl/dotty/pull/18752) +- Better explain message for 'pattern expected' [#18753](https://github.com/lampepfl/dotty/pull/18753) +- Improve failure message of enum `fromOrdinal`/`valueOf` [#19182](https://github.com/lampepfl/dotty/pull/19182) +- Fix type mismatch error confusion between types with same simple name [#19204](https://github.com/lampepfl/dotty/pull/19204) +- Add hint for nested quotes missing staged `Quotes` [#18755](https://github.com/lampepfl/dotty/pull/18755) +- Better error messages for missing commas and more [#18785](https://github.com/lampepfl/dotty/pull/18785) +- Fix imported twice error messages [#18102](https://github.com/lampepfl/dotty/pull/18102) +- Improve error message for inaccessible types [#18406](https://github.com/lampepfl/dotty/pull/18406) +- Future migration warning for `with` type operator [#18818](https://github.com/lampepfl/dotty/pull/18818) +- Improve assertion error message for `Apply` and `TypeApply` [#18700](https://github.com/lampepfl/dotty/pull/18700) +- Shorten traces for TypeMismatch errors under -explain [#18742]( +- Improve `with` in type migration warning [#18852](https://github.com/lampepfl/dotty/pull/18852) +hub.com/lampepfl/dotty/pull/18742) +- Future migration warning for alphanumeric infix operator [#18908](https://github.com/lampepfl/dotty/pull/18908) +- Make sure that trace is shown correctly in the presence of invalid line numbers [#18930](https://github.com/lampepfl/dotty/pull/18930) +- Add migration warning for XML literals in language future [#19101](https://github.com/lampepfl/dotty/pull/19101) +- Avoid diagnostic message forcing crashing the compiler [#19113](https://github.com/lampepfl/dotty/pull/19113) +- Make sure that the stacktrace is shown with `-Ydebug-unpickling` [#19115](https://github.com/lampepfl/dotty/pull/19115) +- Improve `asExprOf` cast error formatting [#19195](https://github.com/lampepfl/dotty/pull/19195) +- Do not warn on underscore wildcard type in pattern [#19249](https://github.com/lampepfl/dotty/pull/19249) + +## Scala-JS + +- Fix #18658: Handle varargs of generic types in `JSExportsGen`. [#18659](https://github.com/lampepfl/dotty/pull/18659) + +## Scaladoc + +- Fix incorrect comment parser used in nightly scaladoc [#18523](https://github.com/lampepfl/dotty/pull/18523) + +## SemanticDB + +- Export diagnostics (including unused warnings) to SemanticDB [#17835](https://github.com/lampepfl/dotty/pull/17835) +- Bugfix: Incorrect semanticdb span on Selectable [#18576](https://github.com/lampepfl/dotty/pull/18576) +- Bugfix: in semanticdb make synthetic apply disambiguator consistent w/ Scala 2 implicit [#17341](https://github.com/lampepfl/dotty/pull/17341) + +## Standard Library + +- Intrinsify `constValueTuple` and `summonAll` [#18013](https://github.com/lampepfl/dotty/pull/18013) +- Fix #18609: Add language.`3.4` and language.`3.4-migration`. [#18610](https://github.com/lampepfl/dotty/pull/18610) + +## TASTy format + +- Eliminate FromJavaObject from TASTy of Java sources [#19259](https://github.com/lampepfl/dotty/pull/19259) +- Add new HOLETYPES to TASTy format [#17225](https://github.com/lampepfl/dotty/pull/17225) +- Add capture checking attributes to TASTy [#19033](https://github.com/lampepfl/dotty/pull/19033) +- Add TASTyInfo abstraction [#19089](https://github.com/lampepfl/dotty/pull/19089) +- Add UTF8 abstraction in the TASTy format [#19090](https://github.com/lampepfl/dotty/pull/19090) + +## Tooling + +- Don't add explanation twice [#18779](https://github.com/lampepfl/dotty/pull/18779) +- ExtractDependencies uses more efficient caching [#18403](https://github.com/lampepfl/dotty/pull/18403) +- Introduce the SourceVersions 3.4 and 3.4-migration; make 3.4 the default. [#18501](https://github.com/lampepfl/dotty/pull/18501) +- Bugfix: Completions for named args in wrong order [#18702](https://github.com/lampepfl/dotty/pull/18702) +- Align unpickled Scala 2 accessors encoding with Scala 3 [#18874](https://github.com/lampepfl/dotty/pull/18874) +- Reinterpret Scala 2 case accessors `xyz$access$idx` [#18907](https://github.com/lampepfl/dotty/pull/18907) +- Presentation-compiler: Add synthetic decorations [#18951](https://github.com/lampepfl/dotty/pull/18951) +- Add compilation unit info to `ClassSymbol` [#19010](https://github.com/lampepfl/dotty/pull/19010) +- Make sure that patches for 3.0 are also applied in later versions [#19018](https://github.com/lampepfl/dotty/pull/19018) + +## Transform + +- Also consider @targetName when checking private overrides [#18361](https://github.com/lampepfl/dotty/pull/18361) +- Teach PostTyper to handle untupled context closures [#17739](https://github.com/lampepfl/dotty/pull/17739) +- Properly dealias tuple types when specializing [#18724](https://github.com/lampepfl/dotty/pull/18724) +- Fix condition in prefixIsElidable to prevent compiler crash [#18924](https://github.com/lampepfl/dotty/pull/18924) +- Fix #18816: Transfer the span of rewired `This` nodes in `fullyParameterizedDef`. [#18840](https://github.com/lampepfl/dotty/pull/18840) +- List(...) optimization to avoid intermediate array [#17166](https://github.com/lampepfl/dotty/pull/17166) +- Make Array.apply an intrinsic [#18537](https://github.com/lampepfl/dotty/pull/18537) +- Add missing span to extension method select [#18557](https://github.com/lampepfl/dotty/pull/18557) + +## Tuples + +- Handle TupleXXL in match analysis [#19212](https://github.com/lampepfl/dotty/pull/19212) +- Add `reverse` method to `NonEmptyTuple` [#13752](https://github.com/lampepfl/dotty/pull/13752) +- Refine handling of pattern binders for large tuples [#19085](https://github.com/lampepfl/dotty/pull/19085) +- Introduce `Tuple.ReverseOnto` and use it in `Tuple.reverse` [#19183](https://github.com/lampepfl/dotty/pull/19183) + +## Typeclass Derivation + +- Consider all parents when checking access to the children of a sum [#19083](https://github.com/lampepfl/dotty/pull/19083) + +## Typer + +- Fix logic when comparing var/def bindings with val refinements [#18049](https://github.com/lampepfl/dotty/pull/18049) +- Fix variance checking in refinements [#18053](https://github.com/lampepfl/dotty/pull/18053) +- Fix accessibleType for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) +- Refix avoid GADT casting with ProtoTypes [#18085](https://github.com/lampepfl/dotty/pull/18085) +- Avoid shadowing by private definitions in more situations [#18142](https://github.com/lampepfl/dotty/pull/18142) +- Refine infoDependsOnPrefix [#18204](https://github.com/lampepfl/dotty/pull/18204) +- Fix spurious subtype check pruning when both sides have unions [#18213](https://github.com/lampepfl/dotty/pull/18213) +- Reimplement support for type aliases in SAM types [#18317](https://github.com/lampepfl/dotty/pull/18317) +- Fix adaptation of constants to constant type aliases [#18360](https://github.com/lampepfl/dotty/pull/18360) +- Issue "positional after named argument" errors [#18363](https://github.com/lampepfl/dotty/pull/18363) +- Deprecate `ops.long.S` [#18426](https://github.com/lampepfl/dotty/pull/18426) +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) +- Use the unwidened type when casting structural calls [#18527](https://github.com/lampepfl/dotty/pull/18527) +- Fix #18649: Use loBound of param types when materializing a context function. [#18651](https://github.com/lampepfl/dotty/pull/18651) +- Identify structural trees on Match Type qualifiers [#18765](https://github.com/lampepfl/dotty/pull/18765) +- Tweak approximation of type variables when computing default types [#18798](https://github.com/lampepfl/dotty/pull/18798) +- Admit parametric aliases of classes in parent typing [#18849](https://github.com/lampepfl/dotty/pull/18849) +- Also add privateWithin when creating constructor proxies [#18893](https://github.com/lampepfl/dotty/pull/18893) +- Revert part of `Simplify defn.FunctionOf.unapply` [#19012](https://github.com/lampepfl/dotty/pull/19012) +- Check @targetName when subtyping Refined Types [#19081](https://github.com/lampepfl/dotty/pull/19081) +- Record failures to adapt application arguments [#18269](https://github.com/lampepfl/dotty/pull/18269) +- Improve handling of AndTypes on the LHS of subtype comparisons [#18235](https://github.com/lampepfl/dotty/pull/18235) +- Allow inferred parameter types always, when eta-expanding [#18771](https://github.com/lampepfl/dotty/pull/18771) +- Fix failing bounds check on default getter [#18419](https://github.com/lampepfl/dotty/pull/18419) +- Use constructor's default getters in case class synthetic `apply` methods [#18716](https://github.com/lampepfl/dotty/pull/18716) +- Keep qualifier of Ident when selecting setter [#18714](https://github.com/lampepfl/dotty/pull/18714) +- Retract SynthesizeExtMethodReceiver mode when when going deeper in overloading resolution [#18759](https://github.com/lampepfl/dotty/pull/18759) +- Constant fold all the number conversion methods [#17446](https://github.com/lampepfl/dotty/pull/17446) +- Refine criterion when to widen types [#17180](https://github.com/lampepfl/dotty/pull/17180) +- Run all MatchType reduction under Mode.Type [#17937](https://github.com/lampepfl/dotty/pull/17937) +- Force consistent MT post-redux normalisation, disallow infinite match types [#18073](https://github.com/lampepfl/dotty/pull/18073) +- Fix #17467: Limit isNullable widening to stable TermRefs; remove under explicit nulls. [#17470](https://github.com/lampepfl/dotty/pull/17470) +- Disallow naming the root package, except for selections [#18187](https://github.com/lampepfl/dotty/pull/18187) +- Contextual varargs parameters [#18186](https://github.com/lampepfl/dotty/pull/18186) +- Encode the name of the attribute in Selectable.selectDynamic [#18928](https://github.com/lampepfl/dotty/pull/18928) +- Remove linearization requirement for override ref checks from java classes [#18953](https://github.com/lampepfl/dotty/pull/18953) +- Fix type inferencing (constraining) regressions [#19189](https://github.com/lampepfl/dotty/pull/19189) +- Repeated params must correspond in override [#16836](https://github.com/lampepfl/dotty/pull/16836) +- Convert SAM result types to function types [#17740](https://github.com/lampepfl/dotty/pull/17740) +- Disallow `infix` objects [#17966](https://github.com/lampepfl/dotty/pull/17966) +- Fix hasMatchingMember handling NoDenotation [#17977](https://github.com/lampepfl/dotty/pull/17977) +- Fix: disallow toplevel infix definitions for vals, vars, givens, methods and implicits [#17994](https://github.com/lampepfl/dotty/pull/17994) +- Curried methods are not valid SAM methods [#18110](https://github.com/lampepfl/dotty/pull/18110) +- Fix #17115: Try to normalize while computing `typeSize`. [#18386](https://github.com/lampepfl/dotty/pull/18386) +- Add default arguments to derived refined type [#18435](https://github.com/lampepfl/dotty/pull/18435) +- Handle dependent context functions [#18443](https://github.com/lampepfl/dotty/pull/18443) +- Fix variance loophole for private vars [#18693](https://github.com/lampepfl/dotty/pull/18693) +- Avoid crash arising from trying to find conversions from polymorphic singleton types [#18760](https://github.com/lampepfl/dotty/pull/18760) +- Allow inner classes of universal traits [#18796](https://github.com/lampepfl/dotty/pull/18796) +- Prevent crash when extension not found [#18830](https://github.com/lampepfl/dotty/pull/18830) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1..3.4.0-RC1` these are: + +``` + 458 Martin Odersky + 291 Nicolas Stucki + 132 Fengyun Liu + 118 Dale Wijnand + 77 Jamie Thompson + 69 Sébastien Doeraene + 49 Paweł Marks + 32 Chris Kipp + 27 Guillaume Martres + 26 Rikito Taniguchi + 21 Yichen Xu + 19 EnzeXing + 14 Szymon Rodziewicz + 13 Lucas Leblanc + 12 Jakub Ciesluk + 12 Jędrzej Rochala + 12 Katarzyna Marek + 11 Carl + 10 David Hua + 9 Florian3k + 9 Wojciech Mazur + 8 Eugene Flesselle + 8 ghostbuster91 + 7 Hamza Remmal + 7 Ondrej Lhotak + 7 Quentin Bernet + 6 Jan Chyb + 6 Julien Richard-Foy + 6 Kacper Korban + 6 Seth Tisue + 5 Lorenzo Gabriele + 5 Matt Bovel + 5 Som Snytt + 5 Yuito Murase + 5 dependabot[bot] + 3 David + 3 Lucas + 3 Pascal Weisenburger + 3 Tomasz Godzik + 2 Aleksander Rainko + 2 Decel + 2 Guillaume Raffin + 2 Ondřej Lhoták + 2 Oron Port + 2 danecek + 2 rochala + 1 Adam Dąbrowski + 1 Aleksey Troitskiy + 1 Arnout Engelen + 1 Ausmarton Zarino Fernandes + 1 Bjorn Regnell + 1 Daniel Esik + 1 Eugene Yokota + 1 François Monniot + 1 Jakub Cieśluk + 1 John Duffell + 1 John M. Higgins + 1 Justin Reardon + 1 Kai + 1 Kisaragi + 1 Lucas Nouguier + 1 Lukas Rytz + 1 LydiaSkuse + 1 Martin Kucera + 1 Martin Kučera + 1 Matthew Rooney + 1 Matthias Kurz + 1 Mikołaj Fornal + 1 Nicolas Almerge + 1 Preveen P + 1 Shardul Chiplunkar + 1 Stefan Wachter + 1 philippus + 1 q-ata + 1 slim +``` diff --git a/changelogs/3.4.0-RC2.md b/changelogs/3.4.0-RC2.md new file mode 100644 index 000000000000..84d85e19efb0 --- /dev/null +++ b/changelogs/3.4.0-RC2.md @@ -0,0 +1,22 @@ +# Backported fixes + +- Fix expandParam's use of argForParam/isArgPrefixOf. [#19412](https://github.com/lampepfl/dotty/pull/19412) +- Remove ascriptionVarargsUnpacking as we never used it [#19399](https://github.com/lampepfl/dotty/pull/19399) +- Make explicit arguments for context bounds an error from 3.5 [#19316](https://github.com/lampepfl/dotty/pull/19316) +- Avoid generating given definitions that loop [#19282](https://github.com/lampepfl/dotty/pull/19282) +- Turn given loop prevention on for -source future [#19392](https://github.com/lampepfl/dotty/pull/19392) +- Fix algorithm to prevent recursive givens [#19411](https://github.com/lampepfl/dotty/pull/19411) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.0-RC1..3.4.0-RC2` these are: + +``` + 15 Martin Odersky + 4 Nicolas Stucki + 3 Paweł Marks + 1 Dale Wijnand + 1 Jan Chyb +``` diff --git a/changelogs/3.4.0-RC3.md b/changelogs/3.4.0-RC3.md new file mode 100644 index 000000000000..57b360d2399c --- /dev/null +++ b/changelogs/3.4.0-RC3.md @@ -0,0 +1,17 @@ +# Backported fixes + +- Sync language.scala with main and backport "Add tests for context bounds migration" [#19515] (https://github.com/lampepfl/dotty/pull/19515) +- Handle default implicits to context parameters under -3.4-migration [#19512] (https://github.com/lampepfl/dotty/pull/19512) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.0-RC2..3.4.0-RC3` these are: + +``` + 4 Paweł Marks + 1 Martin Odersky + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.4.0-RC4.md b/changelogs/3.4.0-RC4.md new file mode 100644 index 000000000000..ecbcdabdd586 --- /dev/null +++ b/changelogs/3.4.0-RC4.md @@ -0,0 +1,14 @@ +# Backported fixes + +- Update jsoup dependency of Scaladoc to 7.2 [#19584](https://github.com/lampepfl/dotty/pull/19584) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.0-RC3..3.4.0-RC4` these are: + +``` + 2 Paweł Marks + 1 Fabián Heredia Montiel +``` diff --git a/changelogs/3.4.0.md b/changelogs/3.4.0.md new file mode 100644 index 000000000000..cf6ee8d010d5 --- /dev/null +++ b/changelogs/3.4.0.md @@ -0,0 +1,474 @@ +# Highlights of the release + +- Make polymorphic functions more efficient and expressive [#17548](https://github.com/lampepfl/dotty/pull/17548) +- SIP-56: Better foundations for match types [#18262](https://github.com/lampepfl/dotty/pull/18262) +- Make SIP 54 (Multi-Source Extension Overloads) a standard feature [#17441](https://github.com/lampepfl/dotty/pull/17441) +- Value parameter inference for polymorphic lambdas [#18041](https://github.com/lampepfl/dotty/pull/18041) +- Add `@publicInBinary` annotation and `-WunstableInlineAccessors` linting flag [#18402](https://github.com/lampepfl/dotty/pull/18402) +- Stabilize Quotes `defn.PolyFunction` [#18480](https://github.com/lampepfl/dotty/pull/18480) +- Stabilize Quotes `Flags.AbsOverride` [#18482](https://github.com/lampepfl/dotty/pull/18482) +- Add `-experimental` compiler flags [#18571](https://github.com/lampepfl/dotty/pull/18571) +- Stabilize SIP-53 (quote pattern explicit type variable syntax) [#18574](https://github.com/lampepfl/dotty/pull/18574) +- Add reflect TypeRepr.dealiasKeepOpaques [#18583](https://github.com/lampepfl/dotty/pull/18583) +- Add attributes section to TASTy and use it for Stdlib TASTy [#18599](https://github.com/lampepfl/dotty/pull/18599) +- Error when reading class file with unknown newer jdk version [#18618](https://github.com/lampepfl/dotty/pull/18618) +- Add support for xsbti.compile.CompileProgress [#18739](https://github.com/lampepfl/dotty/pull/18739) +- Improve type inference for functions like fold [#18780](https://github.com/lampepfl/dotty/pull/18780) +- Improve error message for mismatched tasty versions, allow configuration of header unpickler [#18828](https://github.com/lampepfl/dotty/pull/18828) +- In 3.4 make refutable patterns in a for comprehension an error [#18842](https://github.com/lampepfl/dotty/pull/18842) +- Disallow use of PolyFunction in user code [#18920](https://github.com/lampepfl/dotty/pull/18920) +- Store source file in TASTY attributes [#18948](https://github.com/lampepfl/dotty/pull/18948) +- First step to pipelining support - enable reading Java symbols from TASTy [#19074](https://github.com/lampepfl/dotty/pull/19074) +- Activate constrainResult fix in 3.4 [#19253](https://github.com/lampepfl/dotty/pull/19253) +- Parallelise JVM backend - Scala 2 port [#15392](https://github.com/lampepfl/dotty/pull/15392) +- Avoid generating given definitions that loop [#19282](https://github.com/lampepfl/dotty/pull/19282) + +## Deprecation warnings for old syntax + +- `_` type wildcards [#18813](https://github.com/lampepfl/dotty/pull/18813) +- `private[this]` [#18819](https://github.com/lampepfl/dotty/pull/18819) +- `var x = _` [#18821](https://github.com/lampepfl/dotty/pull/18821) +- `with` as a type operator [#18837](https://github.com/lampepfl/dotty/pull/18837) +- `xs: _*` varargs [#18872](https://github.com/lampepfl/dotty/pull/18872) +- trailing `_` to force eta expansion [#18926](https://github.com/lampepfl/dotty/pull/18926) +- Make explicit arguments for context bounds an error from 3.5 [#19316](https://github.com/lampepfl/dotty/pull/19316) + +# Other changes and fixes + +## Backend + +- Count size of parameters for platform limit check [#18464](https://github.com/lampepfl/dotty/pull/18464) +- Don't emit line number for synthetic unit value [#18717](https://github.com/lampepfl/dotty/pull/18717) +- Avoid too eager transform of $outer for lhs & accessor rhs [#18949](https://github.com/lampepfl/dotty/pull/18949) +- Make more anonymous functions static [#19251](https://github.com/lampepfl/dotty/pull/19251) +- Fix deadlock in initialization of CoreBTypes using Lazy container [#19298](https://github.com/lampepfl/dotty/pull/19298) +- Fix #18769: Allow HK type args in Java signatures. [#18883](https://github.com/lampepfl/dotty/pull/18883) +- Loading symbols from TASTy files directly [#17594](https://github.com/lampepfl/dotty/pull/17594) +- Use dedicated equals method for univerval equality of chars [#18770](https://github.com/lampepfl/dotty/pull/18770) + +## Erasure + +- Get generic signature of fields entered after erasure from their accessor [#19207](https://github.com/lampepfl/dotty/pull/19207) +- Detect case where two alternatives are the same after widening ExprTypes [#18787](https://github.com/lampepfl/dotty/pull/18787) +- Improve erased params logic [#18433](https://github.com/lampepfl/dotty/pull/18433) + +## Experimental: Capture Checking + +- Fix capture set variable installation in Setup [#18885](https://github.com/lampepfl/dotty/pull/18885) +- Don't follow opaque aliases when transforming sym info for cc [#18929](https://github.com/lampepfl/dotty/pull/18929) +- Reset `comparersInUse` to zero in `ContextState.reset` [#18915](https://github.com/lampepfl/dotty/pull/18915) +- Special handling of experimental.captureChecking import [#17427](https://github.com/lampepfl/dotty/pull/17427) +- Change handling of curried function types in capture checking [#18131](https://github.com/lampepfl/dotty/pull/18131) +- Fix #18246: correctly compute capture sets in `TypeComparer.glb` [#18254](https://github.com/lampepfl/dotty/pull/18254) +- New capture escape checking based on levels [#18463](https://github.com/lampepfl/dotty/pull/18463) +- A more robust scheme for resetting denotations after Recheck [#18534](https://github.com/lampepfl/dotty/pull/18534) +- A more flexible scheme for handling the universal capability [#18699](https://github.com/lampepfl/dotty/pull/18699) +- Fix potential soundness hole when adding references to a mapped capture set [#18758](https://github.com/lampepfl/dotty/pull/18758) +- Alternative scheme for cc encapsulation [#18899](https://github.com/lampepfl/dotty/pull/18899) +- Make reach refinement shallow [#19171](https://github.com/lampepfl/dotty/pull/19171) + +## F-bounds + +- Don't check bounds of Java applications in Java units [#18054](https://github.com/lampepfl/dotty/pull/18054) + +## GADTs + +- Avoid embedding SelectionProtos in Conversions [#17755](https://github.com/lampepfl/dotty/pull/17755) +- Freeze constraints while calculating GADT full bounds [#18222](https://github.com/lampepfl/dotty/pull/18222) + +## Implicits + +- Followup fix to transparent inline conversion [#18130](https://github.com/lampepfl/dotty/pull/18130) +- Select local implicits over name-imported over wildcard imported [#18203](https://github.com/lampepfl/dotty/pull/18203) +- Fix how implicit candidates are combined [#18321](https://github.com/lampepfl/dotty/pull/18321) +- Improve error message about missing type of context function parameter [#18788](https://github.com/lampepfl/dotty/pull/18788) +- Support implicit arguments before extractor method [#18671](https://github.com/lampepfl/dotty/pull/18671) +- Tweak convertible implicits fix [#18727](https://github.com/lampepfl/dotty/pull/18727) +- Turn given loop prevention on for -source future [#19392](https://github.com/lampepfl/dotty/pull/19392) +- Fix algorithm to prevent recursive givens [#19411](https://github.com/lampepfl/dotty/pull/19411) +- Handle default implicits to context parameters under -3.4-migration [#19512] (https://github.com/lampepfl/dotty/pull/19512) + +## Incremental Compilation + +- Make incremental compilation aware of synthesized mirrors [#18310](https://github.com/lampepfl/dotty/pull/18310) + +## Inference + +- Honour hard unions in lubbing and param replacing [#18680](https://github.com/lampepfl/dotty/pull/18680) + +## Infrastructure + +- Use -Yscala2-library-tasty to add Scala 2 lib TASTY to scalac (internal only) [#18613](https://github.com/lampepfl/dotty/pull/18613) +- Rename `stdlib-bootstrapped-tasty` to `scala2-library-tasty` [#18615](https://github.com/lampepfl/dotty/pull/18615) +- Fix #19286: Freeze rubygems-update at < 3.5.0. [#19288](https://github.com/lampepfl/dotty/pull/19288) + +## Initialization + +- Fix #17997: Handle intersection type as this type of super type [#18069](https://github.com/lampepfl/dotty/pull/18069) +- Add test for issue #17997 affecting the global object initialization checker [#18141](https://github.com/lampepfl/dotty/pull/18141) +- Fix i18624 and add test case for it [#18859](https://github.com/lampepfl/dotty/pull/18859) +- Treat new Array(0) as immutable [#19192](https://github.com/lampepfl/dotty/pull/19192) +- Fix #18407: Ignore Quote/Slice in init checker [#18848](https://github.com/lampepfl/dotty/pull/18848) +- Check safe initialization of static objects [#16970](https://github.com/lampepfl/dotty/pull/16970) +- Pattern match support in checking global objects [#18127](https://github.com/lampepfl/dotty/pull/18127) +- Fix crash in global object initialization checker when select target has no source [#18627](https://github.com/lampepfl/dotty/pull/18627) +- Fix warning underlining in global init checker [#18668](https://github.com/lampepfl/dotty/pull/18668) +- Fix i18629 [#18839](https://github.com/lampepfl/dotty/pull/18839) +- I18628 [#18841](https://github.com/lampepfl/dotty/pull/18841) +- Make safe init checker skip global objects [#18906](https://github.com/lampepfl/dotty/pull/18906) +- Handle local lazy vals properly [#18998](https://github.com/lampepfl/dotty/pull/18998) + +## Inline + +- Fix regression: inline match crash when rhs uses private inlined methods [#18595](https://github.com/lampepfl/dotty/pull/18595) +- Add structural classes of dynamicApply before inlining [#18766](https://github.com/lampepfl/dotty/pull/18766) +- Set missing expansion span for copied inlined node [#18229](https://github.com/lampepfl/dotty/pull/18229) +- Fix `callTrace` of inlined methods [#18738](https://github.com/lampepfl/dotty/pull/18738) + +## Linting + +- Keep tree of type ascriptions of quote pattern splices [#18412](https://github.com/lampepfl/dotty/pull/18412) +- Fix false positive in WUnused for renamed path-dependent imports [#18468](https://github.com/lampepfl/dotty/pull/18468) +- Fix false positive in WUnused for renamed path-dependent imports (2) [#18617](https://github.com/lampepfl/dotty/pull/18617) +- Fix wunused false positive on CanEqual [#18641](https://github.com/lampepfl/dotty/pull/18641) +- Implement -Xlint:private-shadow, type-parameter-shadow [#17622](https://github.com/lampepfl/dotty/pull/17622) +- Fix: reversed wconf parsing order to mirror scala 2 [#18503](https://github.com/lampepfl/dotty/pull/18503) +- Revert Fix false positive in WUnused for renamed path-dependent imports [#18514](https://github.com/lampepfl/dotty/pull/18514) + +## Macro Annotations + +- Enter missing symbols generated by the MacroAnnotation expansion [#18826](https://github.com/lampepfl/dotty/pull/18826) + +## Match Types + +- Allow Tuple.Head and Tuple.Tail to work with EmptyTuple [#17189](https://github.com/lampepfl/dotty/pull/17189) +- Fix match type reduction with avoided types [#18043](https://github.com/lampepfl/dotty/pull/18043) +- Strip LazyRef before calling simplified, in MT reduction [#18218](https://github.com/lampepfl/dotty/pull/18218) +- Fix MT separate compilation bug [#18398](https://github.com/lampepfl/dotty/pull/18398) +- Do not show deprecation warning for `_` in type match case [#18887](https://github.com/lampepfl/dotty/pull/18887) + +## Nullability + +- Improve logic when to emit pattern type error [#18093](https://github.com/lampepfl/dotty/pull/18093) +- Allow nullability flow typing even in presence of pattern match [#18206](https://github.com/lampepfl/dotty/pull/18206) +- Fix #11967: flow typing nullability in pattern matches [#18212](https://github.com/lampepfl/dotty/pull/18212) +- Fix #18282: consider Predef.eq/ne in nullability flow typing [#18299](https://github.com/lampepfl/dotty/pull/18299) +- Make `this.type` nullable again (unless under -Yexplicit-nulls). [#18399](https://github.com/lampepfl/dotty/pull/18399) + +## Opaque Types + +- Type ascribe trees that require opaque type usage [#18101](https://github.com/lampepfl/dotty/pull/18101) + +## Parser + +- Fix selecting terms using _root_ [#18335](https://github.com/lampepfl/dotty/pull/18335) +- Tweak java getlitch not to skip zero [#18491](https://github.com/lampepfl/dotty/pull/18491) +- Fix i18518 [#18520](https://github.com/lampepfl/dotty/pull/18520) +- Only apply `future` patches on `future-migration` [#18820](https://github.com/lampepfl/dotty/pull/18820) +- Parser simple expression error recovery change from `null` to `???` [#19103](https://github.com/lampepfl/dotty/pull/19103) + +## Pattern Matching + +- Fix syntax and parsing of vararg patterns [#18055](https://github.com/lampepfl/dotty/pull/18055) +- Avoid over widening in SpaceEngine [#18252](https://github.com/lampepfl/dotty/pull/18252) +- Fix regression in exhaustivity of HK types [#18303](https://github.com/lampepfl/dotty/pull/18303) +- Fix missing case in isSubspace, which broke reachablility [#18326](https://github.com/lampepfl/dotty/pull/18326) +- Unsuppress unchecked warnings [#18377](https://github.com/lampepfl/dotty/pull/18377) +- Consider extension methods in Space isSameUnapply [#18642](https://github.com/lampepfl/dotty/pull/18642) +- Fix unreachable warning in deeply nested sealed hierarchy [#18706](https://github.com/lampepfl/dotty/pull/18706) +- Remove unnecessary and recursive Space decomposition [#19216](https://github.com/lampepfl/dotty/pull/19216) +- Prioritise sequence-matches over product-sequence-matches [#19260](https://github.com/lampepfl/dotty/pull/19260) +- Propagate constant in result of inline match [#18455](https://github.com/lampepfl/dotty/pull/18455) +- Disable match anaylsis in inlined trees [#19190](https://github.com/lampepfl/dotty/pull/19190) +- Teach provablyDisjoint about AnyKind [#18510](https://github.com/lampepfl/dotty/pull/18510) +- Warn about unchecked type tests in primitive catch cases [#19206](https://github.com/lampepfl/dotty/pull/19206) +- Reprioritise seq-match over product-seq-match [#19277](https://github.com/lampepfl/dotty/pull/19277) +- Fix exhaustivity due to separate TypeVar lambdas [#18616](https://github.com/lampepfl/dotty/pull/18616) + +## Presentation Compiler + +- Support completions for extension definition parameter [#18331](https://github.com/lampepfl/dotty/pull/18331) +- Fix: Don't collect map, flatMap, withFilter in for-comprehension [#18430](https://github.com/lampepfl/dotty/pull/18430) +- Bugfix: Catch exception from the compiler for broken shadowed pickles [#18502](https://github.com/lampepfl/dotty/pull/18502) +- Bugfix: highlight for enum type params [#18528](https://github.com/lampepfl/dotty/pull/18528) +- Bugfix: No signature help for local methods [#18594](https://github.com/lampepfl/dotty/pull/18594) +- Bugfix: add `moduleClass` imported symbols in `IndexedContext` [#18620](https://github.com/lampepfl/dotty/pull/18620) +- Bugfix: Named args completions with default values [#18633](https://github.com/lampepfl/dotty/pull/18633) +- Fix: match completions for type aliases [#18667](https://github.com/lampepfl/dotty/pull/18667) +- Bugfix: add multiline comment completion [#18703](https://github.com/lampepfl/dotty/pull/18703) +- Bugfix: Backticked named arguments [#18704](https://github.com/lampepfl/dotty/pull/18704) +- Bugfix: [metals] Case completions for tuple type [#18751](https://github.com/lampepfl/dotty/pull/18751) +- Completions should prepend, not replace as it is for Scala 2 [#18803](https://github.com/lampepfl/dotty/pull/18803) +- Bugfix: rename end marker [#18838](https://github.com/lampepfl/dotty/pull/18838) +- Presentation compiler: Bugfix for semantic tokens and synthetic decorations [#18955](https://github.com/lampepfl/dotty/pull/18955) +- Show documentation for value forwarders in completions [#19200](https://github.com/lampepfl/dotty/pull/19200) +- Bugfix: Document highlight on class constructors [#19209](https://github.com/lampepfl/dotty/pull/19209) +- Bugfix: Completions for extension methods with name conflict [#19225](https://github.com/lampepfl/dotty/pull/19225) + +## Polyfunctions + +- Check user defined PolyFunction refinements [#18457](https://github.com/lampepfl/dotty/pull/18457) +- Support polymorphic functions with erased parameters [#18293](https://github.com/lampepfl/dotty/pull/18293) +- Use `PolyFunction` instead of `ErasedFunction` [#18295](https://github.com/lampepfl/dotty/pull/18295) + +## Quotes + +- Support type variable with bounds in quoted pattern [#16910](https://github.com/lampepfl/dotty/pull/16910) +- Add new EXPLICITtpt to TASTy format [#17298](https://github.com/lampepfl/dotty/pull/17298) +- Inhibit typer to insert contextual arguments when it is inside arguments of HOAS patterns [#18040](https://github.com/lampepfl/dotty/pull/18040) +- Compile quote patterns directly into QuotePattern AST [#18133](https://github.com/lampepfl/dotty/pull/18133) +- Add missing span to synthesized product mirror [#18354](https://github.com/lampepfl/dotty/pull/18354) +- Improve non-static macro implementation error message [#18405](https://github.com/lampepfl/dotty/pull/18405) +- Fix scala 2 macros in traits with type parameters [#18663](https://github.com/lampepfl/dotty/pull/18663) +- Patch `underlyingArgument` to avoid mapping into modules [#18923](https://github.com/lampepfl/dotty/pull/18923) +- Fallback erasing term references [#18731](https://github.com/lampepfl/dotty/pull/18731) +- Fix ignored type variable bound warning in type quote pattern [#18199](https://github.com/lampepfl/dotty/pull/18199) +- Splice hole with singleton captures [#18357](https://github.com/lampepfl/dotty/pull/18357) +- Fix macros with erased arguments [#18431](https://github.com/lampepfl/dotty/pull/18431) +- Deprecate 3-arg `FunctionClass` constructor [#18472](https://github.com/lampepfl/dotty/pull/18472) +- Deprecate `Quotes` `{MethodType,TermParamClause}.isErased` [#18479](https://github.com/lampepfl/dotty/pull/18479) +- Avoid crashes on missing positions [#19250](https://github.com/lampepfl/dotty/pull/19250) + +## Reflection + +- Add reflect.ValOrDefDef [#16974](https://github.com/lampepfl/dotty/pull/16974) +- Check New tree for ill-formed module instantiations [#17553](https://github.com/lampepfl/dotty/pull/17553) +- Add reflect `TypeLambda.paramVariances` [#17568](https://github.com/lampepfl/dotty/pull/17568) +- Make check flags for `newMethod`, `newVal` and `newBind` in Quotes API less restrictive [#18217](https://github.com/lampepfl/dotty/pull/18217) +- Normalise mirrorType for mirror Synthesis [#19199](https://github.com/lampepfl/dotty/pull/19199) +- Add reflect `defn.FunctionClass` overloads [#16849](https://github.com/lampepfl/dotty/pull/16849) +- Stabilize reflect flag `JavaAnnotation` [#19267](https://github.com/lampepfl/dotty/pull/19267) +- Stabilize reflect `paramVariance` [#19268](https://github.com/lampepfl/dotty/pull/19268) + +## Reporting + +- Take into account the result type of inline implicit conversions unless they are transparent [#17924](https://github.com/lampepfl/dotty/pull/17924) +- Check if a fatal warning issued in typer is silenced, before converting it into an error [#18089](https://github.com/lampepfl/dotty/pull/18089) +- Elide companion defs to a `object` extending `AnyVal` [#18451](https://github.com/lampepfl/dotty/pull/18451) +- Add regression test for issue i18493 [#18497](https://github.com/lampepfl/dotty/pull/18497) +- Add better explanation to error message [#18665](https://github.com/lampepfl/dotty/pull/18665) +- Better error message when accessing private members [#18690](https://github.com/lampepfl/dotty/pull/18690) +- Improve message for discarded pure non-Unit values [#18723](https://github.com/lampepfl/dotty/pull/18723) +- Better error message when a pattern match extractor is not found. [#18725](https://github.com/lampepfl/dotty/pull/18725) +- Give "did you mean ...?" hints also for simple identifiers [#18747](https://github.com/lampepfl/dotty/pull/18747) +- Better error for definition followed by keyword [#18752](https://github.com/lampepfl/dotty/pull/18752) +- Better explain message for 'pattern expected' [#18753](https://github.com/lampepfl/dotty/pull/18753) +- Improve failure message of enum `fromOrdinal`/`valueOf` [#19182](https://github.com/lampepfl/dotty/pull/19182) +- Fix type mismatch error confusion between types with same simple name [#19204](https://github.com/lampepfl/dotty/pull/19204) +- Add hint for nested quotes missing staged `Quotes` [#18755](https://github.com/lampepfl/dotty/pull/18755) +- Better error messages for missing commas and more [#18785](https://github.com/lampepfl/dotty/pull/18785) +- Fix imported twice error messages [#18102](https://github.com/lampepfl/dotty/pull/18102) +- Improve error message for inaccessible types [#18406](https://github.com/lampepfl/dotty/pull/18406) +- Future migration warning for `with` type operator [#18818](https://github.com/lampepfl/dotty/pull/18818) +- Improve assertion error message for `Apply` and `TypeApply` [#18700](https://github.com/lampepfl/dotty/pull/18700) +- Shorten traces for TypeMismatch errors under -explain [#18742]( +- Improve `with` in type migration warning [#18852](https://github.com/lampepfl/dotty/pull/18852) +hub.com/lampepfl/dotty/pull/18742) +- Future migration warning for alphanumeric infix operator [#18908](https://github.com/lampepfl/dotty/pull/18908) +- Make sure that trace is shown correctly in the presence of invalid line numbers [#18930](https://github.com/lampepfl/dotty/pull/18930) +- Add migration warning for XML literals in language future [#19101](https://github.com/lampepfl/dotty/pull/19101) +- Avoid diagnostic message forcing crashing the compiler [#19113](https://github.com/lampepfl/dotty/pull/19113) +- Make sure that the stacktrace is shown with `-Ydebug-unpickling` [#19115](https://github.com/lampepfl/dotty/pull/19115) +- Improve `asExprOf` cast error formatting [#19195](https://github.com/lampepfl/dotty/pull/19195) +- Do not warn on underscore wildcard type in pattern [#19249](https://github.com/lampepfl/dotty/pull/19249) + +## Scala-JS + +- Fix #18658: Handle varargs of generic types in `JSExportsGen`. [#18659](https://github.com/lampepfl/dotty/pull/18659) + +## Scaladoc + +- Fix incorrect comment parser used in nightly scaladoc [#18523](https://github.com/lampepfl/dotty/pull/18523) +- Update jsoup dependency of Scaladoc to 7.2 [#19584](https://github.com/lampepfl/dotty/pull/19584) + +## SemanticDB + +- Export diagnostics (including unused warnings) to SemanticDB [#17835](https://github.com/lampepfl/dotty/pull/17835) +- Bugfix: Incorrect semanticdb span on Selectable [#18576](https://github.com/lampepfl/dotty/pull/18576) +- Bugfix: in semanticdb make synthetic apply disambiguator consistent w/ Scala 2 implicit [#17341](https://github.com/lampepfl/dotty/pull/17341) + +## Standard Library + +- Intrinsify `constValueTuple` and `summonAll` [#18013](https://github.com/lampepfl/dotty/pull/18013) +- Fix #18609: Add language.`3.4` and language.`3.4-migration`. [#18610](https://github.com/lampepfl/dotty/pull/18610) + +## TASTy format + +- Eliminate FromJavaObject from TASTy of Java sources [#19259](https://github.com/lampepfl/dotty/pull/19259) +- Add new HOLETYPES to TASTy format [#17225](https://github.com/lampepfl/dotty/pull/17225) +- Add capture checking attributes to TASTy [#19033](https://github.com/lampepfl/dotty/pull/19033) +- Add TASTyInfo abstraction [#19089](https://github.com/lampepfl/dotty/pull/19089) +- Add UTF8 abstraction in the TASTy format [#19090](https://github.com/lampepfl/dotty/pull/19090) + +## Tooling + +- Don't add explanation twice [#18779](https://github.com/lampepfl/dotty/pull/18779) +- ExtractDependencies uses more efficient caching [#18403](https://github.com/lampepfl/dotty/pull/18403) +- Introduce the SourceVersions 3.4 and 3.4-migration; make 3.4 the default. [#18501](https://github.com/lampepfl/dotty/pull/18501) +- Bugfix: Completions for named args in wrong order [#18702](https://github.com/lampepfl/dotty/pull/18702) +- Align unpickled Scala 2 accessors encoding with Scala 3 [#18874](https://github.com/lampepfl/dotty/pull/18874) +- Reinterpret Scala 2 case accessors `xyz$access$idx` [#18907](https://github.com/lampepfl/dotty/pull/18907) +- Presentation-compiler: Add synthetic decorations [#18951](https://github.com/lampepfl/dotty/pull/18951) +- Add compilation unit info to `ClassSymbol` [#19010](https://github.com/lampepfl/dotty/pull/19010) +- Make sure that patches for 3.0 are also applied in later versions [#19018](https://github.com/lampepfl/dotty/pull/19018) + +## Transform + +- Also consider @targetName when checking private overrides [#18361](https://github.com/lampepfl/dotty/pull/18361) +- Teach PostTyper to handle untupled context closures [#17739](https://github.com/lampepfl/dotty/pull/17739) +- Properly dealias tuple types when specializing [#18724](https://github.com/lampepfl/dotty/pull/18724) +- Fix condition in prefixIsElidable to prevent compiler crash [#18924](https://github.com/lampepfl/dotty/pull/18924) +- Fix #18816: Transfer the span of rewired `This` nodes in `fullyParameterizedDef`. [#18840](https://github.com/lampepfl/dotty/pull/18840) +- List(...) optimization to avoid intermediate array [#17166](https://github.com/lampepfl/dotty/pull/17166) +- Make Array.apply an intrinsic [#18537](https://github.com/lampepfl/dotty/pull/18537) +- Add missing span to extension method select [#18557](https://github.com/lampepfl/dotty/pull/18557) + +## Tuples + +- Handle TupleXXL in match analysis [#19212](https://github.com/lampepfl/dotty/pull/19212) +- Add `reverse` method to `NonEmptyTuple` [#13752](https://github.com/lampepfl/dotty/pull/13752) +- Refine handling of pattern binders for large tuples [#19085](https://github.com/lampepfl/dotty/pull/19085) +- Introduce `Tuple.ReverseOnto` and use it in `Tuple.reverse` [#19183](https://github.com/lampepfl/dotty/pull/19183) + +## Typeclass Derivation + +- Consider all parents when checking access to the children of a sum [#19083](https://github.com/lampepfl/dotty/pull/19083) + +## Typer + +- Fix logic when comparing var/def bindings with val refinements [#18049](https://github.com/lampepfl/dotty/pull/18049) +- Fix variance checking in refinements [#18053](https://github.com/lampepfl/dotty/pull/18053) +- Fix accessibleType for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) +- Refix avoid GADT casting with ProtoTypes [#18085](https://github.com/lampepfl/dotty/pull/18085) +- Avoid shadowing by private definitions in more situations [#18142](https://github.com/lampepfl/dotty/pull/18142) +- Refine infoDependsOnPrefix [#18204](https://github.com/lampepfl/dotty/pull/18204) +- Fix spurious subtype check pruning when both sides have unions [#18213](https://github.com/lampepfl/dotty/pull/18213) +- Reimplement support for type aliases in SAM types [#18317](https://github.com/lampepfl/dotty/pull/18317) +- Fix adaptation of constants to constant type aliases [#18360](https://github.com/lampepfl/dotty/pull/18360) +- Issue "positional after named argument" errors [#18363](https://github.com/lampepfl/dotty/pull/18363) +- Deprecate `ops.long.S` [#18426](https://github.com/lampepfl/dotty/pull/18426) +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) +- Use the unwidened type when casting structural calls [#18527](https://github.com/lampepfl/dotty/pull/18527) +- Fix #18649: Use loBound of param types when materializing a context function. [#18651](https://github.com/lampepfl/dotty/pull/18651) +- Identify structural trees on Match Type qualifiers [#18765](https://github.com/lampepfl/dotty/pull/18765) +- Tweak approximation of type variables when computing default types [#18798](https://github.com/lampepfl/dotty/pull/18798) +- Admit parametric aliases of classes in parent typing [#18849](https://github.com/lampepfl/dotty/pull/18849) +- Also add privateWithin when creating constructor proxies [#18893](https://github.com/lampepfl/dotty/pull/18893) +- Revert part of `Simplify defn.FunctionOf.unapply` [#19012](https://github.com/lampepfl/dotty/pull/19012) +- Check @targetName when subtyping Refined Types [#19081](https://github.com/lampepfl/dotty/pull/19081) +- Record failures to adapt application arguments [#18269](https://github.com/lampepfl/dotty/pull/18269) +- Improve handling of AndTypes on the LHS of subtype comparisons [#18235](https://github.com/lampepfl/dotty/pull/18235) +- Allow inferred parameter types always, when eta-expanding [#18771](https://github.com/lampepfl/dotty/pull/18771) +- Fix failing bounds check on default getter [#18419](https://github.com/lampepfl/dotty/pull/18419) +- Use constructor's default getters in case class synthetic `apply` methods [#18716](https://github.com/lampepfl/dotty/pull/18716) +- Keep qualifier of Ident when selecting setter [#18714](https://github.com/lampepfl/dotty/pull/18714) +- Retract SynthesizeExtMethodReceiver mode when when going deeper in overloading resolution [#18759](https://github.com/lampepfl/dotty/pull/18759) +- Constant fold all the number conversion methods [#17446](https://github.com/lampepfl/dotty/pull/17446) +- Refine criterion when to widen types [#17180](https://github.com/lampepfl/dotty/pull/17180) +- Run all MatchType reduction under Mode.Type [#17937](https://github.com/lampepfl/dotty/pull/17937) +- Force consistent MT post-redux normalisation, disallow infinite match types [#18073](https://github.com/lampepfl/dotty/pull/18073) +- Fix #17467: Limit isNullable widening to stable TermRefs; remove under explicit nulls. [#17470](https://github.com/lampepfl/dotty/pull/17470) +- Disallow naming the root package, except for selections [#18187](https://github.com/lampepfl/dotty/pull/18187) +- Contextual varargs parameters [#18186](https://github.com/lampepfl/dotty/pull/18186) +- Encode the name of the attribute in Selectable.selectDynamic [#18928](https://github.com/lampepfl/dotty/pull/18928) +- Remove linearization requirement for override ref checks from java classes [#18953](https://github.com/lampepfl/dotty/pull/18953) +- Fix type inferencing (constraining) regressions [#19189](https://github.com/lampepfl/dotty/pull/19189) +- Repeated params must correspond in override [#16836](https://github.com/lampepfl/dotty/pull/16836) +- Convert SAM result types to function types [#17740](https://github.com/lampepfl/dotty/pull/17740) +- Disallow `infix` objects [#17966](https://github.com/lampepfl/dotty/pull/17966) +- Fix hasMatchingMember handling NoDenotation [#17977](https://github.com/lampepfl/dotty/pull/17977) +- Fix: disallow toplevel infix definitions for vals, vars, givens, methods and implicits [#17994](https://github.com/lampepfl/dotty/pull/17994) +- Curried methods are not valid SAM methods [#18110](https://github.com/lampepfl/dotty/pull/18110) +- Fix #17115: Try to normalize while computing `typeSize`. [#18386](https://github.com/lampepfl/dotty/pull/18386) +- Add default arguments to derived refined type [#18435](https://github.com/lampepfl/dotty/pull/18435) +- Handle dependent context functions [#18443](https://github.com/lampepfl/dotty/pull/18443) +- Fix variance loophole for private vars [#18693](https://github.com/lampepfl/dotty/pull/18693) +- Avoid crash arising from trying to find conversions from polymorphic singleton types [#18760](https://github.com/lampepfl/dotty/pull/18760) +- Allow inner classes of universal traits [#18796](https://github.com/lampepfl/dotty/pull/18796) +- Prevent crash when extension not found [#18830](https://github.com/lampepfl/dotty/pull/18830) +- Fix expandParam's use of argForParam/isArgPrefixOf. [#19412](https://github.com/lampepfl/dotty/pull/19412) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1..3.4.0` these are: + +``` + 474 Martin Odersky + 296 Nicolas Stucki + 132 Fengyun Liu + 119 Dale Wijnand + 77 Jamie Thompson + 69 Sébastien Doeraene + 60 Paweł Marks + 32 Chris Kipp + 27 Guillaume Martres + 26 Rikito Taniguchi + 21 Yichen Xu + 19 EnzeXing + 14 Szymon Rodziewicz + 13 Lucas Leblanc + 12 Jakub Ciesluk + 12 Jędrzej Rochala + 12 Katarzyna Marek + 11 Carl + 10 David Hua + 9 Florian3k + 9 Wojciech Mazur + 8 Eugene Flesselle + 8 ghostbuster91 + 7 Hamza Remmal + 7 Jan Chyb + 7 Ondrej Lhotak + 7 Quentin Bernet + 6 Julien Richard-Foy + 6 Kacper Korban + 6 Seth Tisue + 5 Lorenzo Gabriele + 5 Matt Bovel + 5 Som Snytt + 5 Yuito Murase + 5 dependabot[bot] + 3 David + 3 Lucas + 3 Pascal Weisenburger + 3 Tomasz Godzik + 2 Aleksander Rainko + 2 Decel + 2 Guillaume Raffin + 2 Ondřej Lhoták + 2 Oron Port + 2 danecek + 2 rochala + 1 Adam Dąbrowski + 1 Aleksey Troitskiy + 1 Arnout Engelen + 1 Ausmarton Zarino Fernandes + 1 Bjorn Regnell + 1 Daniel Esik + 1 Eugene Yokota + 1 Fabián Heredia Montiel + 1 François Monniot + 1 Jakub Cieśluk + 1 John Duffell + 1 John M. Higgins + 1 Justin Reardon + 1 Kai + 1 Kisaragi + 1 Lucas Nouguier + 1 Lukas Rytz + 1 LydiaSkuse + 1 Martin Kucera + 1 Martin Kučera + 1 Matthew Rooney + 1 Matthias Kurz + 1 Mikołaj Fornal + 1 Nicolas Almerge + 1 Preveen P + 1 Shardul Chiplunkar + 1 Stefan Wachter + 1 philippus + 1 q-ata + 1 slim +``` diff --git a/community-build/README.md b/community-build/README.md index 6db2e70fd96b..1067abbe22b0 100644 --- a/community-build/README.md +++ b/community-build/README.md @@ -1,75 +1,5 @@ # Scala 3 Community Build -This project contains tests to build and test a corpus of open sources Scala -projects against the latest version of Scala 3. - -## Running it locally - -To run the community build on a local machine, first fetch all the git -submodules with `git submodule update --init` and run `sbt community-build/test` -from the root of the dotty repo. - -To run a single project, you can use the usual syntax for running a single JUnit -test, for example `community-build/testOnly -- *shapeless` - -In CI the community build is split up into 3 seperate groups: A, B, and C. To -run one specific build you can also use the same JUnit syntax as above targeting -the individual group. For example: - -``` -sbt "community-build/testOnly dotty.communitybuild.CommunityBuildTestA" -``` - -## Adding your project - -The community build is able to handle both Mill and sbt projects. To add your -project to the community build you can follow these steps: - -1. Ensure your project is compiling with Scala 3. If you need help make sure to - check out the [Scala 3 Migration - Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). - You can see the submodules in - [community-projects](https://github.com/lampepfl/dotty/tree/main/community-build/community-projects/) - for examples of projects that compile with Scala 3. - -2. Open a PR against this repo that: - - Adds your project as a new git submodule - - `git submodule add https://github.com/dotty-staging/XYZ.git community-build/community-projects/XYZ` - - Add the project to [projects.scala](https://github.com/lampepfl/dotty/blob/main/community-build/src/scala/dotty/communitybuild/projects.scala) - - Adds a test in [CommunityBuildTest.scala](https://github.com/lampepfl/dotty/blob/main/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala) - -3. Once the CI is green, someone from the Dotty team will fork your repo and add - it to [dotty-staging](https://github.com/dotty-staging). This enables us to - make changes to your fork if necessary to keep the community build running - smoothly. - -4. Once the fork is created, please update your PR to point to this new fork - instead of your repo. - -## Updating a project - -The projects included in the community build are all forked and located in -[dotty-staging](https://github.com/dotty-staging). When something needs to be -bumped the process is as follows: - -1. Fork the dotty staging repo and sync it with the upstream project. - -2. Once you've verified that the tests are all passing you can then either - request in your PR that the dotty-staging fork be synced or in the - [scala-contributors](https://discord.com/channels/632150470000902164/632628489719382036) - discord channel. - -### Some helpful tips - -- If you're unfamiliar with Git Submodules you can find a nice guide to get - familiar with them [here](https://git-scm.com/book/en/v2/Git-Tools-Submodules). -- Keep in mind that many projects are interrelated. So when you bump one that - change may cascade through multiple different projects causing you to have - to bump multiple. Plan accordingly and at times it's best to pin it to a - stable release version, especially if it's a root library that many others - in the community build are relying on. - -## Looking for the "unmanaged" Scala 3 community build? - -You can find this [here](https://github.com/VirtusLab/community-build3). - +For information on the community build check out the [Community Build section of +the Contributing +Docs](https://dotty.epfl.ch/docs/contributing/community-build.html). diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index 540f08283069..c0756faa7311 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit 540f08283069aefd8a81fec1f3493c70217b6099 +Subproject commit c0756faa7311f70c6da6af29b8cb25506634bf09 diff --git a/community-build/community-projects/munit b/community-build/community-projects/munit index 92f3ad9e8261..c18fddb143b9 160000 --- a/community-build/community-projects/munit +++ b/community-build/community-projects/munit @@ -1 +1 @@ -Subproject commit 92f3ad9e8261b4c142a551baaf61ef5fed84d36a +Subproject commit c18fddb143b98e4c026dc118687410d52b187d88 diff --git a/community-build/community-projects/perspective b/community-build/community-projects/perspective index 365383df6a7b..ec19c412651e 160000 --- a/community-build/community-projects/perspective +++ b/community-build/community-projects/perspective @@ -1 +1 @@ -Subproject commit 365383df6a7b0c2eeb81742f479aa56269a4b557 +Subproject commit ec19c412651e7edbca10cbc90aa3e219e58b01fb diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index ba33a89bdeee..105c3dac8835 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit ba33a89bdeee67089ff486c66ead93ab35f9250a +Subproject commit 105c3dac883549eca1182b04fc5a18fe4f5ad51a diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index 39370e391342..d430625d9621 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit 39370e391342eb3d3ecfa847be16734f2fb1f3a2 +Subproject commit d430625d96218c9031b1434cc0c2110f3740fa1c diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 6e7f3d9caf64..97cccf3b3fcb 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 6e7f3d9caf64d8ad1c82804cf418882345f41930 +Subproject commit 97cccf3b3fcb71885a32b2e567171c0f70b06104 diff --git a/community-build/community-projects/shapeless b/community-build/community-projects/shapeless deleted file mode 160000 index 04e8cebc6097..000000000000 --- a/community-build/community-projects/shapeless +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 04e8cebc6097357598b15caf428e86b78dde0888 diff --git a/community-build/community-projects/shapeless-3 b/community-build/community-projects/shapeless-3 new file mode 160000 index 000000000000..d27c5ba1ae51 --- /dev/null +++ b/community-build/community-projects/shapeless-3 @@ -0,0 +1 @@ +Subproject commit d27c5ba1ae5111b85df2cfb65a26b9246c52570c diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index 789f23b75db1..ba01cca013d9 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit 789f23b75db1cf7961d04468b21a2cc0d7ba32d8 +Subproject commit ba01cca013d9d99e390d17619664bdedd716e0d7 diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 1a2521996bad..6243e902928c 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 1a2521996badfe4cb3d9b8cdecefacb1251faeb9 +Subproject commit 6243e902928c344fb0e82e21120bb257f08a2af2 diff --git a/community-build/community-projects/verify b/community-build/community-projects/verify index 073921a373e0..ae37d7e153fc 160000 --- a/community-build/community-projects/verify +++ b/community-build/community-projects/verify @@ -1 +1 @@ -Subproject commit 073921a373e05bcfdc863769f676089ab889a002 +Subproject commit ae37d7e153fc62d64c40a72c45f810511aef2e01 diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 1349c3adc3b9..767667b491ce 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -25,7 +25,7 @@ def exec(projectDir: Path, binary: String, arguments: Seq[String], environment: import scala.jdk.CollectionConverters._ val command = binary +: arguments log(command.mkString(" ")) - val builder = new ProcessBuilder(command: _*).directory(projectDir.toFile).inheritIO() + val builder = new ProcessBuilder(command*).directory(projectDir.toFile).inheritIO() builder.environment.putAll(environment.asJava) val process = builder.start() val exitCode = process.waitFor() @@ -140,7 +140,7 @@ final case class SbtCommunityProject( case Some(ivyHome) => List(s"-Dsbt.ivy.home=$ivyHome") case _ => Nil extraSbtArgs ++ sbtProps ++ List( - "-sbt-version", "1.8.2", + "-sbt-version", "1.9.3", "-Dsbt.supershell=false", s"-Ddotty.communitybuild.dir=$communitybuildDir", s"--addPluginSbtFile=$sbtPluginFilePath" @@ -358,21 +358,11 @@ object projects: // sbtDocCommand = "dotty-community-build/doc" ) - lazy val stdLib213 = SbtCommunityProject( - project = "stdLib213", - extraSbtArgs = List("-Dscala.build.compileWithDotty=true"), - sbtTestCommand = """set Global / fatalWarnings := false; library/compile""", - sbtPublishCommand = """set Global / fatalWarnings := false; set library/Compile/packageDoc/publishArtifact := false; library/publishLocal""", - // sbtDocCommand = "library/doc" // Does no compile? No idea :/ - ) - - - lazy val shapeless = SbtCommunityProject( - project = "shapeless", - sbtTestCommand = """set deriving/scalacOptions -= "-Xfatal-warnings"; set typeable/scalacOptions -= "-Xfatal-warnings"; test""", - // selectively disable -Xfatal-warnings due to deprecations - sbtDocCommand = forceDoc("typeable", "deriving", "data"), - scalacOptions = Nil // disable -Ysafe-init, due to -Xfatal-warnings + lazy val shapeless3 = SbtCommunityProject( + project = "shapeless-3", + sbtTestCommand = "testJVM; testJS", + sbtDocCommand = forceDoc("typeable", "deriving"), + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), // due to -Xfatal-warnings ) lazy val xmlInterpolator = SbtCommunityProject( @@ -414,7 +404,7 @@ object projects: project = "zio", sbtTestCommand = "testJVMDotty", sbtDocCommand = forceDoc("coreJVM"), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Xcheck-macros"), + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Xcheck-macros"), dependencies =List(izumiReflect) ) @@ -682,7 +672,7 @@ object projects: sbtTestCommand = "runCommunityBuild", sbtPublishCommand = "publishLocal", dependencies = List(scalatest), - scalacOptions = List("-language:implicitConversions"), // disabled -Ysafe-init, due to bug in macro + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Xcheck-macros") :+ "-language:implicitConversions", // disabled -Xcheck-macros, due to bug in macro ) lazy val onnxScala = SbtCommunityProject( @@ -795,8 +785,7 @@ def allProjects = List( projects.scalaPB, projects.minitest, projects.fastparse, - projects.stdLib213, - projects.shapeless, + projects.shapeless3, projects.xmlInterpolator, projects.effpi, projects.sconfig, diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index bf6b6d431509..6a4f832ce05a 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -91,7 +91,7 @@ class CommunityBuildTestC: @Test def scalaz = projects.scalaz.run() @Test def scas = projects.scas.run() @Test def sconfig = projects.sconfig.run() - @Test def shapeless = projects.shapeless.run() + @Test def shapeless3 = projects.shapeless3.run() @Test def sourcecode = projects.sourcecode.run() @Test def specs2 = projects.specs2.run() diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 6f4366a00b77..1540cc86d7a6 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -148,7 +148,7 @@ object MainGenericRunner { case (o @ javaOption(striped)) :: tail => processArgs(tail, settings.withJavaArgs(striped).withScalaArgs(o)) case (o @ scalaOption(_*)) :: tail => - val remainingArgs = (CommandLineParser.expandArg(o) ++ tail).toList + val remainingArgs = CommandLineParser.expandArg(o) ++ tail processArgs(remainingArgs, settings) case (o @ colorOption(_*)) :: tail => processArgs(tail, settings.withScalaArgs(o)) @@ -195,7 +195,7 @@ object MainGenericRunner { case ExecuteMode.PossibleRun => val newClasspath = (settings.classPath :+ ".").flatMap(_.split(classpathSeparator).filter(_.nonEmpty)).map(File(_).toURI.toURL) - import dotty.tools.runner.RichClassLoader._ + import dotty.tools.runner.RichClassLoader.* val newClassLoader = ScalaClassLoader.fromURLsParallelCapable(newClasspath) val targetToRun = settings.possibleEntryPaths.to(LazyList).find { entryPath => newClassLoader.tryToLoadClass(entryPath).orElse { diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index d95638be2695..4027cf9fb564 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -4,8 +4,8 @@ package jvm import scala.language.unsafeNulls -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.report /** diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index e7b5a0dad1bf..db52a74300ef 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -13,16 +13,15 @@ import BCodeHelpers.InvokeStyle import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Constants.* import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _} -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.{nme, str} -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.transform.Erasure -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.util.Spans.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report @@ -33,13 +32,13 @@ import dotty.tools.dotc.report * */ trait BCodeBodyBuilder extends BCodeSkelBuilder { - // import global._ - // import definitions._ - import tpd._ + // import global.* + // import definitions.* + import tpd.* import int.{_, given} import DottyBackendInterface.symExtensions - import bTypes._ - import coreBTypes._ + import bTypes.* + import coreBTypes.* protected val primitives: DottyPrimitives @@ -79,14 +78,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { tree match { case Assign(lhs @ DesugaredSelect(qual, _), rhs) => - val savedStackHeight = stackHeight + val savedStackSize = stack.recordSize() val isStatic = lhs.symbol.isStaticMember if (!isStatic) { - genLoadQualifier(lhs) - stackHeight += 1 + val qualTK = genLoad(qual) + stack.push(qualTK) } genLoad(rhs, symInfoTK(lhs.symbol)) - stackHeight = savedStackHeight + stack.restoreSize(savedStackSize) lineNumber(tree) // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError val receiverClass = qual.tpe.typeSymbol @@ -126,7 +125,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { assert(resKind.isNumericType || (resKind == BOOL), s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* args match { // unary operation @@ -150,9 +149,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } genLoad(larg, resKind) - stackHeight += resKind.size + stack.push(resKind) genLoad(rarg, if (isShift) INT else resKind) - stackHeight -= resKind.size + stack.pop() (code: @switch) match { case ADD => bc add resKind @@ -179,7 +178,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ case Apply(DesugaredSelect(arrayObj, _), args) => - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* val k = tpeTK(arrayObj) genLoad(arrayObj, k) val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) @@ -189,19 +188,19 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if (isArrayGet(code)) { // load argument on stack assert(args.length == 1, s"Too many arguments for array get operation: $tree"); - stackHeight += 1 + stack.push(k) genLoad(args.head, INT) - stackHeight -= 1 + stack.pop() generatedType = k.asArrayBType.componentType bc.aload(elementType) } else if (isArraySet(code)) { val List(a1, a2) = args - stackHeight += 1 + stack.push(k) genLoad(a1, INT) - stackHeight += 1 + stack.push(INT) genLoad(a2) - stackHeight -= 2 + stack.pop(2) generatedType = UNIT bc.astore(elementType) } else { @@ -235,7 +234,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stackHeight)) + genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stack.recordSize())) markProgramPoint(failure) genLoadTo(elsep, resKind, LoadDestination.FallThrough) markProgramPoint(postIf) @@ -262,7 +261,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val code = primitives.getPrimitive(tree, receiver.tpe) - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* if (isArithmeticOp(code)) genArithmeticOp(tree, code) else if (code == CONCAT) genStringConcat(tree) @@ -294,8 +293,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { ) } - def genLoad(tree: Tree): Unit = { - genLoad(tree, tpeTK(tree)) + def genLoad(tree: Tree): BType = { + val generatedType = tpeTK(tree) + genLoad(tree, generatedType) + generatedType } /* Generate code for trees that produce values on the stack */ @@ -364,6 +365,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case t @ Ident(_) => (t, Nil) } + val savedStackSize = stack.recordSize() if (!fun.symbol.isStaticMember) { // load receiver of non-static implementation of lambda @@ -372,10 +374,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // AbstractValidatingLambdaMetafactory.validateMetafactoryArgs val DesugaredSelect(prefix, _) = fun: @unchecked - genLoad(prefix) + val prefixTK = genLoad(prefix) + stack.push(prefixTK) } genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind) + stack.restoreSize(savedStackSize) generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface) case app @ Apply(_, _) => @@ -494,9 +498,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { dest match case LoadDestination.FallThrough => () - case LoadDestination.Jump(label, targetStackHeight) => - if targetStackHeight < stackHeight then - val stackDiff = stackHeight - targetStackHeight + case LoadDestination.Jump(label, targetStackSize) => + val stackDiff = stack.heightDiffWrt(targetStackSize) + if stackDiff != 0 then if expectedType == UNIT then bc dropMany stackDiff else @@ -599,7 +603,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if dest == LoadDestination.FallThrough then val resKind = tpeTK(tree) val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stackHeight)) + registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stack.recordSize())) genLoad(expr, resKind) markProgramPoint(jumpTarget) resKind @@ -657,7 +661,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { markProgramPoint(loop) if isInfinite then - val dest = LoadDestination.Jump(loop, stackHeight) + val dest = LoadDestination.Jump(loop, stack.recordSize()) genLoadTo(body, UNIT, dest) dest else @@ -672,7 +676,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val failure = new asm.Label genCond(cond, success, failure, targetIfNoJump = success) markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop, stackHeight)) + genLoadTo(body, UNIT, LoadDestination.Jump(loop, stack.recordSize())) markProgramPoint(failure) end match LoadDestination.FallThrough @@ -765,10 +769,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // on the stack (contrary to what the type in the AST says). // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) - genLoad(superQual) - stackHeight += 1 + val superQualTK = genLoad(superQual) + stack.push(superQualTK) genLoadArguments(args, paramTKs(app)) - stackHeight -= 1 + stack.pop() generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) // 'new' constructor call: Note: since constructors are @@ -790,9 +794,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) bc dup generatedType - stackHeight += 2 + stack.push(rt) + stack.push(rt) genLoadArguments(args, paramTKs(app)) - stackHeight -= 2 + stack.pop(2) genCallMethod(ctor, InvokeStyle.Special, app.span) case _ => @@ -825,12 +830,11 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special else InvokeStyle.Virtual - val savedStackHeight = stackHeight + val savedStackSize = stack.recordSize() if invokeStyle.hasInstance then - genLoadQualifier(fun) - stackHeight += 1 + stack.push(genLoadQualifier(fun)) genLoadArguments(args, paramTKs(app)) - stackHeight = savedStackHeight + stack.restoreSize(savedStackSize) val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] @@ -888,7 +892,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { bc iconst elems.length bc newarray elmKind - stackHeight += 3 // during the genLoad below, there is the result, its dup, and the index + // during the genLoad below, there is the result, its dup, and the index + stack.push(generatedType) + stack.push(generatedType) + stack.push(INT) var i = 0 var rest = elems @@ -901,7 +908,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { i = i + 1 } - stackHeight -= 3 + stack.pop(3) generatedType } @@ -917,7 +924,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val (generatedType, postMatch, postMatchDest) = if dest == LoadDestination.FallThrough then val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stackHeight)) + (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stack.recordSize())) else (expectedType, null, dest) @@ -1179,7 +1186,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } /* Emit code to Load the qualifier of `tree` on top of the stack. */ - def genLoadQualifier(tree: Tree): Unit = { + def genLoadQualifier(tree: Tree): BType = { lineNumber(tree) tree match { case DesugaredSelect(qualifier, _) => genLoad(qualifier) @@ -1188,6 +1195,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case Some(sel) => genLoadQualifier(sel) case None => assert(t.symbol.owner == this.claszSymbol) + UNIT } case _ => abort(s"Unknown qualifier $tree") } @@ -1200,14 +1208,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { btpes match case btpe :: btpes1 => genLoad(arg, btpe) - stackHeight += btpe.size + stack.push(btpe) loop(args1, btpes1) case _ => case _ => - val savedStackHeight = stackHeight + val savedStackSize = stack.recordSize() loop(args, btpes) - stackHeight = savedStackHeight + stack.restoreSize(savedStackSize) end genLoadArguments def genLoadModule(tree: Tree): BType = { @@ -1258,7 +1266,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* Generate coercion denoted by "code" */ def genCoercion(code: Int): Unit = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () case _ => @@ -1307,13 +1315,13 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { }.sum bc.genNewStringBuilder(approxBuilderSize) - stackHeight += 1 // during the genLoad below, there is a reference to the StringBuilder on the stack + stack.push(jlStringBuilderRef) // during the genLoad below, there is a reference to the StringBuilder on the stack for (elem <- concatArguments) { val elemType = tpeTK(elem) genLoad(elem, elemType) bc.genStringBuilderAppend(elemType) } - stackHeight -= 1 + stack.pop() bc.genStringBuilderEnd } else { @@ -1331,7 +1339,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var totalArgSlots = 0 var countConcats = 1 // ie. 1 + how many times we spilled - val savedStackHeight = stackHeight + val savedStackSize = stack.recordSize() for (elem <- concatArguments) { val tpe = tpeTK(elem) @@ -1339,7 +1347,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // Unlikely spill case if (totalArgSlots + elemSlots >= MaxIndySlots) { - stackHeight = savedStackHeight + countConcats + stack.restoreSize(savedStackSize) + for _ <- 0 until countConcats do + stack.push(StringRef) bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) countConcats += 1 totalArgSlots = 0 @@ -1364,10 +1374,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val tpe = tpeTK(elem) argTypes += tpe.toASMType genLoad(elem, tpe) - stackHeight += 1 + stack.push(tpe) } } - stackHeight = savedStackHeight + stack.restoreSize(savedStackSize) bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) // If we spilled, generate one final concat @@ -1432,7 +1442,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val mdescr = bmType.descriptor val isInterface = isEmittedInterface(receiverClass) - import InvokeStyle._ + import InvokeStyle.* if (style == Super) { if (isInterface && !method.is(JavaDefined)) { val args = new Array[BType](bmType.argumentTypes.length + 1) @@ -1486,7 +1496,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) bc.emitIF_ACMP(op, success) } else { - import Primitives._ + import Primitives.* def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE (tk: @unchecked) match { case LONG => emit(asm.Opcodes.LCMP) @@ -1501,7 +1511,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - import Primitives._ + import Primitives.* if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT @@ -1562,9 +1572,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else { val tk = tpeTK(l).maxType(tpeTK(r)) genLoad(l, tk) - stackHeight += tk.size + stack.push(tk) genLoad(r, tk) - stackHeight -= tk.size + stack.pop() genCJUMP(success, failure, op, tk, targetIfNoJump) } } @@ -1673,15 +1683,15 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val equalsMethod: Symbol = { if (l.tpe <:< defn.BoxedNumberClass.info) { if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - else if (r.tpe <:< defn.BoxedCharClass.info) NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + else if (r.tpe <:< defn.BoxedCharClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) } else defn.BoxesRunTimeModule_externalEquals } genLoad(l, ObjectRef) - stackHeight += 1 + stack.push(ObjectRef) genLoad(r, ObjectRef) - stackHeight -= 1 + stack.pop() genCallMethod(equalsMethod, InvokeStyle.Static) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } @@ -1697,9 +1707,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else if (isNonNullExpr(l)) { // SI-7852 Avoid null check if L is statically non-null. genLoad(l, ObjectRef) - stackHeight += 1 + stack.push(ObjectRef) genLoad(r, ObjectRef) - stackHeight -= 1 + stack.pop() genCallMethod(defn.Any_equals, InvokeStyle.Virtual) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } else { @@ -1709,9 +1719,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val lNonNull = new asm.Label genLoad(l, ObjectRef) - stackHeight += 1 + stack.push(ObjectRef) genLoad(r, ObjectRef) - stackHeight -= 1 + stack.pop() locals.store(eqEqTempLocal) bc dup ObjectRef genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) @@ -1814,7 +1824,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else jliLambdaMetaFactoryMetafactoryHandle - bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*) + bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs*) generatedType } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index c36c8c546635..2ad58fea4cd1 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -9,24 +9,25 @@ import scala.tools.asm import scala.tools.asm.AnnotationVisitor import scala.tools.asm.ClassWriter import scala.collection.mutable +import scala.compiletime.uninitialized import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.Trees -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Constants.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.NameKinds.ExpandedName import dotty.tools.dotc.core.Signature -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.transform.GenericSignatures import dotty.tools.dotc.transform.ElimErasedValueType @@ -44,12 +45,12 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions */ trait BCodeHelpers extends BCodeIdiomatic { // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - //import global._ - import bTypes._ - import tpd._ - import coreBTypes._ + //import global.* + import bTypes.* + import tpd.* + import coreBTypes.* import int.{_, given} - import DottyBackendInterface._ + import DottyBackendInterface.* // We need to access GenBCode phase to get access to post-processor components. // At this point it should always be initialized already. @@ -291,7 +292,7 @@ trait BCodeHelpers extends BCodeIdiomatic { } case Ident(nme.WILDCARD) => // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything - case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnumTrait) => + case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnum) => val edesc = innerClasesStore.typeDescriptor(t.tpe) // the class descriptor of the enumeration class. val evalue = t.symbol.javaSimpleName // value the actual enumeration value. av.visitEnum(name, edesc, evalue) @@ -396,6 +397,9 @@ trait BCodeHelpers extends BCodeIdiomatic { atPhase(erasurePhase) { val memberTpe = if (sym.is(Method)) sym.denot.info + else if sym.denot.validFor.phaseId > erasurePhase.id && sym.isField && sym.getter.exists then + // Memoization field of getter entered after erasure, see run/i17069 for an example + sym.getter.denot.info.resultType else owner.denot.thisType.memberInfo(sym) getGenericSignatureHelper(sym, owner, memberTpe).orNull } @@ -576,7 +580,7 @@ trait BCodeHelpers extends BCodeIdiomatic { /* builder of mirror classes */ class JMirrorBuilder extends JCommonBuilder { - private var cunit: CompilationUnit = _ + private var cunit: CompilationUnit = uninitialized def getCurrentCUnit(): CompilationUnit = cunit; /* Generate a mirror class for a top-level module. A mirror class is a class @@ -700,10 +704,10 @@ trait BCodeHelpers extends BCodeIdiomatic { * classes. */ private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { - import ct.bTypes._ + import ct.bTypes.* val defn = ctx.definitions - import coreBTypes._ - import Types._ + import coreBTypes.* + import Types.* /** * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. @@ -851,7 +855,7 @@ trait BCodeHelpers extends BCodeIdiomatic { object BCodeHelpers { class InvokeStyle(val style: Int) extends AnyVal { - import InvokeStyle._ + import InvokeStyle.* def isVirtual: Boolean = this == Virtual def isStatic : Boolean = this == Static def isSpecial: Boolean = this == Special diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 42f8ef7f4ef6..9938b7415da7 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -22,8 +22,8 @@ trait BCodeIdiomatic { val bTypes: BTypesFromSymbols[int.type] import int.{_, given} - import bTypes._ - import coreBTypes._ + import bTypes.* + import coreBTypes.* lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName @@ -247,9 +247,9 @@ trait BCodeIdiomatic { ): Unit = { jmethod.visitInvokeDynamicInsn( "makeConcatWithConstants", - asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes*), coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle, - (recipe +: constants):_* + (recipe +: constants)* ) } @@ -522,7 +522,7 @@ trait BCodeIdiomatic { i += 1 } assert(oldPos == keys.length, "emitSWITCH") - jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) + jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches*) } else { jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) } @@ -617,7 +617,7 @@ trait BCodeIdiomatic { /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ object JCodeMethodN { - import asm.Opcodes._ + import asm.Opcodes.* // ---------------- conversions ---------------- @@ -651,7 +651,7 @@ trait BCodeIdiomatic { * can-multi-thread */ final def coercionFrom(code: Int): BType = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT @@ -668,7 +668,7 @@ trait BCodeIdiomatic { * can-multi-thread */ final def coercionTo(code: Int): BType = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 0a11fb898b48..0ab9ed85b6cf 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -3,26 +3,24 @@ package backend package jvm import scala.language.unsafeNulls - import scala.annotation.tailrec - -import scala.collection.{ mutable, immutable } - +import scala.collection.{immutable, mutable} import scala.tools.asm import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.TreeTypeMap import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.ast.Trees.SyntheticUnit +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.NameKinds.* import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.report -import dotty.tools.dotc.transform.SymUtils._ + /* * @@ -33,19 +31,81 @@ import dotty.tools.dotc.transform.SymUtils._ trait BCodeSkelBuilder extends BCodeHelpers { import int.{_, given} import DottyBackendInterface.{symExtensions, _} - import tpd._ - import bTypes._ - import coreBTypes._ - import bCodeAsmCommon._ + import tpd.* + import bTypes.* + import coreBTypes.* + import bCodeAsmCommon.* lazy val NativeAttr: Symbol = requiredClass[scala.native] + final class BTypesStack: + // Anecdotally, growing past 16 to 32 is common; growing past 32 is rare + private var stack = new Array[BType](32) + private var size = 0 + + def isEmpty: Boolean = size == 0 + + def push(btype: BType): Unit = + if size == stack.length then + stack = java.util.Arrays.copyOf(stack, stack.length * 2) + stack(size) = btype + size += 1 + + def pop(): Unit = pop(1) + + def pop(count: Int): Unit = + assert(size >= count) + size -= count + + def height: Int = heightBetween(0, size) + + private def heightBetween(start: Int, end: Int): Int = + var result = 0 + var i = start + while i != end do + result += stack(i).size + i += 1 + result + + def recordSize(): BTypesStack.Size = BTypesStack.intToSize(size) + + def restoreSize(targetSize: BTypesStack.Size): Unit = + val targetSize1 = BTypesStack.sizeToInt(targetSize) + assert(size >= targetSize1) + size = targetSize1 + + def heightDiffWrt(targetSize: BTypesStack.Size): Int = + val targetSize1 = BTypesStack.sizeToInt(targetSize) + assert(size >= targetSize1) + heightBetween(targetSize1, size) + + def clear(): Unit = + size = 0 + + def acquireFullStack(): IArray[BType] = + val res = IArray.unsafeFromArray(stack.slice(0, size)) + size = 0 + res + + def restoreFullStack(fullStack: IArray[BType]): Unit = + assert(size == 0 && stack.length >= fullStack.length) + fullStack.copyToArray(stack) + size = fullStack.length + end BTypesStack + + object BTypesStack: + opaque type Size = Int + + private def intToSize(size: Int): Size = size + private def sizeToInt(size: Size): Int = size + end BTypesStack + /** The destination of a value generated by `genLoadTo`. */ enum LoadDestination: /** The value is put on the stack, and control flows through to the next opcode. */ case FallThrough /** The value is put on the stack, and control flow is transferred to the given `label`. */ - case Jump(label: asm.Label, targetStackHeight: Int) + case Jump(label: asm.Label, targetStackSize: BTypesStack.Size) /** The value is RETURN'ed from the enclosing method. */ case Return /** The value is ATHROW'n. */ @@ -369,7 +429,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { var earlyReturnVar: Symbol = null var shouldEmitCleanup = false // stack tracking - var stackHeight = 0 + val stack = new BTypesStack // line numbers var lastEmittedLineNr = -1 @@ -562,16 +622,17 @@ trait BCodeSkelBuilder extends BCodeHelpers { case _ => a } - if (!emitLines || !tree.span.exists) return; - val nr = ctx.source.offsetToLine(tree.span.point) + 1 - if (nr != lastEmittedLineNr) { - lastEmittedLineNr = nr - getNonLabelNode(lastInsn) match { - case lnn: asm.tree.LineNumberNode => - // overwrite previous landmark as no instructions have been emitted for it - lnn.line = nr - case _ => - mnode.visitLineNumber(nr, currProgramPoint()) + if (emitLines && tree.span.exists && !tree.hasAttachment(SyntheticUnit)) { + val nr = ctx.source.offsetToLine(tree.span.point) + 1 + if (nr != lastEmittedLineNr) { + lastEmittedLineNr = nr + getNonLabelNode(lastInsn) match { + case lnn: asm.tree.LineNumberNode => + // overwrite previous landmark as no instructions have been emitted for it + lnn.line = nr + case _ => + mnode.visitLineNumber(nr, currProgramPoint()) + } } } } @@ -589,7 +650,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { earlyReturnVar = null shouldEmitCleanup = false - stackHeight = 0 + stack.clear() lastEmittedLineNr = -1 } @@ -763,9 +824,14 @@ trait BCodeSkelBuilder extends BCodeHelpers { for (p <- params) { locals.makeLocal(p.symbol) } // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") - if (params.size > MaximumJvmParameters) { + val paramsSize = params.map { param => + val tpeTym = param.symbol.info.typeSymbol + if tpeTym == defn.LongClass || tpeTym == defn.DoubleClass then 2 else 1 + }.sum + if (paramsSize > MaximumJvmParameters) { // SI-7324 - report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) + val info = if paramsSize == params.length then "" else " (Long and Double count as 2)" // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.3.3 + report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters$info.", ctx.source.atSpan(methSymbol.span)) return } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala index b5ed27511e7e..4e2ea6dd52b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala @@ -9,7 +9,7 @@ import scala.tools.asm import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.core.StdNames.nme -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd /* @@ -20,9 +20,9 @@ import dotty.tools.dotc.ast.tpd */ trait BCodeSyncAndTry extends BCodeBodyBuilder { import int.given - import tpd._ - import bTypes._ - import coreBTypes._ + import tpd.* + import bTypes.* + import coreBTypes.* /* * Functionality to lower `synchronized` and `try` expressions. */ @@ -118,6 +118,11 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { /* * Emitting try-catch is easy, emitting try-catch-finally not quite so. + * + * For a try-catch, the only thing we need to care about is to stash the stack away + * in local variables and load them back in afterwards, in case the incoming stack + * is not empty. + * * A finally-block (which always has type Unit, thus leaving the operand stack unchanged) * affects control-transfer from protected regions, as follows: * @@ -190,7 +195,7 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { } } - // ------ (0) locals used later ------ + // ------ locals used later ------ /* * `postHandlers` is a program point denoting: @@ -203,6 +208,13 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { */ val postHandlers = new asm.Label + // stack stash + val needStackStash = !stack.isEmpty && !caseHandlers.isEmpty + val acquiredStack = if needStackStash then stack.acquireFullStack() else null + val stashLocals = + if acquiredStack == null then null + else acquiredStack.uncheckedNN.filter(_ != UNIT).map(btpe => locals.makeTempLocal(btpe)) + val hasFinally = (finalizer != tpd.EmptyTree) /* @@ -222,6 +234,17 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { */ val finCleanup = if (hasFinally) new asm.Label else null + /* ------ (0) Stash the stack into local variables, if necessary. + * From top of the stack down to the bottom. + * ------ + */ + + if stashLocals != null then + val stashLocalsNN = stashLocals.uncheckedNN // why is this necessary? + for i <- (stashLocalsNN.length - 1) to 0 by -1 do + val local = stashLocalsNN(i) + bc.store(local.idx, local.tk) + /* ------ (1) try-block, protected by: * (1.a) the EHs due to case-clauses, emitted in (2), * (1.b) the EH due to finally-clause, emitted in (3.A) @@ -367,6 +390,39 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` } + /* ------ (5) Unstash the stack, if it was stashed before. + * From bottom of the stack to the top. + * If there is a non-UNIT result, we need to temporarily store + * that one in a local variable while we unstash. + * ------ + */ + + if stashLocals != null then + val stashLocalsNN = stashLocals.uncheckedNN // why is this necessary? + + val resultLoc = + if kind == UNIT then null + else if tmp != null then locals(tmp) // reuse the same local + else locals.makeTempLocal(kind) + if resultLoc != null then + bc.store(resultLoc.idx, kind) + + for i <- 0 until stashLocalsNN.size do + val local = stashLocalsNN(i) + bc.load(local.idx, local.tk) + if local.tk.isRef then + bc.emit(asm.Opcodes.ACONST_NULL) + bc.store(local.idx, local.tk) + + stack.restoreFullStack(acquiredStack.nn) + + if resultLoc != null then + bc.load(resultLoc.idx, kind) + if kind.isRef then + bc.emit(asm.Opcodes.ACONST_NULL) + bc.store(resultLoc.idx, kind) + end if // stashLocals != null + kind } // end of genLoadTry() diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index 5539bf44aa17..dff7353b761e 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -16,8 +16,6 @@ import scala.tools.asm */ abstract class BTypes { self => val frontendAccess: PostProcessorFrontendAccess - import frontendAccess.{frontendSynch} - val int: DottyBackendInterface import int.given /** @@ -31,8 +29,7 @@ abstract class BTypes { self => * Concurrent because stack map frames are computed when in the class writer, which might run * on multiple classes concurrently. */ - protected def classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType] - // NOTE: Should be a lazy val but scalac does not allow abstract lazy vals (dotty does) + protected lazy val classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType] /** * Obtain a previously constructed ClassBType for a given internal name. @@ -40,7 +37,7 @@ abstract class BTypes { self => def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) val coreBTypes: CoreBTypes { val bTypes: self.type} - import coreBTypes._ + import coreBTypes.* /** * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 884dd19ee64f..b8d7ee04c870 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -7,12 +7,12 @@ import scala.annotation.threadUnsafe import scala.collection.mutable import scala.collection.mutable.Clearable -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.transform.SymUtils._ + import dotty.tools.dotc.core.StdNames import dotty.tools.dotc.core.Phases @@ -29,12 +29,12 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce lazy val VolatileAttr = requiredClass[scala.volatile] val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) - import bCodeAsmCommon._ + import bCodeAsmCommon.* val coreBTypes = new CoreBTypesFromSymbols[I]{ val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this } - import coreBTypes._ + import coreBTypes.* @threadUnsafe protected lazy val classBTypeFromInternalNameMap = collection.concurrent.TrieMap.empty[String, ClassBType] @@ -55,14 +55,15 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce (classSym != defn.NothingClass && classSym != defn.NullClass), s"Cannot create ClassBType for special class symbol ${classSym.showFullName}") - convertedClasses.getOrElse(classSym, { - val internalName = classSym.javaBinaryName - // We first create and add the ClassBType to the hash map before computing its info. This - // allows initializing cylic dependencies, see the comment on variable ClassBType._info. - val classBType = new ClassBType(internalName) - convertedClasses(classSym) = classBType - setClassInfo(classSym, classBType) - }) + convertedClasses.synchronized: + convertedClasses.getOrElse(classSym, { + val internalName = classSym.javaBinaryName + // We first create and add the ClassBType to the hash map before computing its info. This + // allows initializing cylic dependencies, see the comment on variable ClassBType._info. + val classBType = new ClassBType(internalName) + convertedClasses(classSym) = classBType + setClassInfo(classSym, classBType) + }) } final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = { @@ -285,7 +286,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) - import asm.Opcodes._ + import asm.Opcodes.* import GenBCodeOps.addFlagIf 0 .addFlagIf(privateFlag, ACC_PRIVATE) .addFlagIf(!privateFlag, ACC_PUBLIC) @@ -303,7 +304,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce .addFlagIf(sym.is(Bridge), ACC_BRIDGE | ACC_SYNTHETIC) .addFlagIf(sym.is(Artifact), ACC_SYNTHETIC) .addFlagIf(sym.isClass && !sym.isInterface, ACC_SUPER) - .addFlagIf(sym.isAllOf(JavaEnumTrait), ACC_ENUM) + .addFlagIf(sym.isAllOf(JavaEnum), ACC_ENUM) .addFlagIf(sym.is(JavaVarargs), ACC_VARARGS) .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) @@ -311,7 +312,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce } def javaFieldFlags(sym: Symbol) = { - import asm.Opcodes._ + import asm.Opcodes.* import GenBCodeOps.addFlagIf javaFlags(sym) .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 2eaaccdd441d..2f8a469169cc 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -5,7 +5,7 @@ import scala.tools.asm.Handle import scala.tools.asm.tree.InvokeDynamicInsnNode import asm.tree.ClassNode import scala.collection.mutable -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import dotty.tools.dotc.report import scala.language.unsafeNulls @@ -36,6 +36,7 @@ class BackendUtils(val postProcessor: PostProcessor) { case "19" => asm.Opcodes.V19 case "20" => asm.Opcodes.V20 case "21" => asm.Opcodes.V21 + case "22" => asm.Opcodes.V22 } lazy val extraProc: Int = { @@ -91,9 +92,9 @@ class BackendUtils(val postProcessor: PostProcessor) { * methods. */ def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ + import asm.Opcodes.* + import bTypes.* + import coreBTypes.* val cw = classNode @@ -103,12 +104,10 @@ class BackendUtils(val postProcessor: PostProcessor) { // stack map frames and invokes the `getCommonSuperClass` method. This method expects all // ClassBTypes mentioned in the source code to exist in the map. - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serializedLamdaObjDesc, null, null) def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serializedLamdaObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods*) } val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java @@ -133,6 +132,11 @@ class BackendUtils(val postProcessor: PostProcessor) { mv.visitInsn(ARETURN) } + private lazy val serializedLamdaObjDesc = { + import coreBTypes.{ObjectRef, jliSerializedLambdaRef} + MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + } + /** * Visit the class node and collect all referenced nested classes. */ diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala deleted file mode 100644 index 08e84de92dca..000000000000 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala +++ /dev/null @@ -1,142 +0,0 @@ -package dotty.tools.backend.jvm - -import java.io.{DataOutputStream, IOException, PrintWriter, StringWriter} -import java.nio.file.Files -import java.util.jar.Attributes.Name - -import scala.tools.asm.ClassReader -import scala.tools.asm.tree.ClassNode -import dotty.tools.io.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.util.NoSourcePosition -import java.nio.charset.StandardCharsets -import java.nio.channels.ClosedByInterruptException -import BTypes.InternalName -import scala.language.unsafeNulls - -class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { - import frontendAccess.{backendReporting, compilerSettings} - - // if non-null, classfiles are additionally written to this directory - private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) - - // if non-null, classfiles are written to a jar instead of the output directory - private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match { - case jar: JarArchive => - val mainClass = compilerSettings.mainClass.orElse { - // If no main class was specified, see if there's only one - // entry point among the classes going into the jar. - frontendAccess.getEntryPoints match { - case name :: Nil => - backendReporting.log(i"Unique entry point: setting Main-Class to $name") - Some(name) - case names => - if names.isEmpty then backendReporting.warning(em"No Main-Class designated or discovered.") - else backendReporting.warning(em"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") - None - } - } - jar.underlyingSource.map{ source => - if jar.isEmpty then - val jarMainAttrs = mainClass.map(Name.MAIN_CLASS -> _).toList - new Jar(source.file).jarWriter(jarMainAttrs: _*) - else - // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWritter - backendReporting.warning(em"Tried to write to non-empty JAR: $source") - null - }.orNull - - case _ => null - } - - private def getDirectoryOrNull(dir: Option[String]): AbstractFile = - dir.map(d => new PlainDirectory(Directory(d))).orNull - - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - if (base.file != null) { - fastGetFile(base, clsName, suffix) - } else { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - } - - private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { - val index = clsName.lastIndexOf('/') - val (packageName, simpleName) = if (index > 0) { - (clsName.substring(0, index), clsName.substring(index + 1)) - } else ("", clsName) - val directory = base.file.toPath.resolve(packageName) - new PlainFile(Path(directory.resolve(simpleName + suffix))) - } - - private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { - if (outFile.file != null) { - val outPath = outFile.file.toPath - try Files.write(outPath, bytes) - catch { - case _: java.nio.file.NoSuchFileException => - Files.createDirectories(outPath.getParent) - Files.write(outPath, bytes) - } - } else { - val out = new DataOutputStream(outFile.bufferedOutput) - try out.write(bytes, 0, bytes.length) - finally out.close() - } - } - - def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try { - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - val outFile = writeToJarOrFile(className, bytes, ".class") - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - - if (dumpOutputDir != null) { - val dumpFile = getFile(dumpOutputDir, className, ".class") - writeBytes(dumpFile, bytes) - } - outFile - } catch { - case e: FileConflictException => - backendReporting.error(em"error writing $className: ${e.getMessage}") - null - case e: java.nio.file.FileSystemException => - if compilerSettings.debug then e.printStackTrace() - backendReporting.error(em"error writing $className: ${e.getClass.getName} ${e.getMessage}") - null - } - - def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = - writeToJarOrFile(className, bytes, ".tasty") - - private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = { - if jarWriter == null then - val outFolder = compilerSettings.outputDirectory - val outFile = getFile(outFolder, className, suffix) - try writeBytes(outFile, bytes) - catch case ex: ClosedByInterruptException => - try outFile.delete() // don't leave an empty or half-written files around after an interrupt - catch case _: Throwable => () - finally throw ex - outFile - else - val path = className + suffix - val out = jarWriter.newOutputStream(path) - try out.write(bytes, 0, bytes.length) - finally out.flush() - null - } - - def close(): Unit = { - if (jarWriter != null) jarWriter.close() - } -} - -/** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala new file mode 100644 index 000000000000..ec251b4aa3f0 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -0,0 +1,288 @@ +package dotty.tools.backend.jvm + +import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputStream} +import java.nio.ByteBuffer +import java.nio.channels.{ClosedByInterruptException, FileChannel} +import java.nio.charset.StandardCharsets.UTF_8 +import java.nio.file.* +import java.nio.file.attribute.FileAttribute +import java.util +import java.util.concurrent.ConcurrentHashMap +import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} + +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.em +import dotty.tools.io.{AbstractFile, PlainFile} +import dotty.tools.io.PlainFile.toPlainFile +import BTypes.InternalName +import scala.util.chaining.* +import dotty.tools.io.JarArchive + +import scala.language.unsafeNulls + + +class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { + type NullableFile = AbstractFile | Null + import frontendAccess.{compilerSettings, backendReporting} + + sealed trait TastyWriter { + def writeTasty(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit + } + + /** + * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the + * directory and files that are created, and eventually calls `close` when the writing is complete. + * + * The companion object is responsible for constructing a appropriate and optimal implementation for + * the supplied settings. + * + * Operations are threadsafe. + */ + sealed trait ClassfileWriter extends TastyWriter { + /** + * Write a classfile + */ + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile + + + /** + * Close the writer. Behavior is undefined after a call to `close`. + */ + def close(): Unit + + protected def classRelativePath(className: InternalName, suffix: String = ".class"): String = + className.replace('.', '/').nn + suffix + } + + object ClassfileWriter { + private def getDirectory(dir: String): Path = Paths.get(dir).nn + + def apply(): ClassfileWriter = { + val jarManifestMainClass: Option[String] = compilerSettings.mainClass.orElse { + frontendAccess.getEntryPoints match { + case List(name) => Some(name) + case es => + if es.isEmpty then backendReporting.log("No Main-Class designated or discovered.") + else backendReporting.log(s"No Main-Class due to multiple entry points:\n ${es.mkString("\n ")}") + None + } + } + + // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used + // In Dotty we always use single output directory + val basicClassWriter = new SingleClassWriter( + FileWriter(compilerSettings.outputDirectory, jarManifestMainClass) + ) + + val withAdditionalFormats = + compilerSettings.dumpClassesDirectory + .map(getDirectory) + .filter{path => Files.exists(path).tap{ok => if !ok then backendReporting.error(em"Output dir does not exist: ${path.toString}")}} + .map(out => FileWriter(out.toPlainFile, None)) + .fold[ClassfileWriter](basicClassWriter)(new DebugClassWriter(basicClassWriter, _)) + + // val enableStats = settings.areStatisticsEnabled && settings.YaddBackendThreads.value == 1 + // if (enableStats) new WithStatsWriter(withAdditionalFormats) else + withAdditionalFormats + } + + private final class SingleClassWriter(underlying: FileWriter) extends ClassfileWriter { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + underlying.writeFile(classRelativePath(className), bytes) + } + override def writeTasty(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + underlying.writeFile(classRelativePath(className, ".tasty"), bytes) + } + + + override def close(): Unit = underlying.close() + } + + private final class DebugClassWriter(basic: ClassfileWriter, dump: FileWriter) extends ClassfileWriter { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + val outFile = basic.writeClass(className, bytes, sourceFile) + dump.writeFile(classRelativePath(className), bytes) + outFile + } + + override def writeTasty(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + basic.writeTasty(className, bytes, sourceFile) + } + + override def close(): Unit = { + basic.close() + dump.close() + } + } + } + + sealed trait FileWriter { + def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile + def close(): Unit + } + + object FileWriter { + def apply(file: AbstractFile, jarManifestMainClass: Option[String]): FileWriter = + if (file.isInstanceOf[JarArchive]) { + val jarCompressionLevel = compilerSettings.jarCompressionLevel + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + val jarFile = file.underlyingSource.getOrElse{ + throw new IllegalStateException("No underlying source for jar") + } + assert(file.isEmpty, s"Unsafe writing to non-empty JAR: $jarFile") + new JarEntryWriter(jarFile, jarManifestMainClass, jarCompressionLevel) + } + else if (file.isVirtual) new VirtualFileWriter(file) + else if (file.isDirectory) new DirEntryWriter(file.file.toPath.nn) + else throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") + } + + private final class JarEntryWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends FileWriter { + //keep these imports local - avoid confusion with scala naming + import java.util.jar.Attributes.Name.{MANIFEST_VERSION, MAIN_CLASS} + import java.util.jar.{JarOutputStream, Manifest} + + val storeOnly = compressionLevel == Deflater.NO_COMPRESSION + + val jarWriter: JarOutputStream = { + import scala.util.Properties.* + val manifest = new Manifest + val attrs = manifest.getMainAttributes.nn + attrs.put(MANIFEST_VERSION, "1.0") + attrs.put(ScalaCompilerVersion, versionNumberString) + mainClass.foreach(c => attrs.put(MAIN_CLASS, c)) + + val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) + jar.setLevel(compressionLevel) + if (storeOnly) jar.setMethod(ZipOutputStream.STORED) + jar + } + + lazy val crc = new CRC32 + + override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = this.synchronized { + val entry = new ZipEntry(relativePath) + if (storeOnly) { + // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ + // uncompressed sizes to be written before the data. The JarOutputStream could compute the + // values while writing the data, but not patch them into the stream after the fact. So we + // need to pre-compute them here. The compressed size is taken from size. + // https://stackoverflow.com/questions/1206970/how-to-create-uncompressed-zip-archive-in-java/5868403 + // With compression method `DEFLATED` JarOutputStream computes and sets the values. + entry.setSize(bytes.length) + crc.reset() + crc.update(bytes) + entry.setCrc(crc.getValue) + } + jarWriter.putNextEntry(entry) + try jarWriter.write(bytes, 0, bytes.length) + finally jarWriter.flush() + null + } + + override def close(): Unit = this.synchronized(jarWriter.close()) + } + + private final class DirEntryWriter(base: Path) extends FileWriter { + val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() + val noAttributes = Array.empty[FileAttribute[?]] + private val isWindows = scala.util.Properties.isWin + + private def checkName(component: Path): Unit = if (isWindows) { + val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r + val name = component.toString + def warnSpecial(): Unit = backendReporting.warning(em"path component is special Windows device: ${name}") + specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) + } + + def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { + import java.lang.Boolean.TRUE + val parent = filePath.getParent + if (!builtPaths.containsKey(parent)) { + parent.iterator.forEachRemaining(checkName) + try Files.createDirectories(parent, noAttributes*) + catch { + case e: FileAlreadyExistsException => + // `createDirectories` reports this exception if `parent` is an existing symlink to a directory + // but that's fine for us (and common enough, `scalac -d /tmp` on mac targets symlink). + if (!Files.isDirectory(parent)) + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + } + builtPaths.put(baseDir, TRUE) + var current = parent + while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { + current = current.getParent + } + } + checkName(filePath.getFileName()) + } + + // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call + // even if the file is new. + // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + + private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + + override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = { + val path = base.resolve(relativePath) + try { + ensureDirForPath(base, path) + val os = if (isWindows) { + try FileChannel.open(path, fastOpenOptions) + catch { + case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) + } + } else FileChannel.open(path, fallbackOpenOptions) + + try os.write(ByteBuffer.wrap(bytes), 0L) + catch { + case ex: ClosedByInterruptException => + try Files.deleteIfExists(path) // don't leave a empty of half-written classfile around after an interrupt + catch { case _: Throwable => () } + throw ex + } + os.close() + } catch { + case e: FileConflictException => + backendReporting.error(em"error writing ${path.toString}: ${e.getMessage}") + case e: java.nio.file.FileSystemException => + if (compilerSettings.debug) e.printStackTrace() + backendReporting.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + } + AbstractFile.getFile(path) + } + + override def close(): Unit = () + } + + private final class VirtualFileWriter(base: AbstractFile) extends FileWriter { + private def getFile(base: AbstractFile, path: String): AbstractFile = { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/${path}: ${dir.path} is not a directory") + val components = path.split('/') + var dir = base + for (i <- 0 until components.length - 1) dir = ensureDirectory(dir) subdirectoryNamed components(i).toString + ensureDirectory(dir) fileNamed components.last.toString + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + + override def writeFile(relativePath: String, bytes: Array[Byte]):NullableFile = { + val outFile = getFile(base, relativePath) + writeBytes(outFile, bytes) + outFile + } + override def close(): Unit = () + } + + /** Can't output a file due to the state of the file system. */ + class FileConflictException(msg: String, cause: Throwable = null) extends IOException(msg, cause) +} diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index c9f9e4e23d90..b48df60d4c1a 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -8,39 +8,48 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Phases.Phase import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ +import scala.jdk.CollectionConverters.* + import dotty.tools.dotc.interfaces import dotty.tools.dotc.report import java.util.Optional import dotty.tools.dotc.sbt.ExtractDependencies -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ -import Symbols._ +import dotty.tools.dotc.core.* +import Contexts.* +import Phases.* +import Symbols.* import StdNames.nme import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } +import dotty.tools.dotc.core.tasty.TastyUnpickler import scala.tools.asm -import scala.tools.asm.tree._ -import tpd._ +import scala.tools.asm.tree.* +import tpd.* import dotty.tools.io.AbstractFile +import dotty.tools.dotc.util import dotty.tools.dotc.util.NoSourcePosition class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self => import DottyBackendInterface.symExtensions - import bTypes._ - import int.given + import bTypes.* private lazy val mirrorCodeGen = Impl.JMirrorBuilder() - def genUnit(unit: CompilationUnit): GeneratedDefs = { + private def genBCode(using Context) = Phases.genBCodePhase.asInstanceOf[GenBCode] + private def postProcessor(using Context) = genBCode.postProcessor + private def generatedClassHandler(using Context) = genBCode.generatedClassHandler + + /** + * Generate ASM ClassNodes for classes found in a compilation unit. The resulting classes are + * passed to the `GenBCode.generatedClassHandler`. + */ + def genUnit(unit: CompilationUnit)(using ctx: Context): Unit = { val generatedClasses = mutable.ListBuffer.empty[GeneratedClass] val generatedTasty = mutable.ListBuffer.empty[GeneratedTasty] @@ -49,25 +58,32 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val sym = cd.symbol val sourceFile = unit.source.file - def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = - generatedClasses += GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated(classNode, sym, unit.source)) - val plainC = genClass(cd, unit) - registerGeneratedClass(plainC, isArtifact = false) - - val attrNode = - if !sym.isTopLevelModuleClass then plainC - else if sym.companionClass == NoSymbol then - val mirrorC = genMirrorClass(sym, unit) - registerGeneratedClass(mirrorC, isArtifact = true) - mirrorC + val mainClassNode = genClass(cd, unit) + val mirrorClassNode = + if !sym.isTopLevelModuleClass then null + else if sym.companionClass == NoSymbol then genMirrorClass(sym, unit) else report.log(s"No mirror class for module with linked class: ${sym.fullName}", NoSourcePosition) - plainC + null if sym.isClass then - genTastyAndSetAttributes(sym, attrNode) + val tastyAttrNode = if (mirrorClassNode ne null) mirrorClassNode else mainClassNode + genTastyAndSetAttributes(sym, tastyAttrNode) + + def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = + if classNode ne null then + generatedClasses += GeneratedClass(classNode, + sourceClassName = sym.javaClassName, + position = sym.srcPos.sourcePos, + isArtifact = isArtifact, + onFileCreated = onFileCreated(classNode, sym, unit.source) + ) + + registerGeneratedClass(mainClassNode, isArtifact = false) + registerGeneratedClass(mirrorClassNode, isArtifact = true) catch + case ex: InterruptedException => throw ex case ex: Throwable => ex.printStackTrace() report.error(s"Error while emitting ${unit.source}\n${ex.getMessage}", NoSourcePosition) @@ -78,7 +94,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( for (binary <- unit.pickled.get(claszSymbol.asClass)) { generatedTasty += GeneratedTasty(store, binary) val tasty = - val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits @@ -98,28 +114,28 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( case EmptyTree => () case PackageDef(_, stats) => stats foreach genClassDefs case ValDef(_, _, _) => () // module val not emitted - case td: TypeDef => genClassDef(td) + case td: TypeDef => frontendAccess.frontendSynch(genClassDef(td)) } genClassDefs(unit.tpdTree) - GeneratedDefs(generatedClasses.toList, generatedTasty.toList) + generatedClassHandler.process( + GeneratedCompilationUnit(unit.source.file, generatedClasses.toList, generatedTasty.toList) + ) } // Creates a callback that will be evaluated in PostProcessor after creating a file - private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => { + private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: util.SourceFile)(using Context): AbstractFile => Unit = { val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) } - - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - - if (ctx.sbtCallback != null) { - val jSourceFile = sourceFile.jfile.orElse(null) - val cb = ctx.sbtCallback - if (isLocal) cb.generatedLocalClass(jSourceFile, clsFile.file) - else cb.generatedNonLocalClass(jSourceFile, clsFile.file, className, fullClassName) + clsFile => { + val className = cls.name.replace('/', '.') + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) + + ctx.withIncCallback: cb => + if (isLocal) cb.generatedLocalClass(sourceFile, clsFile.jpath) + else cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) } } @@ -134,48 +150,20 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( } private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = { - val b = new Impl.PlainClassBuilder(unit) + val b = new Impl.SyncAndTryBuilder(unit) {} b.genPlainClass(cd) - val cls = b.cnode - checkForCaseConflict(cls.name, cd.symbol) - cls + b.cnode } private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = { - val cls = mirrorCodeGen.genMirrorClass(classSym, unit) - checkForCaseConflict(cls.name, classSym) - cls + mirrorCodeGen.genMirrorClass(classSym, unit) } - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if same then - // FIXME: This should really be an error, but then FromTasty tests fail - report.warning(s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + - "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) - } - } - } sealed transparent trait ImplEarlyInit{ val int: self.int.type = self.int val bTypes: self.bTypes.type = self.bTypes protected val primitives: DottyPrimitives = self.primitives } - object Impl extends ImplEarlyInit with BCodeSyncAndTry { - class PlainClassBuilder(unit: CompilationUnit) extends SyncAndTryBuilder(unit) - } + object Impl extends ImplEarlyInit with BCodeSyncAndTry } diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala index 299c1c75d6cf..94a946989d23 100644 --- a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala +++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala @@ -1,9 +1,9 @@ package dotty.tools.backend.jvm import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Flags.Trait import dotty.tools.dotc.transform.MegaPhase.MiniPhase @@ -18,7 +18,7 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase * the redundant mixin class could be required as a parent by the JVM. */ class CollectSuperCalls extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = CollectSuperCalls.name diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index 30ad6b29b9f0..e0ddd055fccb 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -3,15 +3,16 @@ package backend package jvm -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.transform.Erasure import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames import BTypes.InternalName +import PostProcessorFrontendAccess.Lazy abstract class CoreBTypes { val bTypes: BTypes - import bTypes._ + import bTypes.* def primitiveTypeMap: Map[Symbol, PrimitiveBType] @@ -55,16 +56,17 @@ abstract class CoreBTypes { abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { val bTypes: BTypesFromSymbols[I] - import bTypes._ - import int.given - import DottyBackendInterface._ + import bTypes.* + import DottyBackendInterface.* import dotty.tools.dotc.core.Contexts.Context - + import frontendAccess.perRunLazy /** * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above * the first use of `classBTypeFromSymbol` because that method looks at the map. */ - lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map( + override def primitiveTypeMap: Map[Symbol, bTypes.PrimitiveBType] = _primitiveTypeMap.get + private lazy val _primitiveTypeMap: Lazy[Map[Symbol, PrimitiveBType]] = perRunLazy: + Map( defn.UnitClass -> UNIT, defn.BooleanClass -> BOOL, defn.CharClass -> CHAR, @@ -80,7 +82,8 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy * Map from primitive types to their boxed class type. Useful when pushing class literals onto the * operand stack (ldc instruction taking a class literal), see genConstant. */ - lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( + override def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _boxedClassOfPrimitive.get + private lazy val _boxedClassOfPrimitive: Lazy[Map[PrimitiveBType, ClassBType]] = perRunLazy(Map( UNIT -> classBTypeFromSymbol(requiredClass[java.lang.Void]), BOOL -> classBTypeFromSymbol(requiredClass[java.lang.Boolean]), BYTE -> classBTypeFromSymbol(requiredClass[java.lang.Byte]), @@ -90,7 +93,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy LONG -> classBTypeFromSymbol(requiredClass[java.lang.Long]), FLOAT -> classBTypeFromSymbol(requiredClass[java.lang.Float]), DOUBLE -> classBTypeFromSymbol(requiredClass[java.lang.Double]) - ) + )) lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet @@ -98,7 +101,8 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy * Maps the method symbol for a box method to the boxed type of the result. For example, the * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. */ - lazy val boxResultType: Map[Symbol, ClassBType] = { + override def boxResultType: Map[Symbol, ClassBType] = _boxResultType.get + private lazy val _boxResultType: Lazy[Map[Symbol, ClassBType]] = perRunLazy{ val boxMethods = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def? (x, Erasure.Boxing.boxMethod(x.asClass)) }.toMap @@ -109,7 +113,8 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy /** * Maps the method symbol for an unbox method to the primitive type of the result. * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ - lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { + override def unboxResultType: Map[Symbol, PrimitiveBType] = _unboxResultType.get + private lazy val _unboxResultType = perRunLazy[Map[Symbol, PrimitiveBType]]{ val unboxMethods: Map[Symbol, Symbol] = defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap for ((valueClassSym, unboxMethodSym) <- unboxMethods) @@ -124,35 +129,76 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy * names of NothingClass and NullClass can't be emitted as-is. * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) - lazy val jiSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRuntimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - - lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle = new Handle( + override def srNothingRef: ClassBType = _srNothingRef.get + private lazy val _srNothingRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$"))) + + override def srNullRef: ClassBType = _srNullRef.get + private lazy val _srNullRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass("scala.runtime.Null$"))) + + override def ObjectRef: ClassBType = _ObjectRef.get + private lazy val _ObjectRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.ObjectClass)) + + override def StringRef: ClassBType = _StringRef.get + private lazy val _StringRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.StringClass)) + + override def jlStringBuilderRef: ClassBType = _jlStringBuilderRef.get + private lazy val _jlStringBuilderRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.StringBuilder])) + + override def jlStringBufferRef: ClassBType = _jlStringBufferRef.get + private lazy val _jlStringBufferRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.StringBuffer])) + + override def jlCharSequenceRef: ClassBType = _jlCharSequenceRef.get + private lazy val _jlCharSequenceRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.CharSequence])) + + override def jlClassRef: ClassBType = _jlClassRef.get + private lazy val _jlClassRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.Class[?]])) + + override def jlThrowableRef: ClassBType = _jlThrowableRef.get + private lazy val _jlThrowableRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.ThrowableClass)) + + override def jlCloneableRef: ClassBType = _jlCloneableRef.get + private lazy val _jlCloneableRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.JavaCloneableClass)) + + override def jiSerializableRef: ClassBType = _jiSerializableRef.get + private lazy val _jiSerializableRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.io.Serializable])) + + override def jlClassCastExceptionRef: ClassBType = _jlClassCastExceptionRef.get + private lazy val _jlClassCastExceptionRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.ClassCastException])) + + override def jlIllegalArgExceptionRef: ClassBType = _jlIllegalArgExceptionRef.get + private lazy val _jlIllegalArgExceptionRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException])) + + override def jliSerializedLambdaRef: ClassBType = _jliSerializedLambdaRef.get + private lazy val _jliSerializedLambdaRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda])) + + override def srBoxesRuntimeRef: ClassBType = _srBoxesRuntimeRef.get + private lazy val _srBoxesRuntimeRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime])) + + private def jliCallSiteRef: ClassBType = _jliCallSiteRef.get + private lazy val _jliCallSiteRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite])) + + private def jliLambdaMetafactoryRef: ClassBType = _jliLambdaMetafactoryRef.get + private lazy val _jliLambdaMetafactoryRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory])) + + private def jliMethodHandleRef: ClassBType = _jliMethodHandleRef.get + private lazy val _jliMethodHandleRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.MethodHandleClass)) + + private def jliMethodHandlesLookupRef: ClassBType = _jliMethodHandlesLookupRef.get + private lazy val _jliMethodHandlesLookupRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(defn.MethodHandlesLookupClass)) + + private def jliMethodTypeRef: ClassBType = _jliMethodTypeRef.get + private lazy val _jliMethodTypeRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType])) + + // since JDK 9 + private def jliStringConcatFactoryRef: ClassBType = _jliStringConcatFactoryRef.get + private lazy val _jliStringConcatFactoryRef: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory"))) + + private def srLambdaDeserialize: ClassBType = _srLambdaDeserialize.get + private lazy val _srLambdaDeserialize: Lazy[ClassBType] = perRunLazy(classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize])) + + + override def jliLambdaMetaFactoryMetafactoryHandle = _jliLambdaMetaFactoryMetafactoryHandle.get + private lazy val _jliLambdaMetaFactoryMetafactoryHandle: Lazy[Handle] = perRunLazy{new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "metafactory", @@ -160,9 +206,10 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, jliMethodTypeRef, jliMethodHandleRef, jliMethodTypeRef), jliCallSiteRef ).descriptor, - /* itf = */ false) + /* itf = */ false)} - lazy val jliLambdaMetaFactoryAltMetafactoryHandle = new Handle( + override def jliLambdaMetaFactoryAltMetafactoryHandle = _jliLambdaMetaFactoryAltMetafactoryHandle.get + private lazy val _jliLambdaMetaFactoryAltMetafactoryHandle: Lazy[Handle] = perRunLazy{ new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "altMetafactory", @@ -170,9 +217,10 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(ObjectRef)), jliCallSiteRef ).descriptor, - /* itf = */ false) + /* itf = */ false)} - lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( + override def jliLambdaDeserializeBootstrapHandle: Handle = _jliLambdaDeserializeBootstrapHandle.get + private lazy val _jliLambdaDeserializeBootstrapHandle: Lazy[Handle] = perRunLazy{ new Handle( Opcodes.H_INVOKESTATIC, srLambdaDeserialize.internalName, "bootstrap", @@ -180,9 +228,10 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(jliMethodHandleRef)), jliCallSiteRef ).descriptor, - /* itf = */ false) + /* itf = */ false)} - lazy val jliStringConcatFactoryMakeConcatWithConstantsHandle = new Handle( + override def jliStringConcatFactoryMakeConcatWithConstantsHandle = _jliStringConcatFactoryMakeConcatWithConstantsHandle.get + private lazy val _jliStringConcatFactoryMakeConcatWithConstantsHandle: Lazy[Handle] = perRunLazy{ new Handle( Opcodes.H_INVOKESTATIC, jliStringConcatFactoryRef.internalName, "makeConcatWithConstants", @@ -190,10 +239,11 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, StringRef, ArrayBType(ObjectRef)), jliCallSiteRef ).descriptor, - /* itf = */ false) + /* itf = */ false)} /** * Methods in scala.runtime.BoxesRuntime + * No need to wrap in Lazy to synchronize access, symbols won't change */ lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), boxedClassOfPrimitive(BOOL))), @@ -218,7 +268,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy ) lazy val typeOfArrayOp: Map[Int, BType] = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* Map( (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ @@ -228,7 +278,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ - (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _* + (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) * ) } } diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index b2278c3f0ce8..37045bda17ec 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -3,23 +3,23 @@ package dotty.tools.backend.jvm import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Flags.* + import java.io.{File => _} import scala.reflect.ClassTag import dotty.tools.io.AbstractFile -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import Phases._ +import dotty.tools.dotc.core.* +import Contexts.* +import Types.* +import Symbols.* +import Phases.* import Decorators.em import dotty.tools.dotc.util.ReadOnlyMap import dotty.tools.dotc.report -import tpd._ +import tpd.* import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} @@ -93,7 +93,7 @@ class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymb object DottyBackendInterface { - private def erasureString(clazz: Class[_]): String = { + private def erasureString(clazz: Class[?]): String = { if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" else clazz.getName } diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 469a6ea57679..8d467529d60e 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -3,12 +3,13 @@ package dotty.tools.backend.jvm import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.report -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* import dotty.tools.dotc.interfaces.CompilerCallback -import Contexts._ -import Symbols._ -import dotty.tools.io._ +import Contexts.* +import Symbols.* +import dotty.tools.io.* import scala.collection.mutable +import scala.compiletime.uninitialized class GenBCode extends Phase { self => @@ -25,7 +26,7 @@ class GenBCode extends Phase { self => private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s - private var _backendInterface: DottyBackendInterface = _ + private var _backendInterface: DottyBackendInterface = uninitialized def backendInterface(using ctx: Context): DottyBackendInterface = { if _backendInterface eq null then // Enforce usage of FreshContext so we would be able to modify compilation unit between runs @@ -36,7 +37,7 @@ class GenBCode extends Phase { self => _backendInterface } - private var _codeGen: CodeGen = _ + private var _codeGen: CodeGen = uninitialized def codeGen(using Context): CodeGen = { if _codeGen eq null then val int = backendInterface @@ -45,46 +46,51 @@ class GenBCode extends Phase { self => _codeGen } - private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ + private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = uninitialized def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { if _bTypes eq null then _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) _bTypes } - private var _frontendAccess: PostProcessorFrontendAccess | Null = _ + private var _frontendAccess: PostProcessorFrontendAccess | Null = uninitialized def frontendAccess(using Context): PostProcessorFrontendAccess = { if _frontendAccess eq null then _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) _frontendAccess.nn } - private var _postProcessor: PostProcessor | Null = _ + private var _postProcessor: PostProcessor | Null = uninitialized def postProcessor(using Context): PostProcessor = { if _postProcessor eq null then _postProcessor = new PostProcessor(frontendAccess, bTypes) _postProcessor.nn } - override def run(using ctx: Context): Unit = - // CompilationUnit is the only component that will differ between each run invocation - // We need to update it to have correct source positions. - // FreshContext is always enforced when creating backend interface - backendInterface.ctx + private var _generatedClassHandler: GeneratedClassHandler | Null = uninitialized + def generatedClassHandler(using Context): GeneratedClassHandler = { + if _generatedClassHandler eq null then + _generatedClassHandler = GeneratedClassHandler(postProcessor) + _generatedClassHandler.nn + } + + override def run(using Context): Unit = + frontendAccess.frontendSynchWithoutContext { + backendInterface.ctx .asInstanceOf[FreshContext] .setCompilationUnit(ctx.compilationUnit) - val generated = codeGen.genUnit(ctx.compilationUnit) - // In Scala 2, the backend might use global optimizations which might delay post-processing to build the call graph. - // In Scala 3, we don't perform backend optimizations and always perform post-processing immediately. - // https://github.com/scala/scala/pull/6057 - postProcessor.postProcessAndSendToDisk(generated) + } + codeGen.genUnit(ctx.compilationUnit) (ctx.compilerCallback: CompilerCallback | Null) match { case cb: CompilerCallback => cb.onSourceCompiled(ctx.source) case null => () } override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = { - try super.runOn(units) + try + val result = super.runOn(units) + generatedClassHandler.complete() + result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized if _frontendAccess ne null then @@ -100,6 +106,7 @@ class GenBCode extends Phase { self => } if _postProcessor ne null then postProcessor.classfileWriter.close() + generatedClassHandler.close() } } diff --git a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala new file mode 100644 index 000000000000..bf2ae9a131aa --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala @@ -0,0 +1,192 @@ +package dotty.tools.backend.jvm + +import java.nio.channels.ClosedByInterruptException +import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy +import java.util.concurrent.* + +import scala.collection.mutable.ListBuffer +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future} +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.profile.ThreadPoolFactory +import scala.util.control.NonFatal +import dotty.tools.dotc.core.Phases +import dotty.tools.dotc.core.Decorators.em + +import scala.language.unsafeNulls +import scala.compiletime.uninitialized + +/** + * Interface to handle post-processing and classfile writing (see [[PostProcessor]]) of generated + * classes, potentially in parallel. + */ +private[jvm] sealed trait GeneratedClassHandler { + val postProcessor: PostProcessor + + /** + * Pass the result of code generation for a compilation unit to this handler for post-processing + */ + def process(unit: GeneratedCompilationUnit): Unit + + /** + * If running in parallel, block until all generated classes are handled + */ + def complete(): Unit + + /** + * Invoked at the end of the jvm phase + */ + def close(): Unit = () +} + +private[jvm] object GeneratedClassHandler { + def apply(postProcessor: PostProcessor)(using ictx: Context): GeneratedClassHandler = { + val compilerSettings = postProcessor.frontendAccess.compilerSettings + val handler = compilerSettings.backendParallelism match { + case 1 => new SyncWritingClassHandler(postProcessor) + + case maxThreads => + // if (settings.areStatisticsEnabled) + // runReporting.warning( + // NoPosition, + // "JVM statistics are not reliable with multi-threaded JVM class writing.\n" + + // "To collect compiler statistics remove the " + settings.YaddBackendThreads.name + " setting.", + // WarningCategory.Other, + // site = "" + // ) + val additionalThreads = maxThreads - 1 + // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes + // a new task to be executed on the main thread, which provides back-pressure. + // The queue size is large enough to ensure that running a task on the main thread does + // not take longer than to exhaust the queue for the backend workers. + val queueSize = compilerSettings.backendMaxWorkerQueue.getOrElse(maxThreads * 2) + val threadPoolFactory = ThreadPoolFactory(Phases.genBCodePhase) + val javaExecutor = threadPoolFactory.newBoundedQueueFixedThreadPool(additionalThreads, queueSize, new CallerRunsPolicy, "non-ast") + new AsyncWritingClassHandler(postProcessor, javaExecutor) + } + + // if (settings.optInlinerEnabled || settings.optClosureInvocations) new GlobalOptimisingGeneratedClassHandler(postProcessor, handler) + // else + handler + } + + sealed abstract class WritingClassHandler(val javaExecutor: Executor) extends GeneratedClassHandler { + import postProcessor.bTypes.frontendAccess + + def tryStealing: Option[Runnable] + + private val processingUnits = ListBuffer.empty[CompilationUnitInPostProcess] + + def process(unit: GeneratedCompilationUnit): Unit = { + val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, unit.tasty, unit.sourceFile)(using unit.ctx) + postProcessUnit(unitInPostProcess) + processingUnits += unitInPostProcess + } + + protected implicit val executionContext: ExecutionContextExecutor = ExecutionContext.fromExecutor(javaExecutor) + + final def postProcessUnit(unitInPostProcess: CompilationUnitInPostProcess): Unit = { + unitInPostProcess.task = Future: + frontendAccess.withThreadLocalReporter(unitInPostProcess.bufferedReporting): + // we 'take' classes to reduce the memory pressure + // as soon as the class is consumed and written, we release its data + unitInPostProcess.takeClasses().foreach: + postProcessor.sendToDisk(_, unitInPostProcess.sourceFile) + unitInPostProcess.takeTasty().foreach: + postProcessor.sendToDisk(_, unitInPostProcess.sourceFile) + } + + protected def takeProcessingUnits(): List[CompilationUnitInPostProcess] = { + val result = processingUnits.result() + processingUnits.clear() + result + } + + final def complete(): Unit = { + import frontendAccess.directBackendReporting + + def stealWhileWaiting(unitInPostProcess: CompilationUnitInPostProcess): Unit = { + val task = unitInPostProcess.task + while (!task.isCompleted) + tryStealing match { + case Some(r) => r.run() + case None => Await.ready(task, Duration.Inf) + } + } + + /* + * Go through each task in submission order, wait for it to finish and report its messages. + * When finding task that has not completed, steal work from the executor's queue and run + * it on the main thread (which we are on here), until the task is done. + * + * We could consume the results when they are ready, via use of a [[java.util.concurrent.CompletionService]] + * or something similar, but that would lead to non deterministic reports from backend threads, as the + * compilation unit could complete in a different order than when they were submitted, and thus the relayed + * reports would be in a different order. + * To avoid that non-determinism we read the result in order of submission, with a potential minimal performance + * loss, due to the memory being retained longer for tasks than it might otherwise. + * Most of the memory in the CompilationUnitInPostProcess is reclaimable anyway as the classes are dereferenced after use. + */ + takeProcessingUnits().foreach { unitInPostProcess => + try + stealWhileWaiting(unitInPostProcess) + unitInPostProcess.bufferedReporting.relayReports(directBackendReporting) + // We know the future is complete, throw the exception if it completed with a failure + unitInPostProcess.task.value.get.get + catch + case _: ClosedByInterruptException => throw new InterruptedException() + case NonFatal(t) => + t.printStackTrace() + frontendAccess.backendReporting.error(em"unable to write ${unitInPostProcess.sourceFile} $t") + } + } + } + + private final class SyncWritingClassHandler(val postProcessor: PostProcessor) + extends WritingClassHandler(_.nn.run()) { + + override def toString: String = "SyncWriting" + + def tryStealing: Option[Runnable] = None + } + + private final class AsyncWritingClassHandler(val postProcessor: PostProcessor, override val javaExecutor: ThreadPoolExecutor) + extends WritingClassHandler(javaExecutor) { + + override def toString: String = s"AsyncWriting[additional threads:${javaExecutor.getMaximumPoolSize}]" + + override def close(): Unit = { + super.close() + javaExecutor.shutdownNow() + } + + def tryStealing: Option[Runnable] = Option(javaExecutor.getQueue.poll()) + } + +} + +/** + * State for a compilation unit being post-processed. + * - Holds the classes to post-process (released for GC when no longer used) + * - Keeps a reference to the future that runs the post-processor + * - Buffers messages reported during post-processing + */ +final private class CompilationUnitInPostProcess(private var classes: List[GeneratedClass], private var tasty: List[GeneratedTasty], val sourceFile: AbstractFile)(using Context) { + def takeClasses(): List[GeneratedClass] = { + val c = classes + classes = Nil + c + } + + def takeTasty(): List[GeneratedTasty] = { + val v = tasty + tasty = Nil + v + } + + /** the main async task submitted onto the scheduler */ + var task: Future[Unit] = uninitialized + + val bufferedReporting = new PostProcessorFrontendAccess.BufferingBackendReporting() +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index c16bc70fc3b0..00d7dc598509 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -3,12 +3,12 @@ package dotty.tools.backend.jvm import scala.language.unsafeNulls import scala.tools.asm.{ClassReader, Type, Handle } -import scala.tools.asm.tree._ +import scala.tools.asm.tree.* import scala.collection.mutable import scala.util.control.{NoStackTrace, NonFatal} -import scala.annotation._ -import scala.jdk.CollectionConverters._ +import scala.annotation.* +import scala.jdk.CollectionConverters.* // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf // https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 @@ -315,7 +315,7 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig if (annot.values != null) annot.values.asScala foreach visitConstant } - def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation + def visitAnnotations(annots: java.util.List[? <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations def visitHandle(handle: Handle): Unit = { diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 606b5645aa24..45c6d6ecad44 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -1,5 +1,7 @@ package dotty.tools.backend.jvm +import java.util.concurrent.ConcurrentHashMap + import scala.collection.mutable.ListBuffer import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} import dotty.tools.io.AbstractFile @@ -14,41 +16,69 @@ import scala.tools.asm.tree.ClassNode */ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes) { self => - import bTypes.* + import bTypes.{classBTypeFromInternalName} import frontendAccess.{backendReporting, compilerSettings} - import int.given val backendUtils = new BackendUtils(this) - val classfileWriter = ClassfileWriter(frontendAccess) - - def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = { - val GeneratedDefs(classes, tasty) = generatedDefs - for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) { - val bytes = - try - if !isArtifact then setSerializableLambdas(classNode) - setInnerClasses(classNode) - serializeClass(classNode) - catch - case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => - backendReporting.error(em"Could not write class ${classNode.name} because it exceeds JVM code size limits. ${e.getMessage}") - null - case ex: Throwable => - ex.printStackTrace() - backendReporting.error(em"Error while emitting ${classNode.name}\n${ex.getMessage}") - null - - if (bytes != null) { - if (AsmUtils.traceSerializedClassEnabled && classNode.name.nn.contains(AsmUtils.traceSerializedClassPattern)) + val classfileWriters = new ClassfileWriters(frontendAccess) + val classfileWriter = classfileWriters.ClassfileWriter() + + type ClassnamePosition = (String, SourcePosition) + private val caseInsensitively = new ConcurrentHashMap[String, ClassnamePosition] + + def sendToDisk(clazz: GeneratedClass, sourceFile: AbstractFile): Unit = if !compilerSettings.outputOnlyTasty then { + val classNode = clazz.classNode + val internalName = classNode.name.nn + val bytes = + try + if !clazz.isArtifact then setSerializableLambdas(classNode) + warnCaseInsensitiveOverwrite(clazz) + setInnerClasses(classNode) + serializeClass(classNode) + catch + case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => + backendReporting.error(em"Could not write class $internalName because it exceeds JVM code size limits. ${e.getMessage}") + null + case ex: Throwable => + if compilerSettings.debug then ex.printStackTrace() + backendReporting.error(em"Error while emitting $internalName\n${ex.getMessage}") + null + + if bytes != null then + if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) AsmUtils.traceClass(bytes) + val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) + if clsFile != null then clazz.onFileCreated(clsFile) + } - val clsFile = classfileWriter.writeClass(classNode.name.nn, bytes, sourceFile) - if clsFile != null then onFileCreated(clsFile) - } - } + def sendToDisk(tasty: GeneratedTasty, sourceFile: AbstractFile): Unit = { + val GeneratedTasty(classNode, tastyGenerator) = tasty + val internalName = classNode.name.nn + classfileWriter.writeTasty(classNode.name.nn, tastyGenerator(), sourceFile) + } - for (GeneratedTasty(classNode, binaryGen) <- tasty){ - classfileWriter.writeTasty(classNode.name.nn, binaryGen()) + private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass) = { + val name = clazz.classNode.name.nn + val lowerCaseJavaName = name.nn.toLowerCase + val clsPos = clazz.position + caseInsensitively.putIfAbsent(lowerCaseJavaName, (name, clsPos)) match { + case null => () + case (dupName, dupPos) => + // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting + val ((pos1, pos2), (name1, name2)) = + if (name < dupName) ((clsPos, dupPos), (name, dupName)) + else ((dupPos, clsPos), (dupName, name)) + val locationAddendum = + if pos1.source.path == pos2.source.path then "" + else s" (defined in ${pos2.source.file.name})" + def nicify(name: String): String = name.replace('/', '.').nn + if name1 == name2 then + backendReporting.warning( + em"${nicify(name1)} and ${nicify(name2)} produce classes that overwrite one another", pos1) + else + backendReporting.warning( + em"""Generated class ${nicify(name1)} differs only in case from ${nicify(name2)}$locationAddendum. + | Such classes will overwrite one another on case-insensitive filesystems.""", pos1) } } @@ -106,12 +136,12 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: /** * The result of code generation. [[isArtifact]] is `true` for mirror. */ -case class GeneratedClass(classNode: ClassNode, sourceFile: AbstractFile, isArtifact: Boolean, onFileCreated: AbstractFile => Unit) +case class GeneratedClass( + classNode: ClassNode, + sourceClassName: String, + position: SourcePosition, + isArtifact: Boolean, + onFileCreated: AbstractFile => Unit) case class GeneratedTasty(classNode: ClassNode, tastyGen: () => Array[Byte]) -case class GeneratedDefs(classes: List[GeneratedClass], tasty: List[GeneratedTasty]) +case class GeneratedCompilationUnit(sourceFile: AbstractFile, classes: List[GeneratedClass], tasty: List[GeneratedTasty])(using val ctx: Context) -// Temporary class, will be refactored in a future commit -trait ClassWriterForPostProcessor { - type InternalName = String - def write(bytes: Array[Byte], className: InternalName, sourceFile: AbstractFile): Unit -} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index 80ee68bc94c3..f7955cbb350c 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -8,23 +8,47 @@ import java.util.{Collection => JCollection, Map => JMap} import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.report import dotty.tools.dotc.core.Phases +import scala.compiletime.uninitialized /** * Functionality needed in the post-processor whose implementation depends on the compiler * frontend. All methods are synchronized. */ -sealed abstract class PostProcessorFrontendAccess { - import PostProcessorFrontendAccess._ +sealed abstract class PostProcessorFrontendAccess(backendInterface: DottyBackendInterface) { + import PostProcessorFrontendAccess.* def compilerSettings: CompilerSettings + + def withThreadLocalReporter[T](reporter: BackendReporting)(fn: => T): T def backendReporting: BackendReporting + def directBackendReporting: BackendReporting + def getEntryPoints: List[String] private val frontendLock: AnyRef = new Object() - inline final def frontendSynch[T](inline x: => T): T = frontendLock.synchronized(x) + inline final def frontendSynch[T](inline x: Context ?=> T): T = frontendLock.synchronized(x(using backendInterface.ctx)) + inline final def frontendSynchWithoutContext[T](inline x: T): T = frontendLock.synchronized(x) + inline def perRunLazy[T](inline init: Context ?=> T): Lazy[T] = new Lazy(init)(using this) } object PostProcessorFrontendAccess { + /* A container for value with lazy initialization synchronized on compiler frontend + * Used for sharing variables requiring a Context for initialization, between different threads + * Similar to Scala 2 BTypes.LazyVar, but without re-initialization of BTypes.LazyWithLock. These were not moved to PostProcessorFrontendAccess only due to problematic architectural decisions. + */ + class Lazy[T](init: Context ?=> T)(using frontendAccess: PostProcessorFrontendAccess) { + @volatile private var isInit: Boolean = false + private var v: T = uninitialized + + def get: T = + if isInit then v + else frontendAccess.frontendSynch { + if !isInit then v = init + isInit = true + v + } + } + sealed trait CompilerSettings { def debug: Boolean def target: String // javaOutputVersion @@ -33,25 +57,59 @@ object PostProcessorFrontendAccess { def outputDirectory: AbstractFile def mainClass: Option[String] + + def jarCompressionLevel: Int + def backendParallelism: Int + def backendMaxWorkerQueue: Option[Int] + def outputOnlyTasty: Boolean } sealed trait BackendReporting { - def error(message: Context ?=> Message): Unit - def warning(message: Context ?=> Message): Unit - def log(message: Context ?=> String): Unit + def error(message: Context ?=> Message, position: SourcePosition): Unit + def warning(message: Context ?=> Message, position: SourcePosition): Unit + def log(message: String): Unit + + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) + def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) } - class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess { - import int.given - lazy val compilerSettings: CompilerSettings = buildCompilerSettings() + final class BufferingBackendReporting(using Context) extends BackendReporting { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and + // consumed in another + private var bufferedReports = List.empty[Report] + enum Report(val relay: BackendReporting => Unit): + case Error(message: Message, position: SourcePosition) extends Report(_.error(message, position)) + case Warning(message: Message, position: SourcePosition) extends Report(_.warning(message, position)) + case Log(message: String) extends Report(_.log(message)) + + def error(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Error(message, position) + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = synchronized: + bufferedReports ::= Report.Warning(message, position) + + def log(message: String): Unit = synchronized: + bufferedReports ::= Report.Log(message) - private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { + def relayReports(toReporting: BackendReporting): Unit = synchronized: + if bufferedReports.nonEmpty then + bufferedReports.reverse.foreach(_.relay(toReporting)) + bufferedReports = Nil + } + + + class Impl[I <: DottyBackendInterface](int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess(int) { + override def compilerSettings: CompilerSettings = _compilerSettings.get + private lazy val _compilerSettings: Lazy[CompilerSettings] = perRunLazy(buildCompilerSettings) + + private def buildCompilerSettings(using ctx: Context): CompilerSettings = new CompilerSettings { extension [T](s: dotty.tools.dotc.config.Settings.Setting[T]) - def valueSetByUser: Option[T] = - Option(s.value).filter(_ != s.default) - def s = ctx.settings + def valueSetByUser: Option[T] = Option(s.value).filter(_ != s.default) + inline def s = ctx.settings - lazy val target = + override val target = val releaseValue = Option(s.javaOutputVersion.value).filter(_.nonEmpty) val targetValue = Option(s.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) (releaseValue, targetValue) match @@ -62,18 +120,39 @@ object PostProcessorFrontendAccess { release case (None, None) => "8" // least supported version by default - lazy val debug: Boolean = ctx.debug - lazy val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser - lazy val outputDirectory: AbstractFile = s.outputDir.value - lazy val mainClass: Option[String] = s.XmainClass.valueSetByUser + override val debug: Boolean = ctx.debug + override val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + override val outputDirectory: AbstractFile = s.outputDir.value + override val mainClass: Option[String] = s.XmainClass.valueSetByUser + override val jarCompressionLevel: Int = s.YjarCompressionLevel.value + override val backendParallelism: Int = s.YbackendParallelism.value + override val backendMaxWorkerQueue: Option[Int] = s.YbackendWorkerQueue.valueSetByUser + override val outputOnlyTasty: Boolean = s.YoutputOnlyTasty.value + } + + private lazy val localReporter = new ThreadLocal[BackendReporting] + + override def withThreadLocalReporter[T](reporter: BackendReporting)(fn: => T): T = { + val old = localReporter.get() + localReporter.set(reporter) + try fn + finally + if old eq null then localReporter.remove() + else localReporter.set(old) } - object backendReporting extends BackendReporting { - def error(message: Context ?=> Message): Unit = frontendSynch(report.error(message)) - def warning(message: Context ?=> Message): Unit = frontendSynch(report.warning(message)) - def log(message: Context ?=> String): Unit = frontendSynch(report.log(message)) + override def backendReporting: BackendReporting = { + val local = localReporter.get() + if local eq null then directBackendReporting + else local.nn + } + + override object directBackendReporting extends BackendReporting { + def error(message: Context ?=> Message, position: SourcePosition): Unit = frontendSynch(report.error(message, position)) + def warning(message: Context ?=> Message, position: SourcePosition): Unit = frontendSynch(report.warning(message, position)) + def log(message: String): Unit = frontendSynch(report.log(message)) } def getEntryPoints: List[String] = frontendSynch(entryPoints.toList) } -} \ No newline at end of file +} diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index bc453aec17af..262b5df43362 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -2,10 +2,10 @@ package dotty.tools package backend.jvm import dotc.ast.Trees.Select -import dotc.ast.tpd._ -import dotc.core._ -import Contexts._ -import Names.TermName, StdNames._ +import dotc.ast.tpd.* +import dotc.core.* +import Contexts.* +import Names.TermName, StdNames.* import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} import Decorators.em @@ -32,7 +32,7 @@ import scala.annotation.threadUnsafe * Inspired from the `scalac` compiler. */ class DottyPrimitives(ictx: Context) { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index 1579b4577933..2c5a6639dc8b 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -1,8 +1,8 @@ package dotty.tools.backend.sjs -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ +import dotty.tools.dotc.core.* +import Contexts.* +import Phases.* /** Generates Scala.js IR files for the compilation unit. */ class GenSJSIR extends Phase { diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index eee791852fde..54af9f8dd088 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -8,20 +8,20 @@ import scala.collection.mutable import dotty.tools.FatalError import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Flags._ -import Names._ +import dotty.tools.dotc.core.* +import Contexts.* +import Decorators.* +import Flags.* +import Names.* import NameKinds.DefaultGetterName -import Types._ -import Symbols._ -import Phases._ -import StdNames._ +import Types.* +import Symbols.* +import Phases.* +import StdNames.* import TypeErasure.ErasedValueType import dotty.tools.dotc.transform.{Erasure, ValueClasses} -import dotty.tools.dotc.transform.SymUtils._ + import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report @@ -32,10 +32,11 @@ import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Trees.OptimizerHints -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import JSEncoding._ +import JSEncoding.* import ScopedVar.withScopedVars +import scala.reflect.NameTransformer /** Main codegen for Scala.js IR. * @@ -54,15 +55,15 @@ import ScopedVar.withScopedVars * - `genStatOrExpr()` and everything else generate the bodies of methods. */ class JSCodeGen()(using genCtx: Context) { - import JSCodeGen._ - import tpd._ + import JSCodeGen.* + import tpd.* val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform val jsdefn = JSDefinitions.jsdefn private val primitives = new JSPrimitives(genCtx) val positionConversions = new JSPositions()(using genCtx) - import positionConversions._ + import positionConversions.* private val jsExportsGen = new JSExportsGen(this) @@ -1089,7 +1090,7 @@ class JSCodeGen()(using genCtx: Context) { val exports = List.newBuilder[jsExportsGen.Exported] val jsClassCaptures = List.newBuilder[js.ParamDef] - def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { + def add(tree: ConstructorTree[? <: JSCtor]): Unit = { val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, tree.ctor.paramsAndInfo, tree.overloadNum) exports += e @@ -1270,7 +1271,7 @@ class JSCodeGen()(using genCtx: Context) { * here we use the property from building the trees, that a set of * descendants always has a range of overload numbers. */ - def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { + def ifOverload(tree: ConstructorTree[?], body: js.Tree): js.Tree = body match { case js.Skip() => js.Skip() case body => @@ -1827,7 +1828,7 @@ class JSCodeGen()(using genCtx: Context) { } case Literal(value) => - import Constants._ + import Constants.* value.tag match { case UnitTag => js.Skip() @@ -2525,7 +2526,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a primitive method call. */ private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -2565,7 +2566,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a simple unary operation. */ private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -2606,7 +2607,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a simple binary operation. */ private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos: SourcePosition = tree.sourcePos @@ -2646,7 +2647,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (code == ZAND) { js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) } else { - import js.BinaryOp._ + import js.BinaryOp.* (opType: @unchecked) match { case jstpe.IntType => @@ -2768,7 +2769,7 @@ class JSCodeGen()(using genCtx: Context) { */ private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( implicit pos: Position): js.Tree = { - import js.UnaryOp._ + import js.UnaryOp.* if (from == to || from == jstpe.NothingType) { value @@ -2823,7 +2824,7 @@ class JSCodeGen()(using genCtx: Context) { private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( implicit pos: SourcePosition): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* val bypassEqEq = { // Do not call equals if we have a literal null at either side. @@ -2845,7 +2846,7 @@ class JSCodeGen()(using genCtx: Context) { private lazy val externalEqualsNumNum: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) private lazy val externalEqualsNumChar: Symbol = - NoSymbol // requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) private lazy val externalEqualsNumObject: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) private lazy val externalEquals: Symbol = @@ -2885,7 +2886,7 @@ class JSCodeGen()(using genCtx: Context) { val ptfm = ctx.platform if (lsym.derivesFrom(defn.BoxedNumberClass)) { if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum - else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 + else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumChar else externalEqualsNumObject } else externalEquals } @@ -2931,7 +2932,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for an array operation (get, set or length) */ private def genArrayOp(tree: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -3766,7 +3767,7 @@ class JSCodeGen()(using genCtx: Context) { private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, isStat: Boolean): js.Tree = { - import JSPrimitives._ + import JSPrimitives.* implicit val pos = tree.span @@ -4218,7 +4219,7 @@ class JSCodeGen()(using genCtx: Context) { } } - val methodName = MethodName.reflectiveProxy(methodNameStr, formalParamTypeRefs) + val methodName = MethodName.reflectiveProxy(NameTransformer.encode(methodNameStr), formalParamTypeRefs) js.Apply(js.ApplyFlags.empty, selectedValueTree, js.MethodIdent(methodName), actualArgs)(jstpe.AnyType) } @@ -4696,7 +4697,7 @@ class JSCodeGen()(using genCtx: Context) { } private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { - import js.JSNativeLoadSpec._ + import js.JSNativeLoadSpec.* val symOwner = sym.owner diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index 5336d60129ac..ab7f9a89f9c5 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -4,14 +4,15 @@ import scala.language.unsafeNulls import scala.annotation.threadUnsafe -import dotty.tools.dotc.core._ -import Names._ -import Types._ -import Contexts._ -import Symbols._ -import StdNames._ +import dotty.tools.dotc.core.* +import Names.* +import Types.* +import Contexts.* +import Symbols.* +import StdNames.* import dotty.tools.dotc.config.SJSPlatform +import scala.compiletime.uninitialized object JSDefinitions { /** The Scala.js-specific definitions for the current context. */ @@ -249,7 +250,7 @@ final class JSDefinitions()(using Context) { @threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls") def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol - private var allRefClassesCache: Set[Symbol] = _ + private var allRefClassesCache: Set[Symbol] = uninitialized def allRefClasses(using Context): Set[Symbol] = { if (allRefClassesCache == null) { val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 73a150c60290..f2b90d5b1161 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -4,16 +4,16 @@ import scala.language.unsafeNulls import scala.collection.mutable -import dotty.tools.dotc.core._ -import Contexts._ -import Flags._ -import Types._ -import Symbols._ -import NameOps._ -import Names._ -import StdNames._ +import dotty.tools.dotc.core.* +import Contexts.* +import Flags.* +import Types.* +import Symbols.* +import NameOps.* +import Names.* +import StdNames.* -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSSymUtils.* import org.scalajs.ir import org.scalajs.ir.{Trees => js, Types => jstpe} @@ -61,7 +61,7 @@ object JSEncoding { // Fresh local name generator ---------------------------------------------- class LocalNameGenerator { - import LocalNameGenerator._ + import LocalNameGenerator.* private val usedLocalNames = mutable.Set.empty[LocalName] private val localSymbolNames = mutable.Map.empty[Symbol, LocalName] diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 78412999bb34..8c72f03e7cc4 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -5,18 +5,18 @@ import scala.language.unsafeNulls import scala.annotation.tailrec import scala.collection.mutable -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ -import Names._ +import Contexts.* +import Decorators.* +import Denotations.* +import Flags.* +import Names.* import NameKinds.DefaultGetterName -import NameOps._ -import Phases._ -import Symbols._ -import Types._ +import NameOps.* +import Phases.* +import Symbols.* +import Types.* import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} @@ -28,14 +28,14 @@ import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Position.NoPosition import org.scalajs.ir.Trees.OptimizerHints -import dotty.tools.dotc.transform.sjs.JSExportUtils._ -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSExportUtils.* +import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import JSEncoding._ +import JSEncoding.* final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { - import jsCodeGen._ - import positionConversions._ + import jsCodeGen.* + import positionConversions.* /** Info for a non-member export. */ sealed trait ExportInfo { @@ -154,7 +154,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { (info, tups) <- exports.groupBy(_._1) kind <- checkSameKind(tups) } yield { - import ExportKind._ + import ExportKind.* implicit val pos = info.pos @@ -201,7 +201,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { implicit val pos = info.pos - import ExportKind._ + import ExportKind.* kind match { case Method => diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 2fd007165952..3b25187b0acd 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -4,8 +4,8 @@ import scala.language.unsafeNulls import java.net.{URI, URISyntaxException} -import dotty.tools.dotc.core._ -import Contexts._ +import dotty.tools.dotc.core.* +import Contexts.* import Decorators.em import dotty.tools.dotc.report @@ -17,7 +17,7 @@ import org.scalajs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { - import JSPositions._ + import JSPositions.* private val sourceURIMaps: List[URIMap] = { ctx.settings.scalajsMapSourceURI.value.flatMap { option => @@ -64,7 +64,7 @@ class JSPositions()(using Context) { sourceAndSpan2irPos(sourcePos.source, sourcePos.span) private object span2irPosCache { - import dotty.tools.dotc.util._ + import dotty.tools.dotc.util.* private var lastDotcSource: SourceFile = null private var lastIRSource: ir.Position.SourceFile = null diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 029273aed54b..a3a37795826a 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -1,13 +1,13 @@ package dotty.tools.backend.sjs -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* import Names.TermName -import Types._ -import Contexts._ -import Symbols._ +import Types.* +import Contexts.* +import Symbols.* import Decorators.em -import dotty.tools.dotc.ast.tpd._ +import dotty.tools.dotc.ast.tpd.* import dotty.tools.backend.jvm.DottyPrimitives import dotty.tools.dotc.report import dotty.tools.dotc.util.ReadOnlyMap @@ -64,7 +64,7 @@ object JSPrimitives { } class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { - import JSPrimitives._ + import JSPrimitives.* private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) diff --git a/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala b/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala index be5b8e7bb416..fd271685989c 100644 --- a/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala +++ b/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala @@ -28,7 +28,7 @@ object ScopedVar { implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get - def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = { + def withScopedVars[T](ass: Assignment[?]*)(body: => T): T = { val stack = ass.map(_.push()) try body finally stack.reverse.foreach(_.pop()) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index 5f5e9fc799b5..cbc490919cfe 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -1,11 +1,12 @@ package dotty.tools package dotc -import core.Contexts._ +import core.Contexts.* import reporting.Reporter import io.AbstractFile import scala.annotation.internal.sharable +import scala.compiletime.uninitialized /** A main class for running compiler benchmarks. Can instantiate a given * number of compilers and run each (sequentially) a given number of times @@ -17,7 +18,7 @@ object Bench extends Driver: @sharable private var numCompilers = 1 @sharable private var waitAfter = -1 @sharable private var curCompiler = 0 - @sharable private var times: Array[Int] = _ + @sharable private var times: Array[Int] = uninitialized override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = var reporter: Reporter = emptyReporter diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 8415646eb16c..78773a518b67 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -1,25 +1,25 @@ package dotty.tools package dotc -import core._ -import Contexts._ +import core.* +import Contexts.* import SymDenotations.ClassDenotation -import Symbols._ +import Symbols.* +import Comments.Comment import util.{FreshNameCreator, SourceFile, NoSource} import util.Spans.Span import ast.{tpd, untpd} import tpd.{Tree, TreeTraverser} import ast.Trees.{Import, Ident} import typer.Nullables -import transform.SymUtils._ -import core.Decorators._ +import core.Decorators.* import config.{SourceVersion, Feature} import StdNames.nme import scala.annotation.internal.sharable import scala.util.control.NoStackTrace import transform.MacroAnnotations -class CompilationUnit protected (val source: SourceFile) { +class CompilationUnit protected (val source: SourceFile, val info: CompilationUnitInfo | Null) { override def toString: String = source.toString @@ -30,6 +30,13 @@ class CompilationUnit protected (val source: SourceFile) { /** Is this the compilation unit of a Java file */ def isJava: Boolean = source.file.name.endsWith(".java") + /** Is this the compilation unit of a Java file, or TASTy derived from a Java file */ + def typedAsJava = isJava || { + val infoNN = info + infoNN != null && infoNN.tastyInfo.exists(_.attributes.isJava) + } + + /** The source version for this unit, as determined by a language import */ var sourceVersion: Option[SourceVersion] = None @@ -69,11 +76,23 @@ class CompilationUnit protected (val source: SourceFile) { /** Can this compilation unit be suspended */ def isSuspendable: Boolean = true + /** List of all comments present in this compilation unit */ + var comments: List[Comment] = Nil + + /** This is used to record dependencies to invalidate during incremental + * compilation, but only if `ctx.runZincPhases` is true. + */ + val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder() + /** Suspends the compilation unit by thowing a SuspendException * and recording the suspended compilation unit */ def suspend()(using Context): Nothing = assert(isSuspendable) + // Clear references to symbols that may become stale. No need to call + // `depRecorder.sendToZinc()` since all compilation phases will be rerun + // when this unit is unsuspended. + depRecorder.clear() if !suspended then if (ctx.settings.XprintSuspension.value) report.echo(i"suspended: $this") @@ -94,7 +113,7 @@ class CompilationUnit protected (val source: SourceFile) { myAssignmentSpans.nn } -@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { +@sharable object NoCompilationUnit extends CompilationUnit(NoSource, info = null) { override def isJava: Boolean = false @@ -110,13 +129,14 @@ object CompilationUnit { /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = - val file = clsd.symbol.associatedFile.nn - apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) + val compilationUnitInfo = clsd.symbol.compilationUnitInfo.nn + val file = compilationUnitInfo.associatedFile + apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees, compilationUnitInfo) /** Make a compilation unit, given picked bytes and unpickled tree */ - def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { + def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean, info: CompilationUnitInfo)(using Context): CompilationUnit = { assert(!unpickled.isEmpty, unpickled) - val unit1 = new CompilationUnit(source) + val unit1 = new CompilationUnit(source, info) unit1.tpdTree = unpickled if (forceTrees) { val force = new Force @@ -144,7 +164,8 @@ object CompilationUnit { NoSource } else source - new CompilationUnit(src) + val info = if src.exists then CompilationUnitInfo(src.file) else null + new CompilationUnit(src, info) } /** Force the tree to be loaded */ diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index a6118732d4ae..aaa14a052936 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -1,13 +1,12 @@ package dotty.tools package dotc -import core._ -import Contexts._ +import core.* +import Contexts.* import typer.{TyperPhase, RefChecks} -import cc.CheckCaptures import parsing.Parser import Phases.Phase -import transform._ +import transform.* import dotty.tools.backend import backend.jvm.{CollectSuperCalls, GenBCode} import localopt.StringInterpolatorOpt @@ -35,10 +34,11 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckUnused.PostTyper) :: // Check for unused elements + List(new CheckUnused.PostTyper) :: // Check for unused elements + List(new CheckShadowing) :: // Check shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks - List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks @@ -71,6 +71,7 @@ class Compiler { new ExpandSAMs, // Expand single abstract method closures to anonymous classes new ElimRepeated, // Rewrite vararg parameters and arguments new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new semanticdb.ExtractSemanticDB.AppendDiagnostics) :: // Attach warnings to extracted SemanticDB and write to .semanticdb file List(new init.Checker) :: // Check initialization of objects List(new ProtectedAccessors, // Add accessors for protected members new ExtensionMethods, // Expand methods of value classes with extension methods @@ -83,8 +84,8 @@ class Compiler { new PatternMatcher) :: // Compile pattern matches List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking - List(new CheckCaptures) :: // Check captures, enabled under captureChecking + List(new cc.Setup) :: // Preparations for check captures phase, enabled under captureChecking + List(new cc.CheckCaptures) :: // Check captures, enabled under captureChecking List(new ElimOpaque, // Turn opaque into normal aliases new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) new ExplicitOuter, // Add accessors to outer classes from nested ones. @@ -100,7 +101,6 @@ class Compiler { new Getters, // Replace non-private vals and vars with getter defs (fields are added later) new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees - new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization new ElimOuterSelect, // Expand outer selections new ResolveSuper, // Implement super accessors diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index e548cae55ddd..4207baa57470 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -3,12 +3,12 @@ package dotty.tools.dotc import dotty.tools.FatalError import config.CompilerCommand import core.Comments.{ContextDoc, ContextDocstrings} -import core.Contexts._ +import core.Contexts.* import core.{MacroClassLoader, TypeError} import dotty.tools.dotc.ast.Positioned import dotty.tools.io.AbstractFile -import reporting._ -import core.Decorators._ +import reporting.* +import core.Decorators.* import config.Feature import scala.util.control.NonFatal diff --git a/compiler/src/dotty/tools/dotc/Resident.scala b/compiler/src/dotty/tools/dotc/Resident.scala index 0b9bca0dc75b..481d321773c6 100644 --- a/compiler/src/dotty/tools/dotc/Resident.scala +++ b/compiler/src/dotty/tools/dotc/Resident.scala @@ -1,7 +1,7 @@ package dotty.tools package dotc -import core.Contexts._ +import core.Contexts.* import reporting.Reporter import java.io.EOFException import scala.annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 944ae794c94f..bec1c89d7216 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -1,20 +1,22 @@ package dotty.tools package dotc -import core._ -import Contexts._ -import Periods._ -import Symbols._ -import Scopes._ +import core.* +import Contexts.* +import Periods.* +import Symbols.* +import Scopes.* import Names.Name import Denotations.Denotation import typer.Typer import typer.ImportInfo.withRootImports -import Decorators._ +import Decorators.* import io.AbstractFile -import Phases.unfusedPhases +import Phases.{unfusedPhases, Phase} -import util._ +import sbt.interfaces.ProgressCallback + +import util.* import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} import reporting.Diagnostic import reporting.Diagnostic.Warning @@ -32,6 +34,10 @@ import scala.collection.mutable import scala.util.control.NonFatal import scala.io.Codec +import Run.Progress +import scala.compiletime.uninitialized +import dotty.tools.dotc.transform.MegaPhase + /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -59,7 +65,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private var myUnits: List[CompilationUnit] = Nil private var myUnitsCached: List[CompilationUnit] = Nil - private var myFiles: Set[AbstractFile] = _ + private var myFiles: Set[AbstractFile] = uninitialized // `@nowarn` annotations by source file, populated during typer private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty @@ -155,7 +161,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint } /** The source files of all late entered symbols, as a set */ - private var lateFiles = mutable.Set[AbstractFile]() + private val lateFiles = mutable.Set[AbstractFile]() /** A cache for static references to packages and classes */ val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) @@ -163,15 +169,76 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() + private var _progress: Progress | Null = null // Set if progress reporting is enabled + + private inline def trackProgress(using Context)(inline op: Context ?=> Progress => Unit): Unit = + foldProgress(())(op) + + private inline def foldProgress[T](using Context)(inline default: T)(inline op: Context ?=> Progress => T): T = + val local = _progress + if local != null then + op(using ctx)(local) + else + default + + def didEnterUnit(unit: CompilationUnit)(using Context): Boolean = + foldProgress(true /* should progress by default */)(_.tryEnterUnit(unit)) + + def canProgress()(using Context): Boolean = + foldProgress(true /* not cancelled by default */)(p => !p.checkCancellation()) + + def doAdvanceUnit()(using Context): Unit = + trackProgress: progress => + progress.currentUnitCount += 1 // trace that we completed a unit in the current (sub)phase + progress.refreshProgress() + + def doAdvanceLate()(using Context): Unit = + trackProgress: progress => + progress.currentLateUnitCount += 1 // trace that we completed a late compilation + progress.refreshProgress() + + private def doEnterPhase(currentPhase: Phase)(using Context): Unit = + trackProgress: progress => + progress.enterPhase(currentPhase) + + /** interrupt the thread and set cancellation state */ + private def cancelInterrupted(): Unit = + try + trackProgress(_.cancel()) + finally + Thread.currentThread().nn.interrupt() + + private def doAdvancePhase(currentPhase: Phase, wasRan: Boolean)(using Context): Unit = + trackProgress: progress => + progress.currentUnitCount = 0 // reset unit count in current (sub)phase + progress.currentCompletedSubtraversalCount = 0 // reset subphase index to initial + progress.seenPhaseCount += 1 // trace that we've seen a (sub)phase + if wasRan then + // add an extra traversal now that we completed a (sub)phase + progress.completedTraversalCount += 1 + else + // no subphases were ran, remove traversals from expected total + progress.totalTraversals -= currentPhase.traversals + + private def tryAdvanceSubPhase()(using Context): Unit = + trackProgress: progress => + if progress.canAdvanceSubPhase then + progress.currentUnitCount = 0 // reset unit count in current (sub)phase + progress.seenPhaseCount += 1 // trace that we've seen a (sub)phase + progress.completedTraversalCount += 1 // add an extra traversal now that we completed a (sub)phase + progress.currentCompletedSubtraversalCount += 1 // record that we've seen a subphase + if !progress.isCancelled() then + progress.tickSubphase() + /** Will be set to true if any of the compiled compilation units contains * a pureFunctions language import. */ var pureFunsImportEncountered = false - /** Will be set to true if any of the compiled compilation units contains - * a captureChecking language import. + /** Will be set to true if experimental.captureChecking is enabled + * or any of the compiled compilation units contains a captureChecking language import. */ - var ccImportEncountered = false + var ccEnabledSomewhere = Feature.enabledBySetting(Feature.captureChecking)(using ictx) private var myEnrichedErrorMessage = false @@ -233,22 +300,26 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if ctx.settings.YnoDoubleBindings.value then ctx.base.checkNoDoubleBindings = true - def runPhases(using Context) = { + def runPhases(allPhases: Array[Phase])(using Context) = { var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler var phasesWereAdjusted = false - for (phase <- ctx.base.allPhases) - if (phase.isRunnable) - Stats.trackTime(s"$phase ms ") { + for phase <- allPhases do + doEnterPhase(phase) + val phaseWillRun = phase.isRunnable + if phaseWillRun then + Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis val profileBefore = profiler.beforePhase(phase) - units = phase.runOn(units) + try units = phase.runOn(units) + catch case _: InterruptedException => cancelInterrupted() profiler.afterPhase(phase, profileBefore) if (ctx.settings.Xprint.value.containsPhase(phase)) for (unit <- units) - lastPrintedTree = - printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) + def printCtx(unit: CompilationUnit) = phase.printingContext( + ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) + lastPrintedTree = printTree(lastPrintedTree)(using printCtx(unit)) report.informTime(s"$phase ", start) Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) for (unit <- units) @@ -260,18 +331,25 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if !Feature.ccEnabledSomewhere then ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) - + end if + end if + end if + doAdvancePhase(phase, wasRan = phaseWillRun) + end for profiler.finished() } val runCtx = ctx.fresh runCtx.setProfiler(Profiler()) unfusedPhases.foreach(_.initContext(runCtx)) - runPhases(using runCtx) + val fusedPhases = runCtx.base.allPhases + runCtx.withProgressCallback: cb => + _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) + runPhases(allPhases = fusedPhases)(using runCtx) if (!ctx.reporter.hasErrors) Rewrites.writeBack() suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) - while (finalizeActions.nonEmpty) { + while (finalizeActions.nonEmpty && canProgress()) { val action = finalizeActions.remove(0) action() } @@ -293,10 +371,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint .withRootImports def process()(using Context) = - ctx.typer.lateEnterUnit(doTypeCheck => - if typeCheck then - if compiling then finalizeActions += doTypeCheck - else doTypeCheck() + ctx.typer.lateEnterUnit(typeCheck)(doTypeCheck => + if compiling then finalizeActions += doTypeCheck + else doTypeCheck() ) process()(using unitCtx) @@ -308,7 +385,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private def printTree(last: PrintedTree)(using Context): PrintedTree = { val unit = ctx.compilationUnit - val fusedPhase = ctx.phase.prevMega + val fusedPhase = ctx.phase.prev.megaPhase val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree val treeString = fusedPhase.show(tree) @@ -399,7 +476,129 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint } object Run { + + case class SubPhase(val name: String): + override def toString: String = name + + class SubPhases(val phase: Phase): + require(phase.exists) + + private def baseName: String = phase match + case phase: MegaPhase => phase.shortPhaseName + case phase => phase.phaseName + + val all = IArray.from(phase.subPhases.map(sub => s"$baseName[$sub]")) + + def next(using Context): Option[SubPhases] = + val next0 = phase.megaPhase.next.megaPhase + if next0.exists then Some(SubPhases(next0)) + else None + + def size: Int = all.size + + def subPhase(index: Int) = + if index < all.size then all(index) + else baseName + + + private class Progress(cb: ProgressCallback, private val run: Run, val initialTraversals: Int): + export cb.{cancel, isCancelled} + + var totalTraversals: Int = initialTraversals // track how many phases we expect to run + var currentUnitCount: Int = 0 // current unit count in the current (sub)phase + var currentLateUnitCount: Int = 0 // current late unit count + var completedTraversalCount: Int = 0 // completed traversals over all files + var currentCompletedSubtraversalCount: Int = 0 // completed subphases in the current phase + var seenPhaseCount: Int = 0 // how many phases we've seen so far + + private var currPhase: Phase = uninitialized // initialized by enterPhase + private var subPhases: SubPhases = uninitialized // initialized by enterPhase + private var currPhaseName: String = uninitialized // initialized by enterPhase + private var nextPhaseName: String = uninitialized // initialized by enterPhase + + /** Enter into a new real phase, setting the current and next (sub)phases */ + def enterPhase(newPhase: Phase)(using Context): Unit = + if newPhase ne currPhase then + currPhase = newPhase + subPhases = SubPhases(newPhase) + tickSubphase() + + def canAdvanceSubPhase: Boolean = + currentCompletedSubtraversalCount + 1 < subPhases.size + + /** Compute the current (sub)phase name and next (sub)phase name */ + def tickSubphase()(using Context): Unit = + val index = currentCompletedSubtraversalCount + val s = subPhases + currPhaseName = s.subPhase(index) + nextPhaseName = + if index + 1 < s.all.size then s.subPhase(index + 1) + else s.next match + case None => "" + case Some(next0) => next0.subPhase(0) + if seenPhaseCount > 0 then + refreshProgress() + + + /** Counts the number of completed full traversals over files, plus the number of units in the current phase */ + private def currentProgress(): Int = + completedTraversalCount * work() + currentUnitCount + currentLateUnitCount + + /**Total progress is computed as the sum of + * - the number of traversals we expect to make over all files + * - the number of late compilations + */ + private def totalProgress(): Int = + totalTraversals * work() + run.lateFiles.size + + private def work(): Int = run.files.size + + private def requireInitialized(): Unit = + require((currPhase: Phase | Null) != null, "enterPhase was not called") + + def checkCancellation(): Boolean = + if Thread.interrupted() then cancel() + isCancelled() + + /** trace that we are beginning a unit in the current (sub)phase, unless cancelled */ + def tryEnterUnit(unit: CompilationUnit): Boolean = + if checkCancellation() then false + else + requireInitialized() + cb.informUnitStarting(currPhaseName, unit) + true + + /** trace the current progress out of the total, in the current (sub)phase, reporting the next (sub)phase */ + def refreshProgress()(using Context): Unit = + requireInitialized() + val total = totalProgress() + if total > 0 && !cb.progress(currentProgress(), total, currPhaseName, nextPhaseName) then + cancel() + extension (run: Run | Null) + + /** record that the current phase has begun for the compilation unit of the current Context */ + def enterUnit(unit: CompilationUnit)(using Context): Boolean = + if run != null then run.didEnterUnit(unit) + else true // don't check cancellation if we're not tracking progress + + /** check progress cancellation, true if not cancelled */ + def enterRegion()(using Context): Boolean = + if run != null then run.canProgress() + else true // don't check cancellation if we're not tracking progress + + /** advance the unit count and record progress in the current phase */ + def advanceUnit()(using Context): Unit = + if run != null then run.doAdvanceUnit() + + /** if there exists another subphase, switch to it and record progress */ + def enterNextSubphase()(using Context): Unit = + if run != null then run.tryAdvanceSubPhase() + + /** advance the late count and record progress in the current phase */ + def advanceLate()(using Context): Unit = + if run != null then run.doAdvanceLate() + def enrichedErrorMessage: Boolean = if run == null then false else run.myEnrichedErrorMessage def enrichErrorMessage(errorMessage: String)(using Context): String = if run == null then diff --git a/compiler/src/dotty/tools/dotc/ScalacCommand.scala b/compiler/src/dotty/tools/dotc/ScalacCommand.scala index 2e0d9a08f25d..5f7f80a262d8 100644 --- a/compiler/src/dotty/tools/dotc/ScalacCommand.scala +++ b/compiler/src/dotty/tools/dotc/ScalacCommand.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc -import config.Properties._ +import config.Properties.* import config.CompilerCommand object ScalacCommand extends CompilerCommand: diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled index 6bf7530faf24..08d409772331 100644 --- a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled +++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled @@ -2,14 +2,14 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.* +import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.* // TODO: revise, integrate in a checking phase. object CheckTrees { - import tpd._ + import tpd.* def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index f0580c29e762..36f2d593de1c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -2,27 +2,29 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ -import Symbols._, StdNames._, Trees._, ContextOps._ -import Decorators._, transform.SymUtils._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.* +import Symbols.*, StdNames.*, Trees.*, ContextOps.* +import Decorators.* import Annotations.Annotation -import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} +import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, Chars} import config.Feature.{sourceVersion, migrateTo3, enabled} -import config.SourceVersion._ +import config.SourceVersion.* import collection.mutable.ListBuffer -import reporting._ +import reporting.* import annotation.constructorOnly import printing.Formatting.hl import config.Printers +import parsing.Parsers import scala.annotation.internal.sharable +import scala.annotation.threadUnsafe object desugar { - import untpd._ - import DesugarEnums._ + import untpd.* + import DesugarEnums.* /** An attachment for companion modules of classes that have a `derives` clause. * The position value indicates the start position of the template of the @@ -142,8 +144,13 @@ object desugar { /** A value definition copied from `vdef` with a tpt typetree derived from it */ def derivedTermParam(vdef: ValDef)(using Context): ValDef = + derivedTermParam(vdef, vdef.unforcedRhs) + + def derivedTermParam(vdef: ValDef, rhs: LazyTree)(using Context): ValDef = cpy.ValDef(vdef)( - tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) + tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef), + rhs = rhs + ) // ----- Desugar methods ------------------------------------------------- @@ -202,10 +209,14 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = - for (tpt <- tpts) yield { + def makeImplicitParameters( + tpts: List[Tree], implicitFlag: FlagSet, + mkParamName: Int => TermName, + forPrimaryConstructor: Boolean = false + )(using Context): List[ValDef] = + for (tpt, i) <- tpts.zipWithIndex yield { val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = EvidenceParamName.fresh() + val epname = mkParamName(i) ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) } @@ -239,17 +250,27 @@ object desugar { val DefDef(_, paramss, tpt, rhs) = meth val evidenceParamBuf = ListBuffer[ValDef]() + var seenContextBounds: Int = 0 def desugarContextBounds(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) + cxbounds, iflag, + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + mkParamName = i => + val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. + val ret = ContextBoundParamName(EmptyTermName, index) + seenContextBounds += 1 + ret, + forPrimaryConstructor = isPrimaryConstructor) tbounds case LambdaTypeTree(tparams, body) => cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) case _ => rhs - val paramssNoContextBounds = mapParamss(paramss) { tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) @@ -363,6 +384,34 @@ object desugar { adaptToExpectedTpt(tree) } + /** Split out the quoted pattern type variable definition from the pattern. + * + * Type variable definitions are all the `type t` defined at the start of a quoted pattern. + * Where name `t` is a pattern type variable name (i.e. lower case letters). + * + * ``` + * type t1; ...; type tn; + * ``` + * is split into + * ``` + * (List(; ...; ), ) + * ``` + */ + def quotedPatternTypeVariables(tree: untpd.Tree)(using Context): (List[untpd.TypeDef], untpd.Tree) = + tree match + case untpd.Block(stats, expr) => + val (untpdTypeVariables, otherStats) = stats.span { + case tdef @ untpd.TypeDef(name, _) => !tdef.isBackquoted && name.isVarPattern + case _ => false + } + val untpdCaseTypeVariables = untpdTypeVariables.asInstanceOf[List[untpd.TypeDef]].map { + tdef => tdef.withMods(tdef.mods | Case) + } + val pattern = if otherStats.isEmpty then expr else untpd.cpy.Block(tree)(otherStats, expr) + (untpdCaseTypeVariables, pattern) + case _ => + (Nil, tree) + /** Add all evidence parameters in `params` as implicit parameters to `meth`. * If the parameters of `meth` end in an implicit parameter list or using clause, * evidence parameters are added in front of that list. Otherwise they are added @@ -380,11 +429,11 @@ object desugar { meth.paramss :+ evidenceParams cpy.DefDef(meth)(paramss = paramss1) - /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ + /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = meth.paramss.reverse match { case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(EvidenceParamName)) + vparams.takeWhile(_.name.is(ContextBoundParamName)) case _ => Nil } @@ -394,7 +443,7 @@ object desugar { private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { var mods = tparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & EmptyFlags | Param) + tparam.withMods(mods & (EmptyFlags | Sealed) | Param) } private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { var mods = vparam.rawMods @@ -462,6 +511,7 @@ object desugar { def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) val isValueClass = parents.nonEmpty && isAnyVal(parents.head) // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. + val caseClassInScala2Library = isCaseClass && ctx.settings.YcompileScala2Library.value val originalTparams = constr1.leadingTypeParams val originalVparamss = asTermOnly(constr1.trailingParamss) @@ -500,8 +550,11 @@ object desugar { constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) val derivedVparamss = - constrVparamss.nestedMap(vparam => - derivedTermParam(vparam).withAnnotations(Nil)) + constrVparamss.nestedMap: vparam => + val derived = + if ctx.compilationUnit.isJava then derivedTermParam(vparam, Parsers.unimplementedExpr) + else derivedTermParam(vparam) + derived.withAnnotations(Nil) val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) @@ -621,7 +674,8 @@ object desugar { val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => val app = Apply(nu, vparams.map(refOfDef)) vparams match { - case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) + case vparam :: _ if vparam.mods.is(Given) || vparam.name.is(ContextBoundParamName) => + app.setApplyKind(ApplyKind.Using) case _ => app } } @@ -640,18 +694,20 @@ object desugar { DefDef(name, Nil, tpt, rhs).withMods(synthetic) def productElemMeths = - val caseParams = derivedVparamss.head.toArray - val selectorNamesInBody = normalizedBody.collect { - case vdef: ValDef if vdef.name.isSelectorName => - vdef.name - case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => - ddef.name - } - for i <- List.range(0, arity) - selName = nme.selectorName(i) - if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) - yield syntheticProperty(selName, caseParams(i).tpt, - Select(This(EmptyTypeIdent), caseParams(i).name)) + if caseClassInScala2Library then Nil + else + val caseParams = derivedVparamss.head.toArray + val selectorNamesInBody = normalizedBody.collect { + case vdef: ValDef if vdef.name.isSelectorName => + vdef.name + case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => + ddef.name + } + for i <- List.range(0, arity) + selName = nme.selectorName(i) + if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) + yield syntheticProperty(selName, caseParams(i).tpt, + Select(This(EmptyTypeIdent), caseParams(i).name)) def enumCaseMeths = if isEnumCase then @@ -668,12 +724,14 @@ object desugar { cpy.ValDef(vparam)(rhs = refOfDef(vparam))) val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => cpy.ValDef(vparam)(rhs = EmptyTree)) + var flags = Synthetic | constr1.mods.flags & copiedAccessFlags + if ctx.settings.YcompileScala2Library.value then flags &~= Private DefDef( nme.copy, joinParams(derivedTparams, copyFirstParams :: copyRestParamss), TypeTree(), creatorExpr - ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil + ).withMods(Modifiers(flags, constr1.mods.privateWithin)) :: Nil } } @@ -727,7 +785,9 @@ object desugar { if (mods.is(Abstract)) Nil else { val appMods = - Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) + var flags = Synthetic | constr1.mods.flags & copiedAccessFlags + if ctx.settings.YcompileScala2Library.value then flags &~= Private + Modifiers(flags).withPrivateWithin(constr1.mods.privateWithin) val appParamss = derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) @@ -735,13 +795,27 @@ object desugar { .withMods(appMods) :: Nil } val unapplyMeth = { + def scala2LibCompatUnapplyRhs(unapplyParamName: Name) = + assert(arity <= Definitions.MaxTupleArity, "Unexpected case class with tuple larger than 22: "+ cdef.show) + derivedVparamss.head match + case vparam :: Nil => + Apply(scalaDot(nme.Option), Select(Ident(unapplyParamName), vparam.name)) + case vparams => + val tupleApply = Select(Ident(nme.scala), s"Tuple$arity".toTermName) + val members = vparams.map(vparam => Select(Ident(unapplyParamName), vparam.name)) + Apply(scalaDot(nme.Option), Apply(tupleApply, members)) + val hasRepeatedParam = constrVparamss.head.exists { case ValDef(_, tpt, _) => isRepeated(tpt) } val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) - val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) + val unapplyRHS = + if (arity == 0) Literal(Constant(true)) + else if caseClassInScala2Library then scala2LibCompatUnapplyRhs(unapplyParam.name) + else Ident(unapplyParam.name) val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() + DefDef( methName, joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), @@ -756,7 +830,7 @@ object desugar { } else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) companionDefs(anyRef, companionMembers) - else if (isValueClass) + else if isValueClass && !isObject then companionDefs(anyRef, Nil) else Nil @@ -796,12 +870,13 @@ object desugar { // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss } + val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) + .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -966,7 +1041,7 @@ object desugar { * if the type has a pattern variable name */ def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { - assert(ctx.mode.is(Mode.QuotedPattern)) + assert(ctx.mode.isQuotedPattern) if tree.name.isVarPattern && !tree.isBackquoted then val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) @@ -1020,6 +1095,42 @@ object desugar { name } + /** Strip parens and empty blocks around the body of `tree`. */ + def normalizePolyFunction(tree: PolyFunction)(using Context): PolyFunction = + def stripped(body: Tree): Tree = body match + case Parens(body1) => + stripped(body1) + case Block(Nil, body1) => + stripped(body1) + case _ => body + cpy.PolyFunction(tree)(tree.targs, stripped(tree.body)).asInstanceOf[PolyFunction] + + /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R + * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } + */ + def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = + val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) = tree: @unchecked + val paramFlags = fun match + case fun: FunctionWithMods => + // TODO: make use of this in the desugaring when pureFuns is enabled. + // val isImpure = funFlags.is(Impure) + + // Function flags to be propagated to each parameter in the desugared method type. + val givenFlag = fun.mods.flags.toTermFlags & Given + fun.erasedParams.map(isErased => if isErased then givenFlag | Erased else givenFlag) + case _ => + vparamTypes.map(_ => EmptyFlags) + + val vparams = vparamTypes.lazyZip(paramFlags).zipWithIndex.map { + case ((p: ValDef, paramFlags), n) => p.withAddedFlags(paramFlags) + case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) + }.toList + + RefinedTypeTree(ref(defn.PolyFunctionType), List( + DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree).withFlags(Synthetic) + )).withSpan(tree.span) + end makePolyFunctionType + /** Invent a name for an anonympus given of type or template `impl`. */ def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = val str = impl match @@ -1281,7 +1392,7 @@ object desugar { case tree: ValDef => valDef(tree) case tree: TypeDef => if (tree.isClassDef) classDef(tree) - else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) + else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) else tree case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef @@ -1413,17 +1524,20 @@ object desugar { } /** Make closure corresponding to function. - * params => body + * [tparams] => params => body * ==> - * def $anonfun(params) = body + * def $anonfun[tparams](params) = body * Closure($anonfun) */ - def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = + def makeClosure(tparams: List[TypeDef], vparams: List[ValDef], body: Tree, tpt: Tree | Null = null, span: Span)(using Context): Block = + val paramss: List[ParamClause] = + if tparams.isEmpty then vparams :: Nil + else tparams :: vparams :: Nil Block( - DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) + DefDef(nme.ANON_FUN, paramss, if (tpt == null) TypeTree() else tpt, body) .withSpan(span) .withMods(synthetic | Artifact), - Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) + Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) /** If `nparams` == 1, expand partial function * @@ -1498,9 +1612,12 @@ object desugar { case vd: ValDef => vd } - def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = { + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { val mods = Given - val params = makeImplicitParameters(formals, mods) + val params = makeImplicitParameters(formals, mods, + mkParamName = i => + if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() + else paramNamesOrNil(i)) FunctionWithMods(params, body, Modifiers(mods), erasedParams) } @@ -1712,62 +1829,6 @@ object desugar { } } - def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { - case Parens(body1) => - makePolyFunction(targs, body1, pt) - case Block(Nil, body1) => - makePolyFunction(targs, body1, pt) - case Function(vargs, res) => - assert(targs.nonEmpty) - // TODO: Figure out if we need a `PolyFunctionWithMods` instead. - val mods = body match { - case body: FunctionWithMods => body.mods - case _ => untpd.EmptyModifiers - } - val polyFunctionTpt = ref(defn.PolyFunctionType) - val applyTParams = targs.asInstanceOf[List[TypeDef]] - if (ctx.mode.is(Mode.Type)) { - // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R - // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } - - val applyVParams = vargs.zipWithIndex.map { - case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags.toTermFlags) - } - RefinedTypeTree(polyFunctionTpt, List( - DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) - )) - } - else { - // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body - // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R - // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } - // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. - - def typeTree(tp: Type) = tp match - case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => - var bail = false - def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match - case tp: TypeRef => ref(tp) - case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) - case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) - case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } - val mapped = mapper(mt.resultType, topLevel = true) - if bail then TypeTree() else mapped - case _ => TypeTree() - - val applyVParams = vargs.asInstanceOf[List[ValDef]] - .map(varg => varg.withAddedFlags(mods.flags | Param)) - New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, - List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) - )) - } - case _ => - // may happen for erroneous input. An error will already have been reported. - assert(ctx.reporter.errorsReported) - EmptyTree - } - // begin desugar // Special case for `Parens` desugaring: unlike all the desugarings below, @@ -1780,8 +1841,6 @@ object desugar { } val desugared = tree match { - case PolyFunction(targs, body) => - makePolyFunction(targs, body, pt) orElse tree case SymbolLit(str) => Apply( ref(defn.ScalaSymbolClass.companionModule.termRef), @@ -1795,7 +1854,7 @@ object desugar { case ts: Thicket => ts.trees.tail case t => Nil } map { - case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." + case Block(Nil, EmptyTree) => unitLiteral // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t } @@ -1809,6 +1868,8 @@ object desugar { Annotated( AppliedTypeTree(ref(defn.SeqType), t), New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) + else if op.name == nme.CC_REACH then + Apply(ref(defn.Caps_reachCapability), t :: Nil) else assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) Select(t, op.name) @@ -1823,7 +1884,7 @@ object desugar { val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) case ext: ExtMethods => - Block(List(ext), Literal(Constant(())).withSpan(ext.span)) + Block(List(ext), unitLiteral.withSpan(ext.span)) case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index a1c3c0ed0775..7268ec720ce2 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -2,19 +2,19 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import Symbols._, StdNames._, Trees._ -import Decorators._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.* +import Symbols.*, StdNames.*, Trees.* +import Decorators.* import util.{Property, SourceFile} -import typer.ErrorReporting._ +import typer.ErrorReporting.* import transform.SyntheticMembers.ExtendsSingletonMirror import scala.annotation.internal.sharable /** Helper methods to desugar enums */ object DesugarEnums { - import untpd._ + import untpd.* enum CaseKind: case Simple, Object, Class @@ -99,7 +99,7 @@ object DesugarEnums { val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) - Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) + Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil).setApplyKind(ApplyKind.Using) /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: * @@ -126,7 +126,7 @@ object DesugarEnums { val valuesOfBody: Tree = val defaultCase = - val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) + val msg = Apply(Select(Literal(Constant(s"enum ${enumClass.fullName} has no case with name: ")), nme.PLUS), Ident(nme.nameDollar)) CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) val stringCases = enumValues.map(enumValue => @@ -148,7 +148,8 @@ object DesugarEnums { def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil def fromOrdinal: Tree = def throwArg(ordinal: Tree) = - Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) + val msg = Apply(Select(Literal(Constant(s"enum ${enumClass.fullName} has no case with ordinal: ")), nme.PLUS), Select(ordinal, nme.toString_)) + Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(msg :: Nil))) if !constraints.cached then fromOrdinalMeth(throwArg) else diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index c0cf2c0d1b81..8ee75cbf364b 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package ast -import core._ -import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ +import core.* +import Symbols.*, Types.*, Contexts.*, Decorators.*, util.Spans.*, Flags.*, Constants.* import StdNames.{nme, tpnme} -import ast.Trees._ +import ast.Trees.* import Names.Name import Comments.Comment import NameKinds.DefaultGetterName @@ -24,7 +24,7 @@ object MainProxies { * * would be translated to something like * - * import CommandLineParser._ + * import CommandLineParser.* * class f { * @static def main(args: Array[String]): Unit = * try @@ -36,7 +36,7 @@ object MainProxies { * } */ private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ + import tpd.* def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => stat.symbol :: Nil @@ -48,7 +48,7 @@ object MainProxies { mainMethods(stats).flatMap(mainProxy) } - import untpd._ + import untpd.* private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span def pos = mainFun.sourcePos @@ -172,7 +172,7 @@ object MainProxies { * } */ private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ + import tpd.* /** * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this @@ -405,7 +405,7 @@ object MainProxies { /** A class responsible for extracting the docstrings of a method. */ private class Documentation(docComment: Option[Comment]): - import util.CommentParsing._ + import util.CommentParsing.* /** The main part of the documentation. */ lazy val mainDoc: String = _mainDoc diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index ace396d1e583..2960af8fcdec 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package ast -import core.Contexts._ -import core.Decorators._ -import util.Spans._ +import core.Contexts.* +import core.Decorators.* +import util.Spans.* import Trees.{MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index dd783be7a9e1..d8017783f47f 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -2,23 +2,25 @@ package dotty.tools package dotc package ast -import util.Spans._ +import util.Spans.* import util.{SourceFile, SourcePosition, SrcPos} -import core.Contexts._ -import core.Decorators._ -import core.NameOps._ +import core.Contexts.* +import core.Decorators.* +import core.NameOps.* import core.Flags.{JavaDefined, ExtensionMethod} import core.StdNames.nme import ast.Trees.mods import annotation.constructorOnly import annotation.internal.sharable +import scala.compiletime.uninitialized + /** A base class for things that have positions (currently: modifiers and trees) */ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable { import Positioned.{ids, nextId, debugId} - private var mySpan: Span = _ + private var mySpan: Span = uninitialized private var mySource: SourceFile = src @@ -163,7 +165,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src * - If item is a non-empty tree, it has a position */ def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { - import untpd._ + import untpd.* val last = LastPosRef() def check(p: Any): Unit = p match { case p: Positioned => diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 2d335d1ed380..5ded0e1262e4 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package ast -import core._ -import Flags._, Trees._, Types._, Contexts._ -import Names._, StdNames._, NameOps._, Symbols._ +import core.* +import Flags.*, Trees.*, Types.*, Contexts.* +import Names.*, StdNames.*, NameOps.*, Symbols.* import typer.ConstFold import reporting.trace -import dotty.tools.dotc.transform.SymUtils._ -import Decorators._ + +import Decorators.* import Constants.Constant import scala.collection.mutable @@ -242,7 +242,7 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => /** Does this list contain a named argument tree? */ def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg - val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] + val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[?]] /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef): Boolean = cdef match { @@ -330,6 +330,9 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => p(tree) } + /** The tree stripped of the possibly nested applications (term and type). + * The original tree if it's not an application. + */ def appliedCore(tree: Tree): Tree = tree match { case Apply(fn, _) => appliedCore(fn) case TypeApply(fn, _) => appliedCore(fn) @@ -376,7 +379,7 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => - import untpd._ + import untpd.* /** The underlying tree when stripping any TypedSplice or Parens nodes */ override def unsplice(tree: Tree): Tree = tree match { @@ -417,10 +420,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] case Closure(_, meth, _) => true case Block(Nil, expr) => isContextualClosure(expr) case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => - if params.isEmpty then - cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) - else - isUsingClause(params) + isUsingClause(params) case _ => false } @@ -484,8 +484,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => - import TreeInfo._ - import tpd._ + import TreeInfo.* + import tpd.* /** The purity level of this statement. * @return Pure if statement has no side effects @@ -799,20 +799,52 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } } + /** An extractor for the method of a closure contained the block of the closure, + * possibly with type ascriptions. + */ + object possiblyTypedClosureDef: + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match + case Typed(expr, _) => unapply(expr) + case _ => closureDef.unapply(tree) + /** If tree is a closure, its body, otherwise tree itself */ def closureBody(tree: Tree)(using Context): Tree = tree match { case closureDef(meth) => meth.rhs case _ => tree } + /** Is `mdef` an eta-expansion of a method reference? To recognize this, we use + * the following criterion: A method definition is an eta expansion, if + * it contains at least one term paramter, the parameter has a zero extent span, + * and the right hand side is either an application or a closure with' + * an anonymous method that's itself characterized as an eta expansion. + */ + def isEtaExpansion(mdef: DefDef)(using Context): Boolean = + !rhsOfEtaExpansion(mdef).isEmpty + + def rhsOfEtaExpansion(mdef: DefDef)(using Context): Tree = mdef.paramss match + case (param :: _) :: _ if param.asInstanceOf[Tree].span.isZeroExtent => + mdef.rhs match + case rhs: Apply => rhs + case closureDef(mdef1) => rhsOfEtaExpansion(mdef1) + case _ => EmptyTree + case _ => EmptyTree + /** The variables defined by a pattern, in reverse order of their appearance. */ def patVars(tree: Tree)(using Context): List[Symbol] = { - val acc = new TreeAccumulator[List[Symbol]] { + val acc = new TreeAccumulator[List[Symbol]] { outer => def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { case Bind(_, body) => apply(tree.symbol :: syms, body) case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) + case QuotePattern(bindings, body, _) => quotePatVars(bindings.map(_.symbol) ::: syms, body) case _ => foldOver(syms, tree) } + private object quotePatVars extends TreeAccumulator[List[Symbol]] { + def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { + case SplicePattern(pat, _) => outer.apply(syms, pat) + case _ => foldOver(syms, tree) + } + } } acc(Nil, tree) } @@ -947,6 +979,10 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { def isStructuralTermSelect(tree: Select) = def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match + case defn.FunctionTypeOfMethod(_) => + false + case tp: MatchType => + hasRefinement(tp.tryNormalize) case RefinedType(parent, rname, rinfo) => rname == tree.name || hasRefinement(parent) case tp: TypeProxy => @@ -959,20 +995,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => false !tree.symbol.exists && tree.isTerm - && { - val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !defn.isRefinedFunctionType(qualType) - } - def loop(tree: Tree): Boolean = tree match - case TypeApply(fun, _) => - loop(fun) - case Apply(fun, _) => - loop(fun) + && hasRefinement(tree.qualifier.tpe) + funPart(tree) match case tree: Select => isStructuralTermSelect(tree) case _ => false - loop(tree) } /** Return a pair consisting of (supercall, rest) @@ -1021,7 +1049,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => def assertAllPositioned(tree: Tree)(using Context): Unit = tree.foreachSubTree { - case t: WithoutTypeOrPos[_] => + case t: WithoutTypeOrPos[?] => case t => assert(t.span.exists, i"$t") } @@ -1048,6 +1076,21 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => if tree.symbol.isTypeSplice then Some(tree.qualifier) else None } + extension (tree: tpd.Quote) + /** Type of the quoted expression as seen from outside the quote */ + def bodyType(using Context): Type = + val quoteType = tree.tpe // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` + val exprType = quoteType.argInfos.last // `Expr[T]` or `Type[T]` + exprType.argInfos.head // T + end extension + + extension (tree: tpd.QuotePattern) + /** Type of the quoted pattern */ + def bodyType(using Context): Type = + val quoteType = tree.tpe // `Expr[T]` or `Type[T]` + quoteType.argInfos.head // T + end extension + /** Extractor for not-null assertions. * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. */ diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index ae674c25dc3d..5603a422a77b 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package ast -import Trees._ -import core.Contexts._ +import Trees.* +import core.Contexts.* import core.ContextOps.enter -import core.Flags._ -import core.Symbols._ +import core.Flags.* +import core.Symbols.* import core.TypeError /** A TreeMap that maintains the necessary infrastructure to support @@ -14,7 +14,7 @@ import core.TypeError * This incudes implicits defined in scope as well as imported implicits. */ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { - import tpd._ + import tpd.* def transformSelf(vd: ValDef)(using Context): ValDef = cpy.ValDef(vd)(tpt = transform(vd.tpt)) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 955892b2ae22..668daea5f1fd 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Contexts._, Flags._ -import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant -import Decorators._ -import dotty.tools.dotc.transform.SymUtils._ +import core.* +import Types.*, Contexts.*, Flags.* +import Symbols.*, Annotations.*, Trees.*, Symbols.*, Constants.Constant +import Decorators.* + /** A map that applies three functions and a substitution together to a tree and * makes sure they are coordinated so that the result is well-typed. The functions are @@ -39,7 +39,7 @@ class TreeTypeMap( val substFrom: List[Symbol] = Nil, val substTo: List[Symbol] = Nil, cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) { - import tpd._ + import tpd.* def copy( typeMap: Type => Type, @@ -105,7 +105,8 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + catch case ex: TypeError => super.transform(id) else super.transform(id) case sel: Select => diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 54c15b9909fa..4ec41b95a90b 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ +import core.* +import Types.*, Names.*, NameOps.*, Flags.*, util.Spans.*, Contexts.*, Constants.* import typer.{ ConstFold, ProtoTypes } -import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ +import SymDenotations.*, Symbols.*, Denotations.*, StdNames.*, Comments.* import collection.mutable.ListBuffer import printing.Printer import printing.Texts.Text @@ -16,7 +16,7 @@ import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly import compiletime.uninitialized -import Decorators._ +import Decorators.* import staging.StagingLevel.* object Trees { @@ -32,6 +32,8 @@ object Trees { /** Property key for backquoted identifiers and definitions */ val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() + val SyntheticUnit: Property.StickyKey[Unit] = Property.StickyKey() + /** Trees take a parameter indicating what the type of their `tpe` field * is. Two choices: `Type` or `Untyped`. * Untyped trees have type `Tree[Untyped]`. @@ -455,7 +457,7 @@ object Trees { val point = span.point if name.toTermName == nme.ERROR then Span(point) - else if qualifier.span.start > span.start then // right associative + else if qualifier.span.exists && qualifier.span.start > span.point then // right associative val realName = name.stripModuleClassSuffix.lastPart Span(span.start, span.start + realName.length, point) else @@ -673,6 +675,9 @@ object Trees { */ case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + + def inlinedFromOuterScope: Boolean = call.isEmpty + type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType @@ -686,9 +691,9 @@ object Trees { * phases. After `pickleQuotes` phase, the only quotes that exist are in `inline` * methods. These are dropped when we remove the inline method implementations. * - * Type quotes `'[body]` from the parser are desugared into quote patterns (using a `Type.of[T]]`) - * when type checking. TASTy files will not contain type quotes. Type quotes are used again - * in the `staging` phase to represent the reification of `Type.of[T]]`. + * Type quotes `'[body]` from the parser are typed into `QuotePattern`s when type checking. + * TASTy files will not contain type quotes. Type quotes are used again in the `staging` + * phase to represent the reification of `Type.of[T]]`. * * Type tags `tags` are always empty before the `staging` phase. Tags for stage inconsistent * types are added in the `staging` phase to level 0 quotes. Tags for types that refer to @@ -704,12 +709,6 @@ object Trees { /** Is this a type quote `'[tpe]' */ def isTypeQuote = body.isType - /** Type of the quoted expression as seen from outside the quote */ - def bodyType(using Context): Type = - val quoteType = typeOpt // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` - val exprType = quoteType.argInfos.last // `Expr[T]` or `Type[T]` - exprType.argInfos.head // T - /** Set the type of the body of the quote */ def withBodyType(tpe: Type)(using Context): Quote[Type] = val exprType = // `Expr[T]` or `Type[T]` @@ -737,13 +736,30 @@ object Trees { type ThisTree[+T <: Untyped] = Splice[T] } + /** A tree representing a quote pattern `'{ type binding1; ...; body }` or `'[ type binding1; ...; body ]`. + * `QuotePattern`s are created the type checker when typing an `untpd.Quote` in a pattern context. + * + * `QuotePattern`s are checked are encoded into `unapply`s in the `staging` phase. + * + * The `bindings` contain the list of quote pattern type variable definitions (`Bind`s) in the oreder in + * which they are defined in the source. + * + * @param bindings Type variable definitions (`Bind` tree) + * @param body Quoted pattern (without type variable definitions) + * @param quotes A reference to the given `Quotes` instance in scope + */ + case class QuotePattern[+T <: Untyped] private[ast] (bindings: List[Tree[T]], body: Tree[T], quotes: Tree[T])(implicit @constructorOnly src: SourceFile) + extends PatternTree[T] { + type ThisTree[+T <: Untyped] = QuotePattern[T] + } + /** A tree representing a pattern splice `${ pattern }`, `$ident` or `$ident(args*)` in a quote pattern. * * Parser will only create `${ pattern }` and `$ident`, hence they will not have args. * While typing, the `$ident(args*)` the args are identified and desugared into a `SplicePattern` * containing them. * - * SplicePattern are removed after typing the pattern and are not present in TASTy. + * `SplicePattern` can only be contained within a `QuotePattern`. * * @param body The tree that was spliced * @param args The arguments of the splice (the HOAS arguments) @@ -756,14 +772,14 @@ object Trees { /** A type tree that represents an existing or inferred type */ case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = TypeTree[T] + type ThisTree[+T <: Untyped] <: TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" } /** Tree that replaces a level 1 splices in pickled (level 0) quotes. - * It is only used when picking quotes (will never be in a TASTy file). + * It is only used when pickling quotes (will never be in a TASTy file). * * @param isTerm If this hole is a term, otherwise it is a type hole. * @param idx The index of the hole in it's enclosing level 0 quote. @@ -780,7 +796,8 @@ object Trees { * - as a (result-)type of an inferred ValDef or DefDef. * Every TypeVar is created as the type of one InferredTypeTree. */ - class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T]: + type ThisTree[+T <: Untyped] <: InferredTypeTree[T] /** ref.type */ case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) @@ -915,11 +932,11 @@ object Trees { def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match - case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] + case (tparams @ (tparam: TypeDef[?]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] case _ => Nil def trailingParamss(using Context): List[ParamClause[T]] = paramss match - case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 + case ((tparam: TypeDef[?]) :: _) :: paramss1 => paramss1 case _ => paramss def termParamss(using Context): List[List[ValDef[T]]] = @@ -1163,6 +1180,7 @@ object Trees { type Inlined = Trees.Inlined[T] type Quote = Trees.Quote[T] type Splice = Trees.Splice[T] + type QuotePattern = Trees.QuotePattern[T] type SplicePattern = Trees.SplicePattern[T] type TypeTree = Trees.TypeTree[T] type InferredTypeTree = Trees.InferredTypeTree[T] @@ -1192,7 +1210,6 @@ object Trees { @sharable val EmptyTree: Thicket = genericEmptyTree @sharable val EmptyValDef: ValDef = genericEmptyValDef - @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure // ----- Auxiliary creation methods ------------------ @@ -1330,10 +1347,17 @@ object Trees { case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) } - def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { - case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree - case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) - } + // Positions of trees are automatically pushed down except when we reach an Inlined tree. Therefore, we + // make sure the new expansion has a position by copying the one of the original Inlined tree. + def Inlined(tree: Inlined)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = + if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) then tree + else + // Copy the span from the original Inlined tree if the new expansion doesn't have a span. + val expansionWithSpan = + if expansion.span.exists then expansion + else expansion.withSpan(tree.expansion.span) + finalize(tree, untpd.Inlined(call, bindings, expansionWithSpan)(sourceFile(tree))) + def Quote(tree: Tree)(body: Tree, tags: List[Tree])(using Context): Quote = tree match { case tree: Quote if (body eq tree.body) && (tags eq tree.tags) => tree case _ => finalize(tree, untpd.Quote(body, tags)(sourceFile(tree))) @@ -1342,6 +1366,10 @@ object Trees { case tree: Splice if (expr eq tree.expr) => tree case _ => finalize(tree, untpd.Splice(expr)(sourceFile(tree))) } + def QuotePattern(tree: Tree)(bindings: List[Tree], body: Tree, quotes: Tree)(using Context): QuotePattern = tree match { + case tree: QuotePattern if (bindings eq tree.bindings) && (body eq tree.body) && (quotes eq tree.quotes) => tree + case _ => finalize(tree, untpd.QuotePattern(bindings, body, quotes)(sourceFile(tree))) + } def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) @@ -1464,7 +1492,7 @@ object Trees { * innermost enclosing call for which the inlined version is currently * processed. */ - protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx + protected def inlineContext(tree: Inlined)(using Context): Context = ctx /** The context to use when mapping or accumulating over a tree */ def localCtx(tree: Tree)(using Context): Context @@ -1534,8 +1562,8 @@ object Trees { cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) case SeqLiteral(elems, elemtpt) => cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) - case Inlined(call, bindings, expansion) => - cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) + case tree @ Inlined(call, bindings, expansion) => + cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(tree))) case TypeTree() => tree case SingletonTypeTree(ref) => @@ -1571,6 +1599,9 @@ object Trees { case tree @ TypeDef(name, rhs) => cpy.TypeDef(tree)(name, transform(rhs)) case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + // Currently we do not have cases where we expect `tree.derived` to contain trees for typed trees. + // If it is the case we will fall in `transformMoreCases` and throw an exception there. + // In the future we might keep the `derived` clause after typing, in that case we might want to start handling it here. cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) case Import(expr, selectors) => cpy.Import(tree)(transform(expr), selectors) @@ -1587,6 +1618,8 @@ object Trees { cpy.Quote(tree)(transform(body)(using quoteContext), transform(tags)) case tree @ Splice(expr) => cpy.Splice(tree)(transform(expr)(using spliceContext)) + case tree @ QuotePattern(bindings, body, quotes) => + cpy.QuotePattern(tree)(transform(bindings), transform(body)(using quoteContext), transform(quotes)) case tree @ SplicePattern(body, args) => cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(args)) case tree @ Hole(isTerm, idx, args, content) => @@ -1676,8 +1709,8 @@ object Trees { this(this(this(x, block), handler), finalizer) case SeqLiteral(elems, elemtpt) => this(this(x, elems), elemtpt) - case Inlined(call, bindings, expansion) => - this(this(x, bindings), expansion)(using inlineContext(call)) + case tree @ Inlined(call, bindings, expansion) => + this(this(x, bindings), expansion)(using inlineContext(tree)) case TypeTree() => x case SingletonTypeTree(ref) => @@ -1734,6 +1767,8 @@ object Trees { this(this(x, body)(using quoteContext), tags) case Splice(expr) => this(x, expr)(using spliceContext) + case QuotePattern(bindings, body, quotes) => + this(this(this(x, bindings), body)(using quoteContext), quotes) case SplicePattern(body, args) => this(this(x, body)(using spliceContext), args) case Hole(_, _, args, content) => diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 76e16cc00a90..44fa4e9b22fd 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -4,13 +4,11 @@ package ast import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} import typer.ProtoTypes -import transform.SymUtils._ -import transform.TypeUtils._ -import core._ +import core.* import Scopes.newScope -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ -import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ -import Decorators._, DenotTransformers._ +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.*, NameOps.* +import Symbols.*, StdNames.*, Annotations.*, Trees.*, Symbols.* +import Decorators.*, DenotTransformers.* import collection.{immutable, mutable} import util.{Property, SourceFile} import NameKinds.{TempResultName, OuterSelectName} @@ -18,6 +16,7 @@ import typer.ConstFold import scala.annotation.tailrec import scala.collection.mutable.ListBuffer +import scala.compiletime.uninitialized /** Some creators for typed trees */ object tpd extends Trees.Instance[Type] with TypedTreeInfo { @@ -57,14 +56,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) case _ => - assert(ctx.reporter.errorsReported) + assert(ctx.reporter.errorsReported, s"unexpected tree for type application: $fn") ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = ta.assignType(untpd.Literal(const)) def unitLiteral(using Context): Literal = - Literal(Constant(())) + Literal(Constant(())).withAttachment(SyntheticUnit, ()) def nullLiteral(using Context): Literal = Literal(Constant(null)) @@ -173,6 +172,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Quote(body: Tree, tags: List[Tree])(using Context): Quote = untpd.Quote(body, tags).withBodyType(body.tpe) + def QuotePattern(bindings: List[Tree], body: Tree, quotes: Tree, proto: Type)(using Context): QuotePattern = + ta.assignType(untpd.QuotePattern(bindings, body, quotes), proto) + def Splice(expr: Tree, tpe: Type)(using Context): Splice = untpd.Splice(expr).withType(tpe) @@ -346,24 +348,27 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** An anonymous class * - * new parents { forwarders } + * new parents { termForwarders; typeAliases } + * + * @param parents a non-empty list of class types + * @param termForwarders a non-empty list of forwarding definitions specified by their name and the definition they forward to. + * @param typeMembers a possibly-empty list of type members specified by their name and their right hand side. * - * where `forwarders` contains forwarders for all functions in `fns`. - * @param parents a non-empty list of class types - * @param fns a non-empty of functions for which forwarders should be defined in the class. - * The class has the same owner as the first function in `fns`. - * Its position is the union of all functions in `fns`. + * The class has the same owner as the first function in `termForwarders`. + * Its position is the union of all symbols in `termForwarders`. */ - def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { - AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => - def forwarder(fn: TermSymbol, name: TermName) = { + def AnonClass(parents: List[Type], termForwarders: List[(TermName, TermSymbol)], + typeMembers: List[(TypeName, TypeBounds)] = Nil)(using Context): Block = { + AnonClass(termForwarders.head._2.owner, parents, termForwarders.map(_._2.span).reduceLeft(_ union _)) { cls => + def forwarder(name: TermName, fn: TermSymbol) = { val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm for overridden <- fwdMeth.allOverriddenSymbols do if overridden.is(Extension) then fwdMeth.setFlag(Extension) if !overridden.is(Deferred) then fwdMeth.setFlag(Override) DefDef(fwdMeth, ref(fn).appliedToArgss(_)) } - fns.lazyZip(methNames).map(forwarder) + termForwarders.map((name, sym) => forwarder(name, sym)) ++ + typeMembers.map((name, info) => TypeDef(newSymbol(cls, name, Synthetic, info).entered)) } } @@ -407,7 +412,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case pre: ThisType => tp.isType || pre.cls.isStaticOwner || - tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls + tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && !tp.symbol.owner.is(Trait) && ctx.owner.enclosingClass == pre.cls // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough // and was spuriously triggered in case inner class would inherit from outer one // eg anonymous TypeMap inside TypeMap.andThen @@ -745,7 +750,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } - override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { + override def Inlined(tree: Inlined)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) tree match { case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => @@ -1146,7 +1151,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, _) => + case defn.ContextFunctionType(argTypes, resType) => val anonFun = newAnonFun( ctx.owner, MethodType.companion(isContextual = true)(argTypes, resType), @@ -1255,7 +1260,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { */ private class MapToUnderlying extends TreeMap { override def transform(tree: Tree)(using Context): Tree = tree match { - case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => + case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) && !tree.symbol.is(Module) => tree.symbol.defTree match { case defTree: ValOrDefDef => val rhs = defTree.rhs @@ -1278,6 +1283,21 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless } + /** A tree traverser that generates the the same import contexts as original typer for statements. + * TODO: Should we align TreeMapWithPreciseStatContexts and also keep track of exprOwners? + */ + abstract class TreeTraverserWithPreciseImportContexts extends TreeTraverser: + override def apply(x: Unit, trees: List[Tree])(using Context): Unit = + def recur(trees: List[Tree]): Unit = trees match + case (imp: Import) :: rest => + traverse(rest)(using ctx.importContext(imp, imp.symbol)) + case tree :: rest => + traverse(tree) + traverse(rest) + case Nil => + recur(trees) + end TreeTraverserWithPreciseImportContexts + extension (xs: List[tpd.Tree]) def tpes: List[Type] = xs match { case x :: xs1 => x.tpe :: xs1.tpes @@ -1288,7 +1308,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { trait TreeProvider { protected def computeRootTrees(using Context): List[Tree] - private var myTrees: List[Tree] | Null = _ + private var myTrees: List[Tree] | Null = uninitialized /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ def rootTrees(using Context): List[Tree] = @@ -1389,17 +1409,17 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. * We assume parameters are never nested inside parameters. */ - override def inlineContext(call: Tree)(using Context): Context = { + override def inlineContext(tree: Inlined)(using Context): Context = { // We assume enclosingInlineds is already normalized, and only process the new call with the head. val oldIC = enclosingInlineds val newIC = - if call.isEmpty then + if tree.inlinedFromOuterScope then oldIC match case t1 :: ts2 => ts2 case _ => oldIC else - call :: oldIC + tree.call :: oldIC val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 @@ -1514,6 +1534,25 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } + /** Creates the tuple containing the given elements */ + def tupleTree(elems: List[Tree])(using Context): Tree = { + val arity = elems.length + if arity == 0 then + ref(defn.EmptyTupleModule) + else if arity <= Definitions.MaxTupleArity then + // TupleN[elem1Tpe, ...](elem1, ...) + ref(defn.TupleType(arity).nn.typeSymbol.companionModule) + .select(nme.apply) + .appliedToTypes(elems.map(_.tpe.widenIfUnstable)) + .appliedToArgs(elems) + else + // TupleXXL.apply(elems*) // TODO add and use Tuple.apply(elems*) ? + ref(defn.TupleXXLModule) + .select(nme.apply) + .appliedToVarargs(elems.map(_.asInstance(defn.ObjectType)), TypeTree(defn.ObjectType)) + .asInstance(defn.tupleType(elems.map(elem => elem.tpe.widenIfUnstable))) + } + /** Creates the tuple type tree representation of the type trees in `ts` */ def tupleTypeTree(elems: List[Tree])(using Context): Tree = { val arity = elems.length diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index e3488034fef8..aabfdd97d7bd 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Contexts._, Constants._, Names._, Flags._ +import core.* +import Types.*, Contexts.*, Constants.*, Names.*, Flags.* import dotty.tools.dotc.typer.ProtoTypes -import Symbols._, StdNames._, Trees._ +import Symbols.*, StdNames.*, Trees.* import util.{Property, SourceFile, NoSource} import util.Spans.Span import annotation.constructorOnly import annotation.internal.sharable -import Decorators._ +import Decorators.* object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { @@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] & Trees.MemberDef[T] & ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } @@ -137,6 +137,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { val rename: TermName = renamed match case Ident(rename: TermName) => rename case _ => name + + def isUnimport = rename == nme.WILDCARD } case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree @@ -147,11 +149,21 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case Floating } - /** {x1, ..., xN} T (only relevant under captureChecking) */ + /** {x1, ..., xN} T (only relevant under captureChecking) + * Created when parsing function types so that capture set and result type + * is combined in a single node. + */ case class CapturesAndResult(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree - /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ - case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree + /** A type tree appearing somewhere in the untyped DefDef of a lambda, it will be typed using `tpFun`. + * + * @param isResult Is this the result type of the lambda? This is handled specially in `Namer#valOrDefDefSig`. + * @param tpFun Compute the type of the type tree given the parameters of the lambda. + * A lambda has at most one type parameter list followed by exactly one term parameter list. + * + * Note: This is only used briefly in Typer and does not need the copy/transform/fold infrastructure. + */ + case class InLambdaTypeTree(isResult: Boolean, tpFun: (List[TypeSymbol], List[TermSymbol]) => Type)(implicit @constructorOnly src: SourceFile) extends Tree @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { override def isEmpty: Boolean = true @@ -399,6 +411,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) def Quote(body: Tree, tags: List[Tree])(implicit src: SourceFile): Quote = new Quote(body, tags) def Splice(expr: Tree)(implicit src: SourceFile): Splice = new Splice(expr) + def QuotePattern(bindings: List[Tree], body: Tree, quotes: Tree)(implicit src: SourceFile): QuotePattern = new QuotePattern(bindings, body, quotes) def SplicePattern(body: Tree, args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, args) def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() @@ -479,7 +492,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def InferredTypeTree(tpe: Type)(using Context): TypedSplice = TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) - def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) + def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())).withAttachment(SyntheticUnit, ()) def ref(tp: NamedType)(using Context): Tree = TypedSplice(tpd.ref(tp)) @@ -497,11 +510,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) - def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + def captureRootIn(using Context): Select = + Select(scalaDot(nme.caps), nme.capIn) + def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) diff --git a/compiler/src/dotty/tools/dotc/cc/BoxedTypeCache.scala b/compiler/src/dotty/tools/dotc/cc/BoxedTypeCache.scala deleted file mode 100644 index 56b3f5ba5047..000000000000 --- a/compiler/src/dotty/tools/dotc/cc/BoxedTypeCache.scala +++ /dev/null @@ -1,19 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.* - -/** A one-element cache for the boxed version of an unboxed capturing type */ -class BoxedTypeCache: - private var boxed: Type = compiletime.uninitialized - private var unboxed: Type = NoType - - def apply(tp: AnnotatedType)(using Context): Type = - if tp ne unboxed then - unboxed = tp - val CapturingType(parent, refs) = tp: @unchecked - boxed = CapturingType(parent, refs, boxed = true) - boxed -end BoxedTypeCache \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala index fd89159e2076..e437a8ad5d5f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala @@ -24,8 +24,17 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte import CaptureAnnotation.* import tpd.* - /** A cache for boxed version of a capturing type with this annotation */ - val boxedType = BoxedTypeCache() + /** A cache for the version of this annotation which differs in its boxed status. */ + var boxDual: CaptureAnnotation | Null = null + + /** A boxed annotation which is either the same annotation or its boxDual */ + def boxedAnnot(using Context): CaptureAnnotation = + if boxed then this + else if boxDual != null then boxDual.nn + else + val dual = CaptureAnnotation(refs, boxed = true)(cls) + dual.boxDual = this + dual /** Reconstitute annotation tree from capture set */ override def tree(using Context) = @@ -54,7 +63,7 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte val elems = refs.elems.toList val elems1 = elems.mapConserve(tm) if elems1 eq elems then this - else if elems1.forall(_.isInstanceOf[CaptureRef]) + else if elems1.forall(_.isTrackableRef) then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) else EmptyAnnotation diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 3ba26c92cab5..a0d874007728 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -4,34 +4,87 @@ package cc import core.* import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import Names.TermName import ast.{tpd, untpd} import Decorators.*, NameOps.* import config.SourceVersion import config.Printers.capt import util.Property.Key import tpd.* +import StdNames.nme import config.Feature +import collection.mutable private val Captures: Key[CaptureSet] = Key() -private val BoxedType: Key[BoxedTypeCache] = Key() -/** The arguments of a @retains or @retainsByName annotation */ -private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match - case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems - case _ => Nil +object ccConfig: -def allowUniversalInBoxed(using Context) = - Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) + /** If true, allow mappping capture set variables under captureChecking with maps that are neither + * bijective nor idempotent. We currently do now know how to do this correctly in all + * cases, though. + */ + inline val allowUnsoundMaps = false + + /** If true, use `sealed` as encapsulation mechanism instead of the + * previous global retriction that `cap` can't be boxed or unboxed. + */ + def allowUniversalInBoxed(using Context) = + Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) +end ccConfig + + +/** Are we at checkCaptures phase? */ +def isCaptureChecking(using Context): Boolean = + ctx.phaseId == Phases.checkCapturesPhase.id + +/** Are we at checkCaptures or Setup phase? */ +def isCaptureCheckingOrSetup(using Context): Boolean = + val ccId = Phases.checkCapturesPhase.id + val ctxId = ctx.phaseId + ctxId == ccId || ctxId == ccId - 1 + +/** A dependent function type with given arguments and result type + * TODO Move somewhere else where we treat all function type related ops together. + */ +def depFun(args: List[Type], resultType: Type, isContextual: Boolean, paramNames: List[TermName] = Nil)(using Context): Type = + val make = MethodType.companion(isContextual = isContextual) + val mt = + if paramNames.length == args.length then make(paramNames, args, resultType) + else make(args, resultType) + mt.toFunctionType(alwaysDependent = true) /** An exception thrown if a @retains argument is not syntactically a CaptureRef */ -class IllegalCaptureRef(tpe: Type) extends Exception +class IllegalCaptureRef(tpe: Type) extends Exception(tpe.toString) + +/** Capture checking state, which is known to other capture checking components */ +class CCState: + + /** The last pair of capture reference and capture set where + * the reference could not be added to the set due to a level conflict. + */ + var levelError: Option[CaptureSet.CompareResult.LevelError] = None + +end CCState + +/** The currently valid CCState */ +def ccState(using Context) = + Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState + +class NoCommonRoot(rs: Symbol*)(using Context) extends Exception( + i"No common capture root nested in ${rs.mkString(" and ")}" +) extension (tree: Tree) /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ - def toCaptureRef(using Context): CaptureRef = tree.tpe match - case ref: CaptureRef => ref - case tpe => throw IllegalCaptureRef(tpe) + def toCaptureRef(using Context): CaptureRef = tree match + case ReachCapabilityApply(arg) => + arg.toCaptureRef.reach + case _ => tree.tpe match + case ref: CaptureRef if ref.isTrackableRef => + ref + case tpe => + throw IllegalCaptureRef(tpe) // if this was compiled from cc syntax, problem should have been reported at Typer /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. * For efficience, the result is cached as an Attachment on the tree. @@ -40,26 +93,15 @@ extension (tree: Tree) tree.getAttachment(Captures) match case Some(refs) => refs case None => - val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) + val refs = CaptureSet(tree.retainedElems.map(_.toCaptureRef)*) .showing(i"toCaptureSet $tree --> $result", capt) tree.putAttachment(Captures, refs) refs - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Tree = - if Feature.pureFunsEnabledSomewhere then - val rbn = defn.RetainsByNameAnnot - Annotated(tree, - New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( - Typed( - SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), - TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) - ) - ) - ) - else tree + /** The arguments of a @retains or @retainsByName annotation */ + def retainedElems(using Context): List[Tree] = tree match + case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems + case _ => Nil extension (tp: Type) @@ -76,27 +118,24 @@ extension (tp: Type) def boxed(using Context): Type = tp.dealias match case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => tp.annot match - case ann: CaptureAnnotation => - ann.boxedType(tp) - case ann => - ann.tree.getAttachment(BoxedType) match - case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) - case _ => - ann.tree.attachment(BoxedType)(tp) + case ann: CaptureAnnotation => AnnotatedType(parent, ann.boxedAnnot) + case ann => tp case tp: RealTypeBounds => tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) case _ => tp - /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ - def boxedIfTypeParam(sym: Symbol)(using Context) = - if sym.is(TypeParam) then tp.boxed else tp - - /** The boxed version of `tp`, unless `tycon` is a function symbol */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) - then tp - else tp.boxed + /** If this is a unboxed capturing type with nonempty capture set, its boxed version. + * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. + * The identity for all other types. + */ + def unboxed(using Context): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed && !refs.isAlwaysEmpty => + CapturingType(parent, refs) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo.unboxed, tp.hi.unboxed) + case _ => + tp /** The capture set consisting of all top-level captures of `tp` that appear under a box. * Unlike for `boxed` this also considers parents of capture types, unions and @@ -124,7 +163,7 @@ extension (tp: Type) def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => val refs1 = tp match - case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case ref: CaptureRef if ref.isTracked || ref.isReach => ref.singletonCaptureSet case _ => refs CapturingType(parent, refs1, boxed) case _ => @@ -141,39 +180,6 @@ extension (tp: Type) case _ => tp - /** Under pureFunctions, map regular function type to impure function type - */ - def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match - case AppliedType(fn, args) - if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => - val fname = fn.typeSymbol.name - defn.FunctionType( - fname.functionArity, - isContextual = fname.isContextFunction, - isImpure = true).appliedTo(args) - case _ => - tp - - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Type = - if Feature.pureFunsEnabledSomewhere then - AnnotatedType(tp, - CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) - else - tp - - def isCapturingType(using Context): Boolean = - tp match - case CapturingType(_, _) => true - case _ => false - - def isEventuallyCapturingType(using Context): Boolean = - tp match - case EventuallyCapturingType(_, _) => true - case _ => false - /** Is type known to be always pure by its class structure, * so that adding a capture set to it would not make sense? */ @@ -193,15 +199,121 @@ extension (tp: Type) case _ => false + def isCapabilityClassRef(using Context) = tp.dealiasKeepAnnots match + case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) + case _ => false + + /** Drop @retains annotations everywhere */ + def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling + val tm = new TypeMap: + def apply(t: Type) = t match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + apply(parent) + case _ => + mapOver(t) + tm(tp) + + /** If `x` is a capture ref, its reach capability `x*`, represented internally + * as `x @reachCapability`. `x*` stands for all capabilities reachable through `x`". + * We have `{x} <: {x*} <: dcs(x)}` where the deep capture set `dcs(x)` of `x` + * is the union of all capture sets that appear in covariant position in the + * type of `x`. If `x` and `y` are different variables then `{x*}` and `{y*}` + * are unrelated. + */ + def reach(using Context): CaptureRef = + assert(tp.isTrackableRef) + AnnotatedType(tp, Annotation(defn.ReachCapabilityAnnot, util.Spans.NoSpan)) + + /** If `ref` is a trackable capture ref, and `tp` has only covariant occurrences of a + * universal capture set, replace all these occurrences by `{ref*}`. This implements + * the new aspect of the (Var) rule, which can now be stated as follows: + * + * x: T in E + * ----------- + * E |- x: T' + * + * where T' is T with (1) the toplevel capture set replaced by `{x}` and + * (2) all covariant occurrences of cap replaced by `x*`, provided there + * are no occurrences in `T` at other variances. (1) is standard, whereas + * (2) is new. + * + * For (2), multiple-flipped covariant occurrences of cap won't be replaced. + * In other words, + * + * - For xs: List[File^] ==> List[File^{xs*}], the cap is replaced; + * - while f: [R] -> (op: File^ => R) -> R remains unchanged. + * + * Without this restriction, the signature of functions like withFile: + * + * (path: String) -> [R] -> (op: File^ => R) -> R + * + * could be refined to + * + * (path: String) -> [R] -> (op: File^{withFile*} => R) -> R + * + * which is clearly unsound. + * + * Why is this sound? Covariant occurrences of cap must represent capabilities + * that are reachable from `x`, so they are included in the meaning of `{x*}`. + * At the same time, encapsulation is still maintained since no covariant + * occurrences of cap are allowed in instance types of type variables. + */ + def withReachCaptures(ref: Type)(using Context): Type = + class CheckContraCaps extends TypeTraverser: + var ok = true + def traverse(t: Type): Unit = + if ok then + t match + case CapturingType(_, cs) if cs.isUniversal && variance <= 0 => + ok = false + case _ => + traverseChildren(t) + + object narrowCaps extends TypeMap: + /** Has the variance been flipped at this point? */ + private var isFlipped: Boolean = false + + def apply(t: Type) = + val saved = isFlipped + try + if variance <= 0 then isFlipped = true + t.dealias match + case t1 @ CapturingType(p, cs) if cs.isUniversal && !isFlipped => + t1.derivedCapturingType(apply(p), ref.reach.singletonCaptureSet) + case _ => t match + case t @ CapturingType(p, cs) => + t.derivedCapturingType(apply(p), cs) // don't map capture set variables + case t => + mapOver(t) + finally isFlipped = saved + ref match + case ref: CaptureRef if ref.isTrackableRef => + val checker = new CheckContraCaps + checker.traverse(tp) + if checker.ok then + val tp1 = narrowCaps(tp) + if tp1 ne tp then capt.println(i"narrow $tp of $ref to $tp1") + tp1 + else + capt.println(i"cannot narrow $tp of $ref") + tp + case _ => + tp + extension (cls: ClassSymbol) def pureBaseClass(using Context): Option[Symbol] = - cls.baseClasses.find(bc => + cls.baseClasses.find: bc => defn.pureBaseClasses.contains(bc) - || { - val selfType = bc.givenSelfType - selfType.exists && selfType.captureSet.isAlwaysEmpty - }) + || bc.is(CaptureChecked) + && bc.givenSelfType.dealiasKeepAnnots.match + case CapturingType(_, refs) => refs.isAlwaysEmpty + case RetainingType(_, refs) => refs.isEmpty + case selfType => + isCaptureChecking // At Setup we have not processed self types yet, so + // unless a self type is explicitly given, we can't tell + // and err on the side of impure. + && selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) @@ -247,18 +359,63 @@ extension (sym: Symbol) && sym != defn.Caps_unsafeBox && sym != defn.Caps_unsafeUnbox + /** Does this symbol define a level where we do not want to let local variables + * escape into outer capture sets? + */ + def isLevelOwner(using Context): Boolean = + sym.isClass + || sym.is(Method, butNot = Accessor) + + /** The owner of the current level. Qualifying owners are + * - methods other than constructors and anonymous functions + * - anonymous functions, provided they either define a local + * root of type caps.Cap, or they are the rhs of a val definition. + * - classes, if they are not staticOwners + * - _root_ + */ + def levelOwner(using Context): Symbol = + def recur(sym: Symbol): Symbol = + if !sym.exists || sym.isRoot || sym.isStaticOwner then defn.RootClass + else if sym.isLevelOwner then sym + else recur(sym.owner) + recur(sym) + + /** The outermost symbol owned by both `sym` and `other`. if none exists + * since the owning scopes of `sym` and `other` are not nested, invoke + * `onConflict` to return a symbol. + */ + def maxNested(other: Symbol, onConflict: (Symbol, Symbol) => Context ?=> Symbol)(using Context): Symbol = + if !sym.exists || other.isContainedIn(sym) then other + else if !other.exists || sym.isContainedIn(other) then sym + else onConflict(sym, other) + + /** The innermost symbol owning both `sym` and `other`. + */ + def minNested(other: Symbol)(using Context): Symbol = + if !other.exists || other.isContainedIn(sym) then sym + else if !sym.exists || sym.isContainedIn(other) then other + else sym.owner.minNested(other.owner) + extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ def isBoxed(using Context): Boolean = tp.annot match case ann: CaptureAnnotation => ann.boxed case _ => false -extension (ts: List[Type]) - /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where - * it is the identity. - */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) - then ts - else ts.mapconserve(_.boxed) +/** An extractor for `caps.reachCapability(ref)`, which is used to express a reach + * capability as a tree in a @retains annotation. + */ +object ReachCapabilityApply: + def unapply(tree: Apply)(using Context): Option[Tree] = tree match + case Apply(reach, arg :: Nil) if reach.symbol == defn.Caps_reachCapability => Some(arg) + case _ => None + +/** An extractor for `ref @annotation.internal.reachCapability`, which is used to express + * the reach capability `ref*` as a type. + */ +object ReachCapability: + def unapply(tree: AnnotatedType)(using Context): Option[SingletonCaptureRef] = tree match + case AnnotatedType(parent: SingletonCaptureRef, ann) + if ann.symbol == defn.ReachCapabilityAnnot => Some(parent) + case _ => None diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index fdc4f66beafa..fb086ca0399b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -4,7 +4,7 @@ package cc import core.* import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* -import config.Printers.capt +import config.Printers.{capt, captDebug} import Annotations.Annotation import annotation.threadUnsafe import annotation.constructorOnly @@ -13,9 +13,9 @@ import reporting.trace import printing.{Showable, Printer} import printing.Texts.* import util.{SimpleIdentitySet, Property} +import typer.ErrorReporting.Addenda import util.common.alwaysTrue import scala.collection.mutable -import config.Config.ccAllowUnsoundMaps /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -55,6 +55,11 @@ sealed abstract class CaptureSet extends Showable: */ def isAlwaysEmpty: Boolean + /** An optional level limit, or NoSymbol if none exists. All elements of the set + * must be in scopes visible from the level limit. + */ + def levelLimit: Symbol + /** Is this capture set definitely non-empty? */ final def isNotEmpty: Boolean = !elems.isEmpty @@ -72,21 +77,63 @@ sealed abstract class CaptureSet extends Showable: /** Does this capture set contain the root reference `cap` as element? */ final def isUniversal(using Context) = - elems.exists { - case ref: TermRef => ref.symbol == defn.captureRoot - case _ => false - } + elems.exists(_.isRootCapability) - /** Add new elements to this capture set if allowed. - * @pre `newElems` is not empty and does not overlap with `this.elems`. - * Constant capture sets never allow to add new elements. - * Variables allow it if and only if the new elements can be included - * in all their dependent sets. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if elements were added, or a conflicting - * capture set that prevents addition otherwise. + /** Try to include an element in this capture set. + * @param elem The element to be added + * @param origin The set that originated the request, or `empty` if the request came from outside. + * + * If the set already accounts for the element, return OK. + * Otherwise, try to add a new element to the set. This is OK if + * - the set is a variable, and + * - the element is not at a deeper nesting level than the set, and + * - the element can also be added (in mapped/filtered form) to all + * dependent sets. + * If the `origin` is the same as the `source` of the set variable, the + * element might be filtered or mapped according to the class of the variable. + * Otherwise, the element might have to be back-propagated to the source + * of the variable. + * + * If the element itself cannot be added to the set for some reason, and the + * element is not the root capability, try instead to include its underlying + * capture set. */ - protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult + protected def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else addNewElem(elem) + + /** Try to include all element in `refs` to this capture set. */ + protected final def tryInclude(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + (CompareResult.OK /: newElems): (r, elem) => + r.andAlso(tryInclude(elem, origin)) + + /** Add an element to this capture set, assuming it is not already accounted for, + * and omitting any mapping or filtering. + * + * If the element itself cannot be added to the set for some reason, and the + * element is not the root capability, try instead to include its underlying + * capture set. + */ + protected final def addNewElem(elem: CaptureRef)(using Context, VarState): CompareResult = + if elem.isRootCapability || summon[VarState] == FrozenState then + addThisElem(elem) + else + addThisElem(elem).orElse: + val underlying = elem.captureSetOfInfo + tryInclude(underlying.elems, this).andAlso: + underlying.addDependent(this) + CompareResult.OK + + /** Add new elements one by one using `addNewElem`, abort on first failure */ + protected final def addNewElems(newElems: Refs)(using Context, VarState): CompareResult = + (CompareResult.OK /: newElems): (r, elem) => + r.andAlso(addNewElem(elem)) + + /** Add a specific element, assuming it is not already accounted for, + * and omitting any mapping or filtering, without possibility to backtrack + * to the underlying capture set. + */ + protected def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult /** If this is a variable, add `cs` as a dependent set */ protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult @@ -96,37 +143,34 @@ sealed abstract class CaptureSet extends Showable: cs.addDependent(this)(using ctx, UnrecordedState) this - /** Try to include all references of `elems` that are not yet accounted for by this - * capture set. Inclusion is via `addNewElems`. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if all unaccounted elements could be added, - * capture set that prevents addition otherwise. + /** x subsumes x + * this subsumes this.f + * x subsumes y ==> x* subsumes y + * x subsumes y ==> x* subsumes y* */ - protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val unaccounted = elems.filter(!accountsFor(_)) - if unaccounted.isEmpty then CompareResult.OK - else addNewElems(unaccounted, origin) - - /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ - protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - if accountsFor(elem) then CompareResult.OK - else addNewElems(elem.singletonCaptureSet.elems, origin) - - /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ - extension (x: CaptureRef) private def subsumes(y: CaptureRef) = - (x eq y) - || y.match - case y: TermRef => y.prefix eq x - case _ => false + extension (x: CaptureRef) + private def subsumes(y: CaptureRef)(using Context): Boolean = + (x eq y) + || x.isRootCapability + || y.match + case y: TermRef => !y.isReach && (y.prefix eq x) + case _ => false + || x.match + case ReachCapability(x1) => x1.subsumes(y.stripReach) + case _ => false /** {x} <:< this where <:< is subcapturing, but treating all variables * as frozen. */ def accountsFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - } + if comparer.isInstanceOf[ExplainingTypeComparer] then // !!! DEBUG + reporting.trace.force(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): + elems.exists(_.subsumes(x)) + || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + else + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): + elems.exists(_.subsumes(x)) + || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts @@ -137,7 +181,7 @@ sealed abstract class CaptureSet extends Showable: */ def mightAccountFor(x: CaptureRef)(using Context): Boolean = reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) + elems.exists(_.subsumes(x)) || !x.isRootCapability && { val elems = x.captureSetOfInfo.elems @@ -162,20 +206,14 @@ sealed abstract class CaptureSet extends Showable: /** The subcapturing test, using a given VarState */ private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = - def recur(elems: List[CaptureRef]): CompareResult = elems match - case elem :: elems1 => - var result = that.tryInclude(elem, this) - if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then - result = elem.captureSetOfInfo.subCaptures(that) - if result.isOK then - recur(elems1) - else - varState.rollBack() - result - case Nil => - addDependent(that) - recur(elems.toList) - .showing(i"subcaptures $this <:< $that = ${result.show}", capt) + val result = that.tryInclude(elems, this) + if result.isOK then + addDependent(that) + else + ccState.levelError = ccState.levelError.orElse(result.levelError) + varState.rollBack() + result + //.showing(i"subcaptures $this <:< $that = ${result.show}", capt) /** Two capture sets are considered =:= equal if they mutually subcapture each other * in a frozen state. @@ -191,7 +229,10 @@ sealed abstract class CaptureSet extends Showable: if this.subCaptures(that, frozen = true).isOK then that else if that.subCaptures(this, frozen = true).isOK then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) + else Var( + this.levelLimit.maxNested(that.levelLimit, onConflict = (sym1, sym2) => sym1), + this.elems ++ that.elems) + .addAsDependentTo(this).addAsDependentTo(that) /** The smallest superset (via <:<) of this capture set that also contains `ref`. */ @@ -210,10 +251,11 @@ sealed abstract class CaptureSet extends Showable: * any of the elements in the constant capture set `that` */ def -- (that: CaptureSet.Const)(using Context): CaptureSet = - val elems1 = elems.filter(!that.accountsFor(_)) - if elems1.size == elems.size then this - else if this.isConst then Const(elems1) - else Diff(asVar, that) + if this.isConst then + val elems1 = elems.filter(!that.accountsFor(_)) + if elems1.size == elems.size then this else Const(elems1) + else + if that.isAlwaysEmpty then this else Diff(asVar, that) /** The largest subset (via <:<) of this capture set that does not account for `ref` */ def - (ref: CaptureRef)(using Context): CaptureSet = @@ -275,9 +317,11 @@ sealed abstract class CaptureSet extends Showable: if isUniversal then handler() this - /** Invoke handler on the elements to check wellformedness of the capture set */ - def ensureWellformed(handler: List[CaptureRef] => Context ?=> Unit)(using Context): this.type = - handler(elems.toList) + /** Invoke handler on the elements to ensure wellformedness of the capture set. + * The handler might add additional elements to the capture set. + */ + def ensureWellformed(handler: CaptureRef => Context ?=> Unit)(using Context): this.type = + elems.foreach(handler(_)) this /** An upper approximation of this capture set, i.e. a constant set that is @@ -299,15 +343,18 @@ sealed abstract class CaptureSet extends Showable: /** This capture set with a description that tells where it comes from */ def withDescription(description: String): CaptureSet - /** The provided description (using `withDescription`) for this capture set or else "" */ + /** The provided description (set via `withDescription`) for this capture set or else "" */ def description: String + /** More info enabled by -Y flags */ + def optionalInfo(using Context): String = "" + /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ def toRegularAnnotation(cls: Symbol)(using Context): Annotation = Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) override def toText(printer: Printer): Text = - Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description + printer.toTextCaptureSet(this) ~~ description object CaptureSet: type Refs = SimpleIdentitySet[CaptureRef] @@ -343,8 +390,8 @@ object CaptureSet: def isConst = true def isAlwaysEmpty = elems.isEmpty - def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - CompareResult.fail(this) + def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = + CompareResult.Fail(this :: Nil) def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK @@ -352,17 +399,38 @@ object CaptureSet: def withDescription(description: String): Const = Const(elems, description) + def levelLimit = NoSymbol + override def toString = elems.toString end Const + /** A special capture set that gets added to the types of symbols that were not + * themselves capture checked, in order to admit arbitrary corresponding capture + * sets in subcapturing comparisons. Similar to platform types for explicit + * nulls, this provides more lenient checking against compilation units that + * were not yet compiled with capture checking on. + */ + object Fluid extends Const(emptySet): + override def isAlwaysEmpty = false + override def addThisElem(elem: CaptureRef)(using Context, VarState) = CompareResult.OK + override def accountsFor(x: CaptureRef)(using Context): Boolean = true + override def mightAccountFor(x: CaptureRef)(using Context): Boolean = true + override def toString = "" + end Fluid + /** The subclass of captureset variables with given initial elements */ - class Var(initialElems: Refs = emptySet) extends CaptureSet: + class Var(directOwner: Symbol, initialElems: Refs = emptySet)(using @constructorOnly ictx: Context) extends CaptureSet: /** A unique identification number for diagnostics */ val id = varId += 1 varId + //assert(id != 40) + + override val levelLimit = + if directOwner.exists then directOwner.levelOwner else NoSymbol + /** A variable is solved if it is aproximated to a from-then-on constant set. */ private var isSolved: Boolean = false @@ -380,8 +448,10 @@ object CaptureSet: /** A handler to be invoked if the root reference `cap` is added to this set */ var rootAddedHandler: () => Context ?=> Unit = () => () + private[CaptureSet] var noUniversal = false + /** A handler to be invoked when new elems are added to this set */ - var newElemAddedHandler: List[CaptureRef] => Context ?=> Unit = _ => () + var newElemAddedHandler: CaptureRef => Context ?=> Unit = _ => () var description: String = "" @@ -409,17 +479,37 @@ object CaptureSet: def resetDeps()(using state: VarState): Unit = deps = state.deps(this) - def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - if !isConst && recordElemsState() then - elems ++= newElems - if isUniversal then rootAddedHandler() - newElemAddedHandler(newElems.toList) + final def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = + if isConst || !recordElemsState() then + CompareResult.Fail(this :: Nil) // fail if variable is solved or given VarState is frozen + else if !levelOK(elem) then + CompareResult.LevelError(this, elem) + else + //if id == 34 then assert(!elem.isUniversalRootCapability) + elems += elem + if elem.isRootCapability then + rootAddedHandler() + newElemAddedHandler(elem) // assert(id != 5 || elems.size != 3, this) - (CompareResult.OK /: deps) { (r, dep) => - r.andAlso(dep.tryInclude(newElems, this)) - } - else // fail if variable is solved or given VarState is frozen - CompareResult.fail(this) + val res = (CompareResult.OK /: deps): (r, dep) => + r.andAlso(dep.tryInclude(elem, this)) + res.orElse: + elems -= elem + res.addToTrace(this) + + private def levelOK(elem: CaptureRef)(using Context): Boolean = + if elem.isRootCapability then !noUniversal + else elem match + case elem: TermRef if levelLimit.exists => + var sym = elem.symbol + if sym.isLevelOwner then sym = sym.owner + levelLimit.isContainedIn(sym.levelOwner) + case elem: ThisType if levelLimit.exists => + levelLimit.isContainedIn(elem.cls.levelOwner) + case ReachCapability(elem1) => + levelOK(elem1) + case _ => + true def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = if (cs eq this) || cs.isUniversal || isConst then @@ -428,13 +518,14 @@ object CaptureSet: deps += cs CompareResult.OK else - CompareResult.fail(this) + CompareResult.Fail(this :: Nil) override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + noUniversal = true rootAddedHandler = handler super.disallowRootCapability(handler) - override def ensureWellformed(handler: List[CaptureRef] => (Context) ?=> Unit)(using Context): this.type = + override def ensureWellformed(handler: CaptureRef => (Context) ?=> Unit)(using Context): this.type = newElemAddedHandler = handler super.ensureWellformed(handler) @@ -445,8 +536,10 @@ object CaptureSet: * of this set. The universal set {cap} is a sound fallback. */ final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = - if computingApprox then universal - else if isConst then this + if isConst then + this + else if elems.exists(_.isRootCapability) || computingApprox then + universal else computingApprox = true try computeApprox(origin).ensuring(_.isConst) @@ -463,9 +556,10 @@ object CaptureSet: def solve()(using Context): Unit = if !isConst then val approx = upperApprox(empty) + .showing(i"solve $this = $result", capt) //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") val newElems = approx.elems -- elems - if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then + if tryInclude(newElems, empty)(using ctx, VarState()).isOK then markSolved() /** Mark set as solved and propagate this info to all dependent sets */ @@ -474,11 +568,24 @@ object CaptureSet: deps.foreach(_.propagateSolved()) def withDescription(description: String): this.type = - this.description = - if this.description.isEmpty then description - else s"${this.description} and $description" + this.description = this.description.join(" and ", description) this + /** Adds variables to the ShownVars context property if that exists, which + * establishes a record of all variables printed in an error message. + * Returns variable `ids` under -Ycc-debug, and owner/nesting level info + * under -Yprint-level. + */ + override def optionalInfo(using Context): String = + for vars <- ctx.property(ShownVars) do vars += this + val debugInfo = + if !isConst && ctx.settings.YccDebug.value then ids else "" + val limitInfo = + if ctx.settings.YprintLevel.value && levelLimit.exists + then i"" + else "" + debugInfo ++ limitInfo + /** Used for diagnostics and debugging: A string that traces the creation * history of a variable by following source links. Each variable on the * path is characterized by the variable's id and the first letter of the @@ -489,23 +596,21 @@ object CaptureSet: val trail = this.match case dv: DerivedVar => dv.source.ids case _ => "" - s"$id${getClass.getSimpleName.nn.take(1)}$trail" - - /** Adds variables to the ShownVars context property if that exists, which - * establishes a record of all variables printed in an error message. - * Prints variables wih ids under -Ycc-debug. - */ - override def toText(printer: Printer): Text = inContext(printer.printerContext) { - for vars <- ctx.property(ShownVars) do vars += this - super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) - } - + val descr = getClass.getSimpleName.nn.take(1) + s"$id$descr$trail" override def toString = s"Var$id$elems" end Var + /** Variables that represent refinements of class parameters can have the universal + * capture set, since they represent only what is the result of the constructor. + * Test case: Without that tweak, logger.scala would not compile. + */ + class RefiningVar(directOwner: Symbol)(using Context) extends Var(directOwner): + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this + /** A variable that is derived from some other variable via a map or filter. */ - abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) - extends Var(initialElems): + abstract class DerivedVar(owner: Symbol, initialElems: Refs)(using @constructorOnly ctx: Context) + extends Var(owner, initialElems): // For debugging: A trace where a set was created. Note that logically it would make more // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. @@ -531,12 +636,12 @@ object CaptureSet: */ class Mapped private[CaptureSet] (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(initial.elems): + extends DerivedVar(source.levelLimit, initial.elems): addAsDependentTo(initial) // initial mappings could change by propagation private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] - assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) + assert(ccConfig.allowUnsoundMaps || mapIsIdempotent, tm.getClass) private def whereCreated(using Context): String = if stack == null then "" @@ -544,39 +649,77 @@ object CaptureSet: |Stack trace of variable creation:" |${stack.mkString("\n")}""" - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq source then // elements have to be mapped - mapRefs(newElems, tm, variance) + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + def propagate: CompareResult = + if (origin ne source) && (origin ne initial) && mapIsIdempotent then + // `tm` is idempotent, propagate back elems from image set. + // This is sound, since we know that for `r in newElems: tm(r) = r`, hence + // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. + // It's not necessarily the only possible solution, so the scheme is incomplete. + source.tryInclude(elem, this) + else if ccConfig.allowUnsoundMaps && !mapIsIdempotent + && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) + then + // The map is neither a BiTypeMap nor an idempotent type map. + // In that case there's no much we can do. + // The scheme then does not propagate added elements back to source and rejects adding + // elements from variable sources in contra- and non-variant positions. In essence, + // we approximate types resulting from such maps by returning a possible super type + // from the actual type. But this is neither sound nor complete. + report.warning(em"trying to add $elem from unrecognized source $origin of mapped set $this$whereCreated") + CompareResult.Fail(this :: Nil) else - // elements are added by subcapturing propagation with this Mapped set - // as superset; no mapping is necessary or allowed. - Const(newElems) - super.addNewElems(added.elems, origin) - .andAlso { - if added.isConst then CompareResult.OK - else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } - else CompareResult.fail(this) - } - .andAlso { - if (origin ne source) && (origin ne initial) && mapIsIdempotent then - // `tm` is idempotent, propagate back elems from image set. - // This is sound, since we know that for `r in newElems: tm(r) = r`, hence - // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. - // It's not necessarily the only possible solution, so the scheme is incomplete. - source.tryInclude(newElems, this) - else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then - // The map is neither a BiTypeMap nor an idempotent type map. - // In that case there's no much we can do. - // The scheme then does not propagate added elements back to source and rejects adding - // elements from variable sources in contra- and non-variant positions. In essence, - // we approximate types resulting from such maps by returning a possible super type - // from the actual type. But this is neither sound nor complete. - report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") - CompareResult.fail(this) - else - CompareResult.OK - } + CompareResult.OK + def propagateIf(cond: Boolean): CompareResult = + if cond then propagate else CompareResult.OK + + val mapped = extrapolateCaptureRef(elem, tm, variance) + def isFixpoint = + mapped.isConst && mapped.elems.size == 1 && mapped.elems.contains(elem) + + def addMapped = + val added = mapped.elems.filter(!accountsFor(_)) + addNewElems(added) + .andAlso: + if mapped.isConst then CompareResult.OK + else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } + else CompareResult.Fail(this :: Nil) + .andAlso: + propagateIf(!added.isEmpty) + + def failNoFixpoint = + val reason = + if variance <= 0 then i"the set's variance is $variance" + else i"$elem gets mapped to $mapped, which is not a supercapture." + report.warning(em"""trying to add $elem from unrecognized source $origin of mapped set $this$whereCreated + |The reference cannot be added since $reason""") + CompareResult.Fail(this :: Nil) + + if origin eq source then // elements have to be mapped + addMapped + .andAlso: + if mapped.isConst then CompareResult.OK + else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } + else CompareResult.Fail(this :: Nil) + else if !isFixpoint then + // We did not yet observe the !isFixpoint condition in our tests, but it's a + // theoretical possibility. In that case, it would be inconsistent to both + // add `elem` to the set and back-propagate it. But if `{elem} <:< tm(elem)` + // and the variance of the set is positive, we can soundly add `tm(ref)` to + // the set while back-propagating `ref` as before. Otherwise there's nothing + // obvious left to do except fail (which is always sound). + if variance > 0 + && elem.singletonCaptureSet.subCaptures(mapped, frozen = true).isOK then + // widen to fixpoint. mapped is known to be a fixpoint since tm is idempotent. + // The widening is sound, but loses completeness. + addMapped + else + failNoFixpoint + else if accountsFor(elem) then + CompareResult.OK + else + addNewElem(elem).andAlso(propagate) + end tryInclude override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if source eq origin then @@ -597,17 +740,20 @@ object CaptureSet: */ final class BiMapped private[CaptureSet] (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) - extends DerivedVar(initialElems): + extends DerivedVar(source.levelLimit, initialElems): - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = if origin eq source then - super.addNewElems(newElems.map(bimap.forward), origin) + val mappedElem = bimap.forward(elem) + if accountsFor(mappedElem) then CompareResult.OK + else addNewElem(mappedElem) + else if accountsFor(elem) then + CompareResult.OK else - super.addNewElems(newElems, origin) - .andAlso { - source.tryInclude(newElems.map(bimap.backward), this) - .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt) - } + source.tryInclude(bimap.backward(elem), this) + .showing(i"propagating new elem $elem backward from $this to $source = $result", capt) + .andAlso: + addNewElem(elem) /** For a BiTypeMap, supertypes of the mapped type also constrain * the source via the inverse type mapping and vice versa. That is, if @@ -618,7 +764,7 @@ object CaptureSet: */ override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = val supApprox = super.computeApprox(this) - if source eq origin then supApprox.map(bimap.inverseTypeMap) + if source eq origin then supApprox.map(bimap.inverse) else source.upperApprox(this).map(bimap) ** supApprox override def toString = s"BiMapped$id($source, elems = $elems)" @@ -627,20 +773,19 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) - extends DerivedVar(source.elems.filter(p)): + extends DerivedVar(source.levelLimit, source.elems.filter(p)): - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val filtered = newElems.filter(p) - if origin eq source then - super.addNewElems(filtered, origin) + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then + CompareResult.OK + else if origin eq source then + if p(elem) then addNewElem(elem) + else CompareResult.OK else // Filtered elements have to be back-propagated to source. // Elements that don't satisfy `p` are not allowed. - super.addNewElems(newElems, origin) - .andAlso { - if filtered.size == newElems.size then source.tryInclude(newElems, this) - else CompareResult.fail(this) - } + if p(elem) then source.tryInclude(elem, this) + else CompareResult.Fail(this :: Nil) override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if source eq origin then @@ -658,20 +803,19 @@ object CaptureSet: extends Filtered(source, !other.accountsFor(_)) class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) - extends Var(elemIntersection(cs1, cs2)): + extends Var(cs1.levelLimit.minNested(cs2.levelLimit), elemIntersection(cs1, cs2)): addAsDependentTo(cs1) addAsDependentTo(cs2) deps += cs1 deps += cs2 - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq cs1 then newElems.filter(cs2.accountsFor) - else if origin eq cs2 then newElems.filter(cs1.accountsFor) - else newElems - // If origin is not cs1 or cs2, then newElems will be propagated to - // cs1, cs2 since they are in deps. - super.addNewElems(added, origin) + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + val present = + if origin eq cs1 then cs2.accountsFor(elem) + else if origin eq cs2 then cs1.accountsFor(elem) + else true + if present && !accountsFor(elem) then addNewElem(elem) + else CompareResult.OK override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if (origin eq cs1) || (origin eq cs2) then @@ -703,6 +847,7 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty + else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ @@ -736,25 +881,47 @@ object CaptureSet: /** A TypeMap that is the identity on capture references */ trait IdentityCaptRefMap extends TypeMap - type CompareResult = CompareResult.TYPE - - /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. - * This is either OK, indicating success, or - * another capture set, indicating failure. The failure capture set - * is the one that did not allow propagaton of elements into it. - */ - object CompareResult: - opaque type TYPE = CaptureSet - val OK: TYPE = Const(emptySet) - def fail(cs: CaptureSet): TYPE = cs - - extension (result: TYPE) - /** The result is OK */ - def isOK: Boolean = result eq OK - /** If not isOK, the blocking capture set */ - def blocking: CaptureSet = result - inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result - def show(using Context): String = if result.isOK then "OK" else i"$result" + enum CompareResult extends Showable: + case OK + case Fail(trace: List[CaptureSet]) + case LevelError(cs: CaptureSet, elem: CaptureRef) + + override def toText(printer: Printer): Text = + inContext(printer.printerContext): + this match + case OK => Str("OK") + case Fail(trace) => + if ctx.settings.YccDebug.value then printer.toText(trace, ", ") + else blocking.show + case LevelError(cs: CaptureSet, elem: CaptureRef) => + Str(i"($elem at wrong level for $cs in ${cs.levelLimit})") + + /** The result is OK */ + def isOK: Boolean = this == OK + + /** If not isOK, the blocking capture set */ + def blocking: CaptureSet = (this: @unchecked) match + case Fail(cs) => cs.last + case LevelError(cs, _) => cs + + /** Optionally, this result if it is a level error */ + def levelError: Option[LevelError] = this match + case result: LevelError => Some(result) + case _ => None + + inline def andAlso(op: Context ?=> CompareResult)(using Context): CompareResult = + if isOK then op else this + + inline def orElse(op: Context ?=> CompareResult)(using Context): CompareResult = + if isOK then this + else + val alt = op + if alt.isOK then alt + else this + + inline def addToTrace(cs: CaptureSet): CompareResult = this match + case Fail(trace) => Fail(cs :: trace) + case _ => this end CompareResult /** A VarState serves as a snapshot mechanism that can undo @@ -842,39 +1009,61 @@ object CaptureSet: css.foldLeft(empty)(_ ++ _) */ - /** The capture set of the type underlying a CaptureRef */ + /** The capture set of the type underlying CaptureRef */ def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet - case _ => ofType(ref.underlying) + case ReachCapability(ref1) => deepCaptureSet(ref1.widen) + .showing(i"Deep capture set of $ref: ${ref1.widen} = $result", capt) + case _ => ofType(ref.underlying, followResult = true) /** Capture set of a type */ - def ofType(tp: Type)(using Context): CaptureSet = - def recur(tp: Type): CaptureSet = tp.dealias match - case tp: TermRef => - tp.captureSet - case tp: TermParamRef => - tp.captureSet - case _: TypeRef => - if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty - case _: TypeParamRef => - empty - case CapturingType(parent, refs) => - recur(parent) ++ refs - case AppliedType(tycon, args) => - val cs = recur(tycon) - tycon.typeParams match - case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) - case _ => cs - case tp: TypeProxy => - recur(tp.underlying) - case AndType(tp1, tp2) => - recur(tp1) ** recur(tp2) - case OrType(tp1, tp2) => - recur(tp1) ++ recur(tp2) - case _ => - empty + def ofType(tp: Type, followResult: Boolean)(using Context): CaptureSet = + def recur(tp: Type): CaptureSet = trace(i"ofType $tp, ${tp.getClass} $followResult", show = true): + tp.dealias match + case tp: TermRef => + tp.captureSet + case tp: TermParamRef => + tp.captureSet + case tp: TypeRef => + if tp.typeSymbol == defn.Caps_Cap then universal else empty + case _: TypeParamRef => + empty + case CapturingType(parent, refs) => + recur(parent) ++ refs + case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => + ofType(tpd.parent, followResult = false) // pick up capture set from parent type + ++ (recur(rinfo.resType) // add capture set of result + -- CaptureSet(rinfo.paramRefs.filter(_.isTracked)*)) // but disregard bound parameters + case tpd @ AppliedType(tycon, args) => + if followResult && defn.isNonRefinedFunction(tpd) then + recur(args.last) + // must be (pure) FunctionN type since ImpureFunctions have already + // been eliminated in selector's dealias. Use capture set of result. + else + val cs = recur(tycon) + tycon.typeParams match + case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) + case _ => cs + case tp: TypeProxy => + recur(tp.underlying) + case AndType(tp1, tp2) => + recur(tp1) ** recur(tp2) + case OrType(tp1, tp2) => + recur(tp1) ++ recur(tp2) + case _ => + empty recur(tp) - .showing(i"capture set of $tp = $result", capt) + .showing(i"capture set of $tp = $result", captDebug) + + private def deepCaptureSet(tp: Type)(using Context): CaptureSet = + val collect = new TypeAccumulator[CaptureSet]: + def apply(cs: CaptureSet, t: Type) = t.dealias match + case t @ CapturingType(p, cs1) => + val cs2 = apply(cs, p) + if variance > 0 then cs2 ++ cs1 else cs2 + case _ => + foldOver(cs, t) + collect(CaptureSet.empty, tp) private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() @@ -908,4 +1097,23 @@ object CaptureSet: println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") } else op + + def levelErrors: Addenda = new Addenda: + override def toAdd(using Context) = + for CompareResult.LevelError(cs, ref) <- ccState.levelError.toList yield + ccState.levelError = None + if ref.isRootCapability then + i""" + | + |Note that the universal capability `cap` + |cannot be included in capture set $cs""" + else + val levelStr = ref match + case ref: TermRef => i", defined in ${ref.symbol.maybeOwner}" + case _ => "" + i""" + | + |Note that reference ${ref}$levelStr + |cannot be included in outer capture set $cs which is associated with ${cs.levelLimit}""" + end CaptureSet diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index a7c283f4cc3b..2a5cb91b45d3 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -4,6 +4,7 @@ package cc import core.* import Types.*, Symbols.*, Contexts.* +import Decorators.i /** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. @@ -25,58 +26,62 @@ import Types.*, Symbols.*, Contexts.* */ object CapturingType: - /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. + /** Smart constructor that + * - drops empty capture sets + * - drops a capability class expansion if it is further refined with another capturing type + * - fuses compatible capturing types. * An outer type capturing type A can be fused with an inner capturing type B if their * boxing status is the same or if A is boxed. */ def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = if refs.isAlwaysEmpty then parent else parent match + case parent @ CapturingType(parent1, refs1) if refs1 eq defn.expandedUniversalSet => + apply(parent1, refs, boxed) case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => apply(parent1, refs ++ refs1, boxed) case _ => AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) - /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is - * returned separately by CaptureOps.isBoxed. + /** An extractor for CapturingTypes. Capturing types are recognized if + * - the annotation is a CaptureAnnotation and we are not past CheckCapturingPhase, or + * - the annotation is a @retains and we are in CheckCapturingPhase, + * but not if the IgnoreCaptures mode is set. + * Boxing status is returned separately by CaptureOps.isBoxed. */ def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - if ctx.phase == Phases.checkCapturesPhase - && tp.annot.symbol == defn.RetainsAnnot - && !ctx.mode.is(Mode.IgnoreCaptures) - then - EventuallyCapturingType.unapply(tp) - else None + if ctx.mode.is(Mode.IgnoreCaptures) then None + else decomposeCapturingType(tp) + + /** Decompose `tp` as a capturing type without taking IgnoreCaptures into account */ + def decomposeCapturingType(tp: Type)(using Context): Option[(Type, CaptureSet)] = tp match + case AnnotatedType(parent, ann: CaptureAnnotation) + if isCaptureCheckingOrSetup => + Some((parent, ann.refs)) + case AnnotatedType(parent, ann) + if ann.symbol == defn.RetainsAnnot && isCaptureChecking => + // There are some circumstances where we cannot map annotated types + // with retains annotations to capturing types, so this second recognizer + // path still has to exist. One example is when checking capture sets + // of dependent function type results for well-formedness. E.g. in + // `(x: C^{f}) -> () ->{x} Unit` we need to check that the capture set of + // `x` is not empty. We use the original, untransformed type for that + // since the transformed type already normalizes capture sets which would + // drop subsumed references. But the original type refers to the untransfomed + // type `C^{f}` which does not have a capture annotation yet. The transformed + // type would be in a copy of the dependent function type, but it is useless + // since we need to check the original reference. + try Some((parent, ann.tree.toCaptureSet)) + catch case ex: IllegalCaptureRef => None + case _ => + None /** Check whether a type is uncachable when computing `baseType`. - * - Avoid caching all the types during the setup phase, since at that point - * the capture set variables are not fully installed yet. - * - Avoid caching capturing types when IgnoreCaptures mode is set, since the - * capture sets may be thrown away in the computed base type. - */ + * We avoid caching capturing types when IgnoreCaptures mode is set. + */ def isUncachable(tp: Type)(using Context): Boolean = - ctx.phase == Phases.checkCapturesPhase && - (Setup.isDuringSetup || ctx.mode.is(Mode.IgnoreCaptures) && tp.isEventuallyCapturingType) + ctx.mode.is(Mode.IgnoreCaptures) && decomposeCapturingType(tp).isDefined end CapturingType -/** An extractor for types that will be capturing types at phase CheckCaptures. Also - * included are types that indicate captures on enclosing call-by-name parameters - * before phase ElimByName. - */ -object EventuallyCapturingType: - - def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - val sym = tp.annot.symbol - if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then - tp.annot match - case ann: CaptureAnnotation => - Some((tp.parent, ann.refs)) - case ann => - try Some((tp.parent, ann.tree.toCaptureSet)) - catch case ex: IllegalCaptureRef => None - else None - -end EventuallyCapturingType - diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 380b6ce5fb81..d635096b2318 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -6,56 +6,45 @@ import core.* import Phases.*, DenotTransformers.*, SymDenotations.* import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* import Types.*, StdNames.*, Denotations.* -import config.Printers.{capt, recheckr} +import config.Printers.{capt, recheckr, noPrinter} import config.{Config, Feature} import ast.{tpd, untpd, Trees} import Trees.* import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} import typer.Checking.{checkBounds, checkAppliedTypesIn} -import util.{SimpleIdentitySet, EqHashMap, SrcPos} -import transform.SymUtils.* -import transform.{Recheck, PreRecheck} +import typer.ErrorReporting.{Addenda, err} +import typer.ProtoTypes.{AnySelectionProto, LhsProto} +import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} +import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable -import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} +import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.DefaultGetterName +import NameKinds.{DefaultGetterName, WildcardParamName} import reporting.trace /** The capture checker */ object CheckCaptures: import ast.tpd.* - class Pre extends PreRecheck, SymTransformer: - - override def isEnabled(using Context) = true - - /** Reset `private` flags of parameter accessors so that we can refine them - * in Setup if they have non-empty capture sets. Special handling of some - * symbols defined for case classes. - */ - def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then - sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) - else if Synthetics.needsTransform(sym) then - Synthetics.transformToCC(sym) - else - sym - end Pre + enum EnvKind: + case Regular // normal case + case NestedInOwner // environment is a temporary one nested in the owner's environment, + // and does not have a different actual owner symbol + // (this happens when doing box adaptation). + case ClosureResult // environment is for the result of a closure + case Boxed // environment is inside a box (in which case references are not counted) /** A class describing environments. - * @param owner the current owner - * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, - * and does not have a different actual owner symbol (this happens when doing box adaptation). - * @param captured the caputure set containing all references to tracked free variables outside of boxes - * @param isBoxed true if the environment is inside a box (in which case references are not counted) - * @param outer0 the next enclosing environment + * @param owner the current owner + * @param kind the environment's kind + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param outer0 the next enclosing environment */ case class Env( owner: Symbol, - nestedInOwner: Boolean, + kind: EnvKind, captured: CaptureSet, - isBoxed: Boolean, outer0: Env | Null): def outer = outer0.nn @@ -63,17 +52,24 @@ object CheckCaptures: def isOutermost = outer0 == null /** If an environment is open it tracks free references */ - def isOpen = !captured.isAlwaysEmpty && !isBoxed + def isOpen = !captured.isAlwaysEmpty && kind != EnvKind.Boxed + + def outersIterator: Iterator[Env] = new: + private var cur = Env.this + def hasNext = !cur.isOutermost + def next(): Env = + val res = cur + cur = cur.outer + res end Env /** Similar normal substParams, but this is an approximating type map that * maps parameters in contravariant capture sets to the empty set. - * TODO: check what happens with non-variant. */ final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap: /** This SubstParamsMap is exact if `to` only contains `CaptureRef`s. */ - private val isExactSubstitution: Boolean = to.forall(_.isInstanceOf[CaptureRef]) + private val isExactSubstitution: Boolean = to.forall(_.isTrackableRef) /** As long as this substitution is exact, there is no need to create `Range`s when mapping invariant positions. */ override protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = !isExactSubstitution @@ -89,65 +85,112 @@ object CheckCaptures: tp case _ => mapOver(tp) + end SubstParamsMap + + final class SubstParamsBiMap(from: LambdaType, to: List[Type])(using Context) + extends BiTypeMap: + thisMap => + + def apply(tp: Type): Type = tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) + + lazy val inverse = new BiTypeMap: + def apply(tp: Type): Type = tp match + case tp: NamedType => + var idx = 0 + var to1 = to + while idx < to.length && (tp ne to(idx)) do + idx += 1 + to1 = to1.tail + if idx < to.length then from.paramRefs(idx) + else if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) + def inverse = thisMap + end SubstParamsBiMap /** Check that a @retains annotation only mentions references that can be tracked. * This check is performed at Typer. */ - def checkWellformed(ann: Tree)(using Context): Unit = - for elem <- retainedElems(ann) do - elem.tpe match + def checkWellformed(parent: Tree, ann: Tree)(using Context): Unit = + parent.tpe match + case _: SingletonType => + report.error(em"Singleton type $parent cannot have capture set", parent.srcPos) + case _ => + for elem <- ann.retainedElems do + val elem1 = elem match + case ReachCapabilityApply(arg) => arg + case _ => elem + elem1.tpe match case ref: CaptureRef => - if !ref.canBeTracked then + if !ref.isTrackableRef then report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) case tpe => report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) - /** If `tp` is a capturing type, check that all references it mentions have non-empty - * capture sets. Also: warn about redundant capture annotations. - * This check is performed after capture sets are computed in phase cc. - */ - def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match - case CapturingType(parent, refs) => - for ref <- refs.elems do - if ref.captureSetOfInfo.elems.isEmpty then - report.error(em"$ref cannot be tracked since its capture set is empty", pos) - else if parent.captureSet.accountsFor(ref) then - report.warning(em"redundant capture: $parent already accounts for $ref", pos) - case _ => - - /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that - * are already accounted for by other elements of the same annotation. - * Note: We need to perform the check on the original annotation rather than its - * capture set since the conversion to a capture set already eliminates redundant elements. + /** Report an error if some part of `tp` contains the root capability in its capture set + * or if it refers to an unsealed type parameter that could possibly be instantiated with + * cap in a way that's visible at the type. */ - def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = - // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` - // where `n == elems.length-1`, i <- 0..n`. - // I.e. - // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] - def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match - case Nil => Nil - case elem :: elems => - List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) - for case first :: others <- choices(Nil, retainedElems(ann)) do - val firstRef = first.toCaptureRef - val remaining = CaptureSet(others.map(_.toCaptureRef)*) - if remaining.accountsFor(firstRef) then - report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) - - def disallowRootCapabilitiesIn(tp: Type, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = + private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: + + private val seen = new EqHashSet[TypeRef] + + /** Check that there is at least one method containing carrier and defined + * in the scope of tparam. E.g. this is OK: + * def f[T] = { ... var x: T ... } + * So is this: + * class C[T] { def f() = { class D { var x: T }}} + * But this is not OK: + * class C[T] { object o { var x: T }} + */ + extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = + carrier.exists && { + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) + } + def traverse(t: Type) = - if variance >= 0 then - t.captureSet.disallowRootCapability: () => - def part = if t eq tp then "" else i"the part $t of " - report.error( - em"""$what cannot $have $tp since - |${part}that type captures the root capability `cap`. - |$addendum""", - pos) - traverseChildren(t) + t.dealiasKeepAnnots match + case t: TypeRef => + if !seen.contains(t) then + seen += t + traverseChildren(t) + case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => + () + case t => + if variance >= 0 then + t.captureSet.disallowRootCapability: () => + def part = if t eq tp then "" else i"the part $t of " + report.error( + em"""$what cannot $have $tp since + |${part}that type captures the root capability `cap`. + |$addendum""", + pos) + traverseChildren(t) check.traverse(tp) + end disallowRootCapabilitiesIn + + /** Attachment key for bodies of closures, provided they are values */ + val ClosureBodyValue = Property.Key[Unit] class CheckCaptures extends Recheck, SymTransformer: thisPhase => @@ -156,7 +199,8 @@ class CheckCaptures extends Recheck, SymTransformer: import CheckCaptures.* def phaseName: String = "cc" - override def isEnabled(using Context) = true + + override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere def newRechecker()(using Context) = CaptureChecker(ctx) @@ -164,12 +208,9 @@ class CheckCaptures extends Recheck, SymTransformer: if Feature.ccEnabled then super.run - override def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) - else super.transformSym(sym) + val ccState = new CCState class CaptureChecker(ictx: Context) extends Rechecker(ictx): - import ast.tpd.* override def keepType(tree: Tree) = super.keepType(tree) @@ -180,20 +221,16 @@ class CheckCaptures extends Recheck, SymTransformer: */ private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: variance = startingVariance - override def traverse(t: Type) = - t match - case CapturingType(parent, refs: CaptureSet.Var) => - if variance < 0 then - capt.println(i"solving $t") - refs.solve() - traverse(parent) - case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => - traverse(rinfo) - case tp: TypeVar => - case tp: TypeRef => - traverse(tp.prefix) - case _ => - traverseChildren(t) + override def traverse(t: Type) = t match + case t @ CapturingType(parent, refs) => + refs match + case refs: CaptureSet.Var if variance < 0 => refs.solve() + case _ => + traverse(parent) + case t @ defn.RefinedFunctionOf(rinfo) => + traverse(rinfo) + case _ => + traverseChildren(t) /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- * variantly in it. @@ -207,46 +244,82 @@ class CheckCaptures extends Recheck, SymTransformer: def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") - /** Check subcapturing `{elem} <: cs`, report error on failure */ - def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = - val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) + def checkOK(res: CompareResult, prefix: => String, pos: SrcPos, provenance: => String = "")(using Context): Unit = if !res.isOK then - report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + def toAdd: String = CaptureSet.levelErrors.toAdd.mkString + def descr: String = + val d = res.blocking.description + if d.isEmpty then provenance else "" + report.error(em"$prefix included in the allowed capture set ${res.blocking}$descr$toAdd", pos) + + /** Check subcapturing `{elem} <: cs`, report error on failure */ + def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = + checkOK( + elem.singletonCaptureSet.subCaptures(cs, frozen = false), + i"$elem cannot be referenced here; it is not", + pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = - val res = cs1.subCaptures(cs2, frozen = false) - if !res.isOK then - def header = - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" - else i"references $cs1 are not all" - report.error(em"$header included in allowed capture set ${res.blocking}", pos) + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, + provenance: => String = "", cs1description: String = "")(using Context) = + checkOK( + cs1.subCaptures(cs2, frozen = false), + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" + else i"references $cs1$cs1description are not all", + pos, provenance) /** The current environment */ - private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) + private val rootEnv: Env = inContext(ictx): + Env(defn.RootClass, EnvKind.Regular, CaptureSet.empty, null) + private var curEnv = rootEnv + + /** Currently checked closures and their expected types, used for error reporting */ + private var openClosures: List[(Symbol, Type)] = Nil private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() + /** A list of actions to perform at postCheck. The reason to defer these actions + * is that it is sometimes better for type inference to not constrain too early + * with a checkConformsExpr. + */ + private var todoAtPostCheck = new mutable.ListBuffer[() => Unit] + /** If `sym` is a class or method nested inside a term, a capture set variable representing * the captured variables of the environment associated with `sym`. */ - def capturedVars(sym: Symbol)(using Context) = + def capturedVars(sym: Symbol)(using Context): CaptureSet = myCapturedVars.getOrElseUpdate(sym, - if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() + if sym.ownersIterator.exists(_.isTerm) + then CaptureSet.Var(sym.owner) else CaptureSet.empty) - /** For all nested environments up to `limit` perform `op` */ + /** For all nested environments up to `limit` or a closed environment perform `op`, + * but skip environmenrts directly enclosing environments of kind ClosureResult. + */ def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = - def recur(env: Env): Unit = + def recur(env: Env, skip: Boolean): Unit = if env.isOpen && env.owner != limit then - op(env) + if !skip then op(env) if !env.isOutermost then var nextEnv = env.outer if env.owner.isConstructor then if nextEnv.owner != limit && !nextEnv.isOutermost then - recur(nextEnv.outer) - else recur(nextEnv) - recur(curEnv) + nextEnv = nextEnv.outer + recur(nextEnv, skip = env.kind == EnvKind.ClosureResult) + recur(curEnv, skip = false) + + /** A description where this environment comes from */ + private def provenance(env: Env)(using Context): String = + val owner = env.owner + if owner.isAnonymousFunction then + val expected = openClosures + .find(_._1 == owner) + .map(_._2) + .getOrElse(owner.info.toFunctionType()) + i"\nof an enclosing function literal with expected type $expected" + else + i"\nof the enclosing ${owner.showLocated}" + /** Include `sym` in the capture sets of all enclosing environments nested in the * the environment in which `sym` is defined. @@ -255,10 +328,9 @@ class CheckCaptures extends Recheck, SymTransformer: if sym.exists then val ref = sym.termRef if ref.isTracked then - forallOuterEnvsUpTo(sym.enclosure) { env => + forallOuterEnvsUpTo(sym.enclosure): env => capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") - checkElem(ref, env.captured, pos) - } + checkElem(ref, env.captured, pos, provenance(env)) /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing * environments. At each stage, only include references from `cs` that are outside @@ -266,34 +338,35 @@ class CheckCaptures extends Recheck, SymTransformer: */ def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = if !cs.isAlwaysEmpty then - forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => - val included = cs.filter { - case ref: TermRef => - (env.nestedInOwner || env.owner != ref.symbol.owner) - && env.owner.isContainedIn(ref.symbol.owner) - case ref: ThisType => - (env.nestedInOwner || env.owner != ref.cls) - && env.owner.isContainedIn(ref.cls) + forallOuterEnvsUpTo(ctx.owner.topLevelClass): env => + def isVisibleFromEnv(sym: Symbol) = + (env.kind == EnvKind.NestedInOwner || env.owner != sym) + && env.owner.isContainedIn(sym) + val included = cs.filter: + case ref: TermRef => isVisibleFromEnv(ref.symbol.owner) + case ref: ThisType => isVisibleFromEnv(ref.cls) case _ => false - } capt.println(i"Include call capture $included in ${env.owner}") - checkSubset(included, env.captured, pos) - } + checkSubset(included, env.captured, pos, provenance(env)) /** Include references captured by the called method in the current environment stack */ def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) - override def recheckIdent(tree: Ident)(using Context): Type = - if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) - else markFree(tree.symbol, tree.srcPos) - super.recheckIdent(tree) + override def recheckIdent(tree: Ident, pt: Type)(using Context): Type = + if tree.symbol.is(Method) then + if tree.symbol.info.isParameterless then + // there won't be an apply; need to include call captures now + includeCallCaptures(tree.symbol, tree.srcPos) + else + markFree(tree.symbol, tree.srcPos) + super.recheckIdent(tree, pt) /** A specialized implementation of the selection rule. * - * E |- f: Cf f { m: Cr R } - * ------------------------ - * E |- f.m: C R + * E |- f: f{ m: Cr R }^Cf + * ----------------------- + * E |- f.m: R^C * * The implementation picks as `C` one of `{f}` or `Cr`, depending on the * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr @@ -305,7 +378,7 @@ class CheckCaptures extends Recheck, SymTransformer: // This case can arise when we try to merge multiple types that have different // capture sets on some part. For instance an asSeenFrom might produce // a bi-mapped capture set arising from a substition. Applying the same substitution - // to the same type twice will nevertheless produce different capture setsw which can + // to the same type twice will nevertheless produce different capture sets which can // lead to a failure in disambiguation since neither alternative is better than the // other in a frozen constraint. An example test case is disambiguate-select.scala. // We address the problem by disambiguating while ignoring all capture sets as a fallback. @@ -316,11 +389,15 @@ class CheckCaptures extends Recheck, SymTransformer: val selType = recheckSelection(tree, qualType, name, disambiguate) val selCs = selType.widen.captureSet - if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then + if selCs.isAlwaysEmpty + || selType.widen.isBoxedCapturing + || qualType.isBoxedCapturing + || pt == LhsProto + then selType else val qualCs = qualType.captureSet - capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + capt.println(i"pick one of $qualType, ${selType.widen}, $qualCs, $selCs in $tree") if qualCs.mightSubcapture(selCs) && !selCs.mightSubcapture(qualCs) && !pt.stripCapturing.isInstanceOf[SingletonType] @@ -333,10 +410,10 @@ class CheckCaptures extends Recheck, SymTransformer: /** A specialized implementation of the apply rule. * - * E |- f: Cf (Ra -> Cr Rr) - * E |- a: Ca Ra - * ------------------------ - * E |- f a: C Rr + * E |- f: Ra ->Cf Rr^Cr + * E |- a: Ra^Ca + * --------------------- + * E |- f a: Rr^C * * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr @@ -345,21 +422,37 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol includeCallCaptures(meth, tree.srcPos) + + // Unsafe box/unbox handlng, only for versions < 3.3 def mapArgUsing(f: Type => Type) = val arg :: Nil = tree.args: @unchecked val argType0 = f(recheckStart(arg, pt)) val argType = super.recheckFinish(argType0, arg, pt) super.recheckFinish(argType, tree, pt) - if meth == defn.Caps_unsafeBox then + if meth == defn.Caps_unsafeAssumePure then + val arg :: Nil = tree.args: @unchecked + val argType0 = recheck(arg, pt.capturing(CaptureSet.universal)) + val argType = + if argType0.captureSet.isAlwaysEmpty then argType0 + else argType0.widen.stripCapturing + capt.println(i"rechecking $arg with $pt: $argType") + super.recheckFinish(argType, tree, pt) + else if meth == defn.Caps_unsafeBox then mapArgUsing(_.forceBoxStatus(true)) else if meth == defn.Caps_unsafeUnbox then mapArgUsing(_.forceBoxStatus(false)) else if meth == defn.Caps_unsafeBoxFunArg then - mapArgUsing { - case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual) => - defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual) - } + def forceBox(tp: Type): Type = tp.strippedDealias match + case defn.FunctionOf(paramtpe :: Nil, restpe, isContextual) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContextual) + case tp @ RefinedType(parent, rname, rinfo: MethodType) => + tp.derivedRefinedType(parent, rname, + rinfo.derivedLambdaType( + paramInfos = rinfo.paramInfos.map(_.forceBoxStatus(true)))) + case tp @ CapturingType(parent, refs) => + tp.derivedCapturingType(forceBox(parent), refs) + mapArgUsing(forceBox) else super.recheckApply(tree, pt) match case appType @ CapturingType(appType1, refs) => @@ -371,14 +464,18 @@ class CheckCaptures extends Recheck, SymTransformer: && qual.tpe.captureSet.mightSubcapture(refs) && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => - cs ++ arg.tpe.captureSet) + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet): (cs, arg) => + cs ++ arg.tpe.captureSet appType.derivedCapturingType(appType1, callCaptures) .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) case _ => appType case appType => appType end recheckApply + private def isDistinct(xs: List[Type]): Boolean = xs match + case x :: xs1 => xs1.isEmpty || !xs1.contains(x) && isDistinct(xs1) + case Nil => true + /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. * This means: * - Instantiate result type with actual arguments @@ -386,11 +483,19 @@ class CheckCaptures extends Recheck, SymTransformer: * - remember types of arguments corresponding to tracked * parameters in refinements. * - add capture set of instantiated class to capture set of result type. + * If all argument types are mutually different trackable capture references, use a BiTypeMap, + * since that is more precise. Otherwise use a normal idempotent map, which might lose information + * in the case where the result type contains captureset variables that are further + * constrained afterwards. */ override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = val ownType = - if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) - else mt.resType + if !mt.isResultDependent then + mt.resType + else if argTypes.forall(_.isTrackableRef) && isDistinct(argTypes) then + SubstParamsBiMap(mt, argTypes)(mt.resType) + else + SubstParamsMap(mt, argTypes)(mt.resType) if sym.isConstructor then val cls = sym.owner.asClass @@ -403,15 +508,19 @@ class CheckCaptures extends Recheck, SymTransformer: * Second half: union of all capture sets of arguments to tracked parameters. */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = - mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => - val (core, allCaptures) = acc - val (getterName, argType) = refine + var refined: Type = core + var allCaptures: CaptureSet = initCs + for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol - if getter.termRef.isTracked && !getter.is(Private) - then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) - else (core, allCaptures) - } - + if getter.termRef.isTracked && !getter.is(Private) then + refined = RefinedType(refined, getterName, argType) + allCaptures ++= argType.captureSet + (refined, allCaptures) + + /** Augment result type of constructor with refinements and captures. + * @param core The result type of the constructor + * @param initCs The initial capture set to add, not yet counting capture sets from arguments + */ def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match case core: MethodType => // more parameters to follow; augment result type @@ -424,70 +533,113 @@ class CheckCaptures extends Recheck, SymTransformer: val (refined, cs) = addParamArgRefinements(core, initCs) refined.capturing(cs) - augmentConstructorType(ownType, CaptureSet.empty) match - case augmented: MethodType => - augmented - case augmented => - // add capture sets of class and constructor to final result of constructor call - augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) - .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) + augmentConstructorType(ownType, capturedVars(cls) ++ capturedVars(sym)) + .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) else ownType end instantiate - override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = + override def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = + if ccConfig.allowUniversalInBoxed then + val TypeApply(fn, args) = tree + val polyType = atPhase(thisPhase.prev): + fn.tpe.widen.asInstanceOf[TypeLambda] + for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do + if !tree.symbol.isTypeTestOrCast then + def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" + disallowRootCapabilitiesIn(arg.knownType, NoSymbol, + i"Sealed type variable $pname", "be instantiated to", + i"This is often caused by a local capability$where\nleaking as part of its result.", + tree.srcPos) + super.recheckTypeApply(tree, pt) + + override def recheckClosure(tree: Closure, pt: Type, forceDependent: Boolean)(using Context): Type = val cs = capturedVars(tree.meth.symbol) capt.println(i"typing closure $tree with cvs $cs") - super.recheckClosure(tree, pt).capturing(cs) - .showing(i"rechecked $tree / $pt = $result", capt) - - /** Additionally to normal processing, update types of closures if the expected type - * is a function with only pure parameters. In that case, make the anonymous function - * also have the same parameters as the prototype. - * TODO: Develop a clearer rationale for this. - * TODO: Can we generalize this to arbitrary parameters? - * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) + super.recheckClosure(tree, pt, forceDependent).capturing(cs) + .showing(i"rechecked closure $tree / $pt = $result", capt) + + override def recheckClosureBlock(mdef: DefDef, expr: Closure, pt: Type)(using Context): Type = + mdef.rhs match + case rhs @ closure(_, _, _) => + // In a curried closure `x => y => e` don't leak capabilities retained by + // the second closure `y => e` into the first one. This is an approximation + // of the CC rule which says that a closure contributes captures to its + // environment only if a let-bound reference to the closure is used. + mdef.rhs.putAttachment(ClosureBodyValue, ()) + case _ => + + // Constrain closure's parameters and result from the expected type before + // rechecking the body. + openClosures = (mdef.symbol, pt) :: openClosures + try + val res = recheckClosure(expr, pt, forceDependent = true) + if !isEtaExpansion(mdef) then + // If closure is an eta expanded method reference it's better to not constrain + // its internals early since that would give error messages in generated code + // which are less intelligible. + // Example is the line `a = x` in neg-custom-args/captures/vars.scala. + // For all other closures, early constraints are preferred since they + // give more localized error messages. + checkConformsExpr(res, pt, expr) + recheckDef(mdef, mdef.symbol) + res + finally + openClosures = openClosures.tail + end recheckClosureBlock + + /** Maps mutable variables to the symbols that capture them (in the + * CheckCaptures sense, i.e. symbol is referred to from a different method + * than the one it is defined in). */ - override def recheckBlock(block: Block, pt: Type)(using Context): Type = - block match - case closureDef(mdef) => - pt.dealias match - case defn.FunctionOf(ptformals, _, _) - if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => - // Redo setup of the anonymous function so that formal parameters don't - // get capture sets. This is important to avoid false widenings to `cap` - // when taking the base type of the actual closures's dependent function - // type so that it conforms to the expected non-dependent function type. - // See withLogFile.scala for a test case. - val meth = mdef.symbol - // First, undo the previous setup which installed a completer for `meth`. - atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) - .installAfter(preRecheckPhase) - - // Next, update all parameter symbols to match expected formals - meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) - } - // Next, update types of parameter ValDefs - mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => - val ValDef(_, tpt, _) = param: @unchecked - tpt.rememberTypeAlways(pformal) - } - // Next, install a new completer reflecting the new parameters for the anonymous method - val mt = meth.info.asInstanceOf[MethodType] - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = mt.companion(ptformals, mdef.tpt.knownType) - .showing(i"simplify info of $meth to $result", capt) - recheckDef(mdef, meth) - meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) - case _ => + private val capturedBy = util.HashMap[Symbol, Symbol]() + + /** Maps anonymous functions appearing as function arguments to + * the function that is called. + */ + private val anonFunCallee = util.HashMap[Symbol, Symbol]() + + /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. + */ + private def collectCapturedMutVars(using Context) = new TreeTraverser: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + capturedBy(sym) = enclMeth + case Apply(fn, args) => + for case closureDef(mdef) <- args do + anonFunCallee(mdef.symbol) = fn.symbol + traverseChildren(tree) + case Inlined(_, bindings, expansion) => + traverse(bindings) + traverse(expansion) + case mdef: DefDef => + if !mdef.symbol.isInlineMethod then traverseChildren(tree) case _ => - super.recheckBlock(block, pt) + traverseChildren(tree) - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try - if !sym.is(Module) then // Modules are checked by checking the module class - super.recheckValDef(tree, sym) + if sym.is(Module) then sym.info // Modules are checked by checking the module class + else + if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then + val (carrier, addendum) = capturedBy.get(sym) match + case Some(encl) => + val enclStr = + if encl.isAnonymousFunction then + val location = anonFunCallee.get(encl) match + case Some(meth) if meth.exists => i" argument in a call to $meth" + case _ => "" + s"an anonymous function$location" + else encl.show + (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") + case _ => + (sym, "") + disallowRootCapabilitiesIn( + tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) + checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then // Parameters with inferred types belong to anonymous methods. We need to wait @@ -496,38 +648,93 @@ class CheckCaptures extends Recheck, SymTransformer: // function is compiled since we do not propagate expected types into blocks. interpolateVarsIn(tree.tpt) - override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = - if !Synthetics.isExcluded(sym) then + override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Type = + if Synthetics.isExcluded(sym) then sym.info + else val saved = curEnv val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) - try super.recheckDefDef(tree, sym) + if !localSet.isAlwaysEmpty then + curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) + try checkInferredResult(super.recheckDefDef(tree, sym), tree) finally - interpolateVarsIn(tree.tpt) + if !sym.isAnonymousFunction then + // Anonymous functions propagate their type to the enclosing environment + // so it is not in general sound to interpolate their types. + interpolateVarsIn(tree.tpt) curEnv = saved + /** If val or def definition with inferred (result) type is visible + * in other compilation units, check that the actual inferred type + * conforms to the expected type where all inferred capture sets are dropped. + * This ensures that if files compile separately, they will also compile + * in a joint compilation. + */ + def checkInferredResult(tp: Type, tree: ValOrDefDef)(using Context): Type = + val sym = tree.symbol + + def isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + + def addenda(expected: Type) = new Addenda: + override def toAdd(using Context) = + def result = if tree.isInstanceOf[ValDef] then"" else " result" + i""" + | + |Note that the expected type $expected + |is the previously inferred$result type of $sym + |which is also the type seen in separately compiled sources. + |The new inferred type $tp + |must conform to this type.""" :: Nil + + tree.tpt match + case tpt: InferredTypeTree if !canUseInferred => + val expected = tpt.tpe.dropAllRetains + todoAtPostCheck += (() => checkConformsExpr(tp, expected, tree.rhs, addenda(expected))) + case _ => + tp + end checkInferredResult + /** Class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. * 3. The capture set of the self type of a class includes the capture set of every class parameter, * unless the parameter is marked @constructorOnly. + * 4. If the class extends a pure base class, the capture set of the self type must be empty. */ override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = val saved = curEnv val localSet = capturedVars(cls) for parent <- impl.parents do // (1) - checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) - if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) + checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos, + i"\nof the references allowed to be captured by $cls") + if !localSet.isAlwaysEmpty then + curEnv = Env(cls, EnvKind.Regular, localSet, curEnv) try val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) - for pureBase <- cls.pureBaseClass do + for pureBase <- cls.pureBaseClass do // (4) + def selfType = impl.body + .collect: + case TypeDef(tpnme.SELF, rhs) => rhs + .headOption + .getOrElse(tree) + .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) + selfType.srcPos, cs1description = " captured by this self type") super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -548,9 +755,9 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckTry(tree: Try, pt: Type)(using Context): Type = val tp = super.recheckTry(tree, pt) - if allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then - disallowRootCapabilitiesIn(tp, - "Result of `try`", "have type", + if ccConfig.allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then + disallowRootCapabilitiesIn(tp, ctx.owner, + "result of `try`", "have type", "This is often caused by a locally generated exception capability leaking as part of its result.", tree.srcPos) tp @@ -577,78 +784,105 @@ class CheckCaptures extends Recheck, SymTransformer: * adding all references in the boxed capture set to the current environment. */ override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = - if tree.isTerm && pt.isBoxedCapturing then - val saved = curEnv - - tree match - case _: RefTree | closureDef(_) => - curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) - case _ => - - try super.recheck(tree, pt) + val saved = curEnv + tree match + case _: RefTree | closureDef(_) if pt.isBoxedCapturing => + curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner), curEnv) + case _ if tree.hasAttachment(ClosureBodyValue) => + curEnv = Env(curEnv.owner, EnvKind.ClosureResult, CaptureSet.Var(curEnv.owner), curEnv) + case _ => + val res = + try + if capt eq noPrinter then + super.recheck(tree, pt) + else + trace.force(i"rechecking $tree with pt = $pt", recheckr, show = true): + super.recheck(tree, pt) + catch case ex: NoCommonRoot => + report.error(ex.getMessage.nn) + tree.tpe finally curEnv = saved - else - val res = super.recheck(tree, pt) - if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) - res + if tree.isTerm && !pt.isBoxedCapturing then + markFree(res.boxedCaptureSet, tree.srcPos) + res - /** If `tree` is a reference or an application where the result type refers - * to an enclosing class or method parameter of the reference, check that the result type - * does not capture the universal capability. This is justified since the - * result type would have to be implicitly unboxed. - * TODO: Can we find a cleaner way to achieve this? Logically, this should be part - * of simulated boxing and unboxing. - */ override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = - val typeToCheck = tree match - case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => - tpe - case _: Try => - tpe - case _ => - NoType + def needsUniversalCheck = tree match + case _: RefTree | _: Apply | _: TypeApply => tree.symbol.unboxesResult + case _: Try => true + case _ => false def checkNotUniversal(tp: Type): Unit = tp.widenDealias match case wtp @ CapturingType(parent, refs) => refs.disallowRootCapability { () => - val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" report.error( - em"""The $kind's type $wtp is not allowed to capture the root capability `cap`. + em"""The expression's type $wtp is not allowed to capture the root capability `cap`. |This usually means that a capability persists longer than its allowed lifetime.""", tree.srcPos) } checkNotUniversal(parent) case _ => - if !allowUniversalInBoxed then checkNotUniversal(typeToCheck) + if !ccConfig.allowUniversalInBoxed && needsUniversalCheck then + checkNotUniversal(tpe) super.recheckFinish(tpe, tree, pt) - - /** Massage `actual` and `expected` types using the methods below before checking conformance */ - override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = + end recheckFinish + + // ------------------ Adaptation ------------------------------------- + // + // Adaptations before checking conformance of actual vs expected: + // + // - Convert function to dependent function if expected type is a dependent function type + // (c.f. alignDependentFunction). + // - Relax expected capture set containing `this.type`s by adding references only + // accessible through those types (c.f. addOuterRefs, also #14930 for a discussion). + // - Adapt box status and environment capture sets by simulating box/unbox operations. + // - Instantiate covariant occurrenves of `cap` in actual to reach capabilities. + + private inline val debugSuccesses = false + + /** Massage `actual` and `expected` types before checking conformance. + * Massaging is done by the methods following this one: + * - align dependent function types and add outer references in the expected type + * - adapt boxing in the actual type + * If the resulting types are not compatible, try again with an actual type + * where local capture roots are instantiated to root variables. + */ + override def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda)(using Context): Type = val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) - val actual1 = adaptBoxed(actual, expected1, tree.srcPos) - //println(i"check conforms $actual1 <<< $expected1") - super.checkConformsExpr(actual1, expected1, tree) - - private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean)(using Context): Type = - MethodType.companion(isContextual = isContextual)(args, resultType) - .toFunctionType(isJava = false, alwaysDependent = true) + val actualBoxed = adaptBoxed(actual, expected1, tree.srcPos) + //println(i"check conforms $actualBoxed <<< $expected1") + if isCompatible(actualBoxed, expected1) then + if debugSuccesses then tree match + case Ident(_) => + println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") + case _ => + actualBoxed + else + capt.println(i"conforms failed for ${tree}: $actual vs $expected") + err.typeMismatch(tree.withType(actualBoxed), expected1, addenda ++ CaptureSet.levelErrors) + actual + end checkConformsExpr /** Turn `expected` into a dependent function when `actual` is dependent. */ private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = def recur(expected: Type): Type = expected.dealias match - case expected @ CapturingType(eparent, refs) => - CapturingType(recur(eparent), refs, boxed = expected.isBoxed) + case expected0 @ CapturingType(eparent, refs) => + val eparent1 = recur(eparent) + if eparent1 eq eparent then expected + else CapturingType(eparent1, refs, boxed = expected0.isBoxed) case expected @ defn.FunctionOf(args, resultType, isContextual) - if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => - val expected1 = toDepFun(args, resultType, isContextual) - expected1 - case _ => - expected + if defn.isNonRefinedFunction(expected) => + actual match + case RefinedType(parent, nme.apply, rinfo: MethodType) + if defn.isFunctionNType(actual) => + depFun(args, resultType, isContextual, rinfo.paramNames) + case _ => expected + case _ => expected recur(expected) /** For the expected type, implement the rule outlined in #14390: - * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, + * - when checking an expression `a: Ta^Ca` against an expected type `Te^Ce`, * - where the capture set `Ce` contains Cls.this, - * - and where and all method definitions enclosing `a` inside class `Cls` + * - and where all method definitions enclosing `a` inside class `Cls` * have only pure parameters, * - add to `Ce` all references to variables or this-references in `Ca` * that are outside `Cls`. These are all accessed through `Cls.this`, @@ -656,16 +890,21 @@ class CheckCaptures extends Recheck, SymTransformer: * them explicitly to `Ce` changes nothing. */ private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + def isPure(info: Type): Boolean = info match case info: PolyType => isPure(info.resType) case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) case _ => true + def isPureContext(owner: Symbol, limit: Symbol): Boolean = if owner == limit then true else if !owner.exists then false else isPure(owner.info) && isPureContext(owner.owner, limit) + + // Augment expeced capture set `erefs` by all references in actual capture + // set `arefs` that are outside some `this.type` reference in `erefs` def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = - (erefs /: erefs.elems) { (erefs, eref) => + (erefs /: erefs.elems): (erefs, eref) => eref match case eref: ThisType if isPureContext(ctx.owner, eref.cls) => erefs ++ arefs.filter { @@ -675,7 +914,7 @@ class CheckCaptures extends Recheck, SymTransformer: } case _ => erefs - } + expected match case CapturingType(ecore, erefs) => val erefs1 = augment(erefs, actual.captureSet) @@ -684,6 +923,8 @@ class CheckCaptures extends Recheck, SymTransformer: expected.derivedCapturingType(ecore, erefs1) case _ => expected + end addOuterRefs + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions * @@ -691,23 +932,26 @@ class CheckCaptures extends Recheck, SymTransformer: */ def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = + inline def inNestedEnv[T](boxed: Boolean)(op: => T): T = + val saved = curEnv + curEnv = Env(curEnv.owner, EnvKind.NestedInOwner, CaptureSet.Var(curEnv.owner), if boxed then null else curEnv) + try op + finally curEnv = saved + /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) * to `expected` type. - * It returns the adapted type along with the additionally captured variable - * during adaptation. + * It returns the adapted type along with a capture set consisting of the references + * that were additionally captured during adaptation. * @param reconstruct how to rebuild the adapted function type */ def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, covariant: Boolean, boxed: Boolean, reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try + inNestedEnv(boxed): val (eargs, eres) = expected.dealias.stripCapturing match case defn.FunctionOf(eargs, eres, _) => (eargs, eres) case expected: MethodType => (expected.paramInfos, expected.resType) - case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionNType(expected) => (rinfo.paramInfos, rinfo.resType) case _ => (aargs.map(_ => WildcardType), WildcardType) val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } val ares1 = adapt(ares, eres, covariant) @@ -717,8 +961,7 @@ class CheckCaptures extends Recheck, SymTransformer: else reconstruct(aargs1, ares1) (resTp, curEnv.captured) - finally - curEnv = saved + end adaptFun /** Adapt type function type `actual` to the expected type. * @see [[adaptFun]] @@ -727,12 +970,9 @@ class CheckCaptures extends Recheck, SymTransformer: actual: Type, ares: Type, expected: Type, covariant: Boolean, boxed: Boolean, reconstruct: Type => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try + inNestedEnv(boxed): val eres = expected.dealias.stripCapturing match - case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case defn.PolyFunctionOf(rinfo: PolyType) => rinfo.resType case expected: PolyType => expected.resType case _ => WildcardType @@ -743,8 +983,6 @@ class CheckCaptures extends Recheck, SymTransformer: else reconstruct(ares1) (resTp, curEnv.captured) - finally - curEnv = saved end adaptTypeFun def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = @@ -769,21 +1007,21 @@ class CheckCaptures extends Recheck, SymTransformer: case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => adaptFun(actual, args.init, args.last, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(actual) => + case actual @ defn.RefinedFunctionOf(rinfo: MethodType) => // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, (aargs1, ares1) => rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(isJava = false, alwaysDependent = true)) + .toFunctionType(alwaysDependent = true)) case actual: MethodType => adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + case actual @ defn.RefinedFunctionOf(rinfo: PolyType) => adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, ares1 => val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + val actual1 = actual.derivedRefinedType(refinedInfo = rinfo1) actual1 ) case _ => @@ -791,17 +1029,25 @@ class CheckCaptures extends Recheck, SymTransformer: } // Capture set of the term after adaptation - val cs1 = cs ++ leaked + val cs1 = + if covariant then cs ++ leaked + else + if !leaked.subCaptures(cs, frozen = false).isOK then + report.error( + em"""$expected cannot be box-converted to $actual + |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) + cs // Compute the adapted type def adaptedType(resultBoxed: Boolean) = - styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) + if (styp1 eq styp) && leaked.isAlwaysEmpty && boxed == resultBoxed then actual + else styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) if needsAdaptation then val criticalSet = // the set which is not allowed to have `cap` if covariant then cs1 // can't box with `cap` else expected.captureSet // can't unbox with `cap` - if criticalSet.isUniversal && expected.isValueType && !allowUniversalInBoxed then + if criticalSet.isUniversal && expected.isValueType && !ccConfig.allowUniversalInBoxed then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. @@ -809,7 +1055,7 @@ class CheckCaptures extends Recheck, SymTransformer: println(i"cannot box/unbox $actual vs $expected") actual else - if !allowUniversalInBoxed then + if !ccConfig.allowUniversalInBoxed then // Disallow future addition of `cap` to `criticalSet`. criticalSet.disallowRootCapability { () => report.error( @@ -824,20 +1070,24 @@ class CheckCaptures extends Recheck, SymTransformer: adaptedType(boxed) } - var actualw = actual.widenDealias - actual match - case ref: CaptureRef if ref.isTracked => - actualw match - case CapturingType(p, refs) => - actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) - // given `a: C T`, improve `C T` to `{a} T` - case _ => - case _ => - val adapted = adapt(actualw, expected, covariant = true) - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else actual + if expected == LhsProto || expected.isSingleton && actual.isSingleton then + actual + else + var actualw = actual.widenDealias + actual match + case ref: CaptureRef if ref.isTracked => + actualw match + case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => + actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) + .showing(i"improve $actualw to $result", capt) + // given `a: T^C`, improve `T^C` to `T^{a}` + case _ => + case _ => + val adapted = adapt(actualw.withReachCaptures(actual), expected, covariant = true) + if adapted ne actualw then + capt.println(i"adapt boxed $actual vs $expected ===> $adapted") + adapted + else actual end adaptBoxed /** Check overrides again, taking capture sets into account. @@ -846,7 +1096,7 @@ class CheckCaptures extends Recheck, SymTransformer: * But maybe we can then elide the check during the RefChecks phase under captureChecking? */ def checkOverrides = new TreeTraverser: - class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self) { + class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self): /** Check subtype with box adaptation. * This function is passed to RefChecks to check the compatibility of overriding pairs. * @param sym symbol of the field definition that is being checked @@ -856,7 +1106,7 @@ class CheckCaptures extends Recheck, SymTransformer: val actual1 = val saved = curEnv try - curEnv = Env(clazz, nestedInOwner = true, capturedVars(clazz), isBoxed = false, outer0 = curEnv) + curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) val adapted = adaptBoxed(actual, expected1, srcPos, alwaysConst = true) actual match case _: MethodType => @@ -868,7 +1118,12 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => adapted finally curEnv = saved actual1 frozen_<:< expected1 - } + + override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = + !setup.isPreCC(overriding) && !setup.isPreCC(overridden) + + override def checkInheritedTraitParameters: Boolean = false + end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = t match @@ -877,17 +1132,48 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => traverseChildren(t) + /** Check a ValDef or DefDef as an action performed in a completer. Since + * these checks can appear out of order, we need to firsty create the correct + * environment for checking the definition. + */ + def completeDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type = + val saved = curEnv + try + // Setup environment to reflect the new owner. + val envForOwner: Map[Symbol, Env] = curEnv.outersIterator + .takeWhile(e => !capturedVars(e.owner).isAlwaysEmpty) // no refs can leak beyind this point + .map(e => (e.owner, e)) + .toMap + def restoreEnvFor(sym: Symbol): Env = + val localSet = capturedVars(sym) + if localSet.isAlwaysEmpty then rootEnv + else envForOwner.get(sym) match + case Some(e) => e + case None => Env(sym, EnvKind.Regular, localSet, restoreEnvFor(sym.owner)) + curEnv = restoreEnvFor(sym.owner) + capt.println(i"Complete $sym in ${curEnv.outersIterator.toList.map(_.owner)}") + recheckDef(tree, sym) + finally + curEnv = saved + + private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] + override def checkUnit(unit: CompilationUnit)(using Context): Unit = - Setup(preRecheckPhase, thisPhase, recheckDef)(ctx.compilationUnit.tpdTree) - //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") - withCaptureSetsExplained { + setup.setupUnit(unit.tpdTree, completeDef) + collectCapturedMutVars.traverse(unit.tpdTree) + + if ctx.settings.YccPrintSetup.value then + val echoHeader = "[[syntax tree at end of cc setup]]" + val treeString = show(unit.tpdTree) + report.echo(s"$echoHeader\n$treeString\n") + + withCaptureSetsExplained: super.checkUnit(unit) checkOverrides.traverse(unit.tpdTree) checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) if ctx.settings.YccDebug.value then show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing - } /** Check that self types of subclasses conform to self types of super classes. * (See comment below how this is achieved). The check assumes that classes @@ -918,28 +1204,29 @@ class CheckCaptures extends Recheck, SymTransformer: } assert(roots.nonEmpty) for case root: ClassSymbol <- roots do - checkSelfAgainstParents(root, root.baseClasses) - val selfType = root.asClass.classInfo.selfType - interpolator(startingVariance = -1).traverse(selfType) - if !root.isEffectivelySealed then - def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = - cls.baseClasses.tail.exists { psym => - val selfType = psym.asClass.givenSelfType - selfType.exists && selfType.captureSet.elems == refs.elems - } - selfType match - case CapturingType(_, refs: CaptureSet.Var) - if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => - // Forbid inferred self types unless they are already implied by an explicit - // self type in a parent. - report.error( - em"""$root needs an explicitly declared self type since its - |inferred self type $selfType - |is not visible in other compilation units that define subclasses.""", - root.srcPos) - case _ => - parentTrees -= root - capt.println(i"checked $root with $selfType") + inContext(ctx.fresh.setOwner(root)): + checkSelfAgainstParents(root, root.baseClasses) + val selfType = root.asClass.classInfo.selfType + interpolator(startingVariance = -1).traverse(selfType) + if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } + selfType match + case CapturingType(_, refs: CaptureSet.Var) + if !refs.elems.exists(_.isRootCapability) && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. + report.error( + em"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", + root.srcPos) + case _ => + parentTrees -= root + capt.println(i"checked $root with $selfType") end checkSelfTypes /** Heal ill-formed capture sets in the type parameter. @@ -948,9 +1235,9 @@ class CheckCaptures extends Recheck, SymTransformer: * that this type parameter can't see. * For example, when capture checking the following expression: * - * def usingLogFile[T](op: (f: {cap} File) => T): T = ... + * def usingLogFile[T](op: File^ => T): T = ... * - * usingLogFile[box ?1 () -> Unit] { (f: {cap} File) => () => { f.write(0) } } + * usingLogFile[box ?1 () -> Unit] { (f: File^) => () => { f.write(0) } } * * We may propagate `f` into ?1, making ?1 ill-formed. * This also causes soundness issues, since `f` in ?1 should be widened to `cap`, @@ -960,43 +1247,45 @@ class CheckCaptures extends Recheck, SymTransformer: * compensate this by pushing the widened capture set of `f` into ?1. * This solves the soundness issue caused by the ill-formness of ?1. */ - private def healTypeParam(tree: Tree)(using Context): Unit = + private def healTypeParam(tree: Tree, paramName: TypeName, meth: Symbol)(using Context): Unit = val checker = new TypeTraverser: + private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty + private def isAllowed(ref: CaptureRef): Boolean = ref match case ref: TermParamRef => allowed.contains(ref) case _ => true - // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. - // - // If in these capture sets there are any capture references that are term parameter references we should avoid, - // we will widen them recursively. - private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = - @scala.annotation.tailrec - def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = - todos match - case Nil => acc - case ref :: rem => - val cs = ref.captureSetOfInfo - val nextAcc = cs.filter(isAllowed(_)) :: acc - val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf - recur(nextRem, nextAcc) - recur(refs, Nil) - private def healCaptureSet(cs: CaptureSet): Unit = - def avoidance(elems: List[CaptureRef])(using Context): Unit = - val toInclude = widenParamRefs(elems.filter(!isAllowed(_)).asInstanceOf) - //println(i"HEAL $cs by widening to $toInclude") - toInclude.foreach(checkSubset(_, cs, tree.srcPos)) - cs.ensureWellformed(avoidance) - - private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty + cs.ensureWellformed: elem => + ctx ?=> + var seen = new util.HashSet[CaptureRef] + def recur(ref: CaptureRef): Unit = ref.stripReach match + case ref: TermParamRef + if !allowed.contains(ref) && !seen.contains(ref) => + seen += ref + if ref.underlying.isRef(defn.Caps_Cap) then + report.error(i"escaping local reference $ref", tree.srcPos) + else + val widened = ref.captureSetOfInfo + val added = widened.filter(isAllowed(_)) + capt.println(i"heal $ref in $cs by widening to $added") + if !added.subCaptures(cs, frozen = false).isOK then + val location = if meth.exists then i" of $meth" else "" + val debugSetInfo = if ctx.settings.YccDebug.value then i" $cs" else "" + report.error( + i"local reference ${ref.paramName} leaks into outer capture set$debugSetInfo of type parameter $paramName$location", + tree.srcPos) + else + widened.elems.foreach(recur) + case _ => + recur(elem) def traverse(tp: Type) = tp match case CapturingType(parent, refs) => healCaptureSet(refs) traverse(parent) - case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => + case defn.RefinedFunctionOf(rinfo: MethodType) => traverse(rinfo) case tp: TermLambda => val saved = allowed @@ -1011,76 +1300,58 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam + def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(t: Type): Unit = + t match + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) + && !arg.typeSymbol.name.is(WildcardParamName) + then + CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, + "Array", "have element type", + "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", + pos) + traverseChildren(t) + case defn.RefinedFunctionOf(rinfo: MethodType) => + traverse(rinfo) + case _ => + traverseChildren(t) + check.traverse(tp) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. - * - Check that externally visible `val`s or `def`s have empty capture sets. If not, - * suggest an explicit type. This is so that separate compilation (where external - * symbols have empty capture sets) gives the same results as joint compilation. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ def postCheck(unit: tpd.Tree)(using Context): Unit = val checker = new TreeTraverser: def traverse(tree: Tree)(using Context): Unit = - traverseChildren(tree) - check(tree) - def check(tree: Tree) = tree match - case _: InferredTypeTree => - case tree: TypeTree if !tree.span.isZeroExtent => - tree.knownType.foreachPart { tp => - checkWellformedPost(tp, tree.srcPos) - tp match - case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => - warnIfRedundantCaptureSet(annot.tree) - case _ => - } - case t: ValOrDefDef - if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => - val sym = t.symbol - val isLocal = - sym.owner.ownersIterator.exists(_.isTerm) - || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly - sym.is(Private) // private symbols can always have inferred types - || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be - // too annoying. This is a hole since a defualt getter's result type - // might leak into a type variable. - || // non-local symbols cannot have inferred types since external capture types are not inferred - isLocal // local symbols still need explicit types if - && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference - def isNotPureThis(ref: CaptureRef) = ref match { - case ref: ThisType => !ref.cls.isPureClass - case _ => true - } - if !canUseInferred then - val inferred = t.tpt.knownType - def checkPure(tp: Type) = tp match - case CapturingType(_, refs) - if !refs.elems.filter(isNotPureThis).isEmpty => - val resultStr = if t.isInstanceOf[DefDef] then " result" else "" - report.error( - em"""Non-local $sym cannot have an inferred$resultStr type - |$inferred - |with non-empty capture set $refs. - |The type needs to be declared explicitly.""".withoutDisambiguation(), - t.srcPos) - case _ => - inferred.foreachPart(checkPure, StopAt.Static) - case t @ TypeApply(fun, args) => + val lctx = tree match + case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) + case _ => ctx + trace(i"post check $tree"): + traverseChildren(tree)(using lctx) + check(tree) + def check(tree: Tree)(using Context) = tree match + case TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => - val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => arg.withType(arg.knownType.forceBoxStatus( bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) - } checkBounds(normArgs, tl) + args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => - - args.foreach(healTypeParam(_)) + case tree: TypeTree => + checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker - checker.traverse(unit) + checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) + for chk <- todoAtPostCheck do chk() + setup.postCheck() + if !ctx.reporter.errorsReported then // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives diff --git a/compiler/src/dotty/tools/dotc/cc/RetainingType.scala b/compiler/src/dotty/tools/dotc/cc/RetainingType.scala new file mode 100644 index 000000000000..7902b03445fb --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/RetainingType.scala @@ -0,0 +1,35 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* +import ast.tpd.* +import Annotations.Annotation +import Decorators.i + +/** A builder and extractor for annotated types with @retains or @retainsByName annotations. + */ +object RetainingType: + + def apply(tp: Type, refs: List[Tree], byName: Boolean = false)(using Context): Type = + val annotCls = if byName then defn.RetainsByNameAnnot else defn.RetainsAnnot + val annotTree = + New(annotCls.typeRef, + Typed( + SeqLiteral(refs, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamClass.typeRef.appliedTo(defn.AnyType))) :: Nil) + AnnotatedType(tp, Annotation(annotTree)) + + def unapply(tp: AnnotatedType)(using Context): Option[(Type, List[Tree])] = + val sym = tp.annot.symbol + if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then + tp.annot match + case _: CaptureAnnotation => + assert(ctx.mode.is(Mode.IgnoreCaptures), s"bad retains $tp at ${ctx.phase}") + None + case ann => + Some((tp.parent, ann.tree.retainedElems)) + else + None +end RetainingType diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index bbe54f14b86c..74e67bda5fab 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -2,17 +2,39 @@ package dotty.tools package dotc package cc -import core._ +import core.* import Phases.*, DenotTransformers.*, SymDenotations.* import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* import Types.*, StdNames.* -import config.Printers.capt -import ast.tpd -import transform.Recheck.* -import CaptureSet.IdentityCaptRefMap +import Annotations.Annotation +import config.Feature +import config.Printers.{capt, captDebug} +import ast.tpd, tpd.* +import transform.{PreRecheck, Recheck}, Recheck.* +import CaptureSet.{IdentityCaptRefMap, IdempotentCaptRefMap} import Synthetics.isExcluded import util.Property -import dotty.tools.dotc.core.Annotations.Annotation +import printing.{Printer, Texts}, Texts.{Text, Str} +import collection.mutable + +/** Operations accessed from CheckCaptures */ +trait SetupAPI: + type DefRecheck = (tpd.ValOrDefDef, Symbol) => Context ?=> Type + def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit + def isPreCC(sym: Symbol)(using Context): Boolean + def postCheck()(using Context): Unit + +object Setup: + + /** Recognizer for `res $throws exc`, returning `(res, exc)` in case of success */ + object throwsAlias: + def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match + case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => + Some((res, exc)) + case _ => + None +end Setup +import Setup.* /** A tree traverser that prepares a compilation unit to be capture checked. * It does the following: @@ -26,38 +48,90 @@ import dotty.tools.dotc.core.Annotations.Annotation * are boxed on access). * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. */ -class Setup( - preRecheckPhase: DenotTransformer, - thisPhase: DenotTransformer, - recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) -extends tpd.TreeTraverser: - import tpd.* - - /** Create dependent function with underlying function class `tycon` and given - * arguments `argTypes` and result `resType`. +class Setup extends PreRecheck, SymTransformer, SetupAPI: + thisPhase => + + override def isRunnable(using Context) = + super.isRunnable && Feature.ccEnabledSomewhere + + private val toBeUpdated = new mutable.HashSet[Symbol] + + private def newFlagsFor(symd: SymDenotation)(using Context): FlagSet = + if symd.isAllOf(PrivateParamAccessor) && symd.owner.is(CaptureChecked) && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) + then symd.flags &~ Private | Recheck.ResetPrivate + else symd.flags + + def isPreCC(sym: Symbol)(using Context): Boolean = + sym.isTerm && sym.maybeOwner.isClass + && !sym.is(Module) + && !sym.owner.is(CaptureChecked) + && !defn.isFunctionSymbol(sym.owner) + + private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: + def apply(t: Type): Type = t match + case t: MethodType => + mapOver(t) + case t: TypeLambda => + t.derivedLambdaType(resType = this(t.resType)) + case CapturingType(_, _) => + t + case _ => + val t1 = t match + case t @ defn.RefinedFunctionOf(rinfo: MethodType) => + t.derivedRefinedType(t.parent, t.refinedName, this(rinfo)) + case _ => + mapOver(t) + if variance > 0 then t1 + else decorate(t1, addedSet = Function.const(CaptureSet.Fluid)) + + /** - Reset `private` flags of parameter accessors so that we can refine them + * in Setup if they have non-empty capture sets. + * - Special handling of some symbols defined for case classes. + * Enabled only until recheck is finished, and provided some compilation unit + * is CC-enabled. */ - private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = - MethodType.companion( - isContextual = defn.isContextFunctionClass(tycon.classSymbol), - )(argTypes, resType) - .toFunctionType(isJava = false, alwaysDependent = true) + def transformSym(symd: SymDenotation)(using Context): SymDenotation = + if !pastRecheck && Feature.ccEnabledSomewhere then + val sym = symd.symbol + def mappedInfo = + if toBeUpdated.contains(sym) then symd.info + else transformExplicitType(symd.info) + if Synthetics.needsTransform(symd) then + Synthetics.transform(symd, mappedInfo) + else if isPreCC(sym) then + symd.copySymDenotation(info = fluidify(sym.info)) + else if symd.owner.isTerm || symd.is(CaptureChecked) || symd.owner.is(CaptureChecked) then + val newFlags = newFlagsFor(symd) + val newInfo = mappedInfo + if sym.isClass then + sym.thisType.asInstanceOf[ThisType].invalidateCaches() + if newFlags != symd.flags || (newInfo ne sym.info) + then symd.copySymDenotation(initFlags = newFlags, info = newInfo) + else symd + else symd + else symd + end transformSym /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, * convert it to be boxed. */ private def box(tp: Type)(using Context): Type = - def recur(tp: Type): Type = tp.dealias match - case tp @ CapturingType(parent, refs) if !tp.isBoxed => - tp.boxed + def recur(tp: Type): Type = tp.dealiasKeepAnnotsAndOpaques match + case tp @ CapturingType(parent, refs) => + if tp.isBoxed then tp else tp.boxed + case tp @ AnnotatedType(parent, ann) => + if ann.symbol == defn.RetainsAnnot + then CapturingType(parent, ann.tree.toCaptureSet, boxed = true) + else tp.derivedAnnotatedType(box(parent), ann) case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => val res = args.last val boxedRes = recur(res) if boxedRes eq res then tp else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(tp1) => + case tp1 @ defn.RefinedFunctionOf(rinfo: MethodType) => val boxedRinfo = recur(rinfo) if boxedRinfo eq rinfo then tp - else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) + else boxedRinfo.toFunctionType(alwaysDependent = true) case tp1: MethodOrPoly => val res = tp1.resType val boxedRes = recur(res) @@ -77,284 +151,170 @@ extends tpd.TreeTraverser: * pos.../lists.scala and pos/...curried-shorthands.scala fail. * Need to figure out why. * 3. Refine other class types C by adding capture set variables to their parameter getters - * (see addCaptureRefinements) + * (see addCaptureRefinements), provided `refine` is true. * 4. Add capture set variables to all types that can be tracked * * Polytype bounds are only cleaned using step 1, but not otherwise transformed. */ - private def mapInferred(using Context) = new TypeMap: - - /** Drop @retains annotations everywhere */ - object cleanup extends TypeMap: - def apply(t: Type) = t match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - apply(parent) - case _ => - mapOver(t) - - /** Refine a possibly applied class type C where the class has tracked parameters - * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } - * where CV_1, ..., CV_n are fresh capture sets. - */ - def addCaptureRefinements(tp: Type): Type = tp match - case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => - tp.typeSymbol match - case cls: ClassSymbol - if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => - // We assume that Java classes can refer to capturing Scala types only indirectly, - // using type parameters. Hence, no need to refine them. - cls.paramGetters.foldLeft(tp) { (core, getter) => - if getter.termRef.isTracked then - val getterType = tp.memberInfo(getter).strippedDealias - RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) - .showing(i"add capture refinement $tp --> $result", capt) + private def transformInferredType(tp: Type)(using Context): Type = + def mapInferred(refine: Boolean): TypeMap = new TypeMap: + override def toString = "map inferred" + + /** Refine a possibly applied class type C where the class has tracked parameters + * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } + * where CV_1, ..., CV_n are fresh capture sets. + */ + def addCaptureRefinements(tp: Type): Type = tp match + case _: TypeRef | _: AppliedType if refine && tp.typeParams.isEmpty => + tp.typeSymbol match + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && cls.is(CaptureChecked) => + cls.paramGetters.foldLeft(tp) { (core, getter) => + if atPhase(thisPhase.next)(getter.termRef.isTracked) then + val getterType = + mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias + RefinedType(core, getter.name, + CapturingType(getterType, CaptureSet.RefiningVar(ctx.owner))) + .showing(i"add capture refinement $tp --> $result", capt) + else + core + } + case _ => tp + case _ => tp + + private var isTopLevel = true + + private def mapNested(ts: List[Type]): List[Type] = + val saved = isTopLevel + isTopLevel = false + try ts.mapConserve(this) + finally isTopLevel = saved + + def apply(tp: Type) = + val tp1 = tp match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + // Drop explicit retains annotations + apply(parent) + case tp @ AppliedType(tycon, args) => + val tycon1 = this(tycon) + if defn.isNonRefinedFunction(tp) then + // Convert toplevel generic function types to dependent functions + if !defn.isFunctionSymbol(tp.typeSymbol) && (tp.dealias ne tp) then + // This type is a function after dealiasing, so we dealias and recurse. + // See #15925. + this(tp.dealias) else - core - } - case _ => tp - case _ => tp - - private def superTypeIsImpure(tp: Type): Boolean = { - tp.dealias match - case CapturingType(_, refs) => - !refs.isAlwaysEmpty - case tp: (TypeRef | AppliedType) => - val sym = tp.typeSymbol - if sym.isClass then - sym == defn.AnyClass - // we assume Any is a shorthand of {cap} Any, so if Any is an upper - // bound, the type is taken to be impure. - else superTypeIsImpure(tp.superType) - case tp: (RefinedOrRecType | MatchType) => - superTypeIsImpure(tp.underlying) - case tp: AndType => - superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) - case tp: OrType => - superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) - case _ => - false - }.showing(i"super type is impure $tp = $result", capt) - - /** Should a capture set variable be added on type `tp`? */ - def needsVariable(tp: Type): Boolean = { - tp.typeParams.isEmpty && tp.match - case tp: (TypeRef | AppliedType) => - val tp1 = tp.dealias - if tp1 ne tp then needsVariable(tp1) - else - val sym = tp1.typeSymbol - if sym.isClass then - !sym.isPureClass && sym != defn.AnyClass - else superTypeIsImpure(tp1) - case tp: (RefinedOrRecType | MatchType) => - needsVariable(tp.underlying) - case tp: AndType => - needsVariable(tp.tp1) && needsVariable(tp.tp2) - case tp: OrType => - needsVariable(tp.tp1) || needsVariable(tp.tp2) - case CapturingType(parent, refs) => - needsVariable(parent) - && refs.isConst // if refs is a variable, no need to add another - && !refs.isUniversal // if refs is {cap}, an added variable would not change anything - case _ => - false - }.showing(i"can have inferred capture $tp = $result", capt) - - /** Add a capture set variable to `tp` if necessary, or maybe pull out - * an embedded capture set variable from a part of `tp`. - */ - def addVar(tp: Type) = tp match - case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => - CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) - case tp: RecType => - tp.parent match - case parent @ CapturingType(parent1, refs) => - CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) - case _ => - tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created - // by `mapInferred`. Hence if the underlying type admits capture variables - // a variable was already added, and the first case above would apply. - case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => - CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) - case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => - CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if needsVariable(tp) => - val cs = tp.dealias match - case CapturingType(_, refs) => CaptureSet.Var(refs.elems) - case _ => CaptureSet.Var() - CapturingType(tp, cs) - case _ => - tp - - private var isTopLevel = true - - private def mapNested(ts: List[Type]): List[Type] = - val saved = isTopLevel - isTopLevel = false - try ts.mapConserve(this) finally isTopLevel = saved - - def apply(t: Type) = - val tp = expandThrowsAlias(t) - val tp1 = tp match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - // Drop explicit retains annotations - apply(parent) - case tp @ AppliedType(tycon, args) => - val tycon1 = this(tycon) - if defn.isNonRefinedFunction(tp) then - // Convert toplevel generic function types to dependent functions - if !defn.isFunctionSymbol(tp.typeSymbol) && (tp.dealias ne tp) then - // This type is a function after dealiasing, so we dealias and recurse. - // See #15925. - this(tp.dealias) + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(args1, res1, + isContextual = defn.isContextFunctionClass(tycon1.classSymbol)) + .showing(i"add function refinement $tp ($tycon1, $args1, $res1) (${tp.dealias}) --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) else - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(tycon1, args1, res1) - .showing(i"add function refinement $tp ($tycon1, $args1, $res1) (${tp.dealias}) --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp - else - tp.derivedAppliedType(tycon1, args1 :+ res1) - else - tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => - val rinfo1 = apply(rinfo) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) - else tp - case tp: MethodType => - tp.derivedLambdaType( - paramInfos = mapNested(tp.paramInfos), - resType = this(tp.resType)) - case tp: TypeLambda => - // Don't recurse into parameter bounds, just cleanup any stray retains annotations - tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), - resType = this(tp.resType)) - case _ => - mapOver(tp) - addVar(addCaptureRefinements(tp1)) - end apply - end mapInferred - - private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = mapInferred(tp) - if boxed then box(tp1) else tp1 - - /** Expand some aliases of function types to the underlying functions. - * Right now, these are only $throws aliases, but this could be generalized. - */ - private def expandThrowsAlias(tp: Type)(using Context) = tp match - case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => - // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf( - AnnotatedType( - defn.CanThrowClass.typeRef.appliedTo(exc), - Annotation(defn.ErasedParamAnnot, defn.CanThrowClass.span)) :: Nil, - res, - isContextual = true - ) - case _ => tp - - private def expandThrowsAliases(using Context) = new TypeMap: - def apply(t: Type) = t match - case _: AppliedType => - val t1 = expandThrowsAlias(t) - if t1 ne t then apply(t1) else mapOver(t) - case _: LazyRef => - t - case t @ AnnotatedType(t1, ann) => - // Don't map capture sets, since that would implicitly normalize sets that - // are not well-formed. - t.derivedAnnotatedType(apply(t1), ann) - case _ => - mapOver(t) - - /** Fill in capture sets of curried function types from left to right, using - * a combination of the following two rules: - * - * 1. Expand `{c} (x: A) -> (y: B) -> C` - * to `{c} (x: A) -> {c} (y: B) -> C` - * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked - * to `(x: A) -> {x} (y: B) -> C` - * - * TODO: Should we also propagate capture sets to the left? - */ - private def expandAbbreviations(using Context) = new TypeMap: - - /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type - * of the dependent function type `tp`. - */ - def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match - case RefinedType(parent, nme.apply, rinfo: MethodType) => - val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) - val rinfo1 = rinfo.derivedLambdaType( - resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + tp.derivedAppliedType(tycon1, args.mapConserve(arg => box(this(arg)))) + case defn.RefinedFunctionOf(rinfo: MethodType) => + val rinfo1 = apply(rinfo) + if rinfo1 ne rinfo then rinfo1.toFunctionType(alwaysDependent = true) + else tp + case tp: MethodType => + tp.derivedLambdaType( + paramInfos = mapNested(tp.paramInfos), + resType = this(tp.resType)) + case tp: TypeLambda => + // Don't recurse into parameter bounds, just cleanup any stray retains annotations + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(_.dropAllRetains.bounds), + resType = this(tp.resType)) + case _ => + mapOver(tp) + addVar(addCaptureRefinements(normalizeCaptures(tp1)), ctx.owner) + end apply + end mapInferred + + mapInferred(refine = true)(tp) + end transformInferredType + + private def transformExplicitType(tp: Type, tptToCheck: Option[Tree] = None)(using Context): Type = + val expandAliases = new DeepTypeMap: + override def toString = "expand aliases" + + /** Expand $throws aliases. This is hard-coded here since $throws aliases in stdlib + * are defined with `?=>` rather than `?->`. + * We also have to add a capture set to the last expanded throws alias. I.e. + * T $throws E1 $throws E2 + * expands to + * (erased x$0: CanThrow[E1]) ?-> (erased x$1: CanThrow[E1]) ?->{x$0} T + */ + private def expandThrowsAlias(res: Type, exc: Type, encl: List[MethodType]): Type = + val paramType = AnnotatedType( + defn.CanThrowClass.typeRef.appliedTo(exc), + Annotation(defn.ErasedParamAnnot, defn.CanThrowClass.span)) + val resDecomposed = throwsAlias.unapply(res) + val paramName = nme.syntheticParamName(encl.length) + val mt = ContextualMethodType(paramName :: Nil)( + _ => paramType :: Nil, + mt => resDecomposed match + case Some((res1, exc1)) => expandThrowsAlias(res1, exc1, mt :: encl) + case _ => res + ) + val fntpe = defn.PolyFunctionOf(mt) + if !encl.isEmpty && resDecomposed.isEmpty then + val cs = CaptureSet(encl.map(_.paramRefs.head)*) + CapturingType(fntpe, cs, boxed = false) + else fntpe + + /** Map references to capability classes C to C^ */ + private def expandCapabilityClass(tp: Type): Type = + if tp.isCapabilityClassRef + then CapturingType(tp, defn.expandedUniversalSet, boxed = false) else tp - /** If `tp` is a function type: - * - add `outerCs` as its capture set, - * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. - */ - def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match - case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => - val tycon1 = this(tycon) - val args1 = args.init.mapConserve(this) - val tp1 = - if args1.exists(!_.captureSet.isAlwaysEmpty) then - val propagated = propagateDepFunctionResult( - depFun(tycon, args1, args.last), currentCs ++ outerCs) - propagated match - case RefinedType(_, _, mt: MethodType) => - if mt.isCaptureDependent then propagated - else - // No need to introduce dependent type, switch back to generic function type - tp.derivedAppliedType(tycon1, args1 :+ mt.resType) - else - val resType1 = propagateEnclosing( - args.last, CaptureSet.empty, currentCs ++ outerCs) - tp.derivedAppliedType(tycon1, args1 :+ resType1) - tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => - propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) - .capturing(outerCs) - case _ => - mapOver(tp) - - def apply(tp: Type): Type = tp match - case CapturingType(parent, cs) => - tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) - case _ => - propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) - end expandAbbreviations - - private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) - if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") - if ctx.settings.YccNoAbbrev.value then tp1 - else expandAbbreviations(tp1) + private def recur(t: Type): Type = normalizeCaptures(mapOver(t)) + + def apply(t: Type) = + t match + case t @ CapturingType(parent, refs) => + t.derivedCapturingType(this(parent), refs) + case t @ AnnotatedType(parent, ann) => + val parent1 = this(parent) + if ann.symbol == defn.RetainsAnnot then + for tpt <- tptToCheck do + checkWellformedLater(parent1, ann.tree, tpt) + CapturingType(parent1, ann.tree.toCaptureSet) + else + t.derivedAnnotatedType(parent1, ann) + case throwsAlias(res, exc) => + this(expandThrowsAlias(res, exc, Nil)) + case t: LazyRef => + val t1 = this(t.ref) + if t1 ne t.ref then t1 else t + case t: TypeVar => + this(t.underlying) + case t => + if t.isCapabilityClassRef + then CapturingType(t, defn.expandedUniversalSet, boxed = false) + else recur(t) + end expandAliases + + val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? + if tp1 ne tp then capt.println(i"expanded in ${ctx.owner}: $tp --> $tp1") + tp1 + end transformExplicitType /** Transform type of type tree, and remember the transformed type as the type the tree */ private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = if !tree.hasRememberedType then - tree.rememberType( + val transformed = if tree.isInstanceOf[InferredTypeTree] && !exact - then transformInferredType(tree.tpe, boxed) - else transformExplicitType(tree.tpe, boxed)) + then transformInferredType(tree.tpe) + else transformExplicitType(tree.tpe, tptToCheck = Some(tree)) + tree.rememberType(if boxed then box(transformed) else transformed) /** Substitute parameter symbols in `from` to paramRefs in corresponding * method or poly types `to`. We use a single BiTypeMap to do everything. @@ -366,87 +326,143 @@ extends tpd.TreeTraverser: def apply(t: Type): Type = t match case t: NamedType => - val sym = t.symbol - def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = - def inner(from: List[Symbol], to: List[ParamRef]): Type = - if from.isEmpty then outer(froms.tail, tos.tail) - else if sym eq from.head then to.head - else inner(from.tail, to.tail) - if tos.isEmpty then t - else inner(froms.head, tos.head.paramRefs) - outer(from, to) + if t.prefix == NoPrefix then + val sym = t.symbol + def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = + def inner(from: List[Symbol], to: List[ParamRef]): Type = + if from.isEmpty then outer(froms.tail, tos.tail) + else if sym eq from.head then to.head + else inner(from.tail, to.tail) + if tos.isEmpty then t + else inner(froms.head, tos.head.paramRefs) + outer(from, to) + else t.derivedSelect(apply(t.prefix)) case _ => mapOver(t) - def inverse(t: Type): Type = t match - case t: ParamRef => - def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = - if from.isEmpty then t - else if t.binder eq from.head then to.head(t.paramNum).namedType - else recur(from.tail, to.tail) - recur(to, from) - case _ => - mapOver(t) + lazy val inverse = new BiTypeMap: + override def toString = "SubstParams.inverse" + def apply(t: Type): Type = t match + case t: ParamRef => + def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = + if from.isEmpty then t + else if t.binder eq from.head then to.head(t.paramNum).namedType + else recur(from.tail, to.tail) + recur(to, from) + case _ => + mapOver(t) + def inverse = SubstParams.this end SubstParams /** Update info of `sym` for CheckCaptures phase only */ private def updateInfo(sym: Symbol, info: Type)(using Context) = - sym.updateInfoBetween(preRecheckPhase, thisPhase, info) - - def traverse(tree: Tree)(using Context): Unit = - tree match - case tree: DefDef => - if isExcluded(tree.symbol) then - return - tree.tpt match - case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => - tree.paramss.foreach(traverse) - transformTT(tpt, boxed = false, exact = true) + toBeUpdated += sym + sym.updateInfo(thisPhase, info, newFlagsFor(sym)) + toBeUpdated -= sym + sym.namedType match + case ref: CaptureRef => ref.invalidateCaches() // TODO: needed? + case _ => + + extension (sym: Symbol) def nextInfo(using Context): Type = + atPhase(thisPhase.next)(sym.info) + + def setupTraverser(recheckDef: DefRecheck) = new TreeTraverserWithPreciseImportContexts: + + def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = + transformTT(tpt, + boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), + // types of mutable variables are boxed in pre 3.3 codee + exact = sym.allOverriddenSymbols.hasNext, + // types of symbols that override a parent don't get a capture set TODO drop + ) + val addDescription = new TypeTraverser: + def traverse(tp: Type) = tp match + case tp @ CapturingType(parent, refs) => + if !refs.isConst then refs.withDescription(i"of $sym") + traverse(parent) + case _ => + traverseChildren(tp) + addDescription.traverse(tpt.knownType) + + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree @ DefDef(_, paramss, tpt: TypeTree, _) => + val meth = tree.symbol + if isExcluded(meth) then + return + + inContext(ctx.withOwner(meth)): + paramss.foreach(traverse) + transformResultType(tpt, meth) traverse(tree.rhs) //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") - case _ => + + case tree @ ValDef(_, tpt: TypeTree, _) => + val sym = tree.symbol + val defCtx = if sym.isOneOf(TermParamOrAccessor) then ctx else ctx.withOwner(sym) + inContext(defCtx): + transformResultType(tpt, sym) + capt.println(i"mapped $tree = ${tpt.knownType}") + traverse(tree.rhs) + + case tree @ TypeApply(fn, args) => + traverse(fn) + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + + case tree: TypeDef if tree.symbol.isClass => + inContext(ctx.withOwner(tree.symbol)): traverseChildren(tree) - case tree @ ValDef(_, tpt: TypeTree, _) => - transformTT(tpt, - boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed - exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set - ) - if allowUniversalInBoxed && tree.symbol.is(Mutable) - && !tree.symbol.hasAnnotation(defn.UncheckedCapturesAnnot) - then - CheckCaptures.disallowRootCapabilitiesIn(tpt.knownType, - i"Mutable variable ${tree.symbol.name}", "have type", - "This restriction serves to prevent local capabilities from escaping the scope where they are defined.", - tree.srcPos) - traverse(tree.rhs) - case tree @ TypeApply(fn, args) => - traverse(fn) - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed - - if allowUniversalInBoxed then - val polyType = fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do - if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then - def where = if fn.symbol.exists then i" in the body of ${fn.symbol}" else "" - CheckCaptures.disallowRootCapabilitiesIn(arg.knownType, - i"Sealed type variable $pname", " be instantiated to", - i"This is often caused by a local capability$where\nleaking as part of its result.", - tree.srcPos) - case _ => - traverseChildren(tree) - tree match + + case tree @ SeqLiteral(elems, tpt: TypeTree) => + traverse(elems) + transformTT(tpt, boxed = true, exact = false) + + case _ => + traverseChildren(tree) + postProcess(tree) + end traverse + + def postProcess(tree: Tree)(using Context): Unit = tree match case tree: TypeTree => - transformTT(tree, boxed = false, exact = false) // other types are not boxed + transformTT(tree, boxed = false, exact = false) case tree: ValOrDefDef => val sym = tree.symbol - // replace an existing symbol info with inferred types where capture sets of + /** The return type of a constructor instantiated with local type and value + * parameters. Constructors have `unit` result type, that's why we can't + * get this type by reading the result type tree, and have to construct it + * explicitly. + */ + def constrReturnType(info: Type, psymss: List[List[Symbol]]): Type = info match + case info: MethodOrPoly => + constrReturnType(info.instantiate(psymss.head.map(_.namedType)), psymss.tail) + case _ => + info + + /** The local result type, which is the known type of the result type tree, + * with special treatment for constructors. + */ + def localReturnType = + if sym.isConstructor then constrReturnType(sym.info, sym.paramSymss) + else tree.tpt.knownType + + def paramSignatureChanges = tree.match + case tree: DefDef => tree.paramss.nestedExists: + case param: ValDef => param.tpt.hasRememberedType + case param: TypeDef => param.rhs.hasRememberedType + case _ => false + + def signatureChanges = + tree.tpt.hasRememberedType && !sym.isConstructor || paramSignatureChanges + + // Replace an existing symbol info with inferred types where capture sets of // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the // capture sets of the types of the method's parameter symbols and result type. def integrateRT( info: Type, // symbol info to replace psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` + resType: Type, // the locally computed return type prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order ): Type = @@ -455,66 +471,235 @@ extends tpd.TreeTraverser: val psyms = psymss.head mt.companion(mt.paramNames)( mt1 => - if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then + if !paramSignatureChanges && !mt.isParamDependent && prevLambdas.isEmpty then mt.paramInfos else val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) - psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), + psyms.map(psym => subst(psym.nextInfo).asInstanceOf[mt.PInfo]), mt1 => - integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) + integrateRT(mt.resType, psymss.tail, resType, psyms :: prevPsymss, mt1 :: prevLambdas) ) case info: ExprType => info.derivedExprType(resType = - integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) - case _ => - val restp = tree.tpt.knownType - if prevLambdas.isEmpty then restp - else SubstParams(prevPsymss, prevLambdas)(restp) - - if tree.tpt.hasRememberedType && !sym.isConstructor then - val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) - .showing(i"update info $sym: ${sym.info} --> $result", capt) + integrateRT(info.resType, psymss, resType, prevPsymss, prevLambdas)) + case info => + if prevLambdas.isEmpty then resType + else SubstParams(prevPsymss, prevLambdas)(resType) + + if sym.exists && signatureChanges then + val newInfo = integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) + .showing(i"update info $sym: ${sym.info} = $result", capt) if newInfo ne sym.info then - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = newInfo - recheckDef(tree, sym) - updateInfo(sym, completer) + val updatedInfo = + if sym.isAnonymousFunction + || sym.is(Param) + || sym.is(ParamAccessor) + || sym.isPrimaryConstructor + then + // closures are handled specially; the newInfo is constrained from + // the expected type and only afterwards we recheck the definition + newInfo + else new LazyType: + def complete(denot: SymDenotation)(using Context) = + // infos of other methods are determined from their definitions which + // are checked on demand + assert(ctx.phase == thisPhase.next, i"$sym") + capt.println(i"forcing $sym, printing = ${ctx.mode.is(Mode.Printing)}") + //if ctx.mode.is(Mode.Printing) then new Error().printStackTrace() + denot.info = newInfo + recheckDef(tree, sym) + updateInfo(sym, updatedInfo) + case tree: Bind => val sym = tree.symbol - updateInfo(sym, transformInferredType(sym.info, boxed = false)) + updateInfo(sym, transformInferredType(sym.info)) case tree: TypeDef => tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then - // add capture set to self type of nested classes if no self type is given explicitly - val localRefs = CaptureSet.Var() - val newInfo = ClassInfo(prefix, cls, ps, decls, - CapturingType(cinfo.selfType, localRefs) - .showing(i"inferred self type for $cls: $result", capt)) + if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) + && !cls.isPureClass + then + // add capture set to self type of nested classes if no self type is given explicitly. + val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) + val ps1 = inContext(ctx.withOwner(cls)): + ps.mapConserve(transformExplicitType(_)) + val newInfo = ClassInfo(prefix, cls, ps1, decls, newSelfType) updateInfo(cls, newInfo) + capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") cls.thisType.asInstanceOf[ThisType].invalidateCaches() if cls.is(ModuleClass) then // if it's a module, the capture set of the module reference is the capture set of the self type val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, localRefs)) + updateInfo(modul, CapturingType(modul.info, newSelfType.captureSet)) modul.termRef.invalidateCaches() case _ => - val info = atPhase(preRecheckPhase)(tree.symbol.info) - val newInfo = transformExplicitType(info, boxed = false) - if newInfo ne info then - updateInfo(tree.symbol, newInfo) - capt.println(i"update info of ${tree.symbol} from $info to $newInfo") case _ => - end traverse - - def apply(tree: Tree)(using Context): Unit = - traverse(tree)(using ctx.withProperty(Setup.IsDuringSetupKey, Some(()))) - -object Setup: - val IsDuringSetupKey = new Property.Key[Unit] - - def isDuringSetup(using Context): Boolean = - ctx.property(IsDuringSetupKey).isDefined -end Setup \ No newline at end of file + end postProcess + end setupTraverser + + /** Checks whether an abstract type could be impure. See also: [[needsVariable]]. */ + private def instanceCanBeImpure(tp: Type)(using Context): Boolean = { + tp.dealiasKeepAnnots match + case CapturingType(_, refs) => + !refs.isAlwaysEmpty + case RetainingType(parent, refs) => + !refs.isEmpty + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then + !sym.isPureClass + else + sym != defn.Caps_Cap && instanceCanBeImpure(tp.superType) + case tp: (RefinedOrRecType | MatchType) => + instanceCanBeImpure(tp.underlying) + case tp: AndType => + instanceCanBeImpure(tp.tp1) || instanceCanBeImpure(tp.tp2) + case tp: OrType => + instanceCanBeImpure(tp.tp1) && instanceCanBeImpure(tp.tp2) + case _ => + false + }.showing(i"instance can be impure $tp = $result", capt) + + /** Should a capture set variable be added on type `tp`? */ + def needsVariable(tp: Type)(using Context): Boolean = { + tp.typeParams.isEmpty && tp.match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass + else + val tp1 = tp.dealiasKeepAnnotsAndOpaques + if tp1 ne tp then needsVariable(tp1) + else instanceCanBeImpure(tp1) + case tp: (RefinedOrRecType | MatchType) => + needsVariable(tp.underlying) + case tp: AndType => + needsVariable(tp.tp1) && needsVariable(tp.tp2) + case tp: OrType => + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {cap}, an added variable would not change anything + case RetainingType(parent, refs) => + needsVariable(parent) + && !refs.tpes.exists: + case ref: TermRef => ref.isRootCapability + case _ => false + case AnnotatedType(parent, _) => + needsVariable(parent) + case _ => + false + }.showing(i"can have inferred capture $tp = $result", captDebug) + + /** Pull out an embedded capture set from a part of `tp` */ + def normalizeCaptures(tp: Type)(using Context): Type = tp match + case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => + CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) + case tp: RecType => + tp.parent match + case parent @ CapturingType(parent1, refs) => + CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) + case _ => + tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created + // by `mapInferred`. Hence if the underlying type admits capture variables + // a variable was already added, and the first case above would apply. + case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => + CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) + case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => + CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) + case tp @ AppliedType(tycon, args) if !defn.isFunctionClass(tp.dealias.typeSymbol) => + tp.derivedAppliedType(tycon, args.mapConserve(box)) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo, box(tp.hi)) + case tp: LazyRef => + normalizeCaptures(tp.ref) + case _ => + tp + + /** Add a capture set variable to `tp` if necessary, or maybe pull out + * an embedded capture set variable from a part of `tp`. + */ + def decorate(tp: Type, addedSet: Type => CaptureSet)(using Context): Type = + if tp.typeSymbol == defn.FromJavaObjectSymbol then + // For capture checking, we assume Object from Java is the same as Any + tp + else + def maybeAdd(target: Type, fallback: Type) = + if needsVariable(target) then CapturingType(target, addedSet(target)) + else fallback + val dealiased = tp.dealiasKeepAnnotsAndOpaques + if dealiased ne tp then + val transformed = transformInferredType(dealiased) + maybeAdd(transformed, if transformed ne dealiased then transformed else tp) + else maybeAdd(tp, tp) + + /** Add a capture set variable to `tp` if necessary, or maybe pull out + * an embedded capture set variable from a part of `tp`. + */ + def addVar(tp: Type, owner: Symbol)(using Context): Type = + decorate(tp, + addedSet = _.dealias.match + case CapturingType(_, refs) => CaptureSet.Var(owner, refs.elems) + case _ => CaptureSet.Var(owner)) + + def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit = + setupTraverser(recheckDef).traverse(tree)(using ctx.withPhase(thisPhase)) + + // ------ Checks to run after main capture checking -------------------------- + + /** A list of actions to perform at postCheck */ + private val todoAtPostCheck = new mutable.ListBuffer[Context => Unit] + + /** If `tp` is a capturing type, check that all references it mentions have non-empty + * capture sets. + * Also: warn about redundant capture annotations. + * This check is performed after capture sets are computed in phase cc. + * Note: We need to perform the check on the original annotation rather than its + * capture set since the conversion to a capture set already eliminates redundant elements. + */ + private def checkWellformedPost(parent: Type, ann: Tree, tpt: Tree)(using Context): Unit = + capt.println(i"checkWF post $parent ${ann.retainedElems} in $tpt") + var retained = ann.retainedElems.toArray + for i <- 0 until retained.length do + val refTree = retained(i) + val ref = refTree.toCaptureRef + + def pos = + if refTree.span.exists then refTree.srcPos + else if ann.span.exists then ann.srcPos + else tpt.srcPos + + def check(others: CaptureSet, dom: Type | CaptureSet): Unit = + if others.accountsFor(ref) then + report.warning(em"redundant capture: $dom already accounts for $ref", pos) + + if ref.captureSetOfInfo.elems.isEmpty then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) + if parent.captureSet ne defn.expandedUniversalSet then + check(parent.captureSet, parent) + + val others = + for j <- 0 until retained.length if j != i yield retained(j).toCaptureRef + val remaining = CaptureSet(others*) + check(remaining, remaining) + end for + end checkWellformedPost + + /** Check well formed at post check time */ + private def checkWellformedLater(parent: Type, ann: Tree, tpt: Tree)(using Context): Unit = + if !tpt.span.isZeroExtent then + todoAtPostCheck += (ctx1 => + checkWellformedPost(parent, ann, tpt)(using ctx1.withOwner(ctx.owner))) + + def postCheck()(using Context): Unit = + for chk <- todoAtPostCheck do chk(ctx) + todoAtPostCheck.clear() +end Setup diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index 5fe68dd6a7ac..1372ebafe82f 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -7,11 +7,10 @@ import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* import StdNames.nme import Names.Name import NameKinds.DefaultGetterName -import Phases.checkCapturesPhase import config.Printers.capt -/** Classification and transformation methods for synthetic - * case class methods that need to be treated specially. +/** Classification and transformation methods for function methods and + * synthetic case class methods that need to be treated specially. * In particular, compute capturing types for some of these methods which * have inferred (result-)types that need to be established under separate * compilation. @@ -27,6 +26,9 @@ object Synthetics: case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) case _ => false + private val functionCombinatorNames = Set[Name]( + nme.andThen, nme.compose, nme.curried, nme.tupled) + /** Is `sym` a synthetic apply, copy, or copy default getter method? * The types of these symbols are transformed in a special way without * looking at the definitions's RHS @@ -37,6 +39,7 @@ object Synthetics: || isSyntheticCopyDefaultGetterMethod(symd) || (symd.symbol eq defn.Object_eq) || (symd.symbol eq defn.Object_ne) + || defn.isFunctionClass(symd.owner) && functionCombinatorNames.contains(symd.name) /** Method is excluded from regular capture checking. * Excluded are synthetic class members @@ -52,138 +55,114 @@ object Synthetics: || isSyntheticCompanionMethod(sym, nme.fromProduct) || needsTransform(sym)) - /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. - * An apply method in a case class like this: - * case class CC(a: {d} A, b: B, {cap} c: C) - * would get type - * def apply(a': {d} A, b: B, {cap} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } - * where `'` is used to indicate the difference between parameter symbol and refinement name. - * Analogous for the copy method. - */ - private def addCaptureDeps(info: Type)(using Context): Type = info match - case info: MethodType => - val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) - def augmentResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = augmentResult(tp.resType)) - case _ => - val refined = trackedParams.foldLeft(tp) { (parent, pref) => - RefinedType(parent, pref.paramName, - CapturingType( - atPhase(ctx.phase.next)(pref.underlying.stripCapturing), - CaptureSet(pref))) - } - CapturingType(refined, CaptureSet(trackedParams*)) - if trackedParams.isEmpty then info - else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addCaptureDeps(info.resType)) - case _ => - info - - /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ - private def dropCaptureDeps(tp: Type)(using Context): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) - case CapturingType(parent, _) => - dropCaptureDeps(parent) - case RefinedType(parent, _, _) => - dropCaptureDeps(parent) - case _ => - tp - - /** Add capture information to the type of the default getter of a case class copy method */ - private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) - case info: ExprType => - info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) - case EventuallyCapturingType(parent, _) => - addDefaultGetterCapture(parent, owner, idx) - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) - case _ if idx < owner.asClass.paramGetters.length => - val param = owner.asClass.paramGetters(idx) - val pinfo = param.info - atPhase(ctx.phase.next) { - if pinfo.captureSet.isAlwaysEmpty then info - else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) - } - case _ => - info - - /** Drop capture information from the type of the default getter of a case class copy method */ - private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) - case CapturingType(parent, _) => - parent - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) - case _ => - info - - /** Augment an unapply of type `(x: C): D` to `(x: {cap} C): {x} D` */ - private def addUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - val paramInfo :: Nil = info.paramInfos: @unchecked - val newParamInfo = - CapturingType(paramInfo, CaptureSet.universal) - val trackedParam = info.paramRefs.head - def newResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = newResult(tp.resType)) - case _ => - CapturingType(tp, CaptureSet(trackedParam)) - info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) - .showing(i"augment unapply type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) - - /** Drop added capture information from the type of an `unapply` */ - private def dropUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - info.paramInfos match - case CapturingType(oldParamInfo, _) :: Nil => - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) - case _ => - info - case info: PolyType => - info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it to account for capture information. - * The method is run in phase CheckCaptures.Pre - * @pre needsTransform(sym) - */ - def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) - case nme.unapply => - sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = addCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = - MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it back to what it was before the CC phase. - * @pre needsTransform(sym) + /** Transform the type of a method either to its type under capture checking + * or back to its previous type. + * @param sym The method to transform @pre needsTransform(sym) must hold. + * @param info The possibly already mapped info of sym */ - def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) - case nme.unapply => - sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = dropCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) + def transform(symd: SymDenotation, info: Type)(using Context): SymDenotation = + + /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. + * An apply method in a case class like this: + * case class CC(a: A^{d}, b: B, c: C^{cap}) + * would get type + * def apply(a': A^{d}, b: B, c': C^{cap}): CC^{a', c'} { val a = A^{a'}, val c = C^{c'} } + * where `'` is used to indicate the difference between parameter symbol and refinement name. + * Analogous for the copy method. + */ + def addCaptureDeps(info: Type): Type = info match + case info: MethodType => + val trackedParams = info.paramRefs.filter(atPhase(Phases.checkCapturesPhase)(_.isTracked)) + def augmentResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = augmentResult(tp.resType)) + case _ => + val refined = trackedParams.foldLeft(tp) { (parent, pref) => + RefinedType(parent, pref.paramName, + CapturingType( + atPhase(ctx.phase.next)(pref.underlying.stripCapturing), + CaptureSet(pref))) + } + CapturingType(refined, CaptureSet(trackedParams*)) + if trackedParams.isEmpty then info + else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addCaptureDeps(info.resType)) + case _ => + info + + /** Add capture information to the type of the default getter of a case class copy method + */ + def transformDefaultGetterCaptures(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = transformDefaultGetterCaptures(info.resType, owner, idx)) + case info: ExprType => + info.derivedExprType(transformDefaultGetterCaptures(info.resType, owner, idx)) + case CapturingType(parent, _) => + transformDefaultGetterCaptures(parent, owner, idx) + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(transformDefaultGetterCaptures(parent, owner, idx), annot) + case _ if idx < owner.asClass.paramGetters.length => + val param = owner.asClass.paramGetters(idx) + val pinfo = param.info + atPhase(ctx.phase.next) { + if pinfo.captureSet.isAlwaysEmpty then info + else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) + } + case _ => + info + + /** Augment an unapply of type `(x: C): D` to `(x: C^{cap}): D^{x}` */ + def transformUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + val paramInfo :: Nil = info.paramInfos: @unchecked + val newParamInfo = CapturingType(paramInfo, CaptureSet.universal) + val trackedParam = info.paramRefs.head + def newResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = newResult(tp.resType)) + case _ => + CapturingType(tp, CaptureSet(trackedParam)) + info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) + .showing(i"augment unapply type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = transformUnapplyCaptures(info.resType)) + + def transformComposeCaptures(info: Type, owner: Symbol) = + val (pt: PolyType) = info: @unchecked + val (mt: MethodType) = pt.resType: @unchecked + val (enclThis: ThisType) = owner.thisType: @unchecked + pt.derivedLambdaType(resType = MethodType(mt.paramNames)( + mt1 => mt.paramInfos.map(_.capturing(CaptureSet.universal)), + mt1 => CapturingType(mt.resType, CaptureSet(enclThis, mt1.paramRefs.head)))) + + def transformCurriedTupledCaptures(info: Type, owner: Symbol) = + val (et: ExprType) = info: @unchecked + val (enclThis: ThisType) = owner.thisType: @unchecked + def mapFinalResult(tp: Type, f: Type => Type): Type = + val defn.FunctionNOf(args, res, isContextual) = tp: @unchecked + if defn.isFunctionNType(res) then + defn.FunctionNOf(args, mapFinalResult(res, f), isContextual) + else + f(tp) + ExprType(mapFinalResult(et.resType, CapturingType(_, CaptureSet(enclThis)))) + + def transformCompareCaptures = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType) + + symd.copySymDenotation(info = symd.name match + case DefaultGetterName(nme.copy, n) => + transformDefaultGetterCaptures(info, symd.owner, n) + case nme.unapply => + transformUnapplyCaptures(info) + case nme.apply | nme.copy => + addCaptureDeps(info) + case nme.andThen | nme.compose => + transformComposeCaptures(info, symd.owner) + case nme.curried | nme.tupled => + transformCurriedTupledCaptures(info, symd.owner) + case n if n == nme.eq || n == nme.ne => + transformCompareCaptures) + end transform end Synthetics \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 51b261583feb..5fbe7212a674 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -56,7 +56,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct - override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) + override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString)*) override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index ac8b69381938..0b66f339bf53 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -4,9 +4,9 @@ package dotty.tools.dotc.classpath import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ +import FileUtils.* import dotty.tools.io.ClassPath -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* /** * Provides factory methods for classpath. When creating classpath instances for a given path, diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 1411493bcbfd..26ed2734890e 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -11,10 +11,10 @@ import java.nio.file.{FileSystems, Files} import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} -import FileUtils._ +import FileUtils.* import PlainFile.toPlainFile -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.collection.immutable.ArraySeq import scala.util.control.NonFatal @@ -126,9 +126,9 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo } object JrtClassPath { - import java.nio.file._, java.net.URI + import java.nio.file.*, java.net.URI def apply(release: Option[String]): Option[ClassPath] = { - import scala.util.Properties._ + import scala.util.Properties.* if (!isJavaAtLeast("9")) None else { // Longer term we'd like an official API for this in the JDK @@ -165,7 +165,7 @@ object JrtClassPath { * The implementation assumes that no classes exist in the empty package. */ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ + import java.nio.file.Path, java.nio.file.* type F = Path private val dir: Path = fs.getPath("/packages") @@ -214,7 +214,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ + import java.nio.file.Path, java.nio.file.* private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) private val root: Path = fileSystem.getRootDirectories.iterator.next @@ -278,15 +278,17 @@ case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFile def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val classFile = new JFile(dir, relativePath + ".class") - if (classFile.exists) { - Some(classFile.toPath.toPlainFile) - } - else None + val tastyFile = new JFile(dir, relativePath + ".tasty") + if tastyFile.exists then Some(tastyFile.toPath.toPlainFile) + else + val classFile = new JFile(dir, relativePath + ".class") + if classFile.exists then Some(classFile.toPath.toPlainFile) + else None } protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: JFile): Boolean = f.isClass + protected def isMatchingFile(f: JFile): Boolean = + f.isTasty || (f.isClass && f.classToTasty.isEmpty) private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index d6fa6fb78d07..8c31faa43186 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -17,9 +17,17 @@ object FileUtils { extension (file: AbstractFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") + def isClass: Boolean = !file.isDirectory && hasClassExtension && !file.name.endsWith("$class.class") // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def hasClassExtension: Boolean = file.hasExtension("class") + + def hasTastyExtension: Boolean = file.hasExtension("tasty") + + def isTasty: Boolean = !file.isDirectory && hasTastyExtension + + def isScalaBinary: Boolean = file.isClass || file.isTasty + def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? @@ -30,17 +38,34 @@ object FileUtils { * and returning given default value in other case */ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) + + /** Returns the tasty file associated with this class file */ + def classToTasty: Option[AbstractFile] = + assert(file.isClass, s"non-class: $file") + val tastyName = classNameToTasty(file.name) + Option(file.resolveSibling(tastyName)) } extension (file: JFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = file.isFile && file.getName.endsWith(SUFFIX_CLASS) && !file.getName.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + + def isTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_TASTY) + + /** Returns the tasty file associated with this class file */ + def classToTasty: Option[JFile] = + assert(file.isClass, s"non-class: $file") + val tastyName = classNameToTasty(file.getName.stripSuffix(".class")) + val tastyPath = file.toPath.resolveSibling(tastyName) + if java.nio.file.Files.exists(tastyPath) then Some(tastyPath.toFile) else None + } private val SUFFIX_CLASS = ".class" private val SUFFIX_SCALA = ".scala" + private val SUFFIX_TASTY = ".tasty" private val SUFFIX_JAVA = ".java" private val SUFFIX_SIG = ".sig" @@ -81,4 +106,15 @@ object FileUtils { def mkFileFilter(f: JFile => Boolean): FileFilter = new FileFilter { def accept(pathname: JFile): Boolean = f(pathname) } + + /** Transforms a .class file name to a .tasty file name */ + private def classNameToTasty(fileName: String): String = + val classOrModuleName = fileName.stripSuffix(".class") + val className = + if classOrModuleName.endsWith("$") + && classOrModuleName != "Null$" // scala.runtime.Null$ + && classOrModuleName != "Nothing$" // scala.runtime.Nothing$ + then classOrModuleName.stripSuffix("$") + else classOrModuleName + className + SUFFIX_TASTY } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index e750d9ccacc0..f520cd97767e 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import dotty.tools.io.{ClassPath, ClassRepresentation} import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ +import FileUtils.* import java.net.{URI, URL} case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { @@ -41,12 +41,17 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply def findClassFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) + ".class" - Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) + val pathSeq = FileUtils.dirPath(className).split(java.io.File.separator) + val parentDir = lookupPath(dir)(pathSeq.init.toSeq, directory = true) + if parentDir == null then return None + else + Option(lookupPath(parentDir)(pathSeq.last + ".tasty" :: Nil, directory = false)) + .orElse(Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false))) } private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass + protected def isMatchingFile(f: AbstractFile): Boolean = + f.isTasty || (f.isClass && f.classToTasty.isEmpty) } diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 865f95551a0b..0ca996db4812 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -13,8 +13,8 @@ import java.nio.file.attribute.{BasicFileAttributes, FileTime} import scala.annotation.tailrec import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} -import dotty.tools.dotc.core.Contexts._ -import FileUtils._ +import dotty.tools.dotc.core.Contexts.* +import FileUtils.* /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -44,21 +44,21 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { extends ZipArchiveFileLookup[ClassFileEntryImpl] with NoSourcePaths { - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class").map(_.file) - } + override def findClassFile(className: String): Option[AbstractFile] = + findClass(className).map(_.file) // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[ClassRepresentation] = { + override def findClass(className: String): Option[ClassFileEntryImpl] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class") + val binaries = files(PackageName(pkg), simpleClassName + ".tasty", simpleClassName + ".class") + binaries.find(_.file.isTasty).orElse(binaries.find(_.file.isClass)) } override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + override protected def isRequiredFileType(file: AbstractFile): Boolean = + file.isTasty || (file.isClass && file.classToTasty.isEmpty) } /** diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index e241feee8244..ca8636e3884f 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -9,7 +9,7 @@ import java.io.File import java.net.URL import dotty.tools.io.{ AbstractFile, FileZipArchive } -import FileUtils._ +import FileUtils.* import dotty.tools.io.{EfficientClassPath, ClassRepresentation} /** @@ -43,6 +43,15 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie } yield createFileEntry(entry) + protected def files(inPackage: PackageName, names: String*): Seq[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage).toSeq + name <- names + entry <- Option(dirEntry.lookupName(name, directory = false)) + if isRequiredFileType(entry) + } + yield createFileEntry(entry) + protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = for { dirEntry <- findDirEntry(inPackage) diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 914df040fbf7..5c24dd57eeba 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -3,8 +3,8 @@ package config import scala.language.unsafeNulls -import Settings._ -import core.Contexts._ +import Settings.* +import core.Contexts.* import printing.Highlighting import scala.util.chaining.given @@ -12,7 +12,7 @@ import scala.PartialFunction.cond trait CliCommand: - type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup + type ConcreteSettings <: CommonScalaSettings & Settings.SettingGroup def versionMsg: String diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 41e123472a75..2ffe900fbdbf 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package config -import Settings._ -import core.Contexts._ +import Settings.* +import core.Contexts.* abstract class CompilerCommand extends CliCommand: type ConcreteSettings = ScalaSettings @@ -21,6 +21,6 @@ abstract class CompilerCommand extends CliCommand: else "" final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = - import settings._ + import settings.* val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 247fa28efbda..2746476261e5 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,15 +235,4 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true - - /** If true, print capturing types in the form `{c} T`. - * If false, print them in the form `T @retains(c)`. - */ - inline val printCaptureSetsAsPrefix = true - - /** If true, allow mappping capture set variables under captureChecking with maps that are neither - * bijective nor idempotent. We currently do now know how to do this correctly in all - * cases, though. - */ - inline val ccAllowUnsoundMaps = false } diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index e5ab8f65f55b..2798828ad9a7 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package config -import core._ -import Contexts._, Symbols._, Names._ +import core.* +import Contexts.*, Symbols.*, Names.* import StdNames.nme import Decorators.* import util.{SrcPos, NoSourcePosition} -import SourceVersion._ +import SourceVersion.* import reporting.Message import NameKinds.QualifiedName @@ -29,7 +29,6 @@ object Feature: val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") val clauseInterleaving = experimental("clauseInterleaving") - val relaxedExtensionImports = experimental("relaxedExtensionImports") val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") @@ -103,8 +102,8 @@ object Feature: /** Is captureChecking enabled for any of the currently compiled compilation units? */ def ccEnabledSomewhere(using Context) = - enabledBySetting(captureChecking) - || ctx.run != null && ctx.run.nn.ccImportEncountered + if ctx.run != null then ctx.run.nn.ccEnabledSomewhere + else enabledBySetting(captureChecking) def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -134,18 +133,28 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + report.error( + em"""Experimental $which may only be used under experimental mode: + | 1. In a definition marked as @experimental + | 2. Compiling with the -experimental compiler flag + | 3. With a nightly or snapshot version of the compiler$note + """, srcPos) + + private def ccException(sym: Symbol)(using Context): Boolean = + ccEnabled && defn.ccExperimental.contains(sym) def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = if !isExperimentalEnabled then - val symMsg = - if sym.hasAnnotation(defn.ExperimentalAnnot) then - i"$sym is marked @experimental" - else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then - i"${sym.owner} is marked @experimental" - else - i"$sym inherits @experimental" - report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) + val experimentalSym = + if sym.hasAnnotation(defn.ExperimentalAnnot) then sym + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then sym.owner + else NoSymbol + if !ccException(experimentalSym) then + val symMsg = + if experimentalSym.exists + then i"$experimentalSym is marked @experimental" + else i"$sym inherits @experimental" + report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = @@ -154,7 +163,7 @@ object Feature: do checkExperimentalFeature(s"feature $setting", NoSourcePosition) def isExperimentalEnabled(using Context): Boolean = - Properties.experimental && !ctx.settings.YnoExperimental.value + (Properties.experimental || ctx.settings.experimental.value) && !ctx.settings.YnoExperimental.value /** Handle language import `import language..` if it is one * of the global imports `pureFunctions` or `captureChecking`. In this case @@ -169,7 +178,7 @@ object Feature: true else if fullFeatureName == captureChecking then ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.ccImportEncountered = true + if ctx.run != null then ctx.run.nn.ccEnabledSomewhere = true true else false diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala index 2b2f35e49451..f3c2f295ce82 100644 --- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package config -import io._ +import io.* import classpath.AggregateClassPath -import core._ -import Symbols._, Types._, Contexts._, StdNames._ -import Flags._ -import transform.ExplicitOuter, transform.SymUtils._ +import core.* +import Symbols.*, Types.*, Contexts.*, StdNames.* +import Flags.* +import transform.ExplicitOuter class JavaPlatform extends Platform { @@ -52,7 +52,7 @@ class JavaPlatform extends Platform { */ def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { val d = defn - import d._ + import d.* (sym == ObjectClass) || (sym == JavaSerializableClass) || (sym == ComparableClass) || @@ -66,4 +66,7 @@ class JavaPlatform extends Platform { def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = new ClassfileLoader(bin) + + def newTastyLoader(bin: AbstractFile)(using Context): SymbolLoader = + new TastyLoader(bin) } diff --git a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala new file mode 100644 index 000000000000..4dd9d065395b --- /dev/null +++ b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala @@ -0,0 +1,51 @@ +package dotty.tools +package dotc +package config + +import SourceVersion.* +import Feature.* +import core.Contexts.Context + +class MigrationVersion( + val warnFrom: SourceVersion, + val errorFrom: SourceVersion): + require(warnFrom.ordinal <= errorFrom.ordinal) + + def needsPatch(using Context): Boolean = + sourceVersion.isMigrating && sourceVersion.isAtLeast(warnFrom) + + def patchFrom: SourceVersion = + warnFrom.prevMigrating + +object MigrationVersion: + + val Scala2to3 = MigrationVersion(`3.0`, `3.0`) + + val OverrideValParameter = MigrationVersion(`3.0`, future) + + // we tighten for-comprehension without `case` to error in 3.4, + // but we keep pat-defs as warnings for now ("@unchecked"), + // until we propose an alternative way to assert exhaustivity to the typechecker. + val ForComprehensionPatternWithoutCase = MigrationVersion(`3.2`, `3.4`) + val ForComprehensionUncheckedPathDefs = MigrationVersion(`3.2`, future) + + val NonLocalReturns = MigrationVersion(`3.2`, future) + + val AscriptionAfterPattern = MigrationVersion(`3.3`, future) + + val ExplicitContextBoundArgument = MigrationVersion(`3.4`, `3.5`) + + val AlphanumericInfix = MigrationVersion(`3.4`, future) + val RemoveThisQualifier = MigrationVersion(`3.4`, future) + val UninitializedVars = MigrationVersion(`3.4`, future) + val VarargSpliceAscription = MigrationVersion(`3.4`, future) + val WildcardType = MigrationVersion(`3.4`, future) + val WithOperator = MigrationVersion(`3.4`, future) + val FunctionUnderscore = MigrationVersion(`3.4`, future) + + val ImportWildcard = MigrationVersion(future, future) + val ImportRename = MigrationVersion(future, future) + val ParameterEnclosedByParenthesis = MigrationVersion(future, future) + val XmlLiteral = MigrationVersion(future, future) + +end MigrationVersion diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala index 0411c5604768..ba121d06e35a 100644 --- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala +++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala @@ -4,7 +4,7 @@ package config import scala.language.unsafeNulls -import io._ +import io.* /** A class for holding mappings from source directories to * their output location. This functionality can be accessed diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index 8b4eedb0e9d2..29e6e35855c8 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -9,15 +9,15 @@ import io.{ClassPath, Directory, Path} import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} import ClassPath.split import PartialFunction.condOpt -import core.Contexts._ -import Settings._ +import core.Contexts.* +import Settings.* import dotty.tools.io.File object PathResolver { // Imports property/environment functions which suppress // security exceptions. - import AccessControl._ + import AccessControl.* def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" @@ -208,7 +208,7 @@ class PathResolver(using c: Context) { if (!settings.classpath.isDefault) settings.classpath.value else sys.env.getOrElse("CLASSPATH", ".") - import classPathFactory._ + import classPathFactory.* // Assemble the elements! def basis: List[Iterable[ClassPath]] = diff --git a/compiler/src/dotty/tools/dotc/config/Platform.scala b/compiler/src/dotty/tools/dotc/config/Platform.scala index 0faacf1bcebb..2a0b207e68c1 100644 --- a/compiler/src/dotty/tools/dotc/config/Platform.scala +++ b/compiler/src/dotty/tools/dotc/config/Platform.scala @@ -3,7 +3,7 @@ package dotc package config import io.{ClassPath, AbstractFile} -import core.Contexts._, core.Symbols._ +import core.Contexts.*, core.Symbols.* import core.SymbolLoader import core.StdNames.nme import core.Flags.Module @@ -36,6 +36,9 @@ abstract class Platform { /** Create a new class loader to load class file `bin` */ def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader + /** Create a new TASTy loader to load class file `bin` */ + def newTastyLoader(bin: AbstractFile)(using Context): SymbolLoader + /** The given symbol is a method with the right name and signature to be a runnable program. */ def isMainMethod(sym: Symbol)(using Context): Boolean diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index 63d616e1ce3d..81fd60497025 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -1,4 +1,6 @@ -package dotty.tools.dotc.config +package dotty.tools.dotc +package config +import core.Contexts.{Context, ctx} object Printers { @@ -12,7 +14,18 @@ object Printers { val default = new Printer - val capt = noPrinter + /** Enabled via Ycc-log flag. This is not super-efficient but helps debug + * variants of capture checking faster. + * TODO: Revert to static scheme once capture checking has stabilized + */ + def capt(using Context): Printer = + if ctx.settings.YccLog.value then captActive else noPrinter + val captActive = new Printer + + def captDebug(using Context): Printer = + if ctx.settings.YccDebug.value then captDebugActive else noPrinter + val captDebugActive = new Printer + val constr = noPrinter val core = noPrinter val checks = noPrinter diff --git a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala index 0275e0d6a227..f6e29754ef94 100644 --- a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.config -import dotty.tools.dotc.core._ -import Contexts._ -import Symbols._ +import dotty.tools.dotc.core.* +import Contexts.* +import Symbols.* import dotty.tools.backend.sjs.JSDefinitions diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index c06aa304ef72..d1b91f77f933 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -6,18 +6,21 @@ import scala.language.unsafeNulls import dotty.tools.dotc.config.PathResolver.Defaults import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} import dotty.tools.dotc.config.SourceVersion -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.rewrites.Rewrites -import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} +import Setting.ChoiceWithHelp -import scala.util.chaining._ +import scala.util.chaining.* + +import java.util.zip.Deflater class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BackendUtils` private val minTargetVersion = 8 - private val maxTargetVersion = 21 + private val maxTargetVersion = 22 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -54,6 +57,7 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti /* Path related settings */ val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + val semanticdbText: Setting[Boolean] = BooleanSetting("-semanticdb-text", "Specifies whether to include source code in SemanticDB files or not.") val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) @@ -70,8 +74,8 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) /* Scala.js-related settings */ - val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") - val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only).") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only).") val projectUrl: Setting[String] = StringSetting ( "-project-url", @@ -118,6 +122,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + val experimental: Setting[Boolean] = BooleanSetting("-experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) @@ -155,13 +160,13 @@ private sealed trait VerboseSettings: */ private sealed trait WarningSettings: self: SettingGroup => - import Setting.ChoiceWithHelp val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) val WvalueDiscard: Setting[Boolean] = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") val WNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - + val WimplausiblePatterns = BooleanSetting("-Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") + val WunstableInlineAccessors = BooleanSetting("-WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( name = "-Wunused", helpArg = "warning", @@ -306,6 +311,27 @@ private sealed trait XSettings: } val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") + + val Xlint: Setting[List[ChoiceWithHelp[String]]] = UncompleteMultiChoiceHelpSetting( + name = "-Xlint", + helpArg = "advanced warning", + descr = "Enable or disable specific `lint` warnings", + choices = List( + ChoiceWithHelp("all", ""), + ChoiceWithHelp("private-shadow", "Warn if a private field or class parameter shadows a superclass field"), + ChoiceWithHelp("type-parameter-shadow", "Warn when a type parameter shadows a type already in the scope"), + ), + default = Nil + ) + + object XlintHas: + def allOr(s: String)(using Context) = + Xlint.value.pipe(us => us.contains("all") || us.contains(s)) + def privateShadow(using Context) = + allOr("private-shadow") + def typeParameterShadow(using Context) = + allOr("type-parameter-shadow") + end XSettings /** -Y "Forking" as in forked tongue or "Private" settings */ @@ -338,6 +364,9 @@ private sealed trait YSettings: val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val YjarCompressionLevel: Setting[Int] = IntChoiceSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) + val YbackendParallelism: Setting[Int] = IntChoiceSetting("-Ybackend-parallelism", "maximum worker threads for backend", 1 to 16, 1) + val YbackendWorkerQueue: Setting[Int] = IntChoiceSetting("-Ybackend-worker-queue", "backend threads worker queue size", 0 to 1000, 0) val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") @@ -353,6 +382,7 @@ private sealed trait YSettings: val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") + val YprintTasty: Setting[Boolean] = BooleanSetting("-Yprint-tasty", "Prints the generated TASTY to stdout.") val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) @@ -364,9 +394,11 @@ private sealed trait YSettings: val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") - val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") - val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") - val YlegacyLazyVals: Setting[Boolean] = BooleanSetting("-Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals") + val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") + val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features.") + val YlegacyLazyVals: Setting[Boolean] = BooleanSetting("-Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") + val YcompileScala2Library: Setting[Boolean] = BooleanSetting("-Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") + val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting("-Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") @@ -379,11 +411,14 @@ private sealed trait YSettings: // Experimental language features val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") - val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") - val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") - val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") - val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects.") + val YcheckInitGlobal: Setting[Boolean] = BooleanSetting("-Ysafe-init-global", "Check safe initialization of global objects.") + val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") + val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only).") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references.") + val YccNew: Setting[Boolean] = BooleanSetting("-Ycc-new", "Used in conjunction with captureChecking language import, try out new variants (debug option)") + val YccLog: Setting[Boolean] = BooleanSetting("-Ycc-log", "Used in conjunction with captureChecking language import, print tracing and debug info") + val YccPrintSetup: Setting[Boolean] = BooleanSetting("-Ycc-print-setup", "Used in conjunction with captureChecking language import, print trees after cc.Setup phase") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") @@ -391,10 +426,17 @@ private sealed trait YSettings: val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting("-Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") -end YSettings + val YdebugMacros: Setting[Boolean] = BooleanSetting("-Ydebug-macros", "Show debug info when quote pattern match fails") + + // Pipeline compilation options + val YjavaTasty: Setting[Boolean] = BooleanSetting("-Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") + val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting("-Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) + val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting("-Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") +end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala index 7fdf57478f1a..9f603e6792be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala @@ -89,7 +89,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu def isInt(s: String) = Try(toInt(s)).isSuccess - import ScalaBuild._ + import ScalaBuild.* def toBuild(s: String) = s match { case null | "FINAL" => Final diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 34e5582e8a91..79eb2b882f8f 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -3,7 +3,7 @@ package config import scala.language.unsafeNulls -import core.Contexts._ +import core.Contexts.* import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} @@ -24,7 +24,7 @@ object Settings: val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) class SettingsState(initialValues: Seq[Any]): - private val values = ArrayBuffer(initialValues: _*) + private val values = ArrayBuffer(initialValues*) private var _wasRead: Boolean = false override def toString: String = s"SettingsState(values: ${values.toList})" @@ -62,6 +62,7 @@ object Settings: prefix: String = "", aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, + ignoreInvalidArgs: Boolean = false, propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { private var changed: Boolean = false @@ -104,8 +105,16 @@ object Settings: def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) + def warn(msg: String, args: List[String]) = + ArgsSummary(sstate, args, errors, warnings :+ msg) + def missingArg = - fail(s"missing argument for option $name", args) + val msg = s"missing argument for option $name" + if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + + def invalidChoices(invalid: List[String]) = + val msg = s"invalid choice(s) for $name: ${invalid.mkString(",")}" + if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) def setBoolean(argValue: String, args: List[String]) = if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) @@ -144,7 +153,7 @@ object Settings: choices match case Some(valid) => strings.filterNot(valid.contains) match case Nil => update(strings, args) - case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case invalid => invalidChoices(invalid) case _ => update(strings, args) case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => setString(argRest, args) @@ -287,6 +296,9 @@ object Settings: def MultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def UncompleteMultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases, ignoreInvalidArgs = true)) + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = publish(Setting(name, descr, default, aliases = aliases)) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index b8fa7994ce0c..7a464d331930 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -9,6 +9,9 @@ enum SourceVersion: case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. case `3.2-migration`, `3.2` case `3.3-migration`, `3.3` + case `3.4-migration`, `3.4` + case `3.5-migration`, `3.5` + // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") @@ -16,12 +19,15 @@ enum SourceVersion: def stable: SourceVersion = if isMigrating then SourceVersion.values(ordinal + 1) else this + def prevMigrating: SourceVersion = + if isMigrating then this else SourceVersion.values(ordinal - 1).prevMigrating + def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.3` + def defaultSourceVersion = `3.4` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala index 5b79432a97e7..20304b74c1da 100644 --- a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala @@ -22,7 +22,7 @@ trait WrappedProperties extends PropertiesTrait { override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten def systemProperties: Iterator[(String, String)] = { - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty } } diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 202f3eb26e41..ac02baa429b4 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Constants._, Phases.* +import Symbols.*, Types.*, Contexts.*, Constants.*, Phases.* import ast.tpd, tpd.* import util.Spans.Span import printing.{Showable, Printer} @@ -244,9 +244,6 @@ object Annotations { } else None } - - def makeSourceFile(path: String, span: Span)(using Context): Annotation = - apply(defn.SourceFileAnnot, Literal(Constant(path)), span) } @sharable val EmptyAnnotation = Annotation(EmptyTree) diff --git a/compiler/src/dotty/tools/dotc/core/Atoms.scala b/compiler/src/dotty/tools/dotc/core/Atoms.scala index bcaaf6794107..a68a07947965 100644 --- a/compiler/src/dotty/tools/dotc/core/Atoms.scala +++ b/compiler/src/dotty/tools/dotc/core/Atoms.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._ +import Types.* /** Indicates the singleton types that a type must or may consist of. * @param lo The lower bound: singleton types in this set are guaranteed diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index a61701eee2d7..060189016828 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, Flags._ +import Contexts.*, Types.*, Symbols.*, Names.*, Flags.* import Denotations.SingleDenotation -import Decorators._ +import Decorators.* import collection.mutable import config.SourceVersion.future import config.Feature.sourceVersion @@ -62,7 +62,7 @@ object CheckRealizable { * Type.isStable). */ class CheckRealizable(using Context) { - import CheckRealizable._ + import CheckRealizable.* /** A set of all fields that have already been checked. Used * to avoid infinite recursions when analyzing recursive types. diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 1b20b75ad8ac..92160c97973d 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -5,10 +5,10 @@ package core import scala.language.unsafeNulls import ast.{ untpd, tpd } -import Symbols._, Contexts._ +import Symbols.*, Contexts.* import util.{SourceFile, ReadOnlyMap} -import util.Spans._ -import util.CommentParsing._ +import util.Spans.* +import util.CommentParsing.* import util.Property.Key import parsing.Parsers.Parser import reporting.ProperDefinitionNotFound @@ -17,8 +17,7 @@ object Comments { val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] /** Decorator for getting docbase out of context */ - given CommentsContext: AnyRef with - extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) + extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) /** Context for Docstrings, contains basic functionality for getting * docstrings via `Symbol` and expanding templates diff --git a/compiler/src/dotty/tools/dotc/core/CompilationUnitInfo.scala b/compiler/src/dotty/tools/dotc/core/CompilationUnitInfo.scala new file mode 100644 index 000000000000..d030182a5d7a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/CompilationUnitInfo.scala @@ -0,0 +1,22 @@ +package dotty.tools.dotc.core + +import dotty.tools.io.AbstractFile +import dotty.tools.tasty.TastyVersion + +/** Information about the compilation unit of a class symbol. + * + * @param associatedFile The source or class file from which this class or + * the class containing this symbol was generated, + * null if not applicable. + * @param tastyInfo Information about the TASTy from which this class was loaded. + * None if not loaded from TASTy, + */ +case class CompilationUnitInfo( + associatedFile: AbstractFile, + tastyInfo: Option[TastyInfo], +) + +object CompilationUnitInfo: + def apply(assocFile: AbstractFile | Null): CompilationUnitInfo | Null = + if assocFile == null then null + else new CompilationUnitInfo(assocFile, tastyInfo = None) diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala index f45e9e5217de..63acfbe55701 100644 --- a/compiler/src/dotty/tools/dotc/core/Constants.scala +++ b/compiler/src/dotty/tools/dotc/core/Constants.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import printing.Printer import printing.Texts.Text @@ -210,7 +210,7 @@ object Constants { } override def hashCode: Int = { - import scala.util.hashing.MurmurHash3._ + import scala.util.hashing.MurmurHash3.* val seed = 17 var h = seed h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index c634f847e510..b5979221e2c6 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Contexts._ +import Types.*, Contexts.* import printing.Showable import util.{SimpleIdentitySet, SimpleIdentityMap} @@ -138,6 +138,9 @@ abstract class Constraint extends Showable { /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This + /** Mark toplevel type vars in `tp` as hard. */ + def hardenTypeVars(tp: Type)(using Context): This + /** Gives for each instantiated type var that does not yet have its `inst` field * set, the instance value stored in the constraint. Storing instances in constraints * is done only in a temporary way for contexts that may be retracted diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 9ffe2bda73cb..6be6ec94c1c3 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import Flags._ +import Types.* +import Contexts.* +import Symbols.* +import Decorators.* +import Flags.* import config.Config import config.Printers.typr import typer.ProtoTypes.{newTypeVar, representedParamRef} @@ -54,7 +54,7 @@ trait ConstraintHandling { protected var homogenizeArgs: Boolean = false /** We are currently comparing type lambdas. Used as a flag for - * optimization: when `false`, no need to do an expensive `pruneLambdaParams` + * optimization: when `false`, no need to do an expensive `avoidLambdaParams` */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty @@ -64,6 +64,14 @@ trait ConstraintHandling { */ protected var canWidenAbstract: Boolean = true + /** + * Used for match type reduction. + * When an abstract type may not be widened, according to `widenAbstractOKFor`, + * we record it in this set, so that we can ultimately fail the reduction, but + * with all the information that comes out from continuing to widen the abstract type. + */ + protected var poisoned: Set[TypeParamRef] = Set.empty + protected var myNecessaryConstraintsOnly = false /** When collecting the constraints needed for a particular subtyping * judgment to be true, we sometimes need to approximate the constraint @@ -102,7 +110,7 @@ trait ConstraintHandling { * * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. * Then even if we correct levels on instantiation to eliminate the local `x`, - * it is alreay too late, we'd get `Int & String` as instance, which does not + * it is already too late, we'd get `Int & String` as instance, which does not * satisfy the original constraint `X >: 1`. * * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since @@ -558,13 +566,6 @@ trait ConstraintHandling { inst end approximation - private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match - case AndType(tp1, tp2) => - isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) - case _ => - val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol - cls.isTransparentClass && (!traitOnly || cls.is(Trait)) - /** If `tp` is an intersection such that some operands are transparent trait instances * and others are not, replace as many transparent trait instances as possible with Any * as long as the result is still a subtype of `bound`. But fall back to the @@ -577,7 +578,7 @@ trait ConstraintHandling { var dropped: List[Type] = List() // the types dropped so far, last one on top def dropOneTransparentTrait(tp: Type): Type = - if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + if tp.isTransparent(traitOnly = true) && !kept.contains(tp) then dropped = tp :: dropped defn.AnyType else tp match @@ -650,7 +651,7 @@ trait ConstraintHandling { def widenOr(tp: Type) = if widenUnions then val tpw = tp.widenUnion - if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp + if (tpw ne tp) && !tpw.isTransparent() && (tpw <:< bound) then tpw else tp else tp.hardenUnions def widenSingle(tp: Type) = @@ -681,7 +682,7 @@ trait ConstraintHandling { case tp: AndType => tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) case tp: RefinedType => - tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) + tp.derivedRefinedType(parent = tp.parent.hardenUnions) case tp: RecType => tp.rebind(tp.parent.hardenUnions) case tp: HKTypeLambda => @@ -708,8 +709,8 @@ trait ConstraintHandling { // Widening can add extra constraints, in particular the widened type might // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. - // If that happens, we run `instanceType` again to find a new instantation. - // (we do not check for non-toplevel occurences: those should never occur + // If that happens, we run `instanceType` again to find a new instantiation. + // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then instanceType(param, fromBelow, widenUnions, maxLevel) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index d2b1246a8149..8ec38d52e725 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -1,12 +1,13 @@ package dotty.tools.dotc package core -import Contexts._ +import Contexts.* import config.Printers.{default, typr} +import scala.compiletime.uninitialized trait ConstraintRunInfo { self: Run => private var maxSize = 0 - private var maxConstraint: Constraint | Null = _ + private var maxConstraint: Constraint | Null = uninitialized def recordConstraintSize(c: Constraint, size: Int): Unit = if (size > maxSize) { maxSize = size diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index aa85f714a8e5..920da377f9b4 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package core -import Contexts._, Symbols._, Types._, Flags._ -import Denotations._, SymDenotations._ +import Contexts.*, Symbols.*, Types.*, Flags.* +import Denotations.*, SymDenotations.* import Names.Name, StdNames.nme import ast.untpd @@ -80,12 +80,19 @@ object ContextOps: } /** A fresh local context with given tree and owner. - * Owner might not exist (can happen for self valdefs), in which case - * no owner is set in result context - */ + * + * #19019 Self valdefs must always keep their enclosing ctx.owner. They + * can be NoSymbol or having a symbol with the SelfName flag, depending on + * whether they have an explicit name or not. In either case, we avoid + * `setOwner`. + * + * The owner might also not exist for other kinds of trees, such as + * `LambdaTypeTree` and `TermLambdaTypeTree`. In these cases, we also + * keep the enclosing owner. + */ def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { val freshCtx = ctx.fresh.setTree(tree) - if owner.exists then freshCtx.setOwner(owner) else freshCtx + if owner.exists && !owner.is(SelfName) then freshCtx.setOwner(owner) else freshCtx } /** Context where `sym` is defined, assuming we are in a nested context. */ diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index e0e43169820a..ee288a08b53f 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -3,28 +3,28 @@ package dotc package core import interfaces.CompilerCallback -import Decorators._ -import Periods._ -import Names._ -import Phases._ -import Types._ -import Symbols._ -import Scopes._ -import Uniques._ -import ast.Trees._ +import Decorators.* +import Periods.* +import Names.* +import Phases.* +import Types.* +import Symbols.* +import Scopes.* +import Uniques.* +import ast.Trees.* import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} import inlines.Inliner -import Nullables._ +import Nullables.* import Implicits.ContextualImplicits -import config.Settings._ +import config.Settings.* import config.Config -import reporting._ +import reporting.* import io.{AbstractFile, NoAbstractFile, PlainFile, Path} import scala.io.Codec import collection.mutable -import printing._ +import printing.* import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} import classfile.ReusableDataReader import StdNames.nme @@ -34,27 +34,28 @@ import scala.annotation.internal.sharable import DenotTransformers.DenotTransformer import dotty.tools.dotc.profile.Profiler +import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store -import xsbti.AnalysisCallback -import plugins._ +import plugins.* import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException object Contexts { - private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() - private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() - private val (printerFnLoc, store3) = store2.newLocation[Context => Printer](new RefinedPrinter(_)) - private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() - private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() - private val (runLoc, store6) = store5.newLocation[Run | Null]() - private val (profilerLoc, store7) = store6.newLocation[Profiler]() - private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() - private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() - private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) + private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() + private val (incCallbackLoc, store2) = store1.newLocation[IncrementalCallback | Null]() + private val (printerFnLoc, store3) = store2.newLocation[Context => Printer](new RefinedPrinter(_)) + private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() + private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() + private val (runLoc, store6) = store5.newLocation[Run | Null]() + private val (profilerLoc, store7) = store6.newLocation[Profiler]() + private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() + private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() + private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) + private val (progressCallbackLoc, store11) = store10.newLocation[ProgressCallback | Null]() - private val initialStore = store10 + private val initialStore = store11 /** The current context */ inline def ctx(using ctx: Context): Context = ctx @@ -164,8 +165,26 @@ object Contexts { /** The compiler callback implementation, or null if no callback will be called. */ def compilerCallback: CompilerCallback = store(compilerCallbackLoc) - /** The sbt callback implementation if we are run from sbt, null otherwise */ - def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) + /** The Zinc callback implementation if we are run from Zinc, null otherwise */ + def incCallback: IncrementalCallback | Null = store(incCallbackLoc) + + /** Run `op` if there exists an incremental callback */ + inline def withIncCallback(inline op: IncrementalCallback => Unit): Unit = + val local = incCallback + if local != null then op(local) + + def runZincPhases: Boolean = + def forceRun = settings.YdumpSbtInc.value || settings.YforceSbtPhases.value + val local = incCallback + local != null && local.enabled || forceRun + + /** The Zinc compile progress callback implementation if we are run from Zinc, null otherwise */ + def progressCallback: ProgressCallback | Null = store(progressCallbackLoc) + + /** Run `op` if there exists a Zinc progress callback */ + inline def withProgressCallback(inline op: ProgressCallback => Unit): Unit = + val local = progressCallback + if local != null then op(local) /** The current plain printer */ def printerFn: Context => Printer = store(printerFnLoc) @@ -395,7 +414,7 @@ object Contexts { * from constructor parameters to class parameter accessors. */ def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) + val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) superOrThisCallContext(owner.primaryConstructor, locals) } @@ -539,7 +558,7 @@ object Contexts { private var _owner: Symbol = uninitialized final def owner: Symbol = _owner - private var _tree: Tree[?]= _ + private var _tree: Tree[?] = uninitialized final def tree: Tree[?] = _tree private var _scope: Scope = uninitialized @@ -664,7 +683,8 @@ object Contexts { } def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) - def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) + def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) + def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback) def setPrinterFn(printer: Context => Printer): this.type = updateStore(printerFnLoc, printer) def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) def setRun(run: Run | Null): this.type = updateStore(runLoc, run) @@ -732,20 +752,18 @@ object Contexts { c end FreshContext - given ops: AnyRef with - extension (c: Context) - def addNotNullInfo(info: NotNullInfo) = - c.withNotNullInfos(c.notNullInfos.extendWith(info)) + extension (c: Context) + def addNotNullInfo(info: NotNullInfo) = + c.withNotNullInfos(c.notNullInfos.extendWith(info)) - def addNotNullRefs(refs: Set[TermRef]) = - c.addNotNullInfo(NotNullInfo(refs, Set())) + def addNotNullRefs(refs: Set[TermRef]) = + c.addNotNullInfo(NotNullInfo(refs, Set())) - def withNotNullInfos(infos: List[NotNullInfo]): Context = - if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) + def withNotNullInfos(infos: List[NotNullInfo]): Context = + if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) - def relaxedOverrideContext: Context = - c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) - end ops + def relaxedOverrideContext: Context = + c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens extension (c: Context) { @@ -805,7 +823,7 @@ object Contexts { * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. * In other words: tracking or explaining is a sticky property in the same context. */ - private def comparer(using Context): TypeComparer = + def comparer(using Context): TypeComparer = util.Stats.record("comparing") val base = ctx.base if base.comparersInUse > 0 @@ -895,7 +913,7 @@ object Contexts { def next()(using Context): FreshContext = val base = ctx.base - import base._ + import base.* val nestedCtx = if inUse < pool.size then pool(inUse).reuseIn(ctx) @@ -1043,6 +1061,7 @@ object Contexts { sources.clear() files.clear() comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer + comparersInUse = 0 // Test that access is single threaded diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 4ef0dbc9a43b..29d4b3fa4052 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -6,8 +6,8 @@ import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import scala.util.control.NonFatal -import Contexts._, Names._, Phases._, Symbols._ -import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ +import Contexts.*, Names.*, Phases.*, Symbols.* +import printing.{ Printer, Showable }, printing.Formatting.*, printing.Texts.* import transform.MegaPhase import reporting.{Message, NoExplanation} @@ -56,6 +56,11 @@ object Decorators { def indented(width: Int): String = val padding = " " * width padding + s.replace("\n", "\n" + padding) + + def join(sep: String, other: String) = + if s.isEmpty then other + else if other.isEmpty then s + else s + sep + other end extension /** Convert lazy string to message. To be with caution, since no message-defined @@ -234,6 +239,9 @@ object Decorators { def nestedExists(p: T => Boolean): Boolean = xss match case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) case nil => false + def nestedFind(p: T => Boolean): Option[T] = xss match + case xs :: xss1 => xs.find(p).orElse(xss1.nestedFind(p)) + case nil => None end extension extension (text: Text) @@ -279,7 +287,7 @@ object Decorators { catch case ex: CyclicReference => "... (caught cyclic reference) ..." case NonFatal(ex) - if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + if !ctx.settings.YshowPrintErrors.value => s"... (cannot display due to ${ex.className} ${ex.getMessage}) ..." case _ => String.valueOf(x).nn diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 027aec16e9a3..e08672c693b9 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -3,22 +3,23 @@ package dotc package core import scala.annotation.{threadUnsafe => tu} -import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ -import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ +import Types.*, Contexts.*, Symbols.*, SymDenotations.*, StdNames.*, Names.*, Phases.* +import Flags.*, Scopes.*, Decorators.*, NameOps.*, Periods.*, NullOpsDecorator.* import unpickleScala2.Scala2Unpickler.ensureConstructor import scala.collection.mutable import collection.mutable import Denotations.{SingleDenotation, staticRef} import util.{SimpleIdentityMap, SourceFile, NoSource} import typer.ImportInfo.RootRef -import Comments.CommentsContext -import Comments.Comment +import Comments.{Comment, docCtx} import util.Spans.NoSpan import config.Feature import Symbols.requiredModuleRef -import cc.{CapturingType, CaptureSet, EventuallyCapturingType} +import cc.{CaptureSet, RetainingType} +import ast.tpd.ref import scala.annotation.tailrec +import scala.compiletime.uninitialized object Definitions { @@ -41,9 +42,9 @@ object Definitions { * */ class Definitions { - import Definitions._ + import Definitions.* - private var initCtx: Context = _ + private var initCtx: Context = uninitialized private given currentContext[Dummy_so_its_a_def]: Context = initCtx private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = @@ -121,8 +122,8 @@ class Definitions { denot.info = TypeAlias( HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( tl => List.fill(arity + 1)(TypeBounds.empty), - tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), - CaptureSet.universal) + tl => RetainingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), + ref(captureRoot.termRef) :: Nil) )) else val cls = denot.asClass.classSymbol @@ -243,8 +244,10 @@ class Definitions { @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") + @tu lazy val Compiletime_constValueTuple: Symbol = CompiletimePackageClass.requiredMethod("constValueTuple") @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") - @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val Compiletime_summonAll : Symbol = CompiletimePackageClass.requiredMethod("summonAll") @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") @@ -513,14 +516,16 @@ class Definitions { methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) }) - @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") - def ListType: TypeRef = ListClass.typeRef - @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") - @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") - def NilType: TermRef = NilModule.termRef - @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") - def ConsType: TypeRef = ConsClass.typeRef - @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + def ListType: TypeRef = ListClass.typeRef + @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") + @tu lazy val ListModule_apply: Symbol = ListModule.requiredMethod(nme.apply) + def ListModuleAlias: Symbol = ScalaPackageClass.requiredMethod(nme.List) + @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + def NilType: TermRef = NilModule.termRef + @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + def ConsType: TypeRef = ConsClass.typeRef + @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles @@ -530,8 +535,11 @@ class Definitions { List(AnyType), EmptyScope) @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef - @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") - @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") + @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") + @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") + @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") + @tu lazy val SeqModule_apply: Symbol = SeqModule.requiredMethod(nme.apply) + def SeqModuleAlias: Symbol = ScalaPackageClass.requiredMethod(nme.Seq) def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) @@ -539,7 +547,6 @@ class Definitions { @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) - @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") @@ -744,6 +751,7 @@ class Definitions { @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") + @tu lazy val PartialFunction_apply: Symbol = PartialFunctionClass.requiredMethod(nme.apply) @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) @@ -853,9 +861,9 @@ class Definitions { @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") - @tu lazy val QuoteMatching_ExprMatchModule: Symbol = QuoteMatchingClass.requiredClass("ExprMatchModule") + @tu lazy val QuoteMatching_ExprMatch_unapply: Symbol = QuoteMatchingClass.requiredClass("ExprMatchModule").requiredMethod(nme.unapply) @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") - @tu lazy val QuoteMatching_TypeMatchModule: Symbol = QuoteMatchingClass.requiredClass("TypeMatchModule") + @tu lazy val QuoteMatching_TypeMatch_unapply: Symbol = QuoteMatchingClass.requiredClass("TypeMatchModule").requiredMethod(nme.unapply) @tu lazy val QuoteMatchingModule: Symbol = requiredModule("scala.quoted.runtime.QuoteMatching") @tu lazy val QuoteMatching_KNil: Symbol = QuoteMatchingModule.requiredType("KNil") @tu lazy val QuoteMatching_KCons: Symbol = QuoteMatchingModule.requiredType("KCons") @@ -880,7 +888,6 @@ class Definitions { @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") - @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") @@ -932,6 +939,8 @@ class Definitions { @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") + @tu lazy val TupleModule: Symbol = requiredModule("scala.Tuple") + @tu lazy val EmptyTupleClass: Symbol = requiredClass("scala.EmptyTuple") @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass @@ -942,6 +951,7 @@ class Definitions { def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") + def TupleXXL_unapplySeq(using Context): Symbol = TupleXXLModule.requiredMethod(nme.unapplySeq) @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") @@ -970,20 +980,27 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") + @tu lazy val Caps_Cap: TypeSymbol = CapsModule.requiredType("Cap") + @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - @tu lazy val Caps_SealedAnnot: ClassSymbol = requiredClass("scala.caps.Sealed") + @tu lazy val expandedUniversalSet: CaptureSet = CaptureSet(captureRoot.termRef) + + @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") + @tu lazy val JavaAnnotationClass: ClassSymbol = requiredClass("java.lang.annotation.Annotation") // Annotation classes @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") + @tu lazy val AssignedNonLocallyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AssignedNonLocally") @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") @@ -1025,7 +1042,6 @@ class Definitions { @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") - @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") @tu lazy val BeanSetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanSetter") @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") @@ -1039,15 +1055,24 @@ class Definitions { @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") + @tu lazy val ReachCapabilityAnnot = requiredClass("scala.annotation.internal.reachCapability") @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") + // Initialization annotations + @tu lazy val InitModule: Symbol = requiredModule("scala.annotation.init") + @tu lazy val InitWidenAnnot: ClassSymbol = InitModule.requiredClass("widen") + @tu lazy val InitRegionMethod: Symbol = InitModule.requiredMethod("region") + // A list of meta-annotations that are relevant for fields and accessors @tu lazy val NonBeanMetaAnnots: Set[Symbol] = Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot, CompanionClassMetaAnnot, CompanionMethodMetaAnnot) + @tu lazy val NonBeanParamAccessorAnnots: Set[Symbol] = + Set(PublicInBinaryAnnot) @tu lazy val MetaAnnots: Set[Symbol] = NonBeanMetaAnnots + BeanGetterMetaAnnot + BeanSetterMetaAnnot @@ -1098,27 +1123,99 @@ class Definitions { // - .linkedClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClass.typeRef + object FunctionTypeOfMethod { + /** Matches a `FunctionN[...]`/`ContextFunctionN[...]` or refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. + * Extracts the method type type and apply info. + */ + def unapply(ft: Type)(using Context): Option[MethodOrPoly] = { + ft match + case RefinedType(parent, nme.apply, mt: MethodOrPoly) + if parent.derivesFrom(defn.PolyFunctionClass) || (mt.isInstanceOf[MethodType] && isFunctionNType(parent)) => + Some(mt) + case AppliedType(parent, targs) if isFunctionNType(ft) => + val isContextual = ft.typeSymbol.name.isContextFunction + val methodType = if isContextual then ContextualMethodType else MethodType + Some(methodType(targs.init, targs.last)) + case _ => + None + } + } + object FunctionOf { def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = val mt = MethodType.companion(isContextual, false)(args, resultType) - if mt.hasErasedParams then - RefinedType(ErasedFunctionClass.typeRef, nme.apply, mt) - else - FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + if mt.hasErasedParams then RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) + else FunctionNOf(args, resultType, isContextual) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { - ft.dealias match - case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + ft match + case PolyFunctionOf(mt: MethodType) => Some(mt.paramInfos, mt.resType, mt.isContextualMethod) + case AppliedType(parent, targs) if isFunctionNType(ft) => + Some(targs.init, targs.last, ft.typeSymbol.name.isContextFunction) case _ => - val tsym = ft.dealias.typeSymbol - if isFunctionSymbol(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction) - else None + None } } + object FunctionNOf { + /** Create a `FunctionN` or `ContextFunctionN` type applied to the arguments and result type */ + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = + FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + + /** Matches a (possibly aliased) `FunctionN[...]` or `ContextFunctionN[...]`. + * Extracts the list of function argument types, the result type and whether function is contextual. + */ + def unapply(tpe: AppliedType)(using Context): Option[(List[Type], Type, Boolean)] = { + if !isFunctionNType(tpe) then None + else Some(tpe.args.init, tpe.args.last, tpe.typeSymbol.name.isContextFunction) + } + } + + object RefinedFunctionOf { + /** Matches a refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. + * Extracts the method type type and apply info. + */ + def unapply(tpe: RefinedType)(using Context): Option[MethodOrPoly] = { + tpe.refinedInfo match + case mt: MethodOrPoly + if tpe.refinedName == nme.apply + && (tpe.parent.derivesFrom(defn.PolyFunctionClass) || isFunctionNType(tpe.parent)) => + Some(mt) + case _ => None + } + } + + object PolyFunctionOf { + + /** Creates a refined `PolyFunction` with an `apply` method with the given info. */ + def apply(mt: MethodOrPoly)(using Context): Type = + assert(isValidPolyFunctionInfo(mt), s"Not a valid PolyFunction refinement: $mt") + RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) + + /** Matches a refined `PolyFunction` type and extracts the apply info. + * + * Pattern: `PolyFunction { def apply: $mt }` + */ + def unapply(tpe: RefinedType)(using Context): Option[MethodOrPoly] = + tpe.refinedInfo match + case mt: MethodOrPoly + if tpe.refinedName == nme.apply && tpe.parent.derivesFrom(defn.PolyFunctionClass) => + Some(mt) + case _ => None + + def isValidPolyFunctionInfo(info: Type)(using Context): Boolean = + def isValidMethodType(info: Type) = info match + case info: MethodType => + !info.resType.isInstanceOf[MethodOrPoly] && // Has only one parameter list + !info.isVarArgsMethod && + !info.isMethodWithByNameArgs // No by-name parameters + case _ => false + info match + case info: PolyType => isValidMethodType(info.resType) + case _ => isValidMethodType(info) + } + object PartialFunctionOf { def apply(arg: Type, result: Type)(using Context): Type = PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) @@ -1191,8 +1288,8 @@ class Definitions { */ object ByNameFunction: def apply(tp: Type)(using Context): Type = tp match - case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => - CapturingType(apply(tp1), refs) + case tp @ RetainingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => + RetainingType(apply(tp1), refs) case _ => defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) def unapply(tp: Type)(using Context): Option[Type] = tp match @@ -1317,7 +1414,7 @@ class Definitions { ), privateWithin = patch.privateWithin, coord = denot.symbol.coord, - assocFile = denot.symbol.associatedFile + compUnitInfo = denot.symbol.compilationUnitInfo ) def makeNonClassSymbol(patch: Symbol) = @@ -1362,7 +1459,9 @@ class Definitions { denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted patch2(denot, patchCls) - if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then + if ctx.settings.YcompileScala2Library.value then + () + else if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then patchWith(ScalaPredefModuleClassPatch) else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then patchWith(LanguageModuleClassPatch) @@ -1377,11 +1476,12 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ - @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val pureSimpleClasses = + Set(StringClass, NothingClass, NullClass) ++ ScalaValueClasses() @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) @@ -1465,8 +1565,6 @@ class Definitions { lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") def PolyFunctionType = PolyFunctionClass.typeRef - lazy val ErasedFunctionClass = requiredClass("scala.runtime.ErasedFunction") - /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => @@ -1529,8 +1627,6 @@ class Definitions { /** Is a synthetic function class * - FunctionN for N > 22 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 */ def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction @@ -1546,8 +1642,6 @@ class Definitions { * - FunctionN for 22 > N >= 0 remains as FunctionN * - ContextFunctionN for N > 22 becomes FunctionXXL * - ContextFunctionN for N <= 22 becomes FunctionN - * - ErasedFunctionN becomes Function0 - * - ImplicitErasedFunctionN becomes Function0 * - anything else becomes a NoType */ def functionTypeErasure(cls: Symbol): Type = @@ -1660,26 +1754,6 @@ class Definitions { else TypeOps.nestedPairs(elems) } - def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { - @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { - case _ if bound < 0 => Some(acc.reverse) - case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) - case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) - case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) - case _ => None - } - rec(tp.stripTypeVar, Nil, bound) - } - - def isSmallGenericTuple(tp: Type)(using Context): Boolean = - if tp.derivesFrom(defn.PairClass) && !defn.isTupleNType(tp.widenDealias) then - // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. - // This works only for generic tuples of known size up to 22. - defn.tupleTypes(tp.widenTermRefExpr) match - case Some(elems) if elems.length <= Definitions.MaxTupleArity => true - case _ => false - else false - def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN @@ -1700,25 +1774,22 @@ class Definitions { * - scala.FunctionN * - scala.ContextFunctionN */ - def isFunctionType(tp: Type)(using Context): Boolean = + def isFunctionNType(tp: Type)(using Context): Boolean = isNonRefinedFunction(tp.dropDependentRefinement) - /** Is `tp` a specialized, refined function type? Either an `ErasedFunction` or a `PolyFunction`. */ - def isRefinedFunctionType(tp: Type)(using Context): Boolean = - tp.derivesFrom(defn.PolyFunctionClass) || isErasedFunctionType(tp) - /** Returns whether `tp` is an instance or a refined instance of: * - scala.FunctionN * - scala.ContextFunctionN - * - ErasedFunction * - PolyFunction */ - def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || isRefinedFunctionType(tp) + def isFunctionType(tp: Type)(using Context): Boolean = + isFunctionNType(tp) + || tp.derivesFrom(defn.PolyFunctionClass) // TODO check for refinement? private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = - for base <- bases; tp <- paramTypes do - cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) + if !ctx.settings.YcompileScala2Library.value then + for base <- bases; tp <- paramTypes do + cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) cls @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) @@ -1731,27 +1802,27 @@ class Definitions { @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) // Specialized type parameters defined for scala.Function{0,1,2}. - @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, FloatType, DoubleType) - @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, DoubleType) - @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = - ScalaNumericValueTypeList.toSet + UnitType + BooleanType - @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = - Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) - @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = + @tu lazy val Function1SpecializedParamTypes: List[TypeRef] = + List(IntType, LongType, FloatType, DoubleType) + @tu lazy val Function2SpecializedParamTypes: List[TypeRef] = + List(IntType, LongType, DoubleType) + @tu lazy val Function0SpecializedReturnTypes: List[TypeRef] = + ScalaNumericValueTypeList :+ UnitType :+ BooleanType + @tu lazy val Function1SpecializedReturnTypes: List[TypeRef] = + List(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) + @tu lazy val Function2SpecializedReturnTypes: List[TypeRef] = Function1SpecializedReturnTypes @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedParamTypes.map(_.symbol)) + new PerRun(Function1SpecializedParamTypes.toSet.map(_.symbol)) @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedParamTypes.map(_.symbol)) + new PerRun(Function2SpecializedParamTypes.toSet.map(_.symbol)) @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) + new PerRun(Function0SpecializedReturnTypes.toSet.map(_.symbol)) @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) + new PerRun(Function1SpecializedReturnTypes.toSet.map(_.symbol)) @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) + new PerRun(Function2SpecializedReturnTypes.toSet.map(_.symbol)) def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match @@ -1759,6 +1830,7 @@ class Definitions { case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) case _ => false && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes + && !ctx.settings.YcompileScala2Library.value // We do not add the specilized TupleN methods/classes when compiling the stdlib def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = paramTypes.length <= 2 @@ -1780,19 +1852,20 @@ class Definitions { case _ => false }) + && !ctx.settings.YcompileScala2Library.value // We do not add the specilized FunctionN methods/classes when compiling the stdlib - @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = + @tu lazy val Function0SpecializedApplyNames: List[TermName] = for r <- Function0SpecializedReturnTypes yield nme.apply.specializedFunction(r, Nil).asTermName - @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = + @tu lazy val Function1SpecializedApplyNames: List[TermName] = for r <- Function1SpecializedReturnTypes t1 <- Function1SpecializedParamTypes yield nme.apply.specializedFunction(r, List(t1)).asTermName - @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = + @tu lazy val Function2SpecializedApplyNames: List[TermName] = for r <- Function2SpecializedReturnTypes t1 <- Function2SpecializedParamTypes @@ -1801,7 +1874,7 @@ class Definitions { nme.apply.specializedFunction(r, List(t1, t2)).asTermName @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = - Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames + Set.concat(Function0SpecializedApplyNames, Function1SpecializedApplyNames, Function2SpecializedApplyNames) def functionArity(tp: Type)(using Context): Int = tp.functionArgInfos.length - 1 @@ -1815,10 +1888,10 @@ class Definitions { tp.stripTypeVar.dealias match case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => asContextFunctionType(TypeComparer.bounds(tp1).hiBound) - case tp1 @ RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) && mt.isContextualMethod => + case tp1 @ PolyFunctionOf(mt: MethodType) if mt.isContextualMethod => tp1 case tp1 => - if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 + if tp1.typeSymbol.name.isContextFunction && isFunctionNType(tp1) then tp1 else NoType /** Is `tp` an context function type? */ @@ -1830,28 +1903,14 @@ class Definitions { * types `As`, the result type `B` and a whether the type is an erased context function. */ object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, List[Boolean])] = - if ctx.erasedTypes then - atPhase(erasurePhase)(unapply(tp)) - else - asContextFunctionType(tp) match - case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => - Some((mt.paramInfos, mt.resType, mt.erasedParams)) - case tp1 if tp1.exists => - val args = tp1.functionArgInfos - val erasedParams = erasedFunctionParameters(tp1) - Some((args.init, args.last, erasedParams)) - case _ => None - - /* Returns a list of erased booleans marking whether parameters are erased, for a function type. */ - def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match { - case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams - case tp if isFunctionType(tp) => List.fill(functionArity(tp)) { false } - case _ => Nil - } - - def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.derivesFrom(defn.ErasedFunctionClass) + def unapply(tp: Type)(using Context): Option[(List[Type], Type)] = + asContextFunctionType(tp) match + case PolyFunctionOf(mt: MethodType) => + Some((mt.paramInfos, mt.resType)) + case tp1 if tp1.exists => + val args = tp1.functionArgInfos + Some((args.init, args.last)) + case _ => None /** A whitelist of Scala-2 classes that are known to be pure */ def isAssuredNoInits(sym: Symbol): Boolean = @@ -1944,11 +2003,19 @@ class Definitions { case Some(pkgs) => pkgs.contains(sym.owner) case none => false + /** Experimental definitions that can nevertheless be accessed from a stable + * compiler if capture checking is enabled. + */ + @tu lazy val ccExperimental: Set[Symbol] = Set( + CapsModule, CapsModule.moduleClass, PureClass, + CapabilityAnnot, RequiresCapabilityAnnot, + RetainsAnnot, RetainsByNameAnnot) + // ----- primitive value class machinery ------------------------------------------ class PerRun[T](generate: Context ?=> T) { private var current: RunId = NoRunId - private var cached: T = _ + private var cached: T = uninitialized def apply()(using Context): T = { if (current != ctx.runId) { cached = generate @@ -1988,20 +2055,21 @@ class Definitions { vcls } + def boxedClass(cls: Symbol): ClassSymbol = + if cls eq ByteClass then BoxedByteClass + else if cls eq ShortClass then BoxedShortClass + else if cls eq CharClass then BoxedCharClass + else if cls eq IntClass then BoxedIntClass + else if cls eq LongClass then BoxedLongClass + else if cls eq FloatClass then BoxedFloatClass + else if cls eq DoubleClass then BoxedDoubleClass + else if cls eq UnitClass then BoxedUnitClass + else if cls eq BooleanClass then BoxedBooleanClass + else sys.error(s"Not a primitive value type: $cls") + /** The type of the boxed class corresponding to primitive value type `tp`. */ - def boxedType(tp: Type)(using Context): TypeRef = { - val cls = tp.classSymbol - if (cls eq ByteClass) BoxedByteClass - else if (cls eq ShortClass) BoxedShortClass - else if (cls eq CharClass) BoxedCharClass - else if (cls eq IntClass) BoxedIntClass - else if (cls eq LongClass) BoxedLongClass - else if (cls eq FloatClass) BoxedFloatClass - else if (cls eq DoubleClass) BoxedDoubleClass - else if (cls eq UnitClass) BoxedUnitClass - else if (cls eq BooleanClass) BoxedBooleanClass - else sys.error(s"Not a primitive value type: $tp") - }.typeRef + def boxedType(tp: Type)(using Context): TypeRef = + boxedClass(tp.classSymbol).typeRef def unboxedType(tp: Type)(using Context): TypeRef = { val cls = tp.classSymbol @@ -2041,15 +2109,17 @@ class Definitions { def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 - @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = - SimpleIdentityMap.empty[Symbol] - .updated(AnyClass, ObjectClass) - .updated(MatchableClass, ObjectClass) - .updated(AnyValClass, ObjectClass) - .updated(SingletonClass, ObjectClass) - .updated(TupleClass, ProductClass) - .updated(NonEmptyTupleClass, ProductClass) - .updated(PairClass, ObjectClass) + @tu lazy val specialErasure: collection.Map[Symbol, ClassSymbol] = + val m = mutable.Map[Symbol, ClassSymbol]() + m(AnyClass) = ObjectClass + m(MatchableClass) = ObjectClass + m(PureClass) = ObjectClass + m(AnyValClass) = ObjectClass + m(SingletonClass) = ObjectClass + m(TupleClass) = ProductClass + m(NonEmptyTupleClass) = ProductClass + m(PairClass) = ObjectClass + m // ----- Initialization --------------------------------------------------- @@ -2086,8 +2156,11 @@ class Definitions { this.initCtx = ctx if (!isInitialized) { // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = - syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass + syntheticCoreClasses + syntheticCoreMethods + ScalaValueClasses() + JavaEnumClass + // end force initialization isInitialized = true } addSyntheticSymbolsComments @@ -2166,7 +2239,7 @@ class Definitions { """.stripMargin) add(Any_hashCode, - """/** Calculate a hash code value for the object. + """/** Calculates a hash code value for the object. | * | * The default hashing algorithm is platform dependent. | * diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala index 6690cae3a142..59982fb99b5f 100644 --- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala +++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala @@ -1,13 +1,13 @@ package dotty.tools.dotc package core -import Periods._ -import SymDenotations._ -import Contexts._ -import Types._ -import Symbols._ -import Denotations._ -import Phases._ +import Periods.* +import SymDenotations.* +import Contexts.* +import Types.* +import Symbols.* +import Denotations.* +import Phases.* object DenotTransformers { diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index e56cc453d34d..3608f16e3478 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -3,27 +3,29 @@ package dotc package core import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } -import Contexts._ -import Names._ -import NameKinds._ -import StdNames._ +import Contexts.* +import Names.* +import NameKinds.* +import StdNames.* import Symbols.NoSymbol -import Symbols._ -import Types._ -import Periods._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import Signature.MatchDegree._ -import printing.Texts._ +import Symbols.* +import Types.* +import Periods.* +import Flags.* +import DenotTransformers.* +import Decorators.* +import Signature.MatchDegree.* +import printing.Texts.* import printing.Printer import io.AbstractFile import config.Config import config.Printers.overload -import util.common._ +import util.common.* import typer.ProtoTypes.NoViewsAllowed import collection.mutable.ListBuffer +import scala.compiletime.uninitialized + /** Denotations represent the meaning of symbols and named types. * The following diagram shows how the principal types of denotations * and their denoting entities relate to each other. Lines ending in @@ -121,8 +123,8 @@ object Denotations { /** Map `f` over all single denotations and aggregate the results with `g`. */ def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T - private var cachedPrefix: Type = _ - private var cachedAsSeenFrom: AsSeenFromResult = _ + private var cachedPrefix: Type = uninitialized + private var cachedAsSeenFrom: AsSeenFromResult = uninitialized private var validAsSeenFrom: Period = Nowhere type AsSeenFromResult <: PreDenotation @@ -261,6 +263,10 @@ object Denotations { /** Does this denotation have an alternative that satisfies the predicate `p`? */ def hasAltWith(p: SingleDenotation => Boolean): Boolean + inline final def hasAltWithInline(inline p: SingleDenotation => Boolean): Boolean = inline this match + case mbr: SingleDenotation => mbr.exists && p(mbr) + case mbr => mbr.hasAltWith(p) + /** The denotation made up from the alternatives of this denotation that * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. */ @@ -292,14 +298,13 @@ object Denotations { name: Name, site: Denotation = NoDenotation, args: List[Type] = Nil, - source: AbstractFile | Null = null, generateStubs: Boolean = true) (p: Symbol => Boolean) (using Context): Symbol = disambiguate(p) match { case m @ MissingRef(ownerd, name) if generateStubs => if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() - newStubSymbol(ownerd.symbol, name, source) + newStubSymbol(ownerd.symbol, name) case NoDenotation | _: NoQualifyingRef | _: MissingRef => def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" val msg = @@ -880,7 +885,6 @@ object Denotations { /** Install this denotation to be the result of the given denotation transformer. * This is the implementation of the same-named method in SymDenotations. * It's placed here because it needs access to private fields of SingleDenotation. - * @pre Can only be called in `phase.next`. */ protected def installAfter(phase: DenotTransformer)(using Context): Unit = { val targetId = phase.next.id @@ -888,16 +892,21 @@ object Denotations { else { val current = symbol.current // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") - // printPeriods(current) + // println(current.definedPeriodsString) this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) if (current.validFor.firstPhaseId >= targetId) current.replaceWith(this) + symbol.denot + // Let symbol point to updated denotation + // Without this we can get problems when we immediately recompute the denotation + // at another phase since the invariant that symbol used to point to a valid + // denotation is lost. else { current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) insertAfter(current) } + // println(current.definedPeriodsString) } - // printPeriods(this) } /** Apply a transformation `f` to all denotations in this group that start at or after @@ -982,18 +991,18 @@ object Denotations { if (symbol == NoSymbol) symbol.toString else s"" - def definedPeriodsString: String = { + /** Show all defined periods and the info of the denotation at each */ + def definedPeriodsString(using Context): String = { var sb = new StringBuilder() var cur = this var cnt = 0 - while ({ - sb.append(" " + cur.validFor) + while + sb.append(i" ${cur.validFor.toString}:${cur.infoOrCompleter}") cur = cur.nextInRun cnt += 1 if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } cur ne this - }) - () + do () sb.toString } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8100bea374eb..8c1b715e3e30 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -188,7 +188,7 @@ object Flags { flag } - def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) + def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags)*) /** The empty flag set */ val EmptyFlags: FlagSet = FlagSet(0) @@ -308,8 +308,8 @@ object Flags { */ val (_, StableRealizable @ _, _) = newFlags(24, "") - /** A case parameter accessor */ - val (_, CaseAccessor @ _, _) = newFlags(25, "") + /** A case parameter accessor / an unpickled Scala 2 TASTy (only for Scala 2 stdlib) */ + val (_, CaseAccessor @ _, Scala2Tasty @ _) = newFlags(25, "", "") /** A Scala 2x super accessor / an unpickled Scala 2.x class */ val (SuperParamAliasOrScala2x @ _, SuperParamAlias @ _, Scala2x @ _) = newFlags(26, "", "") @@ -404,10 +404,10 @@ object Flags { /** Children were queried on this class */ val (_, _, ChildrenQueried @ _) = newFlags(56, "") - /** A module variable (Scala 2.x only) - * (re-used as a flag for private parameter accessors in Recheck) + /** A module variable (Scala 2.x only) / a capture-checked class + * (Scala2ModuleVar is re-used as a flag for private parameter accessors in Recheck) */ - val (_, Scala2ModuleVar @ _, _) = newFlags(57, "") + val (_, Scala2ModuleVar @ _, CaptureChecked @ _) = newFlags(57, "/") /** A macro */ val (Macro @ _, _, _) = newFlags(58, "") @@ -576,7 +576,7 @@ object Flags { val InlineMethod: FlagSet = Inline | Method val InlineParam: FlagSet = Inline | Param val InlineByNameProxy: FlagSet = InlineProxy | Method - val JavaEnumTrait: FlagSet = JavaDefined | Enum // A Java enum trait + val JavaEnum: FlagSet = JavaDefined | Enum // A Java enum trait val JavaEnumValue: FlagSet = JavaDefined | EnumValue // A Java enum value val StaticProtected: FlagSet = JavaDefined | JavaStatic | Protected // Java symbol which is `protected` and `static` val JavaModule: FlagSet = JavaDefined | Module // A Java companion object diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index bb65cce84042..1cbfabc08958 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -6,7 +6,7 @@ import Contexts.*, Decorators.*, Symbols.*, Types.* import NameKinds.UniqueName import config.Printers.{gadts, gadtsConstr} import util.{SimpleIdentitySet, SimpleIdentityMap} -import printing._ +import printing.* import scala.annotation.tailrec import scala.annotation.internal.sharable @@ -73,28 +73,31 @@ class GadtConstraint private ( def fullLowerBound(param: TypeParamRef)(using Context): Type = val self = externalize(param) - constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { (acc, p) => - externalize(p) match + constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { (acc, loParam) => + externalize(loParam) match // drop any lower param that is a GADT symbol - // and is upper-bounded by a non-Any super-type of the original parameter - // e.g. in pos/i14287.min + // and is upper-bounded by the original parameter + // e.g. in pos/i14287.min: + // case Foo.Bar[B$1](Foo.Bar[B$2](x)) => Foo.Bar(x) + // after pattern type constraining: // B$1 had info <: X and fullBounds >: B$2 <: X, and // B$2 had info <: B$1 and fullBounds <: B$1 - // We can use the info of B$2 to drop the lower-bound of B$1 + // If we keep these fullBounds, it would be a bidirectional definition. + // So instead we can use the info of B$2 to drop the lower-bound of B$1 // and return non-bidirectional bounds B$1 <: X and B$2 <: B$1. - case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.hiBound => acc - case tp => acc | tp + case lo: TypeRef if lo.symbol.isPatternBound && lo.info.hiBound.frozen_<:<(self) => acc + case lo => acc | lo } def fullUpperBound(param: TypeParamRef)(using Context): Type = val self = externalize(param) - constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (acc, u) => - externalize(u) match - case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.loBound => acc // like fullLowerBound - case tp => + constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (acc, hiParam) => + externalize(hiParam) match + case hi: TypeRef if hi.symbol.isPatternBound && self.frozen_<:<(hi.info.loBound) => acc // like fullLowerBound + case hi => // Any as the upper bound means "no bound", but if F is higher-kinded, // Any & F = F[_]; this is wrong for us so we need to short-circuit - if acc.isAny then tp else acc & tp + if acc.isAny then hi else acc & hi } def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala index 79da5f1dcd6f..5ab2d4a2af03 100644 --- a/compiler/src/dotty/tools/dotc/core/Hashable.scala +++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package core -import Types._ +import Types.* import scala.util.hashing.{ MurmurHash3 => hashing } import annotation.tailrec @@ -40,7 +40,7 @@ object Hashable { } trait Hashable { - import Hashable._ + import Hashable.* protected def hashSeed: Int = getClass.hashCode diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala index 60fc4a4274e0..6244923cfb52 100644 --- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala +++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package core -import Contexts._ +import Contexts.* import Flags.JavaDefined import StdNames.nme -import Symbols._ -import Types._ +import Symbols.* +import Types.* /** This module defines methods to interpret types of Java symbols, which are implicitly nullable in Java, * as Scala types, which are explicitly nullable. diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala index d8f41ef99b11..b6b316ac14d9 100644 --- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala +++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.core -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.reporting.trace import dotty.tools.io.ClassPath diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 60ebc95e7bed..fb278ab92dc9 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Decorators._ +import Types.*, Contexts.*, Symbols.*, Decorators.* import util.Property import Names.Name @@ -12,11 +12,10 @@ object MatchTypeTrace: private enum TraceEntry: case TryReduce(scrut: Type) - case NoMatches(scrut: Type, cases: List[Type]) - case Stuck(scrut: Type, stuckCase: Type, otherCases: List[Type]) - case NoInstance(scrut: Type, stuckCase: Type, fails: List[(Name, TypeBounds)]) + case Stuck(scrut: Type, stuckCase: MatchTypeCaseSpec, otherCases: List[MatchTypeCaseSpec]) + case NoInstance(scrut: Type, stuckCase: MatchTypeCaseSpec, fails: List[(Name, TypeBounds)]) case EmptyScrutinee(scrut: Type) - import TraceEntry._ + import TraceEntry.* private class MatchTrace: var entries: List[TraceEntry] = Nil @@ -51,20 +50,14 @@ object MatchTypeTrace: case _ => case _ => - /** Record a failure that scrutinee `scrut` does not match any case in `cases`. - * Only the first failure is recorded. - */ - def noMatches(scrut: Type, cases: List[Type])(using Context) = - matchTypeFail(NoMatches(scrut, cases)) - /** Record a failure that scrutinee `scrut` does not match `stuckCase` but is * not disjoint from it either, which means that the remaining cases `otherCases` * cannot be visited. Only the first failure is recorded. */ - def stuck(scrut: Type, stuckCase: Type, otherCases: List[Type])(using Context) = + def stuck(scrut: Type, stuckCase: MatchTypeCaseSpec, otherCases: List[MatchTypeCaseSpec])(using Context) = matchTypeFail(Stuck(scrut, stuckCase, otherCases)) - def noInstance(scrut: Type, stuckCase: Type, fails: List[(Name, TypeBounds)])(using Context) = + def noInstance(scrut: Type, stuckCase: MatchTypeCaseSpec, fails: List[(Name, TypeBounds)])(using Context) = matchTypeFail(NoInstance(scrut, stuckCase, fails)) /** Record a failure that scrutinee `scrut` is provably empty. @@ -87,23 +80,21 @@ object MatchTypeTrace: case _ => op + def caseText(spec: MatchTypeCaseSpec)(using Context): String = + caseText(spec.origMatchCase) + def caseText(tp: Type)(using Context): String = tp match case tp: HKTypeLambda => caseText(tp.resultType) case defn.MatchCase(any, body) if any eq defn.AnyType => i"case _ => $body" case defn.MatchCase(pat, body) => i"case $pat => $body" case _ => i"case $tp" - private def casesText(cases: List[Type])(using Context) = + private def casesText(cases: List[MatchTypeCaseSpec])(using Context) = i"${cases.map(caseText)}%\n %" private def explainEntry(entry: TraceEntry)(using Context): String = entry match case TryReduce(scrut: Type) => i" trying to reduce $scrut" - case NoMatches(scrut, cases) => - i""" failed since selector $scrut - | matches none of the cases - | - | ${casesText(cases)}""" case EmptyScrutinee(scrut) => i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" @@ -127,10 +118,16 @@ object MatchTypeTrace: | The computed bounds for the $params are: | ${fails.map((name, bounds) => i"$name$bounds")}%\n %""" - def noMatchesText(scrut: Type, cases: List[Type])(using Context): String = + /** The failure message when the scrutinee `scrut` does not match any case in `cases`. */ + def noMatchesText(scrut: Type, cases: List[MatchTypeCaseSpec])(using Context): String = i"""failed since selector $scrut |matches none of the cases | | ${casesText(cases)}""" + def illegalPatternText(scrut: Type, cas: MatchTypeCaseSpec.LegacyPatMat)(using Context): String = + i"""The match type contains an illegal case: + | ${caseText(cas)} + |(this error can be ignored for now with `-source:3.3`)""" + end MatchTypeTrace diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index ea63eb6a419b..71b49394ae14 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -1,8 +1,16 @@ package dotty.tools.dotc.core -/** A collection of mode bits that are part of a context */ +/** A collection of mode bits that are part of a context. + * + * What's the difference between a boolean setting and a Mode? + * A setting is usually valid for the entire compilation run, whereas a mode is context specific. + * Changing a setting in a context creates a new SettingsState in that context, which is a relatively big object. + * By comparison, a mode is just an Int. + * But, Mode bits are a scarce resource, so for low priority situations, just reset the state with a setting. + * Also, a setting is externally settable, while a mode isn't. + */ case class Mode(val bits: Int) extends AnyVal { - import Mode._ + import Mode.* def | (that: Mode): Mode = Mode(bits | that.bits) def & (that: Mode): Mode = Mode(bits & that.bits) def &~ (that: Mode): Mode = Mode(bits & ~that.bits) @@ -10,6 +18,9 @@ case class Mode(val bits: Int) extends AnyVal { def isExpr: Boolean = (this & PatternOrTypeBits) == None + /** Are we in the body of quoted pattern? */ + def isQuotedPattern: Boolean = (this & QuotedPatternBits) != None + override def toString: String = (0 until 31).filter(i => (bits & (1 << i)) != 0).map(modeName).mkString("Mode(", ",", ")") @@ -44,6 +55,14 @@ object Mode { /** Are we looking for cyclic references? */ val CheckCyclic: Mode = newMode(5, "CheckCyclic") + /** We are in arguments of HOAS pattern in quote pattern matching + * e.g. x, y, z in a quote pattern '{ ... $a(x, y, z) ... } + * + * This mode keep typer from inserting contextual parameters to a contextual method without arguments. + * (See tests/run-macros/i17905 for motivating examples) + */ + val InQuotePatternHoasArgs: Mode = newMode(6, "InQuotePatternHoasArgs") + /** We are in a pattern alternative */ val InPatternAlternative: Mode = newMode(7, "InPatternAlternative") @@ -61,6 +80,9 @@ object Mode { */ val Printing: Mode = newMode(10, "Printing") + /** Are we in a quote the body of quoted type pattern? */ + val QuotedTypePattern: Mode = newMode(11, "QuotedTypePattern") + /** We are currently in a `viewExists` check. In that case, ambiguous * implicits checks are disabled and we succeed with the first implicit * found. @@ -98,9 +120,6 @@ object Mode { /** Read original positions when unpickling from TASTY */ val ReadPositions: Mode = newMode(17, "ReadPositions") - /** Don't suppress exceptions thrown during show */ - val PrintShowExceptions: Mode = newMode(18, "PrintShowExceptions") - val PatternOrTypeBits: Mode = Pattern | Type /** We are elaborating the fully qualified name of a package clause. @@ -120,8 +139,10 @@ object Mode { /** Are we trying to find a hidden implicit? */ val FindHiddenImplicits: Mode = newMode(24, "FindHiddenImplicits") - /** Are we in a quote in a pattern? */ - val QuotedPattern: Mode = newMode(25, "QuotedPattern") + /** Are we in a quote the body of quoted expression pattern? */ + val QuotedExprPattern: Mode = newMode(25, "QuotedExprPattern") + + val QuotedPatternBits: Mode = QuotedExprPattern | QuotedTypePattern /** Are we typechecking the rhs of an extension method? */ val InExtensionMethod: Mode = newMode(26, "InExtensionMethod") @@ -142,6 +163,6 @@ object Mode { */ val RelaxedOverriding: Mode = newMode(30, "RelaxedOverriding") - /** We are checking the original call of an Inlined node */ - val InlinedCall: Mode = newMode(31, "InlinedCall") + /** Skip inlining of methods. */ + val NoInline: Mode = newMode(31, "NoInline") } diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 2c968ab9446c..d4f009cbbbd5 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package core -import Names._ -import NameOps._ -import StdNames._ -import NameTags._ -import Contexts._ -import Decorators._ +import Names.* +import NameOps.* +import StdNames.* +import NameTags.* +import Contexts.* +import Decorators.* import scala.annotation.internal.sharable @@ -278,9 +278,31 @@ object NameKinds { if (underlying.isEmpty) "$" + info.num + "$" else super.mkString(underlying, info) } + /** The name of the term parameter generated for a context bound: + * + * def foo[T: A](...): ... + * + * becomes: + * + * def foo[T](...)(using evidence$1: A[T]): ... + * + * The "evidence$" prefix is a convention copied from Scala 2. + */ + val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + + /** The name of an inferred contextual function parameter: + * + * val x: A ?=> B = b + * + * becomes: + * + * val x: A ?=> B = (contextual$1: A) ?=> b + */ + val ContextFunctionParamName: UniqueNameKind = new UniqueNameKind("contextual$") + /** Other unique names */ + val CanThrowEvidenceName: UniqueNameKind = new UniqueNameKind("canThrow$") val TempResultName: UniqueNameKind = new UniqueNameKind("ev$") - val EvidenceParamName: UniqueNameKind = new UniqueNameKind("evidence$") val DepParamName: UniqueNameKind = new UniqueNameKind("(param)") val LazyImplicitName: UniqueNameKind = new UniqueNameKind("$_lazy_implicit_$") val LazyLocalName: UniqueNameKind = new UniqueNameKind("$lzy") @@ -294,7 +316,6 @@ object NameKinds { val TailTempName: UniqueNameKind = new UniqueNameKind("$tmp") val ExceptionBinderName: UniqueNameKind = new UniqueNameKind("ex") val SkolemName: UniqueNameKind = new UniqueNameKind("?") - val LiftedTreeName: UniqueNameKind = new UniqueNameKind("liftedTree") val SuperArgName: UniqueNameKind = new UniqueNameKind("$superArg$") val DocArtifactName: UniqueNameKind = new UniqueNameKind("$doc") val UniqueInlineName: UniqueNameKind = new UniqueNameKind("$i") diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 04440c9e9b39..415aa049c587 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -6,11 +6,11 @@ import java.security.MessageDigest import java.nio.CharBuffer import scala.io.Codec import Int.MaxValue -import Names._, StdNames._, Contexts._, Symbols._, Flags._, NameKinds._, Types._ +import Names.*, StdNames.*, Contexts.*, Symbols.*, Flags.*, NameKinds.*, Types.* import util.Chars.{isOperatorPart, digit2int} import Decorators.* -import Definitions._ -import nme._ +import Definitions.* +import nme.* object NameOps { @@ -109,7 +109,7 @@ object NameOps { false } - /** is this the name of an object enclosing packagel-level definitions? */ + /** is this the name of an object enclosing package-level definitions? */ def isPackageObjectName: Boolean = name match { case name: TermName => name == nme.PACKAGE || name.endsWith(str.TOPLEVEL_SUFFIX) case name: TypeName => @@ -119,6 +119,16 @@ object NameOps { } } + /** is this the name of an object enclosing top-level definitions? */ + def isTopLevelPackageObjectName: Boolean = name match { + case name: TermName => name.endsWith(str.TOPLEVEL_SUFFIX) + case name: TypeName => + name.toTermName match { + case ModuleClassName(original) => original.isTopLevelPackageObjectName + case _ => false + } + } + /** Convert this module name to corresponding module class name */ def moduleClassName: TypeName = name.derived(ModuleClassName).toTypeName @@ -236,10 +246,12 @@ object NameOps { */ def isPlainFunction(using Context): Boolean = functionArity >= 0 - /** Is a function name that contains `mustHave` as a substring */ - private def isSpecificFunction(mustHave: String)(using Context): Boolean = + /** Is a function name that contains `mustHave` as a substring + * and has arity `minArity` or greater. + */ + private def isSpecificFunction(mustHave: String, minArity: Int = 0)(using Context): Boolean = val suffixStart = functionSuffixStart - isFunctionPrefix(suffixStart, mustHave) && funArity(suffixStart) >= 0 + isFunctionPrefix(suffixStart, mustHave) && funArity(suffixStart) >= minArity def isContextFunction(using Context): Boolean = isSpecificFunction("Context") def isImpureFunction(using Context): Boolean = isSpecificFunction("Impure") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index dc09edd79781..75a135826785 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Contexts._, Symbols._, Types._, Flags._, Scopes._, Decorators._, Names._, NameOps._ +import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme import TypeApplications.EtaExpansion @@ -111,8 +111,11 @@ object NamerOps: def addConstructorApplies(scope: MutableScope, cls: ClassSymbol, modcls: ClassSymbol)(using Context): scope.type = def proxy(constr: Symbol): Symbol = newSymbol( - modcls, nme.apply, ApplyProxyFlags | (constr.flagsUNSAFE & AccessFlags), - ApplyProxyCompleter(constr), coord = constr.coord) + modcls, nme.apply, + ApplyProxyFlags | (constr.flagsUNSAFE & AccessFlags), + ApplyProxyCompleter(constr), + cls.privateWithin, + constr.coord) for dcl <- cls.info.decls do if dcl.isConstructor then scope.enter(proxy(dcl)) scope @@ -138,7 +141,7 @@ object NamerOps: ConstructorCompanionFlags, ConstructorCompanionFlags, constructorCompanionCompleter(cls), coord = cls.coord, - assocFile = cls.assocFile) + compUnitInfo = cls.compUnitInfo) companion.moduleClass.registerCompanion(cls) cls.registerCompanion(companion.moduleClass) companion @@ -147,7 +150,7 @@ object NamerOps: newSymbol(tsym.owner, tsym.name.toTermName, ConstructorCompanionFlags | StableRealizable | Method, ExprType(prefix.select(proxy)), coord = tsym.coord) - /** Add all necesssary constructor proxy symbols for members of class `cls`. This means: + /** Add all necessary constructor proxy symbols for members of class `cls`. This means: * * - if a member is a class, or type alias, that needs a constructor companion, add one, * provided no member with the same name exists. diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 1e08379b57f0..3f9667b08067 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -13,7 +13,7 @@ import util.{LinearMap, HashSet} import scala.annotation.internal.sharable object Names { - import NameKinds._ + import NameKinds.* /** Things that can be turned into names with `toTermName` and `toTypeName`. * Decorators implements these as extension methods for strings. diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala index e18271772ff1..4f22f9d31e36 100644 --- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala +++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package core -import Contexts._ -import Types._ +import Contexts.* +import Types.* /** Defines operations on nullable types and tree. */ object NullOpsDecorator: @@ -49,7 +49,7 @@ object NullOpsDecorator: } end extension - import ast.tpd._ + import ast.tpd.* extension (self: Tree) // cast the type of the tree to a non-nullable type diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 0328cea9b3ca..e11ac26ef93c 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Decorators._, TypeApplications._ +import Types.*, Contexts.*, Symbols.*, Decorators.*, TypeApplications.* import util.{SimpleIdentitySet, SimpleIdentityMap} import collection.mutable import printing.Printer -import printing.Texts._ +import printing.Texts.* import config.Config import config.Printers.constr import reflect.ClassTag @@ -14,6 +14,8 @@ import annotation.tailrec import annotation.internal.sharable import cc.{CapturingType, derivedCapturingType} +import scala.compiletime.uninitialized + object OrderingConstraint { /** If true, use reverse dependencies in `replace` to avoid checking the bounds @@ -124,7 +126,7 @@ object OrderingConstraint { val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty) } -import OrderingConstraint._ +import OrderingConstraint.* /** Constraint over undetermined type parameters that keeps separate maps to * reflect parameter orderings. @@ -748,9 +750,18 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } if isRemovable(param.binder) then current = current.remove(param.binder) current.dropDeps(param) + replacedTypeVar match + case replacedTypeVar: TypeVar if isHard(replacedTypeVar) => current = current.hardenTypeVars(replacement) + case _ => current.checkWellFormed() end replace + def hardenTypeVars(tp: Type)(using Context): OrderingConstraint = tp.dealiasKeepRefiningAnnots match + case tp: TypeVar if contains(tp.origin) => withHard(tp) + case tp: TypeParamRef if contains(tp) => hardenTypeVars(typeVarOfParam(tp)) + case tp: AndOrType => hardenTypeVars(tp.tp1).hardenTypeVars(tp.tp2) + case _ => this + def remove(pt: TypeLambda)(using Context): This = { def removeFromOrdering(po: ParamOrdering) = { def removeFromBoundss(key: TypeLambda, bndss: Array[List[TypeParamRef]]): Array[List[TypeParamRef]] = { @@ -881,7 +892,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, i += 1 } - private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = _ + private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = uninitialized /** The uninstantiated typevars of this constraint */ def uninstVars: collection.Seq[TypeVar] = { diff --git a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala index e88d6540e64b..e499f718365a 100644 --- a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.core import Names.Name -import Contexts._ +import Contexts.* import Types.Type import Variances.{Variance, varianceToInt} diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 5e8a960608e6..38f8e19e2737 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -2,14 +2,14 @@ package dotty.tools package dotc package core -import Decorators._ -import Symbols._ -import Types._ -import Flags._ +import Decorators.* +import Symbols.* +import Types.* +import Flags.* import Contexts.ctx import dotty.tools.dotc.reporting.trace import config.Feature.migrateTo3 -import config.Printers._ +import config.Printers.* trait PatternTypeConstrainer { self: TypeComparer => @@ -76,7 +76,7 @@ trait PatternTypeConstrainer { self: TypeComparer => def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) { def classesMayBeCompatible: Boolean = { - import Flags._ + import Flags.* val patCls = pat.classSymbol val scrCls = scrut.classSymbol !patCls.exists || !scrCls.exists || { @@ -263,29 +263,22 @@ trait PatternTypeConstrainer { self: TypeComparer => trace(i"constraining simple pattern type $tp >:< $pt", gadts, (res: Boolean) => i"$res gadt = ${ctx.gadt}") { (tp, pt) match { - case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => - val saved = state.nn.constraint - val result = - ctx.gadtState.rollbackGadtUnless { - tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => - val variance = param.paramVarianceSign - if variance == 0 || assumeInvariantRefinement || - // As a special case, when pattern and scrutinee types have the same type constructor, - // we infer better bounds for pattern-bound abstract types. - argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol - then - val TypeBounds(loS, hiS) = argS.bounds - val TypeBounds(loP, hiP) = argP.bounds - var res = true - if variance < 1 then res &&= isSubType(loS, hiP) - if variance > -1 then res &&= isSubType(loP, hiS) - res - else true - } - } - if !result then - constraint = saved - result + case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => rollbackConstraintsUnless: + tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => + val variance = param.paramVarianceSign + if variance == 0 || assumeInvariantRefinement || + // As a special case, when pattern and scrutinee types have the same type constructor, + // we infer better bounds for pattern-bound abstract types. + argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol + then + val TypeBounds(loS, hiS) = argS.bounds + val TypeBounds(loP, hiP) = argP.bounds + var res = true + if variance < 1 then res &&= isSubType(loS, hiP) + if variance > -1 then res &&= isSubType(loP, hiS) + res + else true + } case _ => // Give up if we don't get AppliedType, e.g. if we upcasted to Any. // Note that this doesn't mean that patternTp, scrutineeTp cannot possibly diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index ee877fb538d4..019c5932b3c9 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.core -import Contexts._ +import Contexts.* import Phases.unfusedPhases object Periods { diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 3c4c45ab254a..c704846a82da 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package core -import Periods._ -import Contexts._ +import Periods.* +import Contexts.* import dotty.tools.backend.jvm.GenBCode -import DenotTransformers._ -import Denotations._ -import Decorators._ +import DenotTransformers.* +import Denotations.* +import Decorators.* import config.Printers.config import scala.collection.mutable.ListBuffer -import dotty.tools.dotc.transform.MegaPhase._ -import dotty.tools.dotc.transform._ -import Periods._ +import dotty.tools.dotc.transform.MegaPhase.* +import dotty.tools.dotc.transform.* +import Periods.* import parsing.Parser import printing.XprintMode import typer.{TyperPhase, RefChecks} @@ -21,6 +21,7 @@ import typer.ImportInfo.withRootImports import ast.{tpd, untpd} import scala.annotation.internal.sharable import scala.util.control.NonFatal +import scala.compiletime.uninitialized object Phases { @@ -205,30 +206,30 @@ object Phases { if nextDenotTransformerId(i) == phase.id then nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) - private var myParserPhase: Phase = _ - private var myTyperPhase: Phase = _ - private var myPostTyperPhase: Phase = _ - private var mySbtExtractDependenciesPhase: Phase = _ - private var myPicklerPhase: Phase = _ - private var myInliningPhase: Phase = _ - private var myStagingPhase: Phase = _ - private var mySplicingPhase: Phase = _ - private var myFirstTransformPhase: Phase = _ - private var myCollectNullableFieldsPhase: Phase = _ - private var myRefChecksPhase: Phase = _ - private var myPatmatPhase: Phase = _ - private var myElimRepeatedPhase: Phase = _ - private var myElimByNamePhase: Phase = _ - private var myExtensionMethodsPhase: Phase = _ - private var myExplicitOuterPhase: Phase = _ - private var myGettersPhase: Phase = _ - private var myErasurePhase: Phase = _ - private var myElimErasedValueTypePhase: Phase = _ - private var myLambdaLiftPhase: Phase = _ - private var myCountOuterAccessesPhase: Phase = _ - private var myFlattenPhase: Phase = _ - private var myGenBCodePhase: Phase = _ - private var myCheckCapturesPhase: Phase = _ + private var myParserPhase: Phase = uninitialized + private var myTyperPhase: Phase = uninitialized + private var myPostTyperPhase: Phase = uninitialized + private var mySbtExtractDependenciesPhase: Phase = uninitialized + private var myPicklerPhase: Phase = uninitialized + private var myInliningPhase: Phase = uninitialized + private var myStagingPhase: Phase = uninitialized + private var mySplicingPhase: Phase = uninitialized + private var myFirstTransformPhase: Phase = uninitialized + private var myCollectNullableFieldsPhase: Phase = uninitialized + private var myRefChecksPhase: Phase = uninitialized + private var myPatmatPhase: Phase = uninitialized + private var myElimRepeatedPhase: Phase = uninitialized + private var myElimByNamePhase: Phase = uninitialized + private var myExtensionMethodsPhase: Phase = uninitialized + private var myExplicitOuterPhase: Phase = uninitialized + private var myGettersPhase: Phase = uninitialized + private var myErasurePhase: Phase = uninitialized + private var myElimErasedValueTypePhase: Phase = uninitialized + private var myLambdaLiftPhase: Phase = uninitialized + private var myCountOuterAccessesPhase: Phase = uninitialized + private var myFlattenPhase: Phase = uninitialized + private var myGenBCodePhase: Phase = uninitialized + private var myCheckCapturesPhase: Phase = uninitialized final def parserPhase: Phase = myParserPhase final def typerPhase: Phase = myTyperPhase @@ -299,6 +300,14 @@ object Phases { */ def phaseName: String + /** This property is queried when phases are first assembled. + * If it is false, the phase will be dropped from the set of phases to traverse. + */ + def isEnabled(using Context): Boolean = true + + /** This property is queried before a phase is run. + * If it is false, the phase is skipped. + */ def isRunnable(using Context): Boolean = !ctx.reporter.hasErrors // TODO: This might test an unintended condition. @@ -306,6 +315,9 @@ object Phases { // run one calls `errorsReported`, not `hasErrors`. // But maybe changing this would prevent useful phases from running? + /** True for all phases except NoPhase */ + def exists: Boolean = true + /** If set, allow missing or superfluous arguments in applications * and type applications. */ @@ -317,19 +329,38 @@ object Phases { /** List of names of phases that should precede this phase */ def runsAfter: Set[String] = Set.empty + /** for purposes of progress tracking, overridden in TyperPhase */ + def subPhases: List[Run.SubPhase] = Nil + final def traversals: Int = if subPhases.isEmpty then 1 else subPhases.length + + /** skip the phase for a Java compilation unit, may depend on -Yjava-tasty */ + def skipIfJava(using Context): Boolean = true + /** @pre `isRunnable` returns true */ def run(using Context): Unit /** @pre `isRunnable` returns true */ - def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - units.map { unit => - val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports - try run(using unitCtx) - catch case ex: Throwable if !ctx.run.enrichedErrorMessage => - println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) - throw ex - unitCtx.compilationUnit - } + def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = + val buf = List.newBuilder[CompilationUnit] + // factor out typedAsJava check when not needed + val doSkipJava = ctx.settings.YjavaTasty.value && this <= picklerPhase && skipIfJava + for unit <- units do + given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports + if ctx.run.enterUnit(unit) then + try + if doSkipJava && unit.typedAsJava then + () + else + run + catch case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) + throw ex + finally ctx.run.advanceUnit() + buf += unitCtx.compilationUnit + end if + end for + buf.result() + end runOn /** Convert a compilation unit's tree to a string; can be overridden */ def show(tree: untpd.Tree)(using Context): String = @@ -360,14 +391,15 @@ object Phases { /** Can this transform change the base types of a type? */ def changesBaseTypes: Boolean = changesParents - def isEnabled(using Context): Boolean = true - - def exists: Boolean = true - def initContext(ctx: FreshContext): Unit = () + /** A hook that allows to transform the usual context passed to the function + * that prints a compilation unit after a phase + */ + def printingContext(ctx: Context): Context = ctx + private var myPeriod: Period = Periods.InvalidPeriod - private var myBase: ContextBase = _ + private var myBase: ContextBase = uninitialized private var myErasedTypes = false private var myFlatClasses = false private var myRefChecked = false @@ -425,8 +457,8 @@ object Phases { final def prev: Phase = if (id > FirstPhaseId) myBase.phases(start - 1) else NoPhase - final def prevMega(using Context): Phase = - ctx.base.fusedContaining(ctx.phase.prev) + final def megaPhase(using Context): Phase = + ctx.base.fusedContaining(this) final def next: Phase = if (hasNext) myBase.phases(end + 1) else NoPhase @@ -436,12 +468,33 @@ object Phases { final def iterator: Iterator[Phase] = Iterator.iterate(this)(_.next) takeWhile (_.hasNext) - final def monitor(doing: String)(body: => Unit)(using Context): Unit = - try body - catch - case NonFatal(ex) => - report.echo(s"exception occurred while $doing ${ctx.compilationUnit}") + /** Cancellable region, if not cancelled, run the body in the context of the current compilation unit. + * Enrich crash messages. + */ + final def monitor(doing: String)(body: Context ?=> Unit)(using Context): Boolean = + val unit = ctx.compilationUnit + if ctx.run.enterUnit(unit) then + try {body; true} + catch case NonFatal(ex) if !ctx.run.enrichedErrorMessage => + report.echo(ctx.run.enrichErrorMessage(s"exception occurred while $doing $unit")) throw ex + finally ctx.run.advanceUnit() + else + false + + inline def runSubPhase[T](id: Run.SubPhase)(inline body: (Run.SubPhase, Context) ?=> T)(using Context): T = + given Run.SubPhase = id + try + body + finally + ctx.run.enterNextSubphase() + + /** Do not run if compile progress has been cancelled */ + final def cancellable(body: Context ?=> Unit)(using Context): Boolean = + if ctx.run.enterRegion() then + {body; true} + else + false override def toString: String = phaseName } diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 99076b422358..7df5a7fa3c09 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -7,18 +7,19 @@ package dotty.tools package dotc package core -import Symbols._ +import Symbols.* import Types.{TermRef, NoPrefix} -import Flags._ -import Names._ -import Contexts._ -import Phases._ -import Denotations._ -import printing.Texts._ +import Flags.* +import Names.* +import Contexts.* +import Phases.* +import Denotations.* +import printing.Texts.* import printing.Printer import SymDenotations.NoDenotation import collection.mutable +import scala.compiletime.uninitialized object Scopes { diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala index bd744ec01846..f62d594d639d 100644 --- a/compiler/src/dotty/tools/dotc/core/Signature.scala +++ b/compiler/src/dotty/tools/dotc/core/Signature.scala @@ -3,9 +3,9 @@ package core import scala.annotation.tailrec -import Names._, Types._, Contexts._, StdNames._, Decorators._ +import Names.*, Types.*, Contexts.*, StdNames.*, Decorators.* import TypeErasure.sigName -import Signature._ +import Signature.* /** The signature of a denotation. * diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index f2624e26cba5..253a45ffd7a8 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -4,9 +4,9 @@ package core import scala.collection.mutable import scala.annotation.switch import scala.annotation.internal.sharable -import Names._ -import Symbols._ -import Contexts._ +import Names.* +import Symbols.* +import Contexts.* object StdNames { @@ -34,7 +34,6 @@ object StdNames { inline val MODULE_INSTANCE_FIELD = "MODULE$" inline val Function = "Function" - inline val ErasedFunction = "ErasedFunction" inline val ContextFunction = "ContextFunction" inline val ErasedContextFunction = "ErasedContextFunction" inline val AbstractFunction = "AbstractFunction" @@ -121,6 +120,7 @@ object StdNames { val BITMAP_TRANSIENT: N = s"${BITMAP_PREFIX}trans$$" // initialization bitmap for transient lazy vals val BITMAP_CHECKINIT: N = s"${BITMAP_PREFIX}init$$" // initialization bitmap for checkinit values val BITMAP_CHECKINIT_TRANSIENT: N = s"${BITMAP_PREFIX}inittrans$$" // initialization bitmap for transient checkinit values + val CC_REACH: N = "$reach" val DEFAULT_GETTER: N = str.DEFAULT_GETTER val DEFAULT_GETTER_INIT: N = "$lessinit$greater" val DO_WHILE_PREFIX: N = "doWhile$" @@ -177,6 +177,7 @@ object StdNames { final val typeTag: N = "typeTag" final val Expr: N = "Expr" final val String: N = "String" + final val Option: N = "Option" final val Annotation: N = "Annotation" // fictions we use as both types and terms @@ -213,7 +214,6 @@ object StdNames { final val Throwable: N = "Throwable" final val IOOBException: N = "IndexOutOfBoundsException" final val FunctionXXL: N = "FunctionXXL" - final val ErasedFunction: N = "ErasedFunction" final val Abs: N = "Abs" final val And: N = "&&" @@ -299,6 +299,7 @@ object StdNames { val SELF: N = "$this" val SKOLEM: N = "" val TRAIT_CONSTRUCTOR: N = "$init$" + val TRY_BLOCK: N = "" val THROWS: N = "$throws" val U2EVT: N = "u2evt$" val ALLARGS: N = "$allArgs" @@ -394,6 +395,7 @@ object StdNames { val UNIT : N = "UNIT" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" + val andThen: N = "andThen" val annotation: N = "annotation" val any2stringadd: N = "any2stringadd" val anyHash: N = "anyHash" @@ -432,6 +434,7 @@ object StdNames { val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" + val capIn: N = "capIn" val caps: N = "caps" val captureChecking: N = "captureChecking" val checkInitialized: N = "checkInitialized" @@ -442,11 +445,13 @@ object StdNames { val command: N = "command" val common: N = "common" val compiletime : N = "compiletime" + val compose: N = "compose" val conforms_ : N = "$conforms" val contents: N = "contents" val copy: N = "copy" - val currentMirror: N = "currentMirror" val create: N = "create" + val currentMirror: N = "currentMirror" + val curried: N = "curried" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" @@ -621,6 +626,7 @@ object StdNames { val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" + val tupled: N = "tupled" val typedProductIterator: N = "typedProductIterator" val typeTagToManifest: N = "typeTagToManifest" val unapply: N = "unapply" diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 3e32340b21bd..96da91293d91 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package core -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import cc.CaptureSet.IdempotentCaptRefMap /** Substitution operations on types. See the corresponding `subst` and @@ -165,7 +165,7 @@ object Substituters: final class SubstBindingMap(from: BindingType, to: BindingType)(using Context) extends DeepTypeMap, BiTypeMap { def apply(tp: Type): Type = subst(tp, from, to, this)(using mapCtx) - def inverse(tp: Type): Type = tp.subst(to, from) + def inverse = SubstBindingMap(to, from) } final class Subst1Map(from: Symbol, to: Type)(using Context) extends DeepTypeMap { @@ -182,14 +182,14 @@ object Substituters: final class SubstSymMap(from: List[Symbol], to: List[Symbol])(using Context) extends DeepTypeMap, BiTypeMap { def apply(tp: Type): Type = substSym(tp, from, to, this)(using mapCtx) - def inverse(tp: Type) = tp.substSym(to, from) // implicitly requires that `to` contains no duplicates. + def inverse = SubstSymMap(to, from) // implicitly requires that `to` contains no duplicates. } final class SubstThisMap(from: ClassSymbol, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index b8c17ff61e9e..b1e85f2b4f90 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package core -import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._ -import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._ -import NameOps._, NameKinds._ +import Periods.*, Contexts.*, Symbols.*, Denotations.*, Names.*, NameOps.*, Annotations.* +import Types.*, Flags.*, Decorators.*, DenotTransformers.*, StdNames.*, Scopes.* +import NameOps.*, NameKinds.* import Phases.{Phase, typerPhase, unfusedPhases} import Constants.Constant import TypeApplications.TypeParamInfo import Scopes.Scope import dotty.tools.io.AbstractFile -import Decorators._ -import ast._ +import Decorators.* +import ast.* import ast.Trees.{LambdaTypeTree, TypeBoundsTree} import Trees.Literal import Variances.Variance @@ -21,12 +21,12 @@ import util.Stats import java.util.WeakHashMap import scala.util.control.NonFatal import config.Config -import reporting._ +import reporting.* import collection.mutable -import transform.TypeUtils._ -import cc.{CapturingType, derivedCapturingType, Setup, EventuallyCapturingType, isEventuallyCapturingType} +import cc.{CapturingType, derivedCapturingType} import scala.annotation.internal.sharable +import scala.compiletime.uninitialized object SymDenotations { @@ -672,6 +672,10 @@ object SymDenotations { def isPackageObject(using Context): Boolean = name.isPackageObjectName && owner.is(Package) && this.is(Module) + /** Is this symbol a package object containing top-level definitions? */ + def isTopLevelDefinitionsObject(using Context): Boolean = + name.isTopLevelPackageObjectName && owner.is(Package) && this.is(Module) + /** Is this symbol a toplevel definition in a package object? */ def isWrappedToplevelDef(using Context): Boolean = !isConstructor && owner.isPackageObject @@ -864,6 +868,17 @@ object SymDenotations { final def isNullableClassAfterErasure(using Context): Boolean = isClass && !isValueClass && !is(ModuleClass) && symbol != defn.NothingClass + /** Is `pre` the same as C.this, where C is exactly the owner of this symbol, + * or, if this symbol is protected, a subclass of the owner? + */ + def isAccessPrivilegedThisType(pre: Type)(using Context): Boolean = pre match + case pre: ThisType => + (pre.cls eq owner) || this.is(Protected) && pre.cls.derivesFrom(owner) + case pre: TermRef => + pre.symbol.moduleClass == owner + case _ => + false + /** Is this definition accessible as a member of tree with type `pre`? * @param pre The type of the tree from which the selection is made * @param superAccess Access is via super @@ -873,7 +888,7 @@ object SymDenotations { * As a side effect, drop Local flags of members that are not accessed via the ThisType * of their owner. */ - final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer | Null = null)(using Context): Boolean = { + final def isAccessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Boolean = { /** Are we inside definition of `boundary`? * If this symbol is Java defined, package structure is interpreted to be flat. @@ -888,40 +903,15 @@ object SymDenotations { (linked ne NoSymbol) && accessWithin(linked) } - /** Is `pre` the same as C.thisThis, where C is exactly the owner of this symbol, - * or, if this symbol is protected, a subclass of the owner? - */ - def isCorrectThisType(pre: Type): Boolean = pre match { - case pre: ThisType => - (pre.cls eq owner) || this.is(Protected) && pre.cls.derivesFrom(owner) - case pre: TermRef => - pre.symbol.moduleClass == owner - case _ => - false - } - /** Is protected access to target symbol permitted? */ def isProtectedAccessOK: Boolean = - inline def fail(str: String): false = - if whyNot != null then whyNot.nn.append(str) - false val cls = owner.enclosingSubClass if !cls.exists then - if pre.termSymbol.isPackageObject && accessWithin(pre.termSymbol.owner) then - true - else - val encl = if ctx.owner.isConstructor then ctx.owner.enclosingClass.owner.enclosingClass else ctx.owner.enclosingClass - fail(i""" - | Access to protected $this not permitted because enclosing ${encl.showLocated} - | is not a subclass of ${owner.showLocated} where target is defined""") - else if isType || pre.derivesFrom(cls) || isConstructor || owner.is(ModuleClass) then + pre.termSymbol.isPackageObject && accessWithin(pre.termSymbol.owner) + else // allow accesses to types from arbitrary subclasses fixes #4737 // don't perform this check for static members - true - else - fail(i""" - | Access to protected ${symbol.show} not permitted because prefix type ${pre.widen.show} - | does not conform to ${cls.showLocated} where the access takes place""") + isType || pre.derivesFrom(cls) || isConstructor || owner.is(ModuleClass) end isProtectedAccessOK if pre eq NoPrefix then true @@ -933,7 +923,7 @@ object SymDenotations { || boundary.isRoot || (accessWithin(boundary) || accessWithinLinked(boundary)) && ( !this.is(Local) - || isCorrectThisType(pre) + || isAccessPrivilegedThisType(pre) || canBeLocal(name, flags) && { resetFlag(Local) @@ -1030,7 +1020,7 @@ object SymDenotations { /** Is this a Scala 2 macro defined */ final def isScala2MacroInScala3(using Context): Boolean = - is(Macro, butNot = Inline) && is(Erased) + is(Macro, butNot = Inline) && flagsUNSAFE.is(Erased) // flag is set initially for macros - we check if it's a scala 2 macro before completing the type constructor so do not force the info to check the flag // Consider the macros of StringContext as plain Scala 2 macros when // compiling the standard library with Dotty. // This should be removed on Scala 3.x @@ -1041,6 +1031,10 @@ object SymDenotations { isOneOf(EffectivelyErased) || is(Inline) && !isRetainedInline && !hasAnnotation(defn.ScalaStaticAnnot) + /** Is this a member that will become public in the generated binary */ + def hasPublicInBinary(using Context): Boolean = + isTerm && hasAnnotation(defn.PublicInBinaryAnnot) + /** ()T and => T types should be treated as equivalent for this symbol. * Note: For the moment, we treat Scala-2 compiled symbols as loose matching, * because the Scala library does not always follow the right conventions. @@ -1353,7 +1347,7 @@ object SymDenotations { * * site: Subtype of both inClass and C */ - final def matchingDecl(inClass: Symbol, site: Type)(using Context): Symbol = { + final def matchingDecl(inClass: Symbol, site: Type, name: Name = this.name)(using Context): Symbol = { var denot = inClass.info.nonPrivateDecl(name) if (denot.isTerm) // types of the same name always match denot = denot.matchingDenotation(site, site.memberInfo(symbol), symbol.targetName) @@ -1513,16 +1507,29 @@ object SymDenotations { * See tests/pos/i10769.scala */ def reachableTypeRef(using Context) = - TypeRef(owner.reachableThisType, symbol) + TypeRef(owner.reachablePrefix, symbol) - /** Like termRef, but objects in the prefix are represented by their singleton type, + /** The reachable typeRef with wildcard arguments for each type parameter */ + def reachableRawTypeRef(using Context) = + reachableTypeRef.appliedTo(typeParams.map(_ => TypeBounds.emptyPolyKind)) + + /** Like termRef, if it is addressable from the current context, + * but objects in the prefix are represented by their singleton type, * this means we output `pre.O.member` rather than `pre.O$.this.member`. * * This is required to avoid owner crash in ExplicitOuter. * See tests/pos/i10769.scala + * + * If the reference is to an object that is not accessible from the + * current context since the object is nested in a class that is not an outer + * class of the current context, fall back to a TypeRef to the module class. + * Test case is tests/pos/i17556.scala. + * If the reference is to some other inaccessible object, throw an AssertionError. */ - def reachableTermRef(using Context) = - TermRef(owner.reachableThisType, symbol) + def reachableTermRef(using Context): Type = owner.reachablePrefix match + case pre: SingletonType => TermRef(pre, symbol) + case pre if symbol.is(ModuleVal) => TypeRef(pre, symbol.moduleClass) + case _ => throw AssertionError(i"cannot compute path to TermRef $this from ${ctx.owner}") /** Like thisType, but objects in the type are represented by their singleton type, * this means we output `pre.O.member` rather than `pre.O$.this.member`. @@ -1537,6 +1544,18 @@ object SymDenotations { else ThisType.raw(TypeRef(owner.reachableThisType, symbol.asType)) + /** Like `reachableThisType`, except if that would refer to a class where + * the `this` cannot be accessed. In that case, fall back to the + * rawTypeRef of the class. E.g. instead of `A.this.X` where `A.this` + * is inaccessible, use `A#X`. + */ + def reachablePrefix(using Context): Type = reachableThisType match + case pre: ThisType + if !pre.cls.isStaticOwner && !ctx.owner.isContainedIn(pre.cls) => + pre.cls.reachableRawTypeRef + case pre => + pre + /** The variance of this type parameter or type member as a subset of * {Covariant, Contravariant} */ @@ -1680,7 +1699,7 @@ object SymDenotations { c.ensureCompleted() end completeChildrenIn - if is(Sealed) || isAllOf(JavaEnumTrait) then + if is(Sealed) || isAllOf(JavaEnum) && isClass then if !is(ChildrenQueried) then // Make sure all visible children are completed, so that // they show up in Child annotations. A possible child is visible if it @@ -1999,8 +2018,10 @@ object SymDenotations { * @return The result may contain false positives, but never false negatives. */ final def mayHaveCommonChild(that: ClassSymbol)(using Context): Boolean = - !this.is(Final) && !that.is(Final) && (this.is(Trait) || that.is(Trait)) || - this.derivesFrom(that) || that.derivesFrom(this.symbol) + this.is(Trait) && !that.isEffectivelyFinal + || that.is(Trait) && !this.isEffectivelyFinal + || this.derivesFrom(that) + || that.derivesFrom(this.symbol) final override def typeParamCreationFlags: FlagSet = ClassTypeParamCreationFlags @@ -2414,7 +2435,7 @@ object SymDenotations { initPrivateWithin: Symbol) extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) { - private var packageObjsCache: List[ClassDenotation] = _ + private var packageObjsCache: List[ClassDenotation] = uninitialized private var packageObjsRunId: RunId = NoRunId private var ambiguityWarningIssued: Boolean = false @@ -2570,7 +2591,7 @@ object SymDenotations { for (sym <- scope.toList.iterator) // We need to be careful to not force the denotation of `sym` here, // otherwise it will be brought forward to the current run. - if (sym.defRunId != ctx.runId && sym.isClass && sym.asClass.assocFile == file) + if (sym.defRunId != ctx.runId && sym.isClass && sym.asClass.compUnitInfo != null && sym.asClass.compUnitInfo.nn.associatedFile == file) scope.unlink(sym, sym.lastKnownDenotation.name) } } diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala similarity index 95% rename from compiler/src/dotty/tools/dotc/transform/SymUtils.scala rename to compiler/src/dotty/tools/dotc/core/SymUtils.scala index c02a7d90cb8c..65634241b790 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -1,28 +1,28 @@ package dotty.tools.dotc -package transform - -import core._ -import Types._ -import Contexts._ -import Symbols._ -import SymDenotations._ -import Names._ -import NameOps._ -import StdNames._ -import NameKinds._ -import Flags._ -import ValueClasses.isDerivedValueClass -import Decorators._ +package core + +import core.* +import Types.* +import Contexts.* +import Symbols.* +import SymDenotations.* +import Names.* +import NameOps.* +import StdNames.* +import NameKinds.* +import Flags.* +import Decorators.* import Constants.Constant import Annotations.Annotation -import Phases._ +import Phases.* import ast.tpd.Literal +import transform.Mixin import dotty.tools.dotc.transform.sjs.JSSymUtils.sjsNeedsField import scala.annotation.tailrec -object SymUtils: +class SymUtils: extension (self: Symbol) @@ -79,6 +79,14 @@ object SymUtils: self.is(Enum, butNot = Case) && self.info.parents.exists(p => p.typeSymbol == defn.JavaEnumClass) + def isDerivedValueClass(using Context): Boolean = self.isClass && { + val d = self.denot + !d.isRefinementClass && + d.isValueClass && + (d.initial.symbol ne defn.AnyValClass) && // Compare the initial symbol because AnyVal does not exist after erasure + !d.isPrimitiveValueClass + } + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. @@ -100,8 +108,8 @@ object SymUtils: if (!self.is(CaseClass)) "it is not a case class" else if (self.is(Abstract)) "it is an abstract class" else if (self.primaryConstructor.info.paramInfoss.length != 1) "it takes more than one parameter list" - else if (isDerivedValueClass(self)) "it is a value class" - else if (!(companionMirror || canAccessCtor)) s"the constructor of $self is innaccessible from the calling scope." + else if self.isDerivedValueClass then "it is a value class" + else if (!(companionMirror || canAccessCtor)) s"the constructor of $self is inaccessible from the calling scope." else "" end whyNotGenericProduct @@ -173,7 +181,7 @@ object SymUtils: else { val children = self.children val companionMirror = self.useCompanionAsSumMirror - val ownerScope = if pre.isInstanceOf[SingletonType] then pre.classSymbol else NoSymbol + val ownerScope = if pre.isInstanceOf[SingletonType] then pre.classSymbols else Nil def problem(child: Symbol) = { def accessibleMessage(sym: Symbol): String = @@ -183,8 +191,7 @@ object SymUtils: self.isContainedIn(sym) || sym.is(Module) && isVisibleToParent(sym.owner) def isVisibleToScope(sym: Symbol): Boolean = def isReachable: Boolean = ctx.owner.isContainedIn(sym) - def isMemberOfPrefix: Boolean = - ownerScope.exists && inherits(sym, ownerScope) + def isMemberOfPrefix: Boolean = ownerScope.exists(inherits(sym, _)) isReachable || isMemberOfPrefix || sym.is(Module) && isVisibleToScope(sym.owner) if !isVisibleToParent(sym) then i"to its parent $self" else if !companionMirror && !isVisibleToScope(sym) then i"to call site ${ctx.owner}" @@ -333,10 +340,6 @@ object SymUtils: else owner.isLocal } - /** The reachable typeRef with wildcard arguments for each type parameter */ - def reachableRawTypeRef(using Context) = - self.reachableTypeRef.appliedTo(self.typeParams.map(_ => TypeBounds.emptyPolyKind)) - /** Is symbol a type splice operation? */ def isTypeSplice(using Context): Boolean = self == defn.QuotedType_splice @@ -374,8 +377,6 @@ object SymUtils: self.hasAnnotation(defn.ExperimentalAnnot) || isDefaultArgumentOfExperimentalMethod || (!self.is(Package) && self.owner.isInExperimentalScope) - || self.topLevelClass.ownersIterator.exists(p => - p.is(Package) && p.owner.isRoot && p.name == tpnme.dotty) /** The declared self type of this class, as seen from `site`, stripping * all refinements for opaque types. diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 9eb67b468cfa..75c610b29140 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -7,14 +7,15 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal +import dotty.tools.dotc.classpath.FileUtils.isTasty import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions -import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, Names._ -import NameOps._ -import StdNames._ -import classfile.ClassfileParser -import Decorators._ +import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* +import NameOps.* +import StdNames.* +import classfile.{ClassfileParser, ClassfileTastyUUIDParser} +import Decorators.* import util.Stats import reporting.trace @@ -23,10 +24,11 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser - +import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig, TastyVersion} +import dotty.tools.dotc.core.tasty.TastyUnpickler object SymbolLoaders { - import ast.untpd._ + import ast.untpd.* /** A marker trait for a completer that replaces the original * Symbol loader for an unpickled root. @@ -49,7 +51,7 @@ object SymbolLoaders { def enterClass( owner: Symbol, name: PreName, completer: SymbolLoader, flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { - val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, assocFile = completer.sourceFileOrNull) + val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, compUnitInfo = completer.compilationUnitInfo) enterNew(owner, cls, completer, scope) } @@ -61,7 +63,7 @@ object SymbolLoaders { val module = newModuleSymbol( owner, name.toTermName.decode, modFlags, clsFlags, (module, _) => completer.proxy.withDecls(newScope).withSourceModule(module), - assocFile = completer.sourceFileOrNull) + compUnitInfo = completer.compilationUnitInfo) enterNew(owner, module, completer, scope) enterNew(owner, module.moduleClass, completer, scope) } @@ -192,10 +194,13 @@ object SymbolLoaders { if (ctx.settings.verbose.value) report.inform("[symloader] picked up newer source file for " + src.path) enterToplevelsFromSource(owner, nameOf(classRep), src) case (None, Some(src)) => - if (ctx.settings.verbose.value) report.inform("[symloader] no class, picked up source file for " + src.path) + if (ctx.settings.verbose.value) report.inform("[symloader] no class or tasty, picked up source file for " + src.path) enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => - enterClassAndModule(owner, nameOf(classRep), ctx.platform.newClassLoader(bin)) + val completer = + if bin.isTasty then ctx.platform.newTastyLoader(bin) + else ctx.platform.newClassLoader(bin) + enterClassAndModule(owner, nameOf(classRep), completer) } def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = @@ -207,7 +212,7 @@ object SymbolLoaders { /** Load contents of a package */ class PackageLoader(_sourceModule: TermSymbol, classPath: ClassPath) extends SymbolLoader { - override def sourceFileOrNull: AbstractFile | Null = null + def compilationUnitInfo: CompilationUnitInfo | Null = null override def sourceModule(using Context): TermSymbol = _sourceModule def description(using Context): String = "package loader " + sourceModule.fullName @@ -311,7 +316,7 @@ abstract class SymbolLoader extends LazyType { self => /** Load source or class file for `root`, return */ def doComplete(root: SymDenotation)(using Context): Unit - def sourceFileOrNull: AbstractFile | Null + def compilationUnitInfo: CompilationUnitInfo | Null /** Description of the resource (ClassPath, AbstractFile) * being processed by this loader @@ -322,7 +327,7 @@ abstract class SymbolLoader extends LazyType { self => * but provides fresh slots for scope/sourceModule/moduleClass */ def proxy: SymbolLoader = new SymbolLoader { - export self.{doComplete, sourceFileOrNull} + export self.{doComplete, compilationUnitInfo} def description(using Context): String = s"proxy to ${self.description}" } @@ -399,25 +404,60 @@ abstract class SymbolLoader extends LazyType { self => class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { - override def sourceFileOrNull: AbstractFile | Null = classfile + def compilationUnitInfo: CompilationUnitInfo | Null = CompilationUnitInfo(classfile) + def description(using Context): String = "class file " + classfile.toString override def doComplete(root: SymDenotation)(using Context): Unit = - load(root) - - def load(root: SymDenotation)(using Context): Unit = { val (classRoot, moduleRoot) = rootDenots(root.asClass) val classfileParser = new ClassfileParser(classfile, classRoot, moduleRoot)(ctx) - val result = classfileParser.run() - if (mayLoadTreesFromTasty) - result match { - case Some(unpickler: tasty.DottyUnpickler) => - classRoot.classSymbol.rootTreeOrProvider = unpickler - moduleRoot.classSymbol.rootTreeOrProvider = unpickler + classfileParser.run() +} + +class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { + + private val unpickler: tasty.DottyUnpickler = + handleUnpicklingExceptions: + val tastyBytes = tastyFile.toByteArray + new tasty.DottyUnpickler(tastyFile, tastyBytes) // reads header and name table + + val compilationUnitInfo: CompilationUnitInfo | Null = unpickler.compilationUnitInfo + + def description(using Context): String = "TASTy file " + tastyFile.toString + + override def doComplete(root: SymDenotation)(using Context): Unit = + handleUnpicklingExceptions: + checkTastyUUID() + val (classRoot, moduleRoot) = rootDenots(root.asClass) + unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) + if mayLoadTreesFromTasty then + classRoot.classSymbol.rootTreeOrProvider = unpickler + moduleRoot.classSymbol.rootTreeOrProvider = unpickler + + private def handleUnpicklingExceptions[T](thunk: =>T): T = + try thunk + catch case e: RuntimeException => + val message = e match + case e: UnpickleException => + s"""TASTy file ${tastyFile.canonicalPath} could not be read, failing with: + | ${Option(e.getMessage).getOrElse("")}""".stripMargin case _ => - } - } + s"""TASTy file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} + | ${Option(e.getMessage).getOrElse("")}""".stripMargin + throw IOException(message, e) + + + private def checkTastyUUID()(using Context): Unit = + val classfile = + val className = tastyFile.name.stripSuffix(".tasty") + tastyFile.resolveSibling(className + ".class") + if classfile != null then + val tastyUUID = unpickler.unpickler.header.uuid + new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) + else + // This will be the case in any of our tests that compile with `-Youtput-only-tasty` + report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") private def mayLoadTreesFromTasty(using Context): Boolean = ctx.settings.YretainTrees.value || ctx.settings.fromTasty.value @@ -425,7 +465,7 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader { def description(using Context): String = "source file " + srcfile.toString - override def sourceFileOrNull: AbstractFile | Null = srcfile + def compilationUnitInfo: CompilationUnitInfo | Null = CompilationUnitInfo(srcfile) def doComplete(root: SymDenotation)(using Context): Unit = ctx.run.nn.lateCompile(srcfile, typeCheck = ctx.settings.YretainTrees.value) } @@ -433,7 +473,7 @@ class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader { /** A NoCompleter which is also a SymbolLoader. */ class NoLoader extends SymbolLoader with NoCompleter { def description(using Context): String = "NoLoader" - override def sourceFileOrNull: AbstractFile | Null = null + def compilationUnitInfo: CompilationUnitInfo | Null = null override def complete(root: SymDenotation)(using Context): Unit = super[NoCompleter].complete(root) def doComplete(root: SymDenotation)(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 07ac2be90819..ddddaf9b07fb 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -2,23 +2,22 @@ package dotty.tools package dotc package core -import Periods._ -import Names._ -import Scopes._ -import Flags._ -import Decorators._ -import Contexts._ -import Phases._ -import SymDenotations._ -import Denotations._ -import printing.Texts._ +import Periods.* +import Names.* +import Scopes.* +import Flags.* +import Decorators.* +import Contexts.* +import Phases.* +import SymDenotations.* +import Denotations.* +import printing.Texts.* import printing.Printer -import Types._ -import util.Spans._ -import DenotTransformers._ -import StdNames._ -import NameOps._ -import transform.SymUtils._ +import Types.* +import util.Spans.* +import DenotTransformers.* +import StdNames.* +import NameOps.* import NameKinds.LazyImplicitName import ast.tpd import tpd.{Tree, TreeProvider, TreeOps} @@ -31,8 +30,12 @@ import io.AbstractFile import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, EqHashMap} import scala.annotation.internal.sharable import config.Printers.typr +import dotty.tools.dotc.classpath.FileUtils.isScalaBinary -object Symbols { +import scala.compiletime.uninitialized +import dotty.tools.tasty.TastyVersion + +object Symbols extends SymUtils { implicit def eqSymbol: CanEqual[Symbol, Symbol] = CanEqual.derived @@ -77,16 +80,17 @@ object Symbols { /** Does this symbol retain its definition tree? * A good policy for this needs to balance costs and benefits, where - * costs are mainly memoty leaks, in particular across runs. + * costs are mainly memory leaks, in particular across runs. */ def retainsDefTree(using Context): Boolean = ctx.settings.YretainTrees.value || denot.owner.isTerm || // no risk of leaking memory after a run for these denot.isOneOf(InlineOrProxy) || // need to keep inline info - ctx.settings.YcheckInit.value // initialization check + ctx.settings.YcheckInit.value || // initialization check + ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ - private var lastDenot: SymDenotation = _ + private var lastDenot: SymDenotation = uninitialized private var checkedPeriod: Period = Nowhere private[core] def invalidateDenotCache(): Unit = { checkedPeriod = Nowhere } @@ -150,7 +154,7 @@ object Symbols { * symbols defined by the user in a prior run of the REPL, that are still valid. */ final def isDefinedInSource(using Context): Boolean = - span.exists && isValidInCurrentRun && associatedFileMatches(_.extension != "class") + span.exists && isValidInCurrentRun && associatedFileMatches(!_.isScalaBinary) /** Is symbol valid in current run? */ final def isValidInCurrentRun(using Context): Boolean = @@ -261,17 +265,32 @@ object Symbols { /** The source or class file from which this class or * the class containing this symbol was generated, null if not applicable. - * Note that this the returned classfile might be the top-level class + * Note that the returned classfile might be from the top-level class * containing this symbol instead of the directly enclosing class. - * Overridden in ClassSymbol */ def associatedFile(using Context): AbstractFile | Null = - lastDenot.topLevelClass.associatedFile + val compUnitInfo = compilationUnitInfo + if compUnitInfo == null then (null: AbstractFile | Null) + else compUnitInfo.associatedFile + + /** The compilation unit info (associated file, tasty versions, ...). + * Note that the returned CompilationUnitInfo might be from the top-level class + * containing this symbol instead of the directly enclosing class. + * Overridden in ClassSymbol + */ + def compilationUnitInfo(using Context): CompilationUnitInfo | Null = + lastDenot.topLevelClass.compilationUnitInfo + + /** The info of the TASTy from which this symbol was loaded, None if not applicable. */ + def tastyInfo(using Context): Option[TastyInfo] = + val compUnitInfo = compilationUnitInfo + if compUnitInfo == null then None + else compUnitInfo.tastyInfo /** The class file from which this class was generated, null if not applicable. */ final def binaryFile(using Context): AbstractFile | Null = { val file = associatedFile - if (file != null && file.extension == "class") file else null + if file != null && file.isScalaBinary then file else null } /** A trap to avoid calling x.symbol on something that is already a symbol. @@ -284,7 +303,7 @@ object Symbols { final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = - if (src.exists && src.file.extension != "class") src + if (src.exists && !src.file.isScalaBinary) src else NoSource if (!denot.exists) NoSource @@ -349,7 +368,7 @@ object Symbols { def paramRef(using Context): TypeRef = denot.typeRef /** Copy a symbol, overriding selective fields. - * Note that `coord` and `associatedFile` will be set from the fields in `owner`, not + * Note that `coord` and `compilationUnitInfo` will be set from the fields in `owner`, not * the fields in `sym`. */ def copy(using Context)( owner: Symbol = this.owner, @@ -358,13 +377,14 @@ object Symbols { info: Type = this.info, privateWithin: Symbol = this.privateWithin, coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord - val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile + val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo + if isClass then - newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) + newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else newSymbol(owner, name, flags, info, privateWithin, coord1) } @@ -392,7 +412,7 @@ object Symbols { type TermSymbol = Symbol { type ThisName = TermName } type TypeSymbol = Symbol { type ThisName = TypeName } - class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile | Null, id: Int, nestingLevel: Int) + class ClassSymbol private[Symbols] (coord: Coord, val compUnitInfo: CompilationUnitInfo | Null, id: Int, nestingLevel: Int) extends Symbol(coord, id, nestingLevel) { type ThisName = TypeName @@ -452,9 +472,9 @@ object Symbols { } /** The source or class file from which this class was generated, null if not applicable. */ - override def associatedFile(using Context): AbstractFile | Null = - if assocFile != null || this.is(Package) || this.owner.is(Package) then assocFile - else super.associatedFile + override def compilationUnitInfo(using Context): CompilationUnitInfo | Null = + if compUnitInfo != null || this.is(Package) || this.owner.is(Package) then compUnitInfo + else super.compilationUnitInfo private var mySource: SourceFile = NoSource @@ -462,10 +482,16 @@ object Symbols { if !mySource.exists && !denot.is(Package) then // this allows sources to be added in annotations after `sourceOfClass` is first called val file = associatedFile - if file != null && file.extension != "class" then + if file != null && !file.isScalaBinary then mySource = ctx.getSource(file) else mySource = defn.patchSource(this) + if !mySource.exists then + val compUnitInfo = compilationUnitInfo + if compUnitInfo != null then + compUnitInfo.tastyInfo.flatMap(_.attributes.sourceFile) match + case Some(path) => mySource = ctx.getSource(path) + case _ => if !mySource.exists then mySource = atPhaseNoLater(flattenPhase) { denot.topLevelClass.unforcedAnnotation(defn.SourceFileAnnot) match @@ -484,7 +510,7 @@ object Symbols { } @sharable object NoSymbol extends Symbol(NoCoord, 0, 0) { - override def associatedFile(using Context): AbstractFile | Null = NoSource.file + override def compilationUnitInfo(using Context): CompilationUnitInfo | Null = CompilationUnitInfo(NoSource.file) override def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = NoDenotation } @@ -533,9 +559,9 @@ object Symbols { infoFn: ClassSymbol => Type, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): ClassSymbol + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): ClassSymbol = { - val cls = new ClassSymbol(coord, assocFile, ctx.base.nextSymId, ctx.nestingLevel) + val cls = new ClassSymbol(coord, compUnitInfo, ctx.base.nextSymId, ctx.nestingLevel) val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin) cls.denot = denot cls @@ -551,11 +577,11 @@ object Symbols { selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): ClassSymbol = + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): ClassSymbol = newClassSymbol( owner, name, flags, ClassInfo(owner.thisType, _, parents, decls, selfInfo), - privateWithin, coord, assocFile) + privateWithin, coord, compUnitInfo) /** Same as `newCompleteClassSymbol` except that `parents` can be a list of arbitrary * types which get normalized into type refs and parameter bindings. @@ -568,7 +594,7 @@ object Symbols { selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): ClassSymbol = { + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): ClassSymbol = { def completer = new LazyType { def complete(denot: SymDenotation)(using Context): Unit = { val cls = denot.asClass.classSymbol @@ -576,7 +602,7 @@ object Symbols { denot.info = ClassInfo(owner.thisType, cls, parentTypes.map(_.dealias), decls, selfInfo) } } - newClassSymbol(owner, name, flags, completer, privateWithin, coord, assocFile) + newClassSymbol(owner, name, flags, completer, privateWithin, coord, compUnitInfo) } def newRefinedClassSymbol(coord: Coord = NoCoord)(using Context): ClassSymbol = @@ -594,7 +620,7 @@ object Symbols { infoFn: (TermSymbol, ClassSymbol) => Type, // typically a ModuleClassCompleterWithDecls privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): TermSymbol + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): TermSymbol = { val base = owner.thisType val modclsFlags = clsFlags | ModuleClassCreationFlags @@ -602,7 +628,7 @@ object Symbols { val module = newSymbol( owner, name, modFlags | ModuleValCreationFlags, NoCompleter, privateWithin, coord) val modcls = newClassSymbol( - owner, modclsName, modclsFlags, infoFn(module, _), privateWithin, coord, assocFile) + owner, modclsName, modclsFlags, infoFn(module, _), privateWithin, coord, compUnitInfo) module.info = if (modcls.isCompleted) TypeRef(owner.thisType, modcls) else new ModuleCompleter(modcls) @@ -623,12 +649,12 @@ object Symbols { decls: Scope, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): TermSymbol = + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): TermSymbol = newModuleSymbol( owner, name, modFlags, clsFlags, (module, modcls) => ClassInfo( owner.thisType, modcls, parents, decls, TermRef(owner.thisType, module)), - privateWithin, coord, assocFile) + privateWithin, coord, compUnitInfo) /** Same as `newCompleteModuleSymbol` except that `parents` can be a list of arbitrary * types which get normalized into type refs and parameter bindings. @@ -642,7 +668,7 @@ object Symbols { decls: Scope, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): TermSymbol = { + compUnitInfo: CompilationUnitInfo | Null = null)(using Context): TermSymbol = { def completer(module: Symbol) = new LazyType { def complete(denot: SymDenotation)(using Context): Unit = { val cls = denot.asClass.classSymbol @@ -653,7 +679,7 @@ object Symbols { newModuleSymbol( owner, name, modFlags, clsFlags, (module, modcls) => completer(module), - privateWithin, coord, assocFile) + privateWithin, coord, compUnitInfo) } /** Create a package symbol with associated package class @@ -693,17 +719,17 @@ object Symbols { /** Create a stub symbol that will issue a missing reference error * when attempted to be completed. */ - def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile | Null = null)(using Context): Symbol = { + def newStubSymbol(owner: Symbol, name: Name, compUnitInfo: CompilationUnitInfo | Null = null)(using Context): Symbol = { def stubCompleter = new StubInfo() val normalizedOwner = if (owner.is(ModuleVal)) owner.moduleClass else owner - typr.println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, file = $file") + typr.println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, compilation unit = $compUnitInfo") typr.println(s"decls = ${normalizedOwner.unforcedDecls.toList.map(_.debugString).mkString("\n ")}") // !!! DEBUG //if (base.settings.debug.value) throw new Error() val stub = name match { case name: TermName => - newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, assocFile = file) + newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, compUnitInfo = compUnitInfo) case name: TypeName => - newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, assocFile = file) + newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, compUnitInfo = compUnitInfo) } stub } diff --git a/compiler/src/dotty/tools/dotc/core/TastyInfo.scala b/compiler/src/dotty/tools/dotc/core/TastyInfo.scala new file mode 100644 index 000000000000..0accd69f0adc --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/TastyInfo.scala @@ -0,0 +1,11 @@ +package dotty.tools.dotc.core + +import dotty.tools.io.AbstractFile +import dotty.tools.tasty.TastyVersion + +/** Information about the TASTy of a class symbol. + * + * @param version The TASTy version (major, minor, experimental) + * @param attributes Attributes of in the TASTy attributes section + */ +case class TastyInfo(version: TastyVersion, attributes: tasty.Attributes) diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 2e8aee4df96c..f1edd7cd8f8b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -2,18 +2,16 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ -import Symbols._ +import Types.* +import Contexts.* +import Symbols.* import SymDenotations.LazyType -import Decorators._ -import util.Stats._ -import Names._ +import Decorators.* +import util.Stats.* +import Names.* import StdNames.nme import Flags.{Module, Provisional} import dotty.tools.dotc.config.Config -import cc.boxedUnlessFun -import dotty.tools.dotc.transform.TypeUtils.isErasedValueType object TypeApplications { @@ -37,7 +35,7 @@ object TypeApplications { def apply(tycon: Type)(using Context): Type = assert(tycon.typeParams.nonEmpty, tycon) - tycon.EtaExpand(tycon.typeParamSymbols) + tycon.etaExpand(tycon.typeParamSymbols) /** Test that the parameter bounds in a hk type lambda `[X1,...,Xn] => C[X1, ..., Xn]` * contain the bounds of the type parameters of `C`. This is necessary to be able to @@ -72,7 +70,7 @@ object TypeApplications { */ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(using Context): List[Type] = if (tparams.isEmpty) args - else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramInfoOrCompleter)) + else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.paramInfoOrCompleter)) /** A type map that tries to reduce (part of) the result type of the type lambda `tycon` * with the given `args`(some of which are wildcard arguments represented by type bounds). @@ -155,7 +153,7 @@ object TypeApplications { } } -import TypeApplications._ +import TypeApplications.* /** A decorator that provides methods for modeling type application */ class TypeApplications(val self: Type) extends AnyVal { @@ -246,7 +244,7 @@ class TypeApplications(val self: Type) extends AnyVal { def topType(using Context): Type = if self.hasSimpleKind then defn.AnyType - else EtaExpand(self.typeParams) match + else etaExpand(self.typeParams) match case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resultType.topType) case _ => @@ -303,7 +301,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Convert a type constructor `TC` which has type parameters `X1, ..., Xn` * to `[X1, ..., Xn] -> TC[X1, ..., Xn]`. */ - def EtaExpand(tparams: List[TypeParamInfo])(using Context): Type = + def etaExpand(tparams: List[TypeParamInfo])(using Context): Type = HKTypeLambda.fromParams(tparams, self.appliedTo(tparams.map(_.paramRef))) //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") @@ -312,7 +310,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (isLambdaSub) self else EtaExpansion(self) /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ - def EtaExpandIfHK(bound: Type)(using Context): Type = { + def etaExpandIfHK(bound: Type)(using Context): Type = { val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self else self match { @@ -322,6 +320,11 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /** Maps [Ts] => C[Ts] to C */ + def etaCollapse(using Context): Type = self match + case EtaExpansion(classType) => classType + case _ => self + /** The type representing * * T[U1, ..., Un] @@ -354,7 +357,7 @@ class TypeApplications(val self: Type) extends AnyVal { } if ((dealiased eq stripped) || followAlias) try - val instantiated = dealiased.instantiate(args.mapConserve(_.boxedUnlessFun(self))) + val instantiated = dealiased.instantiate(args) if (followAlias) instantiated.normalized else instantiated catch case ex: IndexOutOfBoundsException => @@ -406,7 +409,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (typeParams.nonEmpty) appliedTo(args) else self /** A cycle-safe version of `appliedTo` where computing type parameters do not force - * the typeconstructor. Instead, if the type constructor is completing, we make + * the type constructor. Instead, if the type constructor is completing, we make * up hk type parameters matching the arguments. This is needed when unpickling * Scala2 files such as `scala.collection.generic.Mapfactory`. */ @@ -502,15 +505,14 @@ class TypeApplications(val self: Type) extends AnyVal { * Existential types in arguments are returned as TypeBounds instances. */ final def argInfos(using Context): List[Type] = self.stripped match - case AppliedType(tycon, args) => args.boxedUnlessFun(tycon) + case AppliedType(tycon, args) => args case _ => Nil /** If this is an encoding of a function type, return its arguments, otherwise return Nil. - * Handles `ErasedFunction`s and poly functions gracefully. + * Handles poly functions gracefully. */ final def functionArgInfos(using Context): List[Type] = self.dealias match - case RefinedType(parent, nme.apply, mt: MethodType) if defn.isErasedFunctionType(parent) => (mt.paramInfos :+ mt.resultType) - case RefinedType(parent, nme.apply, mt: MethodType) if parent.typeSymbol eq defn.PolyFunctionClass => (mt.paramInfos :+ mt.resultType) + case defn.PolyFunctionOf(mt: MethodType) => (mt.paramInfos :+ mt.resultType) case _ => self.dropDependentRefinement.dealias.argInfos /** Argument types where existential types in arguments are disallowed */ diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 6857e3da38ed..38f975a8dac8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2,33 +2,33 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._ -import Decorators._ +import Types.*, Contexts.*, Symbols.*, Flags.*, Names.*, NameOps.*, Denotations.* +import Decorators.* import Phases.{gettersPhase, elimByNamePhase} import StdNames.nme import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.migrateTo3 +import config.Feature.{migrateTo3, sourceVersion} import config.Printers.{subtyping, gadts, matchTypes, noPrinter} +import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} -import TypeApplications._ +import TypeApplications.* import Variances.{Variance, variancesConform} import Constants.Constant -import transform.TypeUtils._ -import transform.SymUtils._ import scala.util.control.NonFatal import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly -import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} +import cc.* +import NameKinds.WildcardParamName /** Provides methods to compare types. */ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling, PatternTypeConstrainer { - import TypeComparer._ + import TypeComparer.* Stats.record("TypeComparer") private var myContext: Context = initctx @@ -45,6 +45,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling state = c.typerState monitored = false GADTused = false + opaquesUsed = false recCount = 0 needsGc = false if Config.checkTypeComparerReset then checkReset() @@ -60,6 +61,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used GADT bounds */ private var GADTused: Boolean = false + /** Indicates whether the subtype check used opaque types */ + private var opaquesUsed: Boolean = false + private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -141,8 +145,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def testSubType(tp1: Type, tp2: Type): CompareResult = GADTused = false + opaquesUsed = false if !topLevelSubType(tp1, tp2) then CompareResult.Fail else if GADTused then CompareResult.OKwithGADTUsed + else if opaquesUsed then CompareResult.OKwithOpaquesUsed // we cast on GADTused, so handles if both are used else CompareResult.OK /** The current approximation state. See `ApproxState`. */ @@ -153,7 +159,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * every time we compare components of the previous pair of types. * This type is used for capture conversion in `isSubArgs`. */ - private [this] var leftRoot: Type | Null = null + private var leftRoot: Type | Null = null /** Are we forbidden from recording GADT constraints? */ private var frozenGadt = false @@ -246,10 +252,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling //} assert(!ctx.settings.YnoDeepSubtypes.value) if (Config.traceDeepSubTypeRecursions && !this.isInstanceOf[ExplainingTypeComparer]) - report.log(explained(_.isSubType(tp1, tp2, approx))) + report.log(explained(_.isSubType(tp1, tp2, approx), short = false)) } // Eliminate LazyRefs before checking whether we have seen a type before - val normalize = new TypeMap { + val normalize = new TypeMap with CaptureSet.IdempotentCaptRefMap { val DerefLimit = 10 var derefCount = 0 def apply(t: Type) = t match { @@ -471,7 +477,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp2.isRef(AnyClass, skipRefined = false) || !tp1.evaluating && recur(tp1.ref, tp2) case AndType(tp11, tp12) => - if (tp11.stripTypeVar eq tp12.stripTypeVar) recur(tp11, tp2) + if tp11.stripTypeVar eq tp12.stripTypeVar then recur(tp11, tp2) else thirdTry case tp1 @ OrType(tp11, tp12) => compareAtoms(tp1, tp2) match @@ -494,17 +500,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } - /** Mark toplevel type vars in `tp2` as hard in the current constraint */ - def hardenTypeVars(tp2: Type): Unit = tp2.dealiasKeepRefiningAnnots match - case tvar: TypeVar if constraint.contains(tvar.origin) => - constraint = constraint.withHard(tvar) - case tp2: TypeParamRef if constraint.contains(tp2) => - hardenTypeVars(constraint.typeVarOfParam(tp2)) - case tp2: AndOrType => - hardenTypeVars(tp2.tp1) - hardenTypeVars(tp2.tp2) - case _ => - val res = widenOK || joinOK || recur(tp11, tp2) && recur(tp12, tp2) || containsAnd(tp1) @@ -527,16 +522,23 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // is marked so that it converts all soft unions in its lower bound to hard unions // before it is instantiated. The reason is that the variable's instance type will // be a supertype of (decomposed and reconstituted) `tp1`. - hardenTypeVars(tp2) + constraint = constraint.hardenTypeVars(tp2) res - case CapturingType(parent1, refs1) => - if tp2.isAny then true - else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) - || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure - then recur(parent1, tp2) - else thirdTry + case tp1 @ CapturingType(parent1, refs1) => + def compareCapturing = + if tp2.isAny then true + else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure + then + val tp2a = + if tp1.isBoxedCapturing && !parent1.isBoxedCapturing + then tp2.unboxed + else tp2 + recur(parent1, tp2a) + else thirdTry + compareCapturing case tp1: AnnotatedType if !tp1.isRefining => recur(tp1.parent, tp2) case tp1: MatchType => @@ -567,7 +569,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling || narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && (isBottom(tp1) || GADTusage(tp2.symbol)) - isSubApproxHi(tp1, info2.lo.boxedIfTypeParam(tp2.symbol)) && (trustBounds || isSubApproxHi(tp1, info2.hi)) + isSubApproxHi(tp1, info2.lo) && (trustBounds || isSubApproxHi(tp1, info2.hi)) || compareGADT || tryLiftedToThis2 || fourthTry @@ -634,7 +636,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def compareRefined: Boolean = val tp1w = tp1.widen - if ctx.phase == Phases.checkCapturesPhase then + if isCaptureCheckingOrSetup then // A relaxed version of subtyping for dependent functions where method types // are treated as contravariant. @@ -648,10 +650,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case (info1: MethodType, info2: MethodType) => matchingMethodParams(info1, info2, precise = false) && isSubInfo(info1.resultType, info2.resultType.subst(info2, info1)) - case (info1 @ CapturingType(parent1, refs1), info2: Type) => + case (info1 @ CapturingType(parent1, refs1), info2: Type) + if info2.stripCapturing.isInstanceOf[MethodOrPoly] => subCaptures(refs1, info2.captureSet, frozenConstraint).isOK && sameBoxed(info1, info2, refs1) && isSubInfo(parent1, info2) - case (info1: Type, CapturingType(parent2, refs2)) => + case (info1: Type, CapturingType(parent2, refs2)) + if info1.stripCapturing.isInstanceOf[MethodOrPoly] => val refs1 = info1.captureSet (refs1.isAlwaysEmpty || subCaptures(refs1, refs2, frozenConstraint).isOK) && sameBoxed(info1, info2, refs1) && isSubInfo(info1, parent2) @@ -659,15 +663,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isSubType(info1, info2) if defn.isFunctionType(tp2) then - tp1w.widenDealias match - case tp1: RefinedType => - return isSubInfo(tp1.refinedInfo, tp2.refinedInfo) - case _ => - else if tp2.parent.typeSymbol == defn.PolyFunctionClass then - tp1.member(nme.apply).info match - case info1: PolyType => - return isSubInfo(info1, tp2.refinedInfo) - case _ => + if tp2.derivesFrom(defn.PolyFunctionClass) then + return isSubInfo(tp1.member(nme.apply).info, tp2.refinedInfo) + else + tp1w.widenDealias match + case tp1: RefinedType => + return isSubInfo(tp1.refinedInfo, tp2.refinedInfo) + case _ => + end if val skipped2 = skipMatching(tp1w, tp2) if (skipped2 eq tp2) || !Config.fastPathForRefinedSubtype then @@ -731,7 +734,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => val tparams1 = tp1.typeParams if (tparams1.nonEmpty) - return recur(tp1.EtaExpand(tparams1), tp2) || fourthTry + return recur(tp1.etaExpand(tparams1), tp2) || fourthTry tp2 match { case EtaExpansion(tycon2: TypeRef) if tycon2.symbol.isClass && tycon2.symbol.is(JavaDefined) => recur(tp1, tycon2) || fourthTry @@ -865,19 +868,53 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling fourthTry } - def tryBaseType(cls2: Symbol) = { - val base = nonExprBaseType(tp1, cls2).boxedIfTypeParam(tp1.typeSymbol) + /** Can we widen an abstract type when comparing with `tp`? + * This is the case with the following cases: + * - if `canWidenAbstract` is true. + * + * Secondly, if `tp` is a type parameter, we can widen if: + * - if `tp` is not a type parameter of the matched-against case lambda + * - if `tp` is an invariant or wildcard type parameter + * - finally, allow widening, but record the type parameter in `poisoned`, + * so that can be accounted for during the reduction step + */ + def widenAbstractOKFor(tp: Type): Boolean = + val acc = new TypeAccumulator[Boolean]: + override def apply(x: Boolean, t: Type) = + x && t.match + case t: TypeParamRef => + variance == 0 + || (t.binder ne caseLambda) + || t.paramName.is(WildcardParamName) + || { poisoned += t; true } + case _ => + foldOver(x, t) + + canWidenAbstract && acc(true, tp) + + def tryBaseType(cls2: Symbol) = + val base = nonExprBaseType(tp1, cls2) if base.exists && (base ne tp1) - && (!caseLambda.exists || canWidenAbstract || tp1.widen.underlyingClassRef(refinementOK = true).exists) + && (!caseLambda.exists + || widenAbstractOKFor(tp2) + || tp1.widen.underlyingClassRef(refinementOK = true).exists) then - isSubType(base, tp2, if (tp1.isRef(cls2)) approx else approx.addLow) - && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } - || base.isInstanceOf[OrType] && fourthTry - // if base is a disjunction, this might have come from a tp1 type that + def checkBase = + isSubType(base, tp2, if tp1.isRef(cls2) then approx else approx.addLow) + && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } + if tp1.widenDealias.isInstanceOf[AndType] || base.isInstanceOf[OrType] then + // If tp1 is a intersection, it could be that one of the original + // branches of the AndType tp1 conforms to tp2, but its base type does + // not, or else that its base type for cls2 does not exist, in which case + // it would not show up in `base`. In either case, we need to also fall back + // to fourthTry. Test cases are i18266.scala and i18226a.scala. + // If base is a disjunction, this might have come from a tp1 type that // expands to a match type. In this case, we should try to reduce the type // and compare the redux. This is done in fourthTry + either(checkBase, fourthTry) + else + checkBase else fourthTry - } def fourthTry: Boolean = tp1 match { case tp1: TypeRef => @@ -889,8 +926,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling || narrowGADTBounds(tp1, tp2, approx, isUpper = true)) && (tp2.isAny || GADTusage(tp1.symbol)) - (!caseLambda.exists || canWidenAbstract) - && isSubType(hi1.boxedIfTypeParam(tp1.symbol), tp2, approx.addLow) && (trustBounds || isSubType(lo1, tp2, approx.addLow)) + (!caseLambda.exists || widenAbstractOKFor(tp2)) + && isSubType(hi1, tp2, approx.addLow) && (trustBounds || isSubType(lo1, tp2, approx.addLow)) || compareGADT || tryLiftedToThis1 case _ => @@ -901,16 +938,27 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // However, `null` can always be a value of `T` for Java side. // So the best solution here is to let `Null` be a subtype of non-primitive // value types temporarily. - def isNullable(tp: Type): Boolean = tp.widenDealias match + def isNullable(tp: Type): Boolean = tp.dealias match case tp: TypeRef => val tpSym = tp.symbol ctx.mode.is(Mode.RelaxedOverriding) && !tpSym.isPrimitiveValueClass || tpSym.isNullableClass + case tp: TermRef => + // https://scala-lang.org/files/archive/spec/2.13/03-types.html#singleton-types + // A singleton type is of the form p.type. Where p is a path pointing to a value which conforms to + // scala.AnyRef [Scala 3: which scala.Null conforms to], the type denotes the set of values consisting + // of null and the value denoted by p (i.e., the value v for which v eq p). [Otherwise,] the type + // denotes the set consisting of only the value denoted by p. + !ctx.explicitNulls && isNullable(tp.underlying) && tp.isStable + case tp: ThisType => + // Same as above; this.type is also a singleton type in spec language + !ctx.explicitNulls && isNullable(tp.underlying) case tp: RefinedOrRecType => isNullable(tp.parent) case tp: AppliedType => isNullable(tp.tycon) case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2) case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2) case AnnotatedType(tp1, _) => isNullable(tp1) + case ConstantType(c) => c.tag == Constants.NullTag case _ => false val sym1 = tp1.symbol (sym1 eq NothingClass) && tp2.isValueTypeOrLambda || @@ -931,11 +979,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def tp1widened = val tp1w = tp1.underlying.widenExpr - tp1 match - case tp1: CaptureRef if tp1.isTracked => - CapturingType(tp1w.stripCapturing, tp1.singletonCaptureSet) - case _ => - tp1w + if isCaptureCheckingOrSetup then + tp1 + .match + case tp1: CaptureRef if tp1.isTracked => + CapturingType(tp1w.stripCapturing, tp1.singletonCaptureSet) + case _ => + tp1w + .withReachCaptures(tp1) + else tp1w comparePaths || isSubType(tp1widened, tp2, approx.addLow) case tp1: RefinedType => @@ -954,12 +1006,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } compareHKLambda - case AndType(tp11, tp12) => + case tp1 @ AndType(tp11, tp12) => val tp2a = tp2.dealiasKeepRefiningAnnots if (tp2a ne tp2) // Follow the alias; this might avoid truncating the search space in the either below return recur(tp1, tp2a) - // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 + // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 & T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 // `&' types to the left of <: are problematic, because // we have to choose one constraint set or another, which might cut off @@ -974,17 +1026,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling return recur(AndType(tp11, tp121), tp2) && recur(AndType(tp11, tp122), tp2) case _ => } - val tp1norm = simplifyAndTypeWithFallback(tp11, tp12, tp1) - if (tp1 ne tp1norm) recur(tp1norm, tp2) + val tp1norm = trySimplify(tp1) + if tp1 ne tp1norm then recur(tp1norm, tp2) else either(recur(tp11, tp2), recur(tp12, tp2)) case tp1: MatchType => def compareMatch = tp2 match { case tp2: MatchType => - isSameType(tp1.scrutinee, tp2.scrutinee) && + // we allow a small number of scrutinee types to be widened: + // * skolems, which may appear from type avoidance, but are widened in the inferred result type + // * inline proxies, which is inlining's solution to the same problem + def widenScrutinee(scrutinee1: Type) = scrutinee1 match + case tp: TermRef if tp.symbol.is(InlineProxy) => tp.info + case tp => tp.widenSkolem + def checkScrutinee(scrutinee1: Type): Boolean = + isSameType(scrutinee1, tp2.scrutinee) || { + val widenScrutinee1 = widenScrutinee(scrutinee1) + (widenScrutinee1 ne scrutinee1) && checkScrutinee(widenScrutinee1) + } + checkScrutinee(tp1.scrutinee) && tp1.cases.corresponds(tp2.cases)(isSubType) case _ => false } - recur(tp1.underlying, tp2) || compareMatch + (!caseLambda.exists || canWidenAbstract) && recur(tp1.underlying, tp2) || compareMatch case tp1: AnnotatedType if tp1.isRefining => isNewSubType(tp1.parent) case JavaArrayType(elem1) => @@ -1433,9 +1496,30 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Like tp1 <:< tp2, but returns false immediately if we know that * the case was covered previously during subtyping. + * + * A type has been covered previously in subtype checking if it + * is some combination of TypeRefs that point to classes, where the + * combiners are AppliedTypes, RefinedTypes, RecTypes, And/Or-Types or AnnotatedTypes. + * + * The exception is that if both sides contain OrTypes, the check hasn't been covered. + * See #17465. */ def isNewSubType(tp1: Type): Boolean = - if (isCovered(tp1) && isCovered(tp2)) + def isCovered(tp: Type): CoveredStatus = + tp.dealiasKeepRefiningAnnots.stripTypeVar match + case tp: TypeRef => + if tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass + then CoveredStatus.Covered + else CoveredStatus.Uncovered + case tp: AppliedType => isCovered(tp.tycon) + case tp: RefinedOrRecType => isCovered(tp.parent) + case tp: AndType => isCovered(tp.tp1) min isCovered(tp.tp2) + case tp: OrType => isCovered(tp.tp1) min isCovered(tp.tp2) min CoveredStatus.CoveredWithOr + case _ => CoveredStatus.Uncovered + + val covered1 = isCovered(tp1) + val covered2 = isCovered(tp2) + if (covered1 min covered2) >= CoveredStatus.CoveredWithOr && (covered1 max covered2) == CoveredStatus.Covered then //println(s"useless subtype: $tp1 <:< $tp2") false else isSubType(tp1, tp2, approx.addLow) @@ -1445,12 +1529,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def tryLiftedToThis1: Boolean = { val tp1a = liftToThis(tp1) - (tp1a ne tp1) && recur(tp1a, tp2) + (tp1a ne tp1) && recur(tp1a, tp2) && { opaquesUsed = true; true } } def tryLiftedToThis2: Boolean = { val tp2a = liftToThis(tp2) - (tp2a ne tp2) && recur(tp1, tp2a) + (tp2a ne tp2) && recur(tp1, tp2a) && { opaquesUsed = true; true } } // begin recur @@ -1697,7 +1781,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else if v > 0 then isSubType(arg1, arg2) else isSameType(arg2, arg1) - isSubArg(args1.head.boxedUnlessFun(tp1), args2.head.boxedUnlessFun(tp1)) + isSubArg(args1.head, args2.head) } && recurArgs(args1.tail, args2.tail, tparams2.tail) recurArgs(args1, args2, tparams2) @@ -1729,7 +1813,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def fixRecs(anchor: SingletonType, tp: Type): Type = { def fix(tp: Type): Type = tp.stripTypeVar match { case tp: RecType => fix(tp.parent).substRecThis(tp, anchor) - case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo) + case tp: RefinedType => tp.derivedRefinedType(parent = fix(tp.parent)) case tp: TypeParamRef => fixOrElse(bounds(tp).hi, tp) case tp: TypeProxy => fixOrElse(tp.superType, tp) case tp: AndType => tp.derivedAndType(fix(tp.tp1), fix(tp.tp2)) @@ -1749,7 +1833,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * any, or no constraint at all. * * Otherwise, we infer _sufficient_ constraints: we try to keep the smaller of - * the two constraints, but if never is smaller than the other, we just pick + * the two constraints, but if neither is smaller than the other, we just pick * the first one. */ protected def either(op1: => Boolean, op2: => Boolean): Boolean = @@ -1899,6 +1983,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else op2 end necessaryEither + inline def rollbackConstraintsUnless(inline op: Boolean): Boolean = + val saved = constraint + var result = false + try result = ctx.gadtState.rollbackGadtUnless(op) + finally if !result then constraint = saved + result + /** Decompose into conjunction of types each of which has only a single refinement */ def decomposeRefinements(tp: Type, refines: List[(Name, Type)]): Type = tp match case RefinedType(parent, rname, rinfo) => @@ -1959,7 +2050,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // is that if the refinement does not refer to a member symbol, we will have to // resort to reflection to invoke the member. And Java reflection needs to know exact // erased parameter types. See neg/i12211.scala. Other reflection algorithms could - // conceivably dispatch without knowning precise parameter signatures. One can signal + // conceivably dispatch without knowing precise parameter signatures. One can signal // this by inheriting from the `scala.reflect.SignatureCanBeImprecise` marker trait, // in which case the signature test is elided. def sigsOK(symInfo: Type, info2: Type) = @@ -1971,7 +2062,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def tp1IsSingleton: Boolean = tp1.isInstanceOf[SingletonType] // A relaxed version of isSubType, which compares method types - // under the standard arrow rule which is contravarient in the parameter types, + // under the standard arrow rule which is contravariant in the parameter types, // but under the condition that signatures might have to match (see sigsOK) // This relaxed version is needed to correctly compare dependent function types. // See pos/i12211.scala. @@ -1988,14 +2079,26 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => inFrozenGadtIf(tp1IsSingleton) { isSubType(info1, info2) } def qualifies(m: SingleDenotation): Boolean = - val info1 = m.info.widenExpr - isSubInfo(info1, tp2.refinedInfo.widenExpr, m.symbol.info.orElse(info1)) - || matchAbstractTypeMember(m.info) - || (tp1.isStable && isSubType(TermRef(tp1, m.symbol), tp2.refinedInfo)) - - tp1.member(name) match // inlined hasAltWith for performance - case mbr: SingleDenotation => qualifies(mbr) - case mbr => mbr hasAltWith qualifies + val info2 = tp2.refinedInfo + val isExpr2 = info2.isInstanceOf[ExprType] + var info1 = m.info match + case info1: ValueType if isExpr2 || m.symbol.is(Mutable) => + // OK: { val x: T } <: { def x: T } + // OK: { var x: T } <: { def x: T } + // NO: { var x: T } <: { val x: T } + ExprType(info1) + case info1 @ MethodType(Nil) if isExpr2 && m.symbol.is(JavaDefined) => + // OK{ { def x(): T } <: { def x: T} // if x is Java defined + ExprType(info1.resType) + case info1 => info1 + + m.symbol.hasTargetName(m.symbol.name) && ( + isSubInfo(info1, info2, m.symbol.info.orElse(info1)) + || matchAbstractTypeMember(m.info) + || (tp1.isStable && m.symbol.isStableMember && isSubType(TermRef(tp1, m.symbol), tp2.refinedInfo))) + end qualifies + + tp1.member(name).hasAltWithInline(qualifies) } final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match { @@ -2036,19 +2139,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp1.parent.asInstanceOf[RefinedType], tp2.parent.asInstanceOf[RefinedType], limit)) - /** A type has been covered previously in subtype checking if it - * is some combination of TypeRefs that point to classes, where the - * combiners are AppliedTypes, RefinedTypes, RecTypes, And/Or-Types or AnnotatedTypes. - */ - private def isCovered(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots.stripTypeVar match { - case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass - case tp: AppliedType => isCovered(tp.tycon) - case tp: RefinedOrRecType => isCovered(tp.parent) - case tp: AndType => isCovered(tp.tp1) && isCovered(tp.tp2) - case tp: OrType => isCovered(tp.tp1) && isCovered(tp.tp2) - case _ => false - } - /** Defer constraining type variables when compared against prototypes */ def isMatchedByProto(proto: ProtoType, tp: Type): Boolean = tp.stripTypeVar match { case tp: TypeParamRef if constraint contains tp => true @@ -2118,7 +2208,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val paramsMatch = if precise then isSameTypeWhenFrozen(formal1, formal2a) - else if ctx.phase == Phases.checkCapturesPhase then + else if isCaptureCheckingOrSetup then // allow to constrain capture set variables isSubType(formal2a, formal1) else @@ -2270,7 +2360,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** The least upper bound of two types * @param canConstrain If true, new constraints might be added to simplify the lub. * @param isSoft If the lub is a union, this determines whether it's a soft union. - * @note We do not admit singleton types in or-types as lubs. */ def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true) /*<|<*/ { if (tp1 eq tp2) tp1 @@ -2286,7 +2375,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case Atoms.Range(lo2, hi2) => if hi1.subsetOf(lo2) then return tp2 if hi2.subsetOf(lo1) then return tp1 - if (hi1 & hi2).isEmpty then return orType(tp1, tp2) + if (hi1 & hi2).isEmpty then return orType(tp1, tp2, isSoft = isSoft) case none => case none => val t1 = mergeIfSuper(tp1, tp2, canConstrain) @@ -2451,8 +2540,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling final def andType(tp1: Type, tp2: Type, isErased: Boolean = ctx.erasedTypes): Type = andTypeGen(tp1, tp2, AndType.balanced(_, _), isErased = isErased) - final def simplifyAndTypeWithFallback(tp1: Type, tp2: Type, fallback: Type): Type = - andTypeGen(tp1, tp2, (_, _) => fallback) + /** Try to simplify AndType, or return the type itself if no simplifiying opportunities exist. */ + private def trySimplify(tp: AndType): Type = + andTypeGen(tp.tp1, tp.tp2, (_, _) => tp) /** Form a normalized conjunction of two types. * Note: For certain types, `|` is distributed inside the type. This holds for @@ -2546,12 +2636,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: TypeVar if tp1.isInstantiated => tp1.underlying & tp2 case CapturingType(parent1, refs1) => - if subCaptures(tp2.captureSet, refs1, frozen = true).isOK + val refs2 = tp2.captureSet + if subCaptures(refs2, refs1, frozen = true).isOK && tp1.isBoxedCapturing == tp2.isBoxedCapturing - then - parent1 & tp2 - else - tp1.derivedCapturingType(parent1 & tp2, refs1) + then (parent1 & tp2).capturing(refs2) + else tp1.derivedCapturingType(parent1 & tp2, refs1) case tp1: AnnotatedType if !tp1.isRefining => tp1.underlying & tp2 case _ => @@ -2682,29 +2771,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } || tycon.derivesFrom(defn.PairClass) - /** Is `tp` an empty type? - * - * `true` implies that we found a proof; uncertainty defaults to `false`. - */ - def provablyEmpty(tp: Type): Boolean = - tp.dealias match { - case tp if tp.isExactlyNothing => true - case AndType(tp1, tp2) => provablyDisjoint(tp1, tp2) - case OrType(tp1, tp2) => provablyEmpty(tp1) && provablyEmpty(tp2) - case at @ AppliedType(tycon, args) => - args.lazyZip(tycon.typeParams).exists { (arg, tparam) => - tparam.paramVarianceSign >= 0 - && provablyEmpty(arg) - && typeparamCorrespondsToField(tycon, tparam) - } - case tp: TypeProxy => - provablyEmpty(tp.underlying) - case _ => false - } - - /** Are `tp1` and `tp2` provablyDisjoint types? - * - * `true` implies that we found a proof; uncertainty defaults to `false`. + /** Are `tp1` and `tp2` provablyDisjoint types, i.e., is `tp1 ⋔ tp2` true? * * Proofs rely on the following properties of Scala types: * @@ -2717,156 +2784,268 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Note on soundness: the correctness of match types relies on on the * property that in all possible contexts, the same match type expression * is either stuck or reduces to the same case. + * + * This method must adhere to the specification of disjointness in SIP-56: + * https://docs.scala-lang.org/sips/match-types-spec.html#disjointness + * + * The pattern matcher reachability test uses it for its own purposes, so we + * generalize it to *also* handle type variables and their GADT bounds. + * This is fine because match type reduction always operates under frozen + * GADT constraints. + * + * Other than that generalization, `provablyDisjoint` must not depart from + * the specified "provably disjoint" relation. In particular, it is not + * allowed to reply `false` instead of "I don't know". It must say `true` + * iff the spec says `true` and must say `false` iff the spec says `false`. */ - def provablyDisjoint(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"provable disjoint $tp1, $tp2", matchTypes) { - // println(s"provablyDisjoint(${tp1.show}, ${tp2.show})") + def provablyDisjoint(tp1: Type, tp2: Type)(using Context): Boolean = + provablyDisjoint(tp1, tp2, null) - def isEnumValue(ref: TermRef): Boolean = - val sym = ref.termSymbol - sym.isAllOf(EnumCase, butNot=JavaDefined) + private def provablyDisjoint(tp1: Type, tp2: Type, pending: util.HashSet[(Type, Type)] | Null)( + using Context): Boolean = trace(i"provable disjoint $tp1, $tp2", matchTypes) { + // println(s"provablyDisjoint(${tp1.show}, ${tp2.show})") - def isEnumValueOrModule(ref: TermRef): Boolean = - isEnumValue(ref) || ref.termSymbol.is(Module) || (ref.info match { - case tp: TermRef => isEnumValueOrModule(tp) - case _ => false - }) - - def fullyInstantiated(tp: Type): Boolean = new TypeAccumulator[Boolean] { - override def apply(x: Boolean, t: Type) = - x && { - t.dealias match { - case tp: TypeRef if !tp.symbol.isClass => false - case _: SkolemType | _: TypeVar | _: TypeParamRef | _: TypeBounds => false - case _ => foldOver(x, t) - } - } - }.apply(true, tp) - - (tp1.dealias, tp2.dealias) match { - case _ if !ctx.erasedTypes && tp2.isFromJavaObject => - provablyDisjoint(tp1, defn.AnyType) - case _ if !ctx.erasedTypes && tp1.isFromJavaObject => - provablyDisjoint(defn.AnyType, tp2) - case (tp1: TypeRef, _) if tp1.symbol == defn.SingletonClass => - false - case (_, tp2: TypeRef) if tp2.symbol == defn.SingletonClass => + // Computes ⌈tp⌉ (see the spec), generalized to handle GADT bounds + @scala.annotation.tailrec + def disjointnessBoundary(tp: Type): Type = tp match + case tp: TypeRef => + tp.symbol match + case cls: ClassSymbol => + if cls == defn.SingletonClass then defn.AnyType + else if cls.typeParams.nonEmpty then EtaExpansion(tp) + else tp + case sym => + if !ctx.erasedTypes && sym == defn.FromJavaObjectSymbol then defn.AnyType + else + val optGadtBounds = gadtBounds(sym) + if optGadtBounds != null then disjointnessBoundary(optGadtBounds.hi) + else disjointnessBoundary(tp.superTypeNormalized) + case tp @ AppliedType(tycon: TypeRef, targs) if tycon.symbol.isClass => + /* The theory says we should just return `tp` here. However, due to how + * baseType works (called from `isBaseTypeWithDisjointArguments`), + * it can create infinitely growing towers of `AnnotatedType`s. This + * defeats the infinite recursion detection with the `pending` set. + * Therefore, we eagerly remove all non-refining annotations. We are + * allowed to do that because they don't affect subtyping (so cannot + * create an ill-kinded `AppliedType`) and would anyway be stripped + * later on by the recursive calls to `provablyDisjoint`, though + * `disjointnessBoundary`). + * See tests/pos/provably-disjoint-infinite-recursion-1.scala for an example. + */ + tp.derivedAppliedType( + tycon, + targs.mapConserve(_.stripAnnots(keep = _.symbol.derivesFrom(defn.RefiningAnnotationClass))) + ) + case tp: TermRef => + val isEnumValue = tp.termSymbol.isAllOf(EnumCase, butNot = JavaDefined) + if isEnumValue then tp + else + val optGadtBounds = gadtBounds(tp.symbol) + if optGadtBounds != null then disjointnessBoundary(optGadtBounds.hi) + else disjointnessBoundary(tp.superTypeNormalized) + case tp: AndOrType => + tp + case tp: ConstantType => + tp + case tp: HKTypeLambda => + tp + case tp: TypeProxy => + disjointnessBoundary(tp.superTypeNormalized) + case tp: WildcardType => + disjointnessBoundary(tp.effectiveBounds.hi) + case tp: ErrorType => + defn.AnyType + end disjointnessBoundary + + (disjointnessBoundary(tp1), disjointnessBoundary(tp2)) match + // Infinite recursion detection + case pair if pending != null && pending.contains(pair) => false + + // Cases where there is an intersection or union on the right + case (tp1, tp2: OrType) => + provablyDisjoint(tp1, tp2.tp1, pending) && provablyDisjoint(tp1, tp2.tp2, pending) + case (tp1, tp2: AndType) => + provablyDisjoint(tp1, tp2.tp1, pending) || provablyDisjoint(tp1, tp2.tp2, pending) + + // Cases where there is an intersection or union on the left but not on the right + case (tp1: OrType, tp2) => + provablyDisjoint(tp1.tp1, tp2, pending) && provablyDisjoint(tp1.tp2, tp2, pending) + case (tp1: AndType, tp2) => + provablyDisjoint(tp1.tp1, tp2, pending) || provablyDisjoint(tp1.tp2, tp2, pending) + + // Cases involving type lambdas + case (tp1: HKTypeLambda, tp2: HKTypeLambda) => + tp1.paramNames.sizeCompare(tp2.paramNames) != 0 + || provablyDisjoint(tp1.resultType, tp2.resultType, pending) + case (tp1: HKTypeLambda, tp2) => + !tp2.isDirectRef(defn.AnyKindClass) + case (tp1, tp2: HKTypeLambda) => + !tp1.isDirectRef(defn.AnyKindClass) + + /* Cases where both are unique values (enum cases or constant types) + * + * When both are TermRef's, we look at the symbols. We do not try to + * prove disjointness based on the prefixes. + * + * Otherwise, we know everything there is to know about them our two types. + * Therefore, a direct subtype test is enough to decide disjointness. + */ + case (tp1: TermRef, tp2: TermRef) => + tp1.symbol != tp2.symbol case (tp1: ConstantType, tp2: ConstantType) => - tp1 != tp2 - case (tp1: TypeRef, tp2: TypeRef) if tp1.symbol.isClass && tp2.symbol.isClass => - val cls1 = tp1.classSymbol - val cls2 = tp2.classSymbol - def isDecomposable(tp: Symbol): Boolean = - tp.is(Sealed) && !tp.hasAnonymousChild - def decompose(sym: Symbol, tp: Type): List[Type] = - sym.children.map(x => refineUsingParent(tp, x)).filter(_.exists) - if (cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1)) - false - else - if (cls1.is(Final) || cls2.is(Final)) - // One of these types is final and they are not mutually - // subtype, so they must be unrelated. - true - else if (!cls2.is(Trait) && !cls1.is(Trait)) - // Both of these types are classes and they are not mutually - // subtype, so they must be unrelated by single inheritance - // of classes. - true - else if (isDecomposable(cls1)) - // At this point, !cls1.derivesFrom(cls2): we know that direct - // instantiations of `cls1` (terms of the form `new cls1`) are not - // of type `tp2`. Therefore, we can safely decompose `cls1` using - // `.children`, even if `cls1` is non abstract. - decompose(cls1, tp1).forall(x => provablyDisjoint(x, tp2)) - else if (isDecomposable(cls2)) - decompose(cls2, tp2).forall(x => provablyDisjoint(x, tp1)) + tp1.value != tp2.value + case (tp1: SingletonType, tp2: SingletonType) => + true // a TermRef and a ConstantType, in either direction + + /* Cases where one is a unique value and the other a possibly-parameterized + * class type. Again, we do not look at prefixes, so we test whether the + * unique value derives from the class. + */ + case (tp1: SingletonType, tp2) => + !tp1.derivesFrom(tp2.classSymbol) + case (tp1, tp2: SingletonType) => + !tp2.derivesFrom(tp1.classSymbol) + + /* Now both sides are possibly-parameterized class types `p.C[Ts]` and `q.D[Us]`. + * + * First, we try to show that C and D are entirely disjoint, independently + * of the type arguments, based on their `final` status and `class` status. + * + * Otherwise, we look at all the common baseClasses of tp1 and tp2, and + * try to find one common base class `E` such that `baseType(tp1, E)` and + * `baseType(tp2, E)` can be proven disjoint based on the type arguments. + * + * Regardless, we do not look at prefixes. + */ + case tpPair @ (tp1, tp2) => + val cls1 = tp1.classSymbol.asClass + val cls2 = tp2.classSymbol.asClass + + def isBaseTypeWithDisjointArguments(baseClass: ClassSymbol, pending: util.HashSet[(Type, Type)]): Boolean = + if baseClass.typeParams.isEmpty then + // A common mono base class can never be disjoint thanks to type params + false else + (tp1.baseType(baseClass), tp2.baseType(baseClass)) match + case (AppliedType(tycon1, args1), AppliedType(tycon2, args2)) => + provablyDisjointTypeArgs(baseClass, args1, args2, pending) + case _ => + false + end isBaseTypeWithDisjointArguments + + def typeArgsMatch(tp: Type, cls: ClassSymbol): Boolean = + val typeArgs = tp match + case tp: TypeRef => Nil + case AppliedType(_, args) => args + cls.typeParams.sizeCompare(typeArgs) == 0 + + def existsCommonBaseTypeWithDisjointArguments: Boolean = + if !typeArgsMatch(tp1, cls1) || !typeArgsMatch(tp2, cls2) then + /* We have an unapplied polymorphic class type or otherwise not star-kinded one. + * This does not happen with match types, but happens when coming from the Space engine. + * In that case, we cannot prove disjointness based on type arguments. + */ false - case (AppliedType(tycon1, args1), AppliedType(tycon2, args2)) if isSame(tycon1, tycon2) => - // It is possible to conclude that two types applies are disjoint by - // looking at covariant type parameters if the said type parameters - // are disjoin and correspond to fields. - // (Type parameter disjointness is not enough by itself as it could - // lead to incorrect conclusions for phantom type parameters). - def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = - provablyDisjoint(tp1, tp2) && typeparamCorrespondsToField(tycon1, tparam) - - // In the invariant case, we also use a stronger notion of disjointness: - // we consider fully instantiated types not equal wrt =:= to be disjoint - // (under any context). This is fine because it matches the runtime - // semantics of pattern matching. To implement a pattern such as - // `case Inv[T] => ...`, one needs a type tag for `T` and the compiler - // is used at runtime to check it the scrutinee's type is =:= to `T`. - // Note that this is currently a theoretical concern since Dotty - // doesn't have type tags, meaning that users cannot write patterns - // that do type tests on higher kinded types. - def invariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = - provablyDisjoint(tp1, tp2) || - !isSameType(tp1, tp2) && - fullyInstantiated(tp1) && // We can only trust a "no" from `isSameType` when - fullyInstantiated(tp2) // both `tp1` and `tp2` are fully instantiated. - - args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists { - (arg1, arg2, tparam) => - val v = tparam.paramVarianceSign - if (v > 0) - covariantDisjoint(arg1, arg2, tparam) - else if (v < 0) - // Contravariant case: a value where this type parameter is - // instantiated to `Any` belongs to both types. - false - else - invariantDisjoint(arg1, arg2, tparam) - } - case (tp1: HKLambda, tp2: HKLambda) => - provablyDisjoint(tp1.resType, tp2.resType) - case (_: HKLambda, _) => - // The intersection of these two types would be ill kinded, they are therefore provablyDisjoint. + else + /* We search among the common base classes of `cls1` and `cls2`. + * We exclude any base class that is an ancestor of one of the other base classes: + * they are useless, since anything discovered at their level would also be discovered at + * the level of the descendant common base class. + */ + val innerPending = + if pending != null then pending + else util.HashSet[(Type, Type)]() + innerPending += tpPair + + val cls2BaseClassSet = SymDenotations.BaseClassSet(cls2.classDenot.baseClasses) + val commonBaseClasses = cls1.classDenot.baseClasses.filter(cls2BaseClassSet.contains(_)) + def isAncestorOfOtherBaseClass(cls: ClassSymbol): Boolean = + commonBaseClasses.exists(other => (other ne cls) && other.derivesFrom(cls)) + val result = commonBaseClasses.exists { baseClass => + !isAncestorOfOtherBaseClass(baseClass) && isBaseTypeWithDisjointArguments(baseClass, innerPending) + } + + innerPending -= tpPair + result + end existsCommonBaseTypeWithDisjointArguments + + provablyDisjointClasses(cls1, cls2) + || existsCommonBaseTypeWithDisjointArguments + end match + } + + private def provablyDisjointClasses(cls1: Symbol, cls2: Symbol)(using Context): Boolean = + def isDecomposable(cls: Symbol): Boolean = + cls.is(Sealed) && !cls.hasAnonymousChild + + def decompose(cls: Symbol): List[Symbol] = + cls.children.map { child => + if child.isTerm then child.info.classSymbol + else child + }.filter(child => child.exists && child != cls) + + // TODO? Special-case for Nothing and Null? We probably need Nothing/Null disjoint from Nothing/Null + def eitherDerivesFromOther(cls1: Symbol, cls2: Symbol): Boolean = + cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1) + + def smallestNonTraitBase(cls: Symbol): Symbol = + cls.asClass.baseClasses.find(!_.is(Trait)).get + + if cls1 == defn.AnyKindClass || cls2 == defn.AnyKindClass then + // For some reason, A.derivesFrom(AnyKind) returns false, so we have to handle it specially + false + else if (eitherDerivesFromOther(cls1, cls2)) + false + else + if (cls1.is(Final) || cls2.is(Final)) + // One of these types is final and they are not mutually + // subtype, so they must be unrelated. true - case (_, _: HKLambda) => + else if (!eitherDerivesFromOther(smallestNonTraitBase(cls1), smallestNonTraitBase(cls2))) then + // The traits extend a pair of non-trait classes that are not mutually subtypes, + // so they must be unrelated by single inheritance of classes. true - case (tp1: OrType, _) => - provablyDisjoint(tp1.tp1, tp2) && provablyDisjoint(tp1.tp2, tp2) - case (_, tp2: OrType) => - provablyDisjoint(tp1, tp2.tp1) && provablyDisjoint(tp1, tp2.tp2) - case (tp1: AndType, _) => - !(tp1 <:< tp2) - && (provablyDisjoint(tp1.tp2, tp2) || provablyDisjoint(tp1.tp1, tp2)) - case (_, tp2: AndType) => - !(tp2 <:< tp1) - && (provablyDisjoint(tp1, tp2.tp2) || provablyDisjoint(tp1, tp2.tp1)) - case (tp1: NamedType, _) if gadtBounds(tp1.symbol) != null => - provablyDisjoint(gadtBounds(tp1.symbol).uncheckedNN.hi, tp2) - || provablyDisjoint(tp1.superTypeNormalized, tp2) - case (_, tp2: NamedType) if gadtBounds(tp2.symbol) != null => - provablyDisjoint(tp1, gadtBounds(tp2.symbol).uncheckedNN.hi) - || provablyDisjoint(tp1, tp2.superTypeNormalized) - case (tp1: TermRef, tp2: TermRef) if isEnumValueOrModule(tp1) && isEnumValueOrModule(tp2) => - tp1.termSymbol != tp2.termSymbol - case (tp1: TermRef, tp2: TypeRef) if isEnumValue(tp1) => - fullyInstantiated(tp2) && !tp1.classSymbols.exists(_.derivesFrom(tp2.symbol)) - case (tp1: TypeRef, tp2: TermRef) if isEnumValue(tp2) => - fullyInstantiated(tp1) && !tp2.classSymbols.exists(_.derivesFrom(tp1.symbol)) - case (tp1: RefinedType, tp2: RefinedType) if tp1.refinedName == tp2.refinedName => - provablyDisjoint(tp1.parent, tp2.parent) || provablyDisjoint(tp1.refinedInfo, tp2.refinedInfo) - case (tp1: TypeAlias, tp2: TypeAlias) => - provablyDisjoint(tp1.alias, tp2.alias) - case (tp1: Type, tp2: Type) if defn.isTupleNType(tp1) => - provablyDisjoint(tp1.toNestedPairs, tp2) - case (tp1: Type, tp2: Type) if defn.isTupleNType(tp2) => - provablyDisjoint(tp1, tp2.toNestedPairs) - case (tp1: TypeProxy, tp2: TypeProxy) => - provablyDisjoint(tp1.superTypeNormalized, tp2) || provablyDisjoint(tp1, tp2.superTypeNormalized) - case (tp1: TypeProxy, _) => - provablyDisjoint(tp1.superTypeNormalized, tp2) - case (_, tp2: TypeProxy) => - provablyDisjoint(tp1, tp2.superTypeNormalized) - case _ => + else if (isDecomposable(cls1)) + // At this point, !cls1.derivesFrom(cls2): we know that direct + // instantiations of `cls1` (terms of the form `new cls1`) are not + // of type `tp2`. Therefore, we can safely decompose `cls1` using + // `.children`, even if `cls1` is non abstract. + decompose(cls1).forall(x => provablyDisjointClasses(x, cls2)) + else if (isDecomposable(cls2)) + decompose(cls2).forall(x => provablyDisjointClasses(cls1, x)) + else false + end provablyDisjointClasses + + private def provablyDisjointTypeArgs(cls: ClassSymbol, args1: List[Type], args2: List[Type], pending: util.HashSet[(Type, Type)])(using Context): Boolean = + // It is possible to conclude that two types applied are disjoint by + // looking at covariant type parameters if the said type parameters + // are disjoint and correspond to fields. + // (Type parameter disjointness is not enough by itself as it could + // lead to incorrect conclusions for phantom type parameters). + def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = + provablyDisjoint(tp1, tp2, pending) && typeparamCorrespondsToField(cls.appliedRef, tparam) + + // In the invariant case, direct type parameter disjointness is enough. + def invariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = + provablyDisjoint(tp1, tp2, pending) + + args1.lazyZip(args2).lazyZip(cls.typeParams).exists { + (arg1, arg2, tparam) => + val v = tparam.paramVarianceSign + if (v > 0) + covariantDisjoint(arg1, arg2, tparam) + else if (v < 0) + // Contravariant case: a value where this type parameter is + // instantiated to `Any` belongs to both types. + false + else + invariantDisjoint(arg1, arg2, tparam) } - } + end provablyDisjointTypeArgs - protected def explainingTypeComparer = ExplainingTypeComparer(comparerContext) + protected def explainingTypeComparer(short: Boolean) = ExplainingTypeComparer(comparerContext, short) protected def trackingTypeComparer = TrackingTypeComparer(comparerContext) private def inSubComparer[T, Cmp <: TypeComparer](comparer: Cmp)(op: Cmp => T): T = @@ -2876,8 +3055,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling finally myInstance = saved /** The trace of comparison operations when performing `op` */ - def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:")(using Context): String = - val cmp = explainingTypeComparer + def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean)(using Context): String = + val cmp = explainingTypeComparer(short) inSubComparer(cmp)(op) cmp.lastTrace(header) @@ -2888,7 +3067,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling object TypeComparer { enum CompareResult: - case OK, Fail, OKwithGADTUsed + case OK, Fail, OKwithGADTUsed, OKwithOpaquesUsed /** Class for unification variables used in `natValue`. */ private class AnyConstantType extends UncachedGroundType with ValueType { @@ -2936,6 +3115,16 @@ object TypeComparer { end ApproxState type ApproxState = ApproxState.Repr + /** Result of `isCovered` check. */ + private object CoveredStatus: + type Repr = Int + + val Uncovered: Repr = 1 // The type is not covered + val CoveredWithOr: Repr = 2 // The type is covered and contains OrTypes + val Covered: Repr = 3 // The type is covered and free from OrTypes + end CoveredStatus + type CoveredStatus = CoveredStatus.Repr + def topLevelSubType(tp1: Type, tp2: Type)(using Context): Boolean = comparing(_.topLevelSubType(tp1, tp2)) @@ -3036,11 +3225,14 @@ object TypeComparer { def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false)(using Context): Boolean = comparing(_.constrainPatternType(pat, scrut, forceInvariantRefinement)) - def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:")(using Context): String = - comparing(_.explained(op, header)) + def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean = false)(using Context): String = + comparing(_.explained(op, header, short)) def tracked[T](op: TrackingTypeComparer => T)(using Context): T = comparing(_.tracked(op)) + + def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + comparing(_.subCaptures(refs1, refs2, frozen)) } object TrackingTypeComparer: @@ -3048,14 +3240,16 @@ object TrackingTypeComparer: enum MatchResult extends Showable: case Reduced(tp: Type) case Disjoint + case ReducedAndDisjoint case Stuck case NoInstance(fails: List[(Name, TypeBounds)]) def toText(p: Printer): Text = this match - case Reduced(tp) => "Reduced(" ~ p.toText(tp) ~ ")" - case Disjoint => "Disjoint" - case Stuck => "Stuck" - case NoInstance(fails) => "NoInstance(" ~ Text(fails.map(p.toText(_) ~ p.toText(_)), ", ") ~ ")" + case Reduced(tp) => "Reduced(" ~ p.toText(tp) ~ ")" + case Disjoint => "Disjoint" + case ReducedAndDisjoint => "ReducedAndDisjoint" + case Stuck => "Stuck" + case NoInstance(fails) => "NoInstance(" ~ Text(fails.map(p.toText(_) ~ p.toText(_)), ", ") ~ ")" class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { import TrackingTypeComparer.* @@ -3090,7 +3284,10 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { super.typeVarInstance(tvar) } - def matchCases(scrut: Type, cases: List[Type])(using Context): Type = { + def matchCases(scrut: Type, cases: List[MatchTypeCaseSpec])(using Context): Type = { + // a reference for the type parameters poisoned during matching + // for use during the reduction step + var poisoned: Set[TypeParamRef] = Set.empty def paramInstances(canApprox: Boolean) = new TypeAccumulator[Array[Type]]: def apply(insts: Array[Type], t: Type) = t match @@ -3102,16 +3299,24 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { case entry: TypeBounds => val lo = fullLowerBound(param) val hi = fullUpperBound(param) - if isSubType(hi, lo) then lo.simplified else Range(lo, hi) + if !poisoned(param) && isSubType(hi, lo) then lo.simplified else Range(lo, hi) case inst => assert(inst.exists, i"param = $param\nconstraint = $constraint") - inst.simplified + if !poisoned(param) then inst.simplified else Range(inst, inst) insts case _ => foldOver(insts, t) def instantiateParams(insts: Array[Type]) = new ApproximatingTypeMap { variance = 0 + + override def range(lo: Type, hi: Type): Type = + if variance == 0 && (lo eq hi) then + // override the default `lo eq hi` test, which removes the Range + // which leads to a Reduced result, instead of NoInstance + Range(lower(lo), upper(hi)) + else super.range(lo, hi) + def apply(t: Type) = t match { case t @ TypeParamRef(b, n) if b `eq` caseLambda => insts(n) case t: LazyRef => apply(t.ref) @@ -3119,53 +3324,254 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } } - /** Match a single case. */ - def matchCase(cas: Type): MatchResult = trace(i"$scrut match ${MatchTypeTrace.caseText(cas)}", matchTypes, show = true) { - val cas1 = cas match { - case cas: HKTypeLambda => - caseLambda = constrained(cas) - caseLambda.resultType - case _ => - cas + def instantiateParamsSpec(insts: Array[Type], caseLambda: HKTypeLambda) = new TypeMap { + variance = 0 + + def apply(t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` caseLambda => insts(n) + case t: LazyRef => apply(t.ref) + case _ => mapOver(t) } + } + + /** Match a single case. */ + def matchCase(cas: MatchTypeCaseSpec): MatchResult = trace(i"$scrut match ${MatchTypeTrace.caseText(cas)}", matchTypes, show = true) { + cas match + case cas: MatchTypeCaseSpec.SubTypeTest => matchSubTypeTest(cas) + case cas: MatchTypeCaseSpec.SpeccedPatMat => matchSpeccedPatMat(cas) + case cas: MatchTypeCaseSpec.LegacyPatMat => matchLegacyPatMat(cas) + case cas: MatchTypeCaseSpec.MissingCaptures => matchMissingCaptures(cas) + } + + def matchSubTypeTest(spec: MatchTypeCaseSpec.SubTypeTest): MatchResult = + val disjoint = provablyDisjoint(scrut, spec.pattern) + if necessarySubType(scrut, spec.pattern) then + if disjoint then + MatchResult.ReducedAndDisjoint + else + MatchResult.Reduced(spec.body) + else if disjoint then + MatchResult.Disjoint + else + MatchResult.Stuck + end matchSubTypeTest + + // See https://docs.scala-lang.org/sips/match-types-spec.html#matching + def matchSpeccedPatMat(spec: MatchTypeCaseSpec.SpeccedPatMat): MatchResult = + /* Concreteness checking + * + * When following a baseType and reaching a non-wildcard, in-variant-pos type capture, + * we have to make sure that the scrutinee is concrete enough to uniquely determine + * the values of the captures. This comes down to checking that we do not follow any + * upper bound of an abstract type. + * + * See notably neg/wildcard-match.scala for examples of this. + * + * See neg/i13780.scala and neg/i13780-1.scala for ClassCastException + * reproducers if we disable this check. + */ + + def followEverythingConcrete(tp: Type): Type = + val widenedTp = tp.widenDealias + val tp1 = widenedTp.normalized + + def followTp1: Type = + // If both widenDealias and normalized did something, start again + if (tp1 ne widenedTp) && (widenedTp ne tp) then followEverythingConcrete(tp1) + else tp1 + + tp1 match + case tp1: TypeRef => + tp1.info match + case TypeAlias(tl: HKTypeLambda) => tl + case MatchAlias(tl: HKTypeLambda) => tl + case _ => followTp1 + case tp1 @ AppliedType(tycon, args) => + val concreteTycon = followEverythingConcrete(tycon) + if concreteTycon eq tycon then followTp1 + else followEverythingConcrete(concreteTycon.applyIfParameterized(args)) + case _ => + followTp1 + end followEverythingConcrete + + def isConcrete(tp: Type): Boolean = + followEverythingConcrete(tp) match + case tp1: AndOrType => isConcrete(tp1.tp1) && isConcrete(tp1.tp2) + case tp1 => tp1.underlyingClassRef(refinementOK = true).exists + + // Actual matching logic + + val instances = Array.fill[Type](spec.captureCount)(NoType) + + def rec(pattern: MatchTypeCasePattern, scrut: Type, variance: Int, scrutIsWidenedAbstract: Boolean): Boolean = + pattern match + case MatchTypeCasePattern.Capture(num, isWildcard) => + instances(num) = scrut match + case scrut: TypeBounds => + if isWildcard then + // anything will do, as long as it conforms to the bounds for the subsequent `scrut <:< instantiatedPat` test + scrut.hi + else if scrutIsWidenedAbstract then + // always keep the TypeBounds so that we can report the correct NoInstances + scrut + else + variance match + case 1 => scrut.hi + case -1 => scrut.lo + case 0 => scrut + case _ => + if !isWildcard && scrutIsWidenedAbstract && variance != 0 then + // force a TypeBounds to report the correct NoInstances + // the Nothing and Any bounds are used so that they are not displayed; not for themselves in particular + if variance > 0 then TypeBounds(defn.NothingType, scrut) + else TypeBounds(scrut, defn.AnyType) + else + scrut + !instances(num).isError + + case MatchTypeCasePattern.TypeTest(tpe) => + // The actual type test is handled by `scrut <:< instantiatedPat` + true + + case MatchTypeCasePattern.BaseTypeTest(classType, argPatterns, needsConcreteScrut) => + val cls = classType.classSymbol.asClass + scrut.baseType(cls) match + case base @ AppliedType(baseTycon, baseArgs) if baseTycon =:= classType => + val innerScrutIsWidenedAbstract = + scrutIsWidenedAbstract + || (needsConcreteScrut && !isConcrete(scrut)) // no point in checking concreteness if it does not need to be concrete + matchArgs(argPatterns, baseArgs, classType.typeParams, innerScrutIsWidenedAbstract) + case _ => + false - val defn.MatchCase(pat, body) = cas1: @unchecked + case MatchTypeCasePattern.AbstractTypeConstructor(tycon, argPatterns) => + scrut.dealias match + case scrutDealias @ AppliedType(scrutTycon, args) if scrutTycon =:= tycon => + matchArgs(argPatterns, args, tycon.typeParams, scrutIsWidenedAbstract) + case _ => + false + + case MatchTypeCasePattern.CompileTimeS(argPattern) => + natValue(scrut) match + case Some(scrutValue) if scrutValue > 0 => + rec(argPattern, ConstantType(Constant(scrutValue - 1)), variance, scrutIsWidenedAbstract) + case _ => + false + + case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => + val stableScrut: SingletonType = scrut match + case scrut: SingletonType => scrut + case _ => SkolemType(scrut) + stableScrut.member(typeMemberName) match + case denot: SingleDenotation if denot.exists => + val info = denot.info match + case TypeAlias(alias) => alias // Extract the alias + case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix + case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances + val infoRefersToSkolem = stableScrut.isInstanceOf[SkolemType] && stableScrut.occursIn(info) + val info1 = info match + case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances + case _ if infoRefersToSkolem => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances + case _ => info // We have a match + rec(capture, info1, variance = 0, scrutIsWidenedAbstract) + case _ => + false + end rec + + def matchArgs(argPatterns: List[MatchTypeCasePattern], args: List[Type], tparams: List[TypeParamInfo], scrutIsWidenedAbstract: Boolean): Boolean = + if argPatterns.isEmpty then + true + else + rec(argPatterns.head, args.head, tparams.head.paramVarianceSign, scrutIsWidenedAbstract) + && matchArgs(argPatterns.tail, args.tail, tparams.tail, scrutIsWidenedAbstract) + + // This might not be needed + val constrainedCaseLambda = constrained(spec.origMatchCase, ast.tpd.EmptyTree)._1.asInstanceOf[HKTypeLambda] + + val disjoint = + val defn.MatchCase(origPattern, _) = constrainedCaseLambda.resultType: @unchecked + provablyDisjoint(scrut, origPattern) + + def tryDisjoint: MatchResult = + if disjoint then + MatchResult.Disjoint + else + MatchResult.Stuck + + if rec(spec.pattern, scrut, variance = 1, scrutIsWidenedAbstract = false) then + if instances.exists(_.isInstanceOf[TypeBounds]) then + MatchResult.NoInstance { + constrainedCaseLambda.paramNames.zip(instances).collect { + case (name, bounds: TypeBounds) => (name, bounds) + } + } + else + val defn.MatchCase(instantiatedPat, reduced) = + instantiateParamsSpec(instances, constrainedCaseLambda)(constrainedCaseLambda.resultType): @unchecked + if scrut <:< instantiatedPat then + if disjoint then + MatchResult.ReducedAndDisjoint + else + MatchResult.Reduced(reduced) + else + tryDisjoint + else + tryDisjoint + end matchSpeccedPatMat + + def matchLegacyPatMat(spec: MatchTypeCaseSpec.LegacyPatMat): MatchResult = + val caseLambda = constrained(spec.origMatchCase, ast.tpd.EmptyTree)._1.asInstanceOf[HKTypeLambda] + this.caseLambda = caseLambda + + val defn.MatchCase(pat, body) = caseLambda.resultType: @unchecked def matches(canWidenAbstract: Boolean): Boolean = val saved = this.canWidenAbstract + val savedPoisoned = this.poisoned this.canWidenAbstract = canWidenAbstract + this.poisoned = Set.empty try necessarySubType(scrut, pat) - finally this.canWidenAbstract = saved + finally + poisoned = this.poisoned + this.poisoned = savedPoisoned + this.canWidenAbstract = saved + + val disjoint = provablyDisjoint(scrut, pat) def redux(canApprox: Boolean): MatchResult = - caseLambda match - case caseLambda: HKTypeLambda => - val instances = paramInstances(canApprox)(Array.fill(caseLambda.paramNames.length)(NoType), pat) - instantiateParams(instances)(body) match - case Range(lo, hi) => - MatchResult.NoInstance { - caseLambda.paramNames.zip(instances).collect { - case (name, Range(lo, hi)) => (name, TypeBounds(lo, hi)) - } - } - case redux => - MatchResult.Reduced(redux.simplified) - case _ => - MatchResult.Reduced(body) + val instances = paramInstances(canApprox)(Array.fill(caseLambda.paramNames.length)(NoType), pat) + instantiateParams(instances)(body) match + case Range(lo, hi) => + MatchResult.NoInstance { + caseLambda.paramNames.zip(instances).collect { + case (name, Range(lo, hi)) => (name, TypeBounds(lo, hi)) + } + } + case redux => + if disjoint then + MatchResult.ReducedAndDisjoint + else + MatchResult.Reduced(redux) - if caseLambda.exists && matches(canWidenAbstract = false) then + if matches(canWidenAbstract = false) then redux(canApprox = true) else if matches(canWidenAbstract = true) then redux(canApprox = false) - else if (provablyDisjoint(scrut, pat)) + else if (disjoint) // We found a proof that `scrut` and `pat` are incompatible. // The search continues. MatchResult.Disjoint else MatchResult.Stuck - } + end matchLegacyPatMat + + def matchMissingCaptures(spec: MatchTypeCaseSpec.MissingCaptures): MatchResult = + MatchResult.Stuck - def recur(remaining: List[Type]): Type = remaining match + def recur(remaining: List[MatchTypeCaseSpec]): Type = remaining match + case (cas: MatchTypeCaseSpec.LegacyPatMat) :: _ if sourceVersion.isAtLeast(SourceVersion.`3.4`) => + val errorText = MatchTypeTrace.illegalPatternText(scrut, cas) + ErrorType(reporting.MatchTypeLegacyPattern(errorText)) case cas :: remaining1 => matchCase(cas) match case MatchResult.Disjoint => @@ -3177,58 +3583,79 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { MatchTypeTrace.noInstance(scrut, cas, fails) NoType case MatchResult.Reduced(tp) => - tp + tp.simplified + case MatchResult.ReducedAndDisjoint => + // Empty types break the basic assumption that if a scrutinee and a + // pattern are disjoint it's OK to reduce passed that pattern. Indeed, + // empty types viewed as a set of value is always a subset of any other + // types. As a result, if a scrutinee both matches a pattern and is + // probably disjoint from it, we prevent reduction. + // See `tests/neg/6570.scala` and `6570-1.scala` for examples that + // exploit emptiness to break match type soundness. + MatchTypeTrace.emptyScrutinee(scrut) + NoType case Nil => val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - throw TypeError(em"Match type reduction $casesText") + ErrorType(reporting.MatchTypeNoCases(casesText)) inFrozenConstraint { - // Empty types break the basic assumption that if a scrutinee and a - // pattern are disjoint it's OK to reduce passed that pattern. Indeed, - // empty types viewed as a set of value is always a subset of any other - // types. As a result, we first check that the scrutinee isn't empty - // before proceeding with reduction. See `tests/neg/6570.scala` and - // `6570-1.scala` for examples that exploit emptiness to break match - // type soundness. - - // If we revered the uncertainty case of this empty check, that is, - // `!provablyNonEmpty` instead of `provablyEmpty`, that would be - // obviously sound, but quite restrictive. With the current formulation, - // we need to be careful that `provablyEmpty` covers all the conditions - // used to conclude disjointness in `provablyDisjoint`. - if (provablyEmpty(scrut)) - MatchTypeTrace.emptyScrutinee(scrut) - NoType + if scrut.isError then + // if the scrutinee is an error type + // then just return that as the result + // not doing so will result in the first type case matching + // because ErrorType (as a FlexType) is <:< any type case + // this situation can arise from any kind of nesting of match types, + // e.g. neg/i12049 `Tuple.Concat[Reverse[ts], (t2, t1)]` + // if Reverse[ts] fails with no matches, + // the error type should be the reduction of the Concat too + scrut else recur(cases) } } } -/** A type comparer that can record traces of subtype operations */ -class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { - import TypeComparer._ +/** A type comparer that can record traces of subtype operations + * @param short if true print only failing forward traces; never print succesful + * subtraces; never print backtraces starting with `<==`. + */ +class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeComparer(initctx) { + import TypeComparer.* init(initctx) - override def explainingTypeComparer = this + override def explainingTypeComparer(short: Boolean) = + if short == this.short then this + else ExplainingTypeComparer(comparerContext, short) private var indent = 0 private val b = new StringBuilder - - private var skipped = false + private var lastForwardGoal: String | Null = null override def traceIndented[T](str: String)(op: => T): T = - if (skipped) op - else { + val str1 = str.replace('\n', ' ') + if short && str1 == lastForwardGoal then + op // repeated goal, skip for clarity + else + lastForwardGoal = str1 + val curLength = b.length indent += 2 - val str1 = str.replace('\n', ' ') b.append("\n").append(" " * indent).append("==> ").append(str1) val res = op - b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) + if short then + if res == false then + if lastForwardGoal != null then // last was deepest goal that failed + b.append(" = false") + lastForwardGoal = null + else + b.length = curLength // don't show successful subtraces + else + b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) indent -= 2 res - } + + private def traceIndentedIfNotShort[T](str: String)(op: => T): T = + if short then op else traceIndented(str)(op) private def frozenNotice: String = if frozenConstraint then " in frozen constraint" else "" @@ -3239,7 +3666,8 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { then s" ${tp1.getClass} ${tp2.getClass}" else "" val approx = approxState - traceIndented(s"${show(tp1)} <: ${show(tp2)}$moreInfo${approx.show}$frozenNotice") { + def approxStr = if short then "" else approx.show + traceIndented(s"${show(tp1)} <: ${show(tp2)}$moreInfo${approxStr}$frozenNotice") { super.recur(tp1, tp2) } @@ -3249,12 +3677,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def lub(tp1: Type, tp2: Type, canConstrain: Boolean, isSoft: Boolean): Type = - traceIndented(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain, isSoft=$isSoft)") { + traceIndentedIfNotShort(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain, isSoft=$isSoft)") { super.lub(tp1, tp2, canConstrain, isSoft) } override def glb(tp1: Type, tp2: Type): Type = - traceIndented(s"glb(${show(tp1)}, ${show(tp2)})") { + traceIndentedIfNotShort(s"glb(${show(tp1)}, ${show(tp2)})") { super.glb(tp1, tp2) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 9bcb3eca36bb..48559787c6a1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -2,17 +2,16 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._ +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* import Flags.JavaDefined import Uniques.unique import TypeOps.makePackageObjPrefixExplicit import backend.sjs.JSDefinitions -import transform.ExplicitOuter._ -import transform.ValueClasses._ -import transform.TypeUtils._ -import transform.ContextFunctionResults._ +import transform.ExplicitOuter.* +import transform.ValueClasses.* +import transform.ContextFunctionResults.* import unpickleScala2.Scala2Erasure -import Decorators._ +import Decorators.* import Definitions.MaxImplementedFunctionArity import scala.annotation.tailrec @@ -72,7 +71,38 @@ end SourceLanguage object TypeErasure { private def erasureDependsOnArgs(sym: Symbol)(using Context) = - sym == defn.ArrayClass || sym == defn.PairClass || isDerivedValueClass(sym) + sym == defn.ArrayClass || sym == defn.PairClass || sym.isDerivedValueClass + + /** The arity of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs. + * + * NOTE: This method is used to determine how to erase tuples, so it can + * only be changed in very limited ways without breaking + * binary-compatibility. In particular, note that it returns -1 for + * all tuples that end with the `EmptyTuple` type alias instead of + * `EmptyTuple.type` because of a missing dealias, but this is now + * impossible to fix. + * + * @return The arity if it can be determined, or: + * -1 if this type does not have a fixed arity + * -2 if the arity depends on an uninstantiated type variable or WildcardType. + */ + def tupleArity(tp: Type)(using Context): Int = tp/*.dealias*/ match + case AppliedType(tycon, _ :: tl :: Nil) if tycon.isRef(defn.PairClass) => + val arity = tupleArity(tl) + if (arity < 0) arity else arity + 1 + case tp: SingletonType => + if tp.termSymbol == defn.EmptyTupleModule then 0 else -1 + case tp: AndOrType => + val arity1 = tupleArity(tp.tp1) + val arity2 = tupleArity(tp.tp2) + if arity1 == arity2 then arity1 else math.min(-1, math.min(arity1, arity2)) + case tp: WildcardType => -2 + case tp: TypeVar if !tp.isInstantiated => -2 + case _ => + if defn.isTupleNType(tp) then tp.dealias.argInfos.length + else tp.dealias match + case tp: TypeVar if !tp.isInstantiated => -2 + case _ => -1 def normalizeClass(cls: ClassSymbol)(using Context): ClassSymbol = { if (cls.owner == defn.ScalaPackageClass) { @@ -95,7 +125,7 @@ object TypeErasure { case tp: TypeRef => val sym = tp.symbol sym.isClass && - (!erasureDependsOnArgs(sym) || isDerivedValueClass(sym)) && + (!erasureDependsOnArgs(sym) || sym.isDerivedValueClass) && !defn.specialErasure.contains(sym) && !defn.isSyntheticFunctionClass(sym) case _: TermRef => @@ -143,9 +173,9 @@ object TypeErasure { } } - private def erasureIdx(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean) = + private def erasureIdx(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, inSigName: Boolean) = extension (b: Boolean) def toInt = if b then 1 else 0 - wildcardOK.toInt + inSigName.toInt + (isSymbol.toInt << 1) + (isConstructor.toInt << 2) + (semiEraseVCs.toInt << 3) @@ -158,16 +188,16 @@ object TypeErasure { semiEraseVCs <- List(false, true) isConstructor <- List(false, true) isSymbol <- List(false, true) - wildcardOK <- List(false, true) + inSigName <- List(false, true) do - erasures(erasureIdx(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)) = - new TypeErasure(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK) + erasures(erasureIdx(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, inSigName)) = + new TypeErasure(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, inSigName) /** Produces an erasure function. See the documentation of the class [[TypeErasure]] * for a description of each parameter. */ - private def erasureFn(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean): TypeErasure = - erasures(erasureIdx(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)) + private def erasureFn(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, inSigName: Boolean): TypeErasure = + erasures(erasureIdx(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, inSigName)) /** The current context with a phase no later than erasure */ def preErasureCtx(using Context) = @@ -178,7 +208,7 @@ object TypeErasure { * @param tp The type to erase. */ def erasure(tp: Type)(using Context): Type = - erasureFn(sourceLanguage = SourceLanguage.Scala3, semiEraseVCs = false, isConstructor = false, isSymbol = false, wildcardOK = false)(tp)(using preErasureCtx) + erasureFn(sourceLanguage = SourceLanguage.Scala3, semiEraseVCs = false, isConstructor = false, isSymbol = false, inSigName = false)(tp)(using preErasureCtx) /** The value class erasure of a Scala type, where value classes are semi-erased to * ErasedValueType (they will be fully erased in [[ElimErasedValueType]]). @@ -186,11 +216,11 @@ object TypeErasure { * @param tp The type to erase. */ def valueErasure(tp: Type)(using Context): Type = - erasureFn(sourceLanguage = SourceLanguage.Scala3, semiEraseVCs = true, isConstructor = false, isSymbol = false, wildcardOK = false)(tp)(using preErasureCtx) + erasureFn(sourceLanguage = SourceLanguage.Scala3, semiEraseVCs = true, isConstructor = false, isSymbol = false, inSigName = false)(tp)(using preErasureCtx) /** The erasure that Scala 2 would use for this type. */ def scala2Erasure(tp: Type)(using Context): Type = - erasureFn(sourceLanguage = SourceLanguage.Scala2, semiEraseVCs = true, isConstructor = false, isSymbol = false, wildcardOK = false)(tp)(using preErasureCtx) + erasureFn(sourceLanguage = SourceLanguage.Scala2, semiEraseVCs = true, isConstructor = false, isSymbol = false, inSigName = false)(tp)(using preErasureCtx) /** Like value class erasure, but value classes erase to their underlying type erasure */ def fullErasure(tp: Type)(using Context): Type = @@ -200,7 +230,7 @@ object TypeErasure { def sigName(tp: Type, sourceLanguage: SourceLanguage)(using Context): TypeName = { val normTp = tp.translateFromRepeated(toArray = sourceLanguage.isJava) - val erase = erasureFn(sourceLanguage, semiEraseVCs = !sourceLanguage.isJava, isConstructor = false, isSymbol = false, wildcardOK = true) + val erase = erasureFn(sourceLanguage, semiEraseVCs = !sourceLanguage.isJava, isConstructor = false, isSymbol = false, inSigName = true) erase.sigName(normTp)(using preErasureCtx) } @@ -208,8 +238,7 @@ object TypeErasure { * TermRefs are kept instead of being widened away. */ def erasedRef(tp: Type)(using Context): Type = tp match { - case tp: TermRef => - assert(tp.symbol.exists, tp) + case tp: TermRef if tp.symbol.exists => val tp1 = makePackageObjPrefixExplicit(tp) if (tp1 ne tp) erasedRef(tp1) else TermRef(erasedRef(tp.prefix), tp.symbol.asTerm) @@ -230,7 +259,7 @@ object TypeErasure { def transformInfo(sym: Symbol, tp: Type)(using Context): Type = { val sourceLanguage = SourceLanguage(sym) val semiEraseVCs = !sourceLanguage.isJava // Java sees our value classes as regular classes. - val erase = erasureFn(sourceLanguage, semiEraseVCs, sym.isConstructor, isSymbol = true, wildcardOK = false) + val erase = erasureFn(sourceLanguage, semiEraseVCs, sym.isConstructor, isSymbol = true, inSigName = false) def eraseParamBounds(tp: PolyType): Type = tp.derivedLambdaType( @@ -374,7 +403,6 @@ object TypeErasure { tp1 // After erasure, T | Nothing is just T and C | Null is just C, if C is a reference type. else tp1 match { case JavaArrayType(elem1) => - import dotty.tools.dotc.transform.TypeUtils._ tp2 match { case JavaArrayType(elem2) => if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) @@ -410,7 +438,7 @@ object TypeErasure { val candidates = takeUntil(tp2superclasses)(!_.is(Trait)) // Candidates st "no other common superclass or trait derives from S" - // Also, drop `PairClass` since it is not valid after erasue + // Also, drop `PairClass` since it is not valid after erasure val minimums = candidates.filter { cand => cand != defn.PairClass && candidates.forall(x => !x.derivesFrom(cand) || x.eq(cand)) @@ -530,23 +558,18 @@ object TypeErasure { } /** The erasure of `PolyFunction { def apply: $applyInfo }` */ - def erasePolyFunctionApply(applyInfo: Type)(using Context): Type = - assert(applyInfo.isInstanceOf[PolyType]) - val res = applyInfo.resultType - val paramss = res.paramNamess - assert(paramss.length == 1) - erasure(defn.FunctionType(paramss.head.length, - isContextual = res.isImplicitMethod)) - - def eraseErasedFunctionApply(erasedFn: MethodType)(using Context): Type = - val fnType = defn.FunctionType( - n = erasedFn.erasedParams.count(_ == false), - isContextual = erasedFn.isContextualMethod, - ) - erasure(fnType) + def eraseRefinedFunctionApply(applyInfo: Type)(using Context): Type = + def functionType(info: Type): Type = info match { + case info: PolyType => + functionType(info.resultType) + case info: MethodType => + assert(!info.resultType.isInstanceOf[MethodicType]) + defn.FunctionType(n = info.nonErasedParamCount) + } + erasure(functionType(applyInfo)) } -import TypeErasure._ +import TypeErasure.* /** * @param sourceLanguage Adapt our erasure rules to mimic what the given language @@ -556,13 +579,19 @@ import TypeErasure._ * If false, they are erased like normal classes. * @param isConstructor Argument forms part of the type of a constructor * @param isSymbol If true, the type being erased is the info of a symbol. - * @param wildcardOK Wildcards are acceptable (true when using the erasure - * for computing a signature name). + * @param inSigName This eraser is used for `TypeErasure.sigName`, + * see `TypeErasure#apply` for more information. */ -class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean) { +class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, inSigName: Boolean) { - /** The erasure |T| of a type T. This is: + /** The erasure |T| of a type T. + * + * If computing the erasure of T requires erasing a WildcardType or an + * uninstantiated type variable, then an exception signaling an internal + * error will be thrown, unless `inSigName` is set in which case WildcardType + * will be returned. * + * In all other situations, |T| will be computed as follow: * - For a refined type scala.Array+[T]: * - if T is Nothing or Null, []Object * - otherwise, if T <: Object, []|T| @@ -594,116 +623,129 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst * - For NoType or NoPrefix, the type itself. * - For any other type, exception. */ - private def apply(tp: Type)(using Context): Type = tp match { - case _: ErasedValueType => - tp - case tp: TypeRef => - val sym = tp.symbol - if !sym.isClass then this(checkedSuperType(tp)) - else if semiEraseVCs && isDerivedValueClass(sym) then eraseDerivedValueClass(tp) - else if defn.isSyntheticFunctionClass(sym) then defn.functionTypeErasure(sym) - else eraseNormalClassRef(tp) - case tp: AppliedType => - val tycon = tp.tycon - if (tycon.isRef(defn.ArrayClass)) eraseArray(tp) - else if (tycon.isRef(defn.PairClass)) erasePair(tp) - else if (tp.isRepeatedParam) apply(tp.translateFromRepeated(toArray = sourceLanguage.isJava)) - else if (semiEraseVCs && isDerivedValueClass(tycon.classSymbol)) eraseDerivedValueClass(tp) - else this(checkedSuperType(tp)) - case tp: TermRef => - this(underlyingOfTermRef(tp)) - case _: ThisType => - this(tp.widen) - case SuperType(thistpe, supertpe) => - SuperType(this(thistpe), this(supertpe)) - case ExprType(rt) => - defn.FunctionType(0) - case RefinedType(parent, nme.apply, refinedInfo) if parent.typeSymbol eq defn.PolyFunctionClass => - erasePolyFunctionApply(refinedInfo) - case RefinedType(parent, nme.apply, refinedInfo: MethodType) if defn.isErasedFunctionType(parent) => - eraseErasedFunctionApply(refinedInfo) - case tp: TypeProxy => - this(tp.underlying) - case tp @ AndType(tp1, tp2) => - if sourceLanguage.isJava then - this(tp1) - else if sourceLanguage.isScala2 then - this(Scala2Erasure.intersectionDominator(Scala2Erasure.flattenedParents(tp))) - else - erasedGlb(this(tp1), this(tp2)) - case OrType(tp1, tp2) => - if isSymbol && sourceLanguage.isScala2 && ctx.settings.scalajs.value then - // In Scala2Unpickler we unpickle Scala.js pseudo-unions as if they were - // real unions, but we must still erase them as Scala 2 would to emit - // the correct signatures in SJSIR. - // We only do this when `isSymbol` is true since in other situations we - // cannot distinguish a Scala.js pseudo-union from a Scala 3 union that - // has been substituted into a Scala 2 type (e.g., via `asSeenFrom`), - // erasing these unions as if they were pseudo-unions could have an - // impact on overriding relationships so it's best to leave them - // alone (and this doesn't impact the SJSIR we generate). - JSDefinitions.jsdefn.PseudoUnionType - else - TypeComparer.orType(this(tp1), this(tp2), isErased = true) - case tp: MethodType => - def paramErasure(tpToErase: Type) = - erasureFn(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)(tpToErase) - val (names, formals0) = if tp.hasErasedParams then - tp.paramNames - .zip(tp.paramInfos) - .zip(tp.erasedParams) - .collect{ case (param, isErased) if !isErased => param } - .unzip - else (tp.paramNames, tp.paramInfos) - val formals = formals0.mapConserve(paramErasure) - eraseResult(tp.resultType) match { - case rt: MethodType => - tp.derivedLambdaType(names ++ rt.paramNames, formals ++ rt.paramInfos, rt.resultType) - case NoType => - // Can happen if we smuggle in a Nothing in the qualifier. Normally we prevent that - // in Checking.checkMembersOK, but compiler-generated code can bypass this test. - // See i15377.scala for a test case. - NoType - case rt => - tp.derivedLambdaType(names, formals, rt) - } - case tp: PolyType => - this(tp.resultType) - case tp @ ClassInfo(pre, cls, parents, decls, _) => - if (cls.is(Package)) tp - else { - def eraseParent(tp: Type) = tp.dealias match { // note: can't be opaque, since it's a class parent - case tp: AppliedType if tp.tycon.isRef(defn.PairClass) => defn.ObjectType - case _ => apply(tp) + private def apply(tp: Type)(using Context): Type = + val etp = tp match + case _: ErasedValueType => + tp + case tp: TypeRef => + val sym = tp.symbol + if !sym.isClass then this(checkedSuperType(tp)) + else if semiEraseVCs && sym.isDerivedValueClass then eraseDerivedValueClass(tp) + else if defn.isSyntheticFunctionClass(sym) then defn.functionTypeErasure(sym) + else eraseNormalClassRef(tp) + case tp: AppliedType => + val tycon = tp.tycon + if (tycon.isRef(defn.ArrayClass)) eraseArray(tp) + else if (tycon.isRef(defn.PairClass)) erasePair(tp) + else if (tp.isRepeatedParam) apply(tp.translateFromRepeated(toArray = sourceLanguage.isJava)) + else if (semiEraseVCs && tycon.classSymbol.isDerivedValueClass) eraseDerivedValueClass(tp) + else this(checkedSuperType(tp)) + case tp: TermRef => + this(underlyingOfTermRef(tp)) + case _: ThisType => + this(tp.widen) + case SuperType(thistpe, supertpe) => + val eThis = this(thistpe) + val eSuper = this(supertpe) + if eThis.isInstanceOf[WildcardType] || eSuper.isInstanceOf[WildcardType] then WildcardType + else SuperType(eThis, eSuper) + case ExprType(rt) => + defn.FunctionType(0) + case defn.PolyFunctionOf(mt) => + eraseRefinedFunctionApply(mt) + case tp: TypeVar if !tp.isInstantiated => + assert(inSigName, i"Cannot erase uninstantiated type variable $tp") + WildcardType + case tp: TypeProxy => + this(tp.underlying) + case tp @ AndType(tp1, tp2) => + if sourceLanguage.isJava then + this(tp1) + else if sourceLanguage.isScala2 then + this(Scala2Erasure.intersectionDominator(Scala2Erasure.flattenedParents(tp))) + else + val e1 = this(tp1) + val e2 = this(tp2) + if e1.isInstanceOf[WildcardType] || e2.isInstanceOf[WildcardType] then WildcardType + else erasedGlb(e1, e2) + case OrType(tp1, tp2) => + if isSymbol && sourceLanguage.isScala2 && ctx.settings.scalajs.value then + // In Scala2Unpickler we unpickle Scala.js pseudo-unions as if they were + // real unions, but we must still erase them as Scala 2 would to emit + // the correct signatures in SJSIR. + // We only do this when `isSymbol` is true since in other situations we + // cannot distinguish a Scala.js pseudo-union from a Scala 3 union that + // has been substituted into a Scala 2 type (e.g., via `asSeenFrom`), + // erasing these unions as if they were pseudo-unions could have an + // impact on overriding relationships so it's best to leave them + // alone (and this doesn't impact the SJSIR we generate). + JSDefinitions.jsdefn.PseudoUnionType + else + val e1 = this(tp1) + val e2 = this(tp2) + if e1.isInstanceOf[WildcardType] || e2.isInstanceOf[WildcardType] then WildcardType + else TypeComparer.orType(e1, e2, isErased = true) + case tp: MethodType => + def paramErasure(tpToErase: Type) = + erasureFn(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, inSigName = false)(tpToErase) + val (names, formals0) = if tp.hasErasedParams then + tp.paramNames + .zip(tp.paramInfos) + .zip(tp.erasedParams) + .collect{ case (param, isErased) if !isErased => param } + .unzip + else (tp.paramNames, tp.paramInfos) + val formals = formals0.mapConserve(paramErasure) + eraseResult(tp.resultType) match { + case rt: MethodType => + tp.derivedLambdaType(names ++ rt.paramNames, formals ++ rt.paramInfos, rt.resultType) + case NoType => + // Can happen if we smuggle in a Nothing in the qualifier. Normally we prevent that + // in Checking.checkMembersOK, but compiler-generated code can bypass this test. + // See i15377.scala for a test case. + NoType + case rt => + tp.derivedLambdaType(names, formals, rt) } - val erasedParents: List[Type] = - if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil - else parents.mapConserve(eraseParent) match { - case tr :: trs1 => - assert(!tr.classSymbol.is(Trait), i"$cls has bad parents $parents%, %") - val tr1 = if (cls.is(Trait)) defn.ObjectType else tr - tr1 :: trs1.filterNot(_.isAnyRef) - case nil => nil + case tp: PolyType => + this(tp.resultType) + case tp @ ClassInfo(pre, cls, parents, decls, _) => + if (cls.is(Package)) tp + else { + def eraseParent(tp: Type) = tp.dealias match { // note: can't be opaque, since it's a class parent + case tp: AppliedType if tp.tycon.isRef(defn.PairClass) => defn.ObjectType + case _ => apply(tp) } - var erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass).openForMutations - for dcl <- erasedDecls.iterator do - if dcl.lastKnownDenotation.unforcedAnnotation(defn.TargetNameAnnot).isDefined - && dcl.targetName != dcl.name - then - if erasedDecls eq decls then erasedDecls = erasedDecls.cloneScope - erasedDecls.unlink(dcl) - erasedDecls.enter(dcl.targetName, dcl) - val selfType1 = if cls.is(Module) then cls.sourceModule.termRef else NoType - tp.derivedClassInfo(NoPrefix, erasedParents, erasedDecls, selfType1) - // can't replace selftype by NoType because this would lose the sourceModule link - } - case _: ErrorType | JavaArrayType(_) => - tp - case tp: WildcardType if wildcardOK => - tp - case tp if (tp `eq` NoType) || (tp `eq` NoPrefix) => - tp - } + val erasedParents: List[Type] = + if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil + else parents.mapConserve(eraseParent) match { + case tr :: trs1 => + assert(!tr.classSymbol.is(Trait), i"$cls has bad parents $parents%, %") + val tr1 = if (cls.is(Trait)) defn.ObjectType else tr + tr1 :: trs1.filterNot(_.isAnyRef) + case nil => nil + } + var erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass).openForMutations + for dcl <- erasedDecls.iterator do + if dcl.lastKnownDenotation.unforcedAnnotation(defn.TargetNameAnnot).isDefined + && dcl.targetName != dcl.name + then + if erasedDecls eq decls then erasedDecls = erasedDecls.cloneScope + erasedDecls.unlink(dcl) + erasedDecls.enter(dcl.targetName, dcl) + val selfType1 = if cls.is(Module) then cls.sourceModule.termRef else NoType + tp.derivedClassInfo(NoPrefix, erasedParents, erasedDecls, selfType1) + // can't replace selftype by NoType because this would lose the sourceModule link + } + case _: ErrorType | JavaArrayType(_) => + tp + case tp: WildcardType => + assert(inSigName, i"Cannot erase wildcard type $tp") + WildcardType + case tp if (tp `eq` NoType) || (tp `eq` NoPrefix) => + tp + assert(!etp.isInstanceOf[WildcardType] || inSigName, i"Unexpected WildcardType erasure for $tp") + etp /** Like translucentSuperType, but issue a fatal error if it does not exist. */ private def checkedSuperType(tp: TypeProxy)(using Context): Type = @@ -734,17 +776,19 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst val defn.ArrayOf(elemtp) = tp: @unchecked if (isGenericArrayElement(elemtp, isScala2 = sourceLanguage.isScala2)) defn.ObjectType else - try JavaArrayType(erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, wildcardOK)(elemtp)) + try + val eElem = erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName)(elemtp) + if eElem.isInstanceOf[WildcardType] then WildcardType + else JavaArrayType(eElem) catch case ex: Throwable => handleRecursive("erase array type", tp.show, ex) } private def erasePair(tp: Type)(using Context): Type = { - // NOTE: `tupleArity` does not consider TypeRef(EmptyTuple$) equivalent to EmptyTuple.type, - // we fix this for printers, but type erasure should be preserved. - val arity = tp.tupleArity - if (arity < 0) defn.ProductClass.typeRef - else if (arity <= Definitions.MaxTupleArity) defn.TupleType(arity).nn + val arity = tupleArity(tp) + if arity == -2 then WildcardType // erasure depends on an uninstantiated type variable or WildcardType + else if arity == -1 then defn.ProductClass.typeRef + else if arity <= Definitions.MaxTupleArity then defn.TupleType(arity).nn else defn.TupleXXLClass.typeRef } @@ -780,8 +824,9 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst val underlying = tp.select(unbox).widen.resultType // The underlying part of an ErasedValueType cannot be an ErasedValueType itself - val erase = erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, wildcardOK) + val erase = erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName) val erasedUnderlying = erase(underlying) + if erasedUnderlying.isInstanceOf[WildcardType] then return WildcardType // Ideally, we would just use `erasedUnderlying` as the erasure of `tp`, but to // be binary-compatible with Scala 2 we need two special cases for polymorphic @@ -819,21 +864,27 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // correctly (see SIP-15 and [[Erasure.Boxing.adaptToType]]), so the result type of a // constructor method should not be semi-erased. if semiEraseVCs && isConstructor && !tp.isInstanceOf[MethodOrPoly] then - erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, wildcardOK).eraseResult(tp) + erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName).eraseResult(tp) else tp match case tp: TypeRef => val sym = tp.symbol if (sym eq defn.UnitClass) sym.typeRef - else this(tp) + else apply(tp) case tp: AppliedType => val sym = tp.tycon.typeSymbol if (sym.isClass && !erasureDependsOnArgs(sym)) eraseResult(tp.tycon) - else this(tp) + else apply(tp) case _ => - this(tp) + apply(tp) /** The name of the type as it is used in `Signature`s. - * Need to ensure correspondence with erasure! + * + * If `tp` is WildcardType, or if computing its erasure requires erasing a + * WildcardType or an uninstantiated type variable, then the special name + * `tpnme.Uninstantiated` which is used to signal an underdefined signature + * is used. + * + * Note: Need to ensure correspondence with erasure! */ private def sigName(tp: Type)(using Context): TypeName = try tp match { @@ -847,7 +898,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst if (!info.exists) assert(false, i"undefined: $tp with symbol $sym") return sigName(info) } - if (semiEraseVCs && isDerivedValueClass(sym)) { + if (semiEraseVCs && sym.isDerivedValueClass) { val erasedVCRef = eraseDerivedValueClass(tp) if (erasedVCRef.exists) return sigName(erasedVCRef) } @@ -873,21 +924,20 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case ErasedValueType(_, underlying) => sigName(underlying) case JavaArrayType(elem) => - sigName(elem) ++ "[]" + val elemName = sigName(elem) + if elemName eq tpnme.Uninstantiated then elemName + else elemName ++ "[]" case tp: TermRef => sigName(underlyingOfTermRef(tp)) case ExprType(rt) => - sigName(defn.FunctionOf(Nil, rt)) - case tp: TypeVar => - val inst = tp.instanceOpt - if (inst.exists) sigName(inst) else tpnme.Uninstantiated - case tp @ RefinedType(parent, nme.apply, _) if parent.typeSymbol eq defn.PolyFunctionClass => + sigName(defn.FunctionNOf(Nil, rt)) + case tp: TypeVar if !tp.isInstantiated => + tpnme.Uninstantiated + case tp @ defn.PolyFunctionOf(_) => // we need this case rather than falling through to the default // because RefinedTypes <: TypeProxy and it would be caught by // the case immediately below sigName(this(tp)) - case tp @ RefinedType(parent, nme.apply, refinedInfo) if defn.isErasedFunctionType(parent) => - sigName(this(tp)) case tp: TypeProxy => sigName(tp.underlying) case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 24a207da6836..76be98d9bd65 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package core -import Types._ -import Symbols._ -import Flags._ -import Names._ -import Contexts._ -import SymDenotations._ -import Denotations._ -import Decorators._ -import reporting._ +import Types.* +import Symbols.* +import Flags.* +import Names.* +import Contexts.* +import SymDenotations.* +import Denotations.* +import Decorators.* +import reporting.* import ast.untpd import config.Printers.{cyclicErrors, noPrinter} @@ -22,7 +22,11 @@ abstract class TypeError(using creationContext: Context) extends Exception(""): * This is expensive and only useful for debugging purposes. */ def computeStackTrace: Boolean = - ctx.debug || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + ctx.debug + || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + || ctx.settings.YdebugTypeError.value + || ctx.settings.YdebugError.value + || ctx.settings.YdebugUnpickling.value override def fillInStackTrace(): Throwable = if computeStackTrace then super.fillInStackTrace().nn @@ -131,14 +135,18 @@ end handleRecursive * so it requires knowing denot already. * @param denot */ -class CyclicReference private (val denot: SymDenotation)(using Context) extends TypeError: +class CyclicReference(val denot: SymDenotation)(using Context) extends TypeError: var inImplicitSearch: Boolean = false - override def toMessage(using Context): Message = - val cycleSym = denot.symbol + val cycleSym = denot.symbol + + // cycleSym.flags would try completing denot and would fail, but here we can use flagsUNSAFE to detect flags + // set by the parser. + def unsafeFlags = cycleSym.flagsUNSAFE + def isMethod = unsafeFlags.is(Method) + def isVal = !isMethod && cycleSym.isTerm - // cycleSym.flags would try completing denot and would fail, but here we can use flagsUNSAFE to detect flags - // set by the parser. + override def toMessage(using Context): Message = val unsafeFlags = cycleSym.flagsUNSAFE val isMethod = unsafeFlags.is(Method) val isVal = !isMethod && cycleSym.isTerm @@ -177,10 +185,21 @@ object CyclicReference: def apply(denot: SymDenotation)(using Context): CyclicReference = val ex = new CyclicReference(denot) if ex.computeStackTrace then - cyclicErrors.println(s"Cyclic reference involving! $denot") + cyclicErrors.println(s"Cyclic reference involving $denot") val sts = ex.getStackTrace.asInstanceOf[Array[StackTraceElement]] for (elem <- sts take 200) cyclicErrors.println(elem.toString) ex end CyclicReference +class UnpicklingError(denot: Denotation, where: String, cause: Throwable)(using Context) extends TypeError: + override def toMessage(using Context): Message = + val debugUnpickling = cause match + case cause: UnpicklingError => "" + case _ => + if ctx.settings.YdebugUnpickling.value then + cause.getStackTrace().nn.mkString("\n ", "\n ", "") + else "\n\nRun with -Ydebug-unpickling to see full stack trace." + em"""Could not read definition $denot$where. Caused by the following exception: + |$cause$debugUnpickling""" +end UnpicklingError diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 6809e4b9083c..2005aa702782 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -2,21 +2,20 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, NameKinds.*, Flags._ -import SymDenotations._ -import util.Spans._ +import Contexts.*, Types.*, Symbols.*, Names.*, NameKinds.*, Flags.* +import SymDenotations.* +import util.Spans.* import util.Stats -import Decorators._ -import StdNames._ +import Decorators.* +import StdNames.* import collection.mutable -import ast.tpd._ +import ast.tpd.* import reporting.trace import config.Printers.typr import config.Feature -import transform.SymUtils.* -import typer.ProtoTypes._ +import typer.ProtoTypes.* import typer.ForceDegree -import typer.Inferencing._ +import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter import cc.{CapturingType, derivedCapturingType, CaptureSet, isBoxed, isBoxedCapturing} @@ -99,7 +98,8 @@ object TypeOps: tp match { case tp: NamedType => val sym = tp.symbol - if (sym.isStatic && !sym.maybeOwner.seesOpaques || (tp.prefix `eq` NoPrefix)) tp + if sym.isStatic && !sym.maybeOwner.seesOpaques || (tp.prefix `eq` NoPrefix) + then tp else derivedSelect(tp, atVariance(variance max 0)(this(tp.prefix))) case tp: LambdaType => mapOverLambda(tp) // special cased common case @@ -147,7 +147,7 @@ object TypeOps: isFullyDefined(tp, ForceDegree.all) case _ => val normed = tp.tryNormalize - if normed.exists then normed else tp.map(simplify(_, theMap)) + if normed.exists then simplify(normed, theMap) else tp.map(simplify(_, theMap)) case tp: TypeParamRef => val tvar = ctx.typerState.constraint.typeVarOfParam(tp) if tvar.exists then tvar else tp @@ -184,7 +184,7 @@ object TypeOps: else tp.derivedAnnotatedType(parent1, annot) case _: MatchType => val normed = tp.tryNormalize - if (normed.exists) normed else mapOver + if (normed.exists) simplify(normed, theMap) else mapOver case tp: MethodicType => // See documentation of `Types#simplified` val addTypeVars = new TypeMap with IdempotentCaptRefMap: @@ -839,33 +839,34 @@ object TypeOps: } } - /** Gather GADT symbols and `ThisType`s found in `tp2`, ie. the scrutinee. */ + /** Gather GADT symbols and singletons found in `tp2`, ie. the scrutinee. */ object TraverseTp2 extends TypeTraverser: - val thisTypes = util.HashSet[ThisType]() - val gadtSyms = new mutable.ListBuffer[Symbol] + val singletons = util.HashMap[Symbol, SingletonType]() + val gadtSyms = new mutable.ListBuffer[Symbol] - def traverse(tp: Type) = { + def traverse(tp: Type) = try val tpd = tp.dealias if tpd ne tp then traverse(tpd) else tp match - case tp: ThisType if !tp.tref.symbol.isStaticOwner && !thisTypes.contains(tp) => - thisTypes += tp + case tp: ThisType if !singletons.contains(tp.tref.symbol) && !tp.tref.symbol.isStaticOwner => + singletons(tp.tref.symbol) = tp traverseChildren(tp.tref) - case tp: TypeRef if tp.symbol.isAbstractOrParamType => + case tp: TermRef if tp.symbol.is(Param) => + singletons(tp.typeSymbol) = tp + traverseChildren(tp) + case tp: TypeRef if !gadtSyms.contains(tp.symbol) && tp.symbol.isAbstractOrParamType => gadtSyms += tp.symbol traverseChildren(tp) - val owners = Iterator.iterate(tp.symbol)(_.maybeOwner).takeWhile(_.exists) - for sym <- owners do - // add ThisType's for the classes symbols in the ownership of `tp` - // for example, i16451.CanForward.scala, add `Namer.this`, as one of the owners of the type parameter `A1` - if sym.isClass && !sym.isAnonymousClass && !sym.isStaticOwner then - traverse(sym.thisType) + // traverse abstract type infos, to add any singletons + // for example, i16451.CanForward.scala, add `Namer.this`, from the info of the type parameter `A1` + // also, i19031.ci-reg2.scala, add `out`, from the info of the type parameter `A1` (from synthetic applyOrElse) + traverseChildren(tp.info) case _ => traverseChildren(tp) - } + catch case ex: Throwable => handleRecursive("traverseTp2", tp.show, ex) TraverseTp2.traverse(tp2) - val thisTypes = TraverseTp2.thisTypes - val gadtSyms = TraverseTp2.gadtSyms.toList + val singletons = TraverseTp2.singletons + val gadtSyms = TraverseTp2.gadtSyms.toList // Prefix inference, given `p.C.this.Child`: // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or @@ -875,15 +876,18 @@ object TypeOps: class InferPrefixMap extends TypeMap { var prefixTVar: Type | Null = null def apply(tp: Type): Type = tp match { - case tp @ ThisType(tref) if !tref.symbol.isStaticOwner => + case tp: TermRef if singletons.contains(tp.symbol) => + prefixTVar = singletons(tp.symbol) // e.g. tests/pos/i19031.ci-reg2.scala, keep out + prefixTVar.uncheckedNN + case ThisType(tref) if !tref.symbol.isStaticOwner => val symbol = tref.symbol - if thisTypes.contains(tp) then - prefixTVar = tp // e.g. tests/pos/i16785.scala, keep Outer.this + if singletons.contains(symbol) then + prefixTVar = singletons(symbol) // e.g. tests/pos/i16785.scala, keep Outer.this prefixTVar.uncheckedNN else if symbol.is(Module) then TermRef(this(tref.prefix), symbol.sourceModule) else if (prefixTVar != null) - this(tref) + this(tref.applyIfParameterized(tref.typeParams.map(_ => WildcardType))) else { prefixTVar = WildcardType // prevent recursive call from assigning it // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` @@ -913,7 +917,8 @@ object TypeOps: } def instantiate(): Type = { - for tp <- mixins.reverseIterator do protoTp1 <:< tp + for tp <- mixins.reverseIterator do + protoTp1 <:< tp maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala new file mode 100644 index 000000000000..c76b5117dc89 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -0,0 +1,154 @@ +package dotty.tools +package dotc +package core + +import TypeErasure.ErasedValueType +import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* +import Names.Name + +class TypeUtils { + /** A decorator that provides methods on types + * that are needed in the transformer pipeline. + */ + extension (self: Type) { + + def isErasedValueType(using Context): Boolean = + self.isInstanceOf[ErasedValueType] + + def isPrimitiveValueType(using Context): Boolean = + self.classSymbol.isPrimitiveValueClass + + def isErasedClass(using Context): Boolean = + self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) + + /** Is this type a checked exception? This is the case if the type + * derives from Exception but not from RuntimeException. According to + * that definition Throwable is unchecked. That makes sense since you should + * neither throw nor catch `Throwable` anyway, so we should not define + * a capability to do so. + */ + def isCheckedException(using Context): Boolean = + self.derivesFrom(defn.ExceptionClass) + && !self.derivesFrom(defn.RuntimeExceptionClass) + + def isByName: Boolean = + self.isInstanceOf[ExprType] + + def ensureMethodic(using Context): Type = self match { + case self: MethodicType => self + case _ => if (ctx.erasedTypes) MethodType(Nil, self) else ExprType(self) + } + + def widenToParents(using Context): Type = self.parents match { + case Nil => self + case ps => ps.reduceLeft(AndType(_, _)) + } + + /** The element types of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs + */ + def tupleElementTypes(using Context): Option[List[Type]] = + tupleElementTypesUpTo(Int.MaxValue) + + /** The element types of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs + * @param bound The maximum number of elements that needs generating minus 1 + * The generation will stop once more than bound elems have been generated + * @param normalize If true, normalize and dealias at each step. + * If false, never normalize and dealias only to find *: + * and EmptyTuple types. This is useful for printing. + */ + def tupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): Option[List[Type]] = + def recur(tp: Type, bound: Int): Option[List[Type]] = + if bound < 0 then Some(Nil) + else (if normalize then tp.normalized else tp).dealias match + case AppliedType(tycon, hd :: tl :: Nil) if tycon.isRef(defn.PairClass) => + recur(tl, bound - 1).map(hd :: _) + case tp: AppliedType if defn.isTupleNType(tp) && normalize => + Some(tp.args) // if normalize is set, use the dealiased tuple + // otherwise rely on the default case below to print unaliased tuples. + case tp: SingletonType => + if tp.termSymbol == defn.EmptyTupleModule then Some(Nil) else None + case _ => + if defn.isTupleClass(tp.typeSymbol) && !normalize then Some(tp.dealias.argInfos) + else None + recur(self.stripTypeVar, bound) + + /** Is this a generic tuple but not already an instance of one of Tuple1..22? */ + def isGenericTuple(using Context): Boolean = + self.derivesFrom(defn.PairClass) + && !defn.isTupleNType(self.widenDealias) + + /** Is this a generic tuple that would fit into the range 1..22? + * In this case we need to cast it to make the TupleN members accessible. + * This works only for generic tuples of known size up to 22. + */ + def isSmallGenericTuple(using Context): Boolean = genericTupleArityCompare < 0 + + /** Is this a generic tuple with an arity above 22? */ + def isLargeGenericTuple(using Context): Boolean = genericTupleArityCompare > 0 + + /** If this is a generic tuple with element types, compare the arity and return: + * * -1, if the generic tuple is small (<= MaxTupleArity) + * * 1, if the generic tuple is large (> MaxTupleArity) + * * 0 if this isn't a generic tuple with element types + */ + def genericTupleArityCompare(using Context): Int = + if self.isGenericTuple then + self.widenTermRefExpr.tupleElementTypesUpTo(Definitions.MaxTupleArity).match + case Some(elems) => if elems.length <= Definitions.MaxTupleArity then -1 else 1 + case _ => 0 + else 0 + + /** Is this a large generic tuple and is `pat` TupleXXL? + * TupleXXL.unapplySeq extracts values of type TupleXXL + * but large scrutinee terms are typed as large generic tuples. + * This allows them to hold on to their precise element types, + * but it means type-wise, the terms don't conform to the + * extractor's parameter type, so this method identifies case. + */ + def isTupleXXLExtract(pat: Type)(using Context): Boolean = + pat.typeSymbol == defn.TupleXXLClass && self.isLargeGenericTuple + + /** The `*:` equivalent of an instance of a Tuple class */ + def toNestedPairs(using Context): Type = + tupleElementTypes match + case Some(types) => TypeOps.nestedPairs(types) + case None => throw new AssertionError("not a tuple") + + def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) + + /** The TermRef referring to the companion of the underlying class reference + * of this type, while keeping the same prefix. + */ + def mirrorCompanionRef(using Context): TermRef = self match { + case AndType(tp1, tp2) => + val c1 = tp1.classSymbol + val c2 = tp2.classSymbol + if c1.isSubClass(c2) then tp1.mirrorCompanionRef + else tp2.mirrorCompanionRef // precondition: the parts of the AndType have already been checked to be non-overlapping + case self @ TypeRef(prefix, _) if self.symbol.isClass => + prefix.select(self.symbol.companionModule).asInstanceOf[TermRef] + case self: TypeProxy => + self.superType.mirrorCompanionRef + } + + /** Is this type a methodic type that takes at least one parameter? */ + def takesParams(using Context): Boolean = self.stripPoly match + case mt: MethodType => mt.paramNames.nonEmpty || mt.resType.takesParams + case _ => false + + /** Is this type a methodic type that takes implicit parameters (both old and new) at some point? */ + def takesImplicitParams(using Context): Boolean = self.stripPoly match + case mt: MethodType => mt.isImplicitMethod || mt.resType.takesImplicitParams + case _ => false + + /** Is this a type deriving only from transparent classes? + * @param traitOnly if true, all class symbols must be transparent traits + */ + def isTransparent(traitOnly: Boolean = false)(using Context): Boolean = self match + case AndType(tp1, tp2) => + tp1.isTransparent(traitOnly) && tp2.isTransparent(traitOnly) + case _ => + val cls = self.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + } +} diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index d2df2a2aebef..ef7329c3698d 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -2,18 +2,19 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ +import Types.* +import Contexts.* import util.SimpleIdentitySet -import reporting._ +import reporting.* import config.Config import config.Printers.constr import collection.mutable import java.lang.ref.WeakReference import util.{Stats, SimpleIdentityMap} -import Decorators._ +import Decorators.* import scala.annotation.internal.sharable +import scala.compiletime.uninitialized object TyperState { @sharable private var nextId: Int = 0 @@ -44,19 +45,19 @@ object TyperState { class TyperState() { import TyperState.LevelMap - private var myId: Int = _ + private var myId: Int = uninitialized def id: Int = myId - private var previous: TyperState | Null = _ + private var previous: TyperState | Null = uninitialized - private var myReporter: Reporter = _ + private var myReporter: Reporter = uninitialized def reporter: Reporter = myReporter /** A fresh type state with the same constraint as this one and the given reporter */ def setReporter(reporter: Reporter): this.type = { myReporter = reporter; this } - private var myConstraint: Constraint = _ + private var myConstraint: Constraint = uninitialized def constraint: Constraint = myConstraint def constraint_=(c: Constraint)(using Context): Unit = { @@ -66,9 +67,9 @@ class TyperState() { c.checkConsistentVars() } - private var previousConstraint: Constraint = _ + private var previousConstraint: Constraint = uninitialized - private var myIsCommittable: Boolean = _ + private var myIsCommittable: Boolean = uninitialized def isCommittable: Boolean = myIsCommittable @@ -79,7 +80,7 @@ class TyperState() { def isGlobalCommittable: Boolean = isCommittable && (previous == null || previous.uncheckedNN.isGlobalCommittable) - private var isCommitted: Boolean = _ + private var isCommitted: Boolean = uninitialized /** The set of uninstantiated type variables which have this state as their owning state. * @@ -87,11 +88,11 @@ class TyperState() { * if `tstate.isCommittable` then * `tstate.ownedVars.contains(tvar)` iff `tvar.owningState.get eq tstate` */ - private var myOwnedVars: TypeVars = _ + private var myOwnedVars: TypeVars = uninitialized def ownedVars: TypeVars = myOwnedVars def ownedVars_=(vs: TypeVars): Unit = myOwnedVars = vs - private var upLevels: LevelMap = _ + private var upLevels: LevelMap = uninitialized /** Initializes all fields except reporter, isCommittable, which need to be * set separately. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index fb66d133c0ba..fba5f3f56648 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2,50 +2,51 @@ package dotty.tools package dotc package core -import Symbols._ -import Flags._ -import Names._ -import StdNames._, NameOps._ -import NullOpsDecorator._ -import NameKinds.SkolemName -import Scopes._ -import Constants._ -import Contexts._ -import Phases._ -import Annotations._ -import SymDenotations._ -import Decorators._ -import Denotations._ -import Periods._ -import CheckRealizable._ +import Symbols.* +import Flags.* +import Names.* +import StdNames.*, NameOps.* +import NullOpsDecorator.* +import NameKinds.{SkolemName, WildcardParamName} +import Scopes.* +import Constants.* +import Contexts.* +import Phases.* +import Annotations.* +import SymDenotations.* +import Decorators.* +import Denotations.* +import Periods.* +import CheckRealizable.* import Variances.{Variance, setStructuralVariances, Invariant} import typer.Nullables -import util.Stats._ -import util.SimpleIdentitySet -import ast.tpd._ +import util.Stats.* +import util.{SimpleIdentityMap, SimpleIdentitySet} +import ast.tpd.* import ast.TreeTypeMap -import printing.Texts._ +import printing.Texts.* import printing.Printer -import Hashable._ -import Uniques._ +import Hashable.* +import Uniques.* import collection.mutable import config.Config +import config.Feature.sourceVersion +import config.SourceVersion import annotation.{tailrec, constructorOnly} import scala.util.hashing.{ MurmurHash3 => hashing } import config.Printers.{core, typr, matchTypes} import reporting.{trace, Message} import java.lang.ref.WeakReference import compiletime.uninitialized -import cc.{CapturingType, CaptureSet, derivedCapturingType, isBoxedCapturing, EventuallyCapturingType, boxedUnlessFun} +import cc.{CapturingType, CaptureSet, derivedCapturingType, isBoxedCapturing, RetainingType, isCaptureChecking} import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.transform.TypeUtils.isErasedClass -object Types { + +object Types extends TypeUtils { @sharable private var nextId = 0 @@ -109,6 +110,11 @@ object Types { /** Is this type still provisional? This is the case if the type contains, or depends on, * uninstantiated type variables or type symbols that have the Provisional flag set. * This is an antimonotonic property - once a type is not provisional, it stays so forever. + * + * FIXME: The semantics of this flag are broken by the existence of `TypeVar#resetInst`, + * a non-provisional type could go back to being provisional after + * a call to `resetInst`. This means all caches that rely on `isProvisional` + * can likely end up returning stale results. */ def isProvisional(using Context): Boolean = mightBeProvisional && testProvisional @@ -364,7 +370,7 @@ object Types { * (since these are relevant for inference or resolution) but never consider prefixes * (since these often do not constrain the search space anyway). */ - def unusableForInference(using Context): Boolean = widenDealias match + def unusableForInference(using Context): Boolean = try widenDealias match case AppliedType(tycon, args) => tycon.unusableForInference || args.exists(_.unusableForInference) case RefinedType(parent, _, rinfo) => parent.unusableForInference || rinfo.unusableForInference case TypeBounds(lo, hi) => lo.unusableForInference || hi.unusableForInference @@ -374,6 +380,7 @@ object Types { case CapturingType(parent, refs) => parent.unusableForInference || refs.elems.exists(_.unusableForInference) case _: ErrorType => true case _ => false + catch case ex: Throwable => handleRecursive("unusableForInference", show, ex) /** Does the type carry an annotation that is an instance of `cls`? */ @tailrec final def hasAnnotation(cls: ClassSymbol)(using Context): Boolean = stripTypeVar match { @@ -381,6 +388,15 @@ object Types { case _ => false } + /** Returns the annotation that is an instance of `cls` carried by the type. */ + @tailrec final def getAnnotation(cls: ClassSymbol)(using Context): Option[Annotation] = stripTypeVar match { + case AnnotatedType(tp, annot) => + if annot.matches(cls) then Some(annot) + else tp.getAnnotation(cls) + case _ => + None + } + /** Does this type have a supertype with an annotation satisfying given predicate `p`? */ def derivesAnnotWith(p: Annotation => Boolean)(using Context): Boolean = this match { case tp: AnnotatedType => p(tp.annot) || tp.parent.derivesAnnotWith(p) @@ -414,6 +430,12 @@ object Types { case _ => false } + /** Is this the type of a method that has a by-name parameters? */ + def isMethodWithByNameArgs(using Context): Boolean = stripPoly match { + case mt: MethodType => mt.paramInfos.exists(_.isInstanceOf[ExprType]) + case _ => false + } + /** Is this the type of a method with a leading empty parameter list? */ def isNullaryMethod(using Context): Boolean = stripPoly match { @@ -444,9 +466,35 @@ object Types { case _ => NoType } - /** Is this a higher-kinded type lambda with given parameter variances? */ + /** Is this a higher-kinded type lambda with given parameter variances? + * These lambdas are used as the RHS of higher-kinded abstract types or + * type aliases. The variance info is strictly needed only for abstract types. + * For type aliases we allow the user to write the variance, and we use it + * to check that the structural variance of the type lambda is compatible + * with the declared variance, and that the declared variance is compatible + * with any abstract types that are overridden. + * + * But it's important to note that the variance of a type parameter in + * a type lambda is strictly determined by how it occurs in the body of + * the lambda. Declared variances have no influence here. For instance + * the following two lambdas are variant, even though no parameter variance + * is indicated: + * + * [X] =>> List[X] // covariant + * [X] =>> X => Unit // contravariant + * + * Why store declared variances in lambdas at all? It's because type symbols are just + * normal symbols, and there is no field in a Symbol that keeps a list of variances. + * Generally we have the design that we store all info that applies to some symbols + * but not others in the symbol's types. + */ def isDeclaredVarianceLambda: Boolean = false + /** Is this type a CaptureRef that can be tracked? + * This is true for all ThisTypes or ParamRefs but only for some NamedTypes. + */ + def isTrackableRef(using Context): Boolean = false + /** Does this type contain wildcard types? */ final def containsWildcardTypes(using Context) = existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) @@ -678,6 +726,16 @@ object Types { findMember(name, pre, required, excluded) } + /** The implicit members with given name. If there are none and the denotation + * contains private members, also look for shadowed non-private implicits. + */ + def implicitMembersNamed(name: Name)(using Context): List[SingleDenotation] = + val d = member(name) + val alts = d.altsWith(_.isOneOf(GivenOrImplicitVal)) + if alts.isEmpty && d.hasAltWith(_.symbol.is(Private)) then + nonPrivateMember(name).altsWith(_.isOneOf(GivenOrImplicitVal)) + else alts + /** Find member of this type with given `name`, all `required` * flags and no `excluded` flag and produce a denotation that contains * the type of the member as seen from given prefix `pre`. @@ -985,7 +1043,7 @@ object Types { final def implicitMembers(using Context): List[TermRef] = { record("implicitMembers") memberDenots(implicitFilter, - (name, buf) => buf ++= member(name).altsWith(_.isOneOf(GivenOrImplicitVal))) + (name, buf) => buf ++= implicitMembersNamed(name)) .toList.map(d => TermRef(this, d.symbol.asTerm)) } @@ -1302,7 +1360,7 @@ object Types { case tp: AndType => tp.derivedAndType(tp.tp1.widenUnionWithoutNull, tp.tp2.widenUnionWithoutNull) case tp: RefinedType => - tp.derivedRefinedType(tp.parent.widenUnion, tp.refinedName, tp.refinedInfo) + tp.derivedRefinedType(parent = tp.parent.widenUnion) case tp: RecType => tp.rebind(tp.parent.widenUnion) case tp: HKTypeLambda => @@ -1353,7 +1411,7 @@ object Types { Atoms.Range(set, set) else Atoms.Unknown - dealias match + dealias.normalized match case tp: SingletonType => tp.underlying.atoms match case as @ Atoms.Range(lo, hi) => @@ -1392,24 +1450,24 @@ object Types { if (tp1.exists) tp1.dealias1(keep, keepOpaques) else tp case tp: AnnotatedType => val parent1 = tp.parent.dealias1(keep, keepOpaques) - tp match + if keep(tp) then tp.derivedAnnotatedType(parent1, tp.annot) + else tp match case tp @ CapturingType(parent, refs) => tp.derivedCapturingType(parent1, refs) case _ => - if keep(tp) then tp.derivedAnnotatedType(parent1, tp.annot) - else parent1 + parent1 case tp: LazyRef => tp.ref.dealias1(keep, keepOpaques) case _ => this } - /** Follow aliases and dereferences LazyRefs, annotated types and instantiated + /** Follow aliases and dereference LazyRefs, annotated types and instantiated * TypeVars until type is no longer alias type, annotated type, LazyRef, * or instantiated type variable. */ final def dealias(using Context): Type = dealias1(keepNever, keepOpaques = false) - /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type + /** Follow aliases and dereference LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. * Goes through annotated types and rewraps annotations on the result. */ @@ -1418,12 +1476,30 @@ object Types { /** Like `dealiasKeepAnnots`, but keeps only refining annotations */ final def dealiasKeepRefiningAnnots(using Context): Type = dealias1(keepIfRefining, keepOpaques = false) - /** Follow non-opaque aliases and dereferences LazyRefs, annotated types and instantiated - * TypeVars until type is no longer alias type, annotated type, LazyRef, - * or instantiated type variable. + /** Like dealias, but does not follow aliases if symbol is Opaque. This is + * necessary if we want to look at the info of a symbol containing opaque + * type aliases but pretend "it's from the outside". For instance, consider: + * + * opaque type IArray[T] = Array[? <: T] + * object IArray: + * def head[T](xs: IArray[T]): T = ??? + * + * If we dealias types in the info of `head`, those types appear with prefix + * IArray.this, where IArray's self type is `IArray { type IArray[T] = Array[? <: T] }`. + * Hence, if we see IArray it will appear as an alias of [T] =>> Array[? <: T]. + * But if we want to see the type from the outside of object IArray we need to + * suppress this dealiasing. A test case where this matters is i18909.scala. + * Here, we dealias symbol infos at the start of capture checking in operation `fluidify`. + * We have to be careful not to accidentally reveal opaque aliases when doing so. */ final def dealiasKeepOpaques(using Context): Type = dealias1(keepNever, keepOpaques = true) + /** Like dealiasKeepAnnots, but does not follow opaque aliases. See `dealiasKeepOpaques` + * for why this is sometimes necessary. + */ + final def dealiasKeepAnnotsAndOpaques(using Context): Type = + dealias1(keepAlways, keepOpaques = true) + /** Approximate this type with a type that does not contain skolem types. */ final def deskolemized(using Context): Type = val deskolemizer = new ApproximatingTypeMap { @@ -1540,7 +1616,7 @@ object Types { } /** The capture set of this type. Overridden and cached in CaptureRef */ - def captureSet(using Context): CaptureSet = CaptureSet.ofType(this) + def captureSet(using Context): CaptureSet = CaptureSet.ofType(this, followResult = false) // ----- Normalizing typerefs over refined types ---------------------------- @@ -1711,11 +1787,9 @@ object Types { if !tf1.exists then tf2 else if !tf2.exists then tf1 else NoType - case t if defn.isNonRefinedFunction(t) => + case t if defn.isFunctionType(t) => t - case t if defn.isErasedFunctionType(t) => - t - case t @ SAMType(_) => + case t @ SAMType(_, _) => t case _ => NoType @@ -1834,24 +1908,29 @@ object Types { /** Turn type into a function type. * @pre this is a method type without parameter dependencies. - * @param dropLast the number of trailing parameters that should be dropped - * when forming the function type. + * @param isJava translate repeated params as as java `Array`s? * @param alwaysDependent if true, always create a dependent function type. */ - def toFunctionType(isJava: Boolean, dropLast: Int = 0, alwaysDependent: Boolean = false)(using Context): Type = this match { - case mt: MethodType if !mt.isParamDependent => - val formals1 = if (dropLast == 0) mt.paramInfos else mt.paramInfos dropRight dropLast - val isContextual = mt.isContextualMethod && !ctx.erasedTypes - val result1 = mt.nonDependentResultApprox match { - case res: MethodType => res.toFunctionType(isJava) - case res => res - } - val funType = defn.FunctionOf( - formals1 mapConserve (_.translateFromRepeated(toArray = isJava)), - result1, isContextual) - if alwaysDependent || mt.isResultDependent then - RefinedType(funType, nme.apply, mt) - else funType + def toFunctionType(isJava: Boolean = false, alwaysDependent: Boolean = false)(using Context): Type = this match { + case mt: MethodType => + assert(!mt.isParamDependent) + def nonDependentFunType = + val isContextual = mt.isContextualMethod && !ctx.erasedTypes + val result1 = mt.nonDependentResultApprox match { + case res: MethodType => res.toFunctionType(isJava) + case res => res + } + defn.FunctionNOf( + mt.paramInfos.mapConserve(_.translateFromRepeated(toArray = isJava)), + result1, isContextual) + if mt.hasErasedParams then + defn.PolyFunctionOf(mt) + else if alwaysDependent || mt.isResultDependent then + RefinedType(nonDependentFunType, nme.apply, mt) + else nonDependentFunType + case poly @ PolyType(_, mt: MethodType) => + assert(!mt.isParamDependent) + defn.PolyFunctionOf(poly) } /** The signature of this type. This is by default NotAMethod, @@ -1899,7 +1978,7 @@ object Types { * the two capture sets are combined. */ def capturing(cs: CaptureSet)(using Context): Type = - if cs.isConst && cs.subCaptures(captureSet, frozen = true).isOK then this + if cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(captureSet, frozen = true).isOK then this else this match case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) case _ => CapturingType(this, cs) @@ -1941,7 +2020,22 @@ object Types { * this method handles this by never simplifying inside a `MethodicType`, * except for replacing type parameters with associated type variables. */ - def simplified(using Context): Type = TypeOps.simplify(this, null) + def simplified(using Context): Type = + // A recursive match type will have the recursive call + // wrapped in a LazyRef. For example in i18175, the recursive calls + // to IsPiped within the definition of IsPiped are all wrapped in LazyRefs. + // In addition to that, TypeMaps, such as the one that backs TypeOps.simplify, + // by default will rewrap a LazyRef when applying its function. + // The result of those two things means that given a big enough input + // that recurses enough times through one or multiple match types, + // reducing and simplifying the result of the case bodies, + // can end up with a large stack of directly-nested lazy refs. + // And if that nesting level breaches `Config.LogPendingSubTypesThreshold`, + // then TypeComparer will eventually start returning `false` for `isSubType`. + // Or, under -Yno-deep-subtypes, start throwing AssertionErrors. + // So, we eagerly strip that lazy ref here to avoid the stacking. + val tp = stripLazyRef + TypeOps.simplify(tp, null) /** Compare `this == that`, assuming corresponding binders in `bs` are equal. * The normal `equals` should be equivalent to `equals(that, null`)`. @@ -2088,22 +2182,23 @@ object Types { } /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */ - trait CaptureRef extends SingletonType: - private var myCaptureSet: CaptureSet | Null = _ + trait CaptureRef extends TypeProxy, ValueType: + private var myCaptureSet: CaptureSet | Null = uninitialized private var myCaptureSetRunId: Int = NoRunId private var mySingletonCaptureSet: CaptureSet.Const | Null = null - /** Can the reference be tracked? This is true for all ThisTypes or ParamRefs - * but only for some NamedTypes. - */ - def canBeTracked(using Context): Boolean - /** Is the reference tracked? This is true if it can be tracked and the capture * set of the underlying type is not always empty. */ - final def isTracked(using Context): Boolean = canBeTracked && !captureSetOfInfo.isAlwaysEmpty + final def isTracked(using Context): Boolean = + isTrackableRef && (isRootCapability || !captureSetOfInfo.isAlwaysEmpty) + + /** Is this a reach reference of the form `x*`? */ + def isReach(using Context): Boolean = false // overridden in AnnotatedType - /** Is this reference the root capability `cap` ? */ + def stripReach(using Context): CaptureRef = this // overridden in AnnotatedType + + /** Is this reference the generic root capability `cap` ? */ def isRootCapability(using Context): Boolean = false /** Normalize reference so that it can be compared with `eq` for equality */ @@ -2122,7 +2217,7 @@ object Types { else myCaptureSet = CaptureSet.Pending val computed = CaptureSet.ofInfo(this) - if ctx.phase != Phases.checkCapturesPhase || underlying.isProvisional then + if !isCaptureChecking || underlying.isProvisional then myCaptureSet = null else myCaptureSet = computed @@ -2134,9 +2229,12 @@ object Types { override def captureSet(using Context): CaptureSet = val cs = captureSetOfInfo - if canBeTracked && !cs.isAlwaysEmpty then singletonCaptureSet else cs + if isTrackableRef && !cs.isAlwaysEmpty then singletonCaptureSet else cs + end CaptureRef + trait SingletonCaptureRef extends SingletonType, CaptureRef + /** A trait for types that bind other types that refer to them. * Instances are: LambdaType, RecType. */ @@ -2202,7 +2300,7 @@ object Types { private var lastSymbol: Symbol | Null = null private var checkedPeriod: Period = Nowhere private var myStableHash: Byte = 0 - private var mySignature: Signature = _ + private var mySignature: Signature = uninitialized private var mySignatureRunId: Int = NoRunId // Invariants: @@ -2239,7 +2337,7 @@ object Types { if ctx.runId != mySignatureRunId then mySignature = computeSignature - if !mySignature.isUnderDefined then mySignatureRunId = ctx.runId + if !mySignature.isUnderDefined && !isProvisional then mySignatureRunId = ctx.runId mySignature end signature @@ -2302,6 +2400,14 @@ object Types { case _ => if (denotationIsCurrent) lastDenotation.nn.symbol else NoSymbol } + /** Like `currentSymbol`, but force the denotation if the symbol isn't valid. + * Compared to `stableInRunSymbol`, this doesn't force the denotation for non-symbolic named types, + * because currentSymbol returns NoSymbol, which is `Permanent`, so always "isValidInCurrentRun". + * Forcing the denotation breaks tests/run/enrich-gentraversable.scala. */ + private def currentValidSymbol(using Context): Symbol = + val sym = currentSymbol + if sym.isValidInCurrentRun then sym else denot.symbol + /** Retrieves currently valid symbol without necessarily updating denotation. * Assumes that symbols do not change between periods in the same run. * Used to get the class underlying a ThisType. @@ -2614,7 +2720,7 @@ object Types { if (tparams.head.eq(tparam)) return args.head match { case _: TypeBounds if !widenAbstract => TypeRef(pre, tparam) - case arg => arg.boxedUnlessFun(tycon) + case arg => arg } tparams = tparams.tail args = args.tail @@ -2656,7 +2762,7 @@ object Types { else { if (isType) { val res = - if (currentSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + if (currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) else prefix.lookupRefined(name) if (res.exists) return res if (Config.splitProjections) @@ -2730,18 +2836,19 @@ object Types { /** A reference like this one, but with the given prefix. */ final def withPrefix(prefix: Type)(using Context): Type = { def reload(): NamedType = { - val lastSym = lastSymbol.nn - val allowPrivate = !lastSym.exists || lastSym.is(Private) + val sym = stableInRunSymbol + val allowPrivate = !sym.exists || sym.is(Private) var d = memberDenot(prefix, name, allowPrivate) - if (d.isOverloaded && lastSym.exists) + if (d.isOverloaded && sym.exists) d = disambiguate(d, - if (lastSym.signature == Signature.NotAMethod) Signature.NotAMethod - else lastSym.asSeenFrom(prefix).signature, - lastSym.targetName) + if (sym.signature == Signature.NotAMethod) Signature.NotAMethod + else sym.asSeenFrom(prefix).signature, + sym.targetName) NamedType(prefix, name, d) } if (prefix eq this.prefix) this - else if !NamedType.validPrefix(prefix) then UnspecifiedErrorType + else if !NamedType.validPrefix(prefix) then + throw TypeError(em"invalid new prefix $prefix cannot replace ${this.prefix} in type $this") else if (lastDenotation == null) NamedType(prefix, designator) else designator match { case sym: Symbol => @@ -2791,7 +2898,7 @@ object Types { */ abstract case class TermRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType, ImplicitRef, CaptureRef { + extends NamedType, ImplicitRef, SingletonCaptureRef { type ThisType = TermRef type ThisName = TermName @@ -2822,7 +2929,7 @@ object Types { * They are subsumed in the capture sets of the enclosing class. * TODO: ^^^ What about call-by-name? */ - def canBeTracked(using Context) = + override def isTrackableRef(using Context) = ((prefix eq NoPrefix) || symbol.is(ParamAccessor) && (prefix eq symbol.owner.thisType) || isRootCapability @@ -2832,7 +2939,7 @@ object Types { name == nme.CAPTURE_ROOT && symbol == defn.captureRoot override def normalizedRef(using Context): CaptureRef = - if canBeTracked then symbol.termRef else this + if isTrackableRef then symbol.termRef else this } abstract case class TypeRef(override val prefix: Type, @@ -2843,7 +2950,7 @@ object Types { type ThisName = TypeName private var myCanDropAliasPeriod: Period = Nowhere - private var myCanDropAlias: Boolean = _ + private var myCanDropAlias: Boolean = uninitialized /** Given an alias type `type A = B` where a recursive comparison with `B` yields * `false`, can we conclude that the comparison is definitely false? @@ -2971,7 +3078,8 @@ object Types { * Note: we do not pass a class symbol directly, because symbols * do not survive runs whereas typerefs do. */ - abstract case class ThisType(tref: TypeRef) extends CachedProxyType, CaptureRef { + abstract case class ThisType(tref: TypeRef) + extends CachedProxyType, SingletonCaptureRef { def cls(using Context): ClassSymbol = tref.stableInRunSymbol match { case cls: ClassSymbol => cls case _ if ctx.mode.is(Mode.Interactive) => defn.AnyClass // was observed to happen in IDE mode @@ -2985,7 +3093,7 @@ object Types { // can happen in IDE if `cls` is stale } - def canBeTracked(using Context) = true + override def isTrackableRef(using Context) = true override def computeHash(bs: Binders): Int = doHash(bs, tref) @@ -3016,7 +3124,8 @@ object Types { abstract case class SuperType(thistpe: Type, supertpe: Type) extends CachedProxyType with SingletonType { override def underlying(using Context): Type = supertpe override def superType(using Context): Type = - thistpe.baseType(supertpe.typeSymbol) + if supertpe.typeSymbol.exists then thistpe.baseType(supertpe.typeSymbol) + else super.superType def derivedSuperType(thistpe: Type, supertpe: Type)(using Context): Type = if ((thistpe eq this.thistpe) && (supertpe eq this.supertpe)) this else SuperType(thistpe, supertpe) @@ -3121,7 +3230,9 @@ object Types { def checkInst(using Context): this.type = this // debug hook - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(using Context): Type = + final def derivedRefinedType + (parent: Type = this.parent, refinedName: Name = this.refinedName, refinedInfo: Type = this.refinedInfo) + (using Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this else RefinedType(parent, refinedName, refinedInfo) @@ -3304,8 +3415,8 @@ object Types { abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType { def isAnd: Boolean = true private var myBaseClassesPeriod: Period = Nowhere - private var myBaseClasses: List[ClassSymbol] = _ - /** Base classes of are the merge of the operand base classes. */ + private var myBaseClasses: List[ClassSymbol] = uninitialized + /** Base classes are the merge of the operand base classes. */ override final def baseClasses(using Context): List[ClassSymbol] = { if (myBaseClassesPeriod != ctx.period) { val bcs1 = tp1.baseClasses @@ -3397,8 +3508,8 @@ object Types { def isAnd: Boolean = false def isSoft: Boolean private var myBaseClassesPeriod: Period = Nowhere - private var myBaseClasses: List[ClassSymbol] = _ - /** Base classes of are the intersection of the operand base classes. */ + private var myBaseClasses: List[ClassSymbol] = uninitialized + /** Base classes are the intersection of the operand base classes. */ override final def baseClasses(using Context): List[ClassSymbol] = { if (myBaseClassesPeriod != ctx.period) { val bcs1 = tp1.baseClasses @@ -3426,7 +3537,7 @@ object Types { myFactorCount else 1 - private var myJoin: Type = _ + private var myJoin: Type = uninitialized private var myJoinPeriod: Period = Nowhere /** Replace or type by the closest non-or type above it */ @@ -3440,7 +3551,7 @@ object Types { myJoin } - private var myUnion: Type = _ + private var myUnion: Type = uninitialized private var myUnionPeriod: Period = Nowhere override def widenUnionWithoutNull(using Context): Type = @@ -3455,13 +3566,15 @@ object Types { myUnion private var atomsRunId: RunId = NoRunId - private var myAtoms: Atoms = _ - private var myWidened: Type = _ + private var myAtoms: Atoms = uninitialized + private var myWidened: Type = uninitialized private def computeAtoms()(using Context): Atoms = - if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms - else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms - else tp1.atoms | tp2.atoms + val tp1n = tp1.normalized + val tp2n = tp2.normalized + if tp1n.hasClassSymbol(defn.NothingClass) then tp2.atoms + else if tp2n.hasClassSymbol(defn.NothingClass) then tp1.atoms + else tp1n.atoms | tp2n.atoms private def computeWidenSingletons()(using Context): Type = val tp1w = tp1.widenSingletons @@ -3535,7 +3648,7 @@ object Types { def expectValueTypeOrWildcard(tp: Type, where: => String)(using Context): Unit = if !tp.isValueTypeOrWildcard then - assert(!ctx.isAfterTyper, where) // we check correct kinds at PostTyper + assert(!ctx.isAfterTyper, s"$tp in $where") // we check correct kinds at PostTyper throw TypeError(em"$tp is not a value type, cannot be used $where") /** An extractor object to pattern match against a nullable union. @@ -3607,7 +3720,7 @@ object Types { trait LambdaType extends BindingType with TermType { self => type ThisName <: Name type PInfo <: Type - type This <: LambdaType{type PInfo = self.PInfo} + type This >: this.type <: LambdaType{type PInfo = self.PInfo} type ParamRefType <: ParamRef def paramNames: List[ThisName] @@ -3648,8 +3761,6 @@ object Types { def companion: LambdaTypeCompanion[ThisName, PInfo, This] - def erasedParams(using Context) = List.fill(paramInfos.size)(false) - /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters * @@ -3665,7 +3776,7 @@ object Types { final def derivedLambdaType(paramNames: List[ThisName] = this.paramNames, paramInfos: List[PInfo] = this.paramInfos, - resType: Type = this.resType)(using Context): LambdaType = + resType: Type = this.resType)(using Context): This = if ((paramNames eq this.paramNames) && (paramInfos eq this.paramInfos) && (resType eq this.resType)) this else newLikeThis(paramNames, paramInfos, resType) @@ -3696,11 +3807,11 @@ object Types { // (1) mySignatureRunId != NoRunId => mySignature != null // (2) myJavaSignatureRunId != NoRunId => myJavaSignature != null - private var mySignature: Signature = _ + private var mySignature: Signature = uninitialized private var mySignatureRunId: Int = NoRunId - private var myJavaSignature: Signature = _ + private var myJavaSignature: Signature = uninitialized private var myJavaSignatureRunId: Int = NoRunId - private var myScala2Signature: Signature = _ + private var myScala2Signature: Signature = uninitialized private var myScala2SignatureRunId: Int = NoRunId /** If `isJava` is false, the Scala signature of this method. Otherwise, its Java signature. @@ -3740,17 +3851,17 @@ object Types { case SourceLanguage.Java => if ctx.runId != myJavaSignatureRunId then myJavaSignature = computeSignature - if !myJavaSignature.isUnderDefined then myJavaSignatureRunId = ctx.runId + if !myJavaSignature.isUnderDefined && !isProvisional then myJavaSignatureRunId = ctx.runId myJavaSignature case SourceLanguage.Scala2 => if ctx.runId != myScala2SignatureRunId then myScala2Signature = computeSignature - if !myScala2Signature.isUnderDefined then myScala2SignatureRunId = ctx.runId + if !myScala2Signature.isUnderDefined && !isProvisional then myScala2SignatureRunId = ctx.runId myScala2Signature case SourceLanguage.Scala3 => if ctx.runId != mySignatureRunId then mySignature = computeSignature - if !mySignature.isUnderDefined then mySignatureRunId = ctx.runId + if !mySignature.isUnderDefined && !isProvisional then mySignatureRunId = ctx.runId mySignature end signature @@ -3781,10 +3892,10 @@ object Types { } trait TermLambda extends LambdaType { thisLambdaType => - import DepStatus._ + import DepStatus.* type ThisName = TermName type PInfo = Type - type This <: TermLambda + type This >: this.type <: TermLambda type ParamRefType = TermParamRef override def resultType(using Context): Type = @@ -3941,13 +4052,18 @@ object Types { final override def isImplicitMethod: Boolean = companion.eq(ImplicitMethodType) || isContextualMethod final override def hasErasedParams(using Context): Boolean = - erasedParams.contains(true) + paramInfos.exists(p => p.hasAnnotation(defn.ErasedParamAnnot)) + final override def isContextualMethod: Boolean = companion.eq(ContextualMethodType) - override def erasedParams(using Context): List[Boolean] = + def erasedParams(using Context): List[Boolean] = paramInfos.map(p => p.hasAnnotation(defn.ErasedParamAnnot)) + def nonErasedParamCount(using Context): Int = + paramInfos.count(p => !p.hasAnnotation(defn.ErasedParamAnnot)) + + protected def prefixString: String = companion.prefixString } @@ -3972,15 +4088,10 @@ object Types { protected def toPInfo(tp: Type)(using Context): PInfo - /** If `tparam` is a sealed type parameter symbol of a polymorphic method, add - * a @caps.Sealed annotation to the upperbound in `tp`. - */ - protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = tp - def fromParams[PI <: ParamInfo.Of[N]](params: List[PI], resultType: Type)(using Context): Type = if (params.isEmpty) resultType else apply(params.map(_.paramName))( - tl => params.map(param => toPInfo(addSealed(param, tl.integrate(params, param.paramInfo)))), + tl => params.map(param => toPInfo(tl.integrate(params, param.paramInfo))), tl => tl.integrate(params, resultType)) } @@ -4022,10 +4133,10 @@ object Types { def addInto(tp: Type): Type = tp match case tp @ AppliedType(tycon, args) if tycon.typeSymbol == defn.RepeatedParamClass => tp.derivedAppliedType(tycon, addInto(args.head) :: Nil) - case tp @ AppliedType(tycon, args) if defn.isFunctionType(tp) => + case tp @ AppliedType(tycon, args) if defn.isFunctionNType(tp) => wrapConvertible(tp.derivedAppliedType(tycon, args.init :+ addInto(args.last))) - case tp @ RefinedType(parent, rname, rinfo) if defn.isFunctionOrPolyType(tp) => - wrapConvertible(tp.derivedRefinedType(parent, rname, addInto(rinfo))) + case tp @ defn.RefinedFunctionOf(rinfo) => + wrapConvertible(tp.derivedRefinedType(refinedInfo = addInto(rinfo))) case tp: MethodOrPoly => tp.derivedLambdaType(resType = addInto(tp.resType)) case ExprType(resType) => @@ -4081,7 +4192,7 @@ object Types { trait TypeLambda extends LambdaType { type ThisName = TypeName type PInfo = TypeBounds - type This <: TypeLambda + type This >: this.type <: TypeLambda type ParamRefType = TypeParamRef def isResultDependent(using Context): Boolean = true @@ -4302,16 +4413,6 @@ object Types { resultTypeExp: PolyType => Type)(using Context): PolyType = unique(new PolyType(paramNames)(paramInfosExp, resultTypeExp)) - override protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = - tparam match - case tparam: Symbol if tparam.is(Sealed) => - tp match - case tp @ TypeBounds(lo, hi) => - tp.derivedTypeBounds(lo, - AnnotatedType(hi, Annotation(defn.Caps_SealedAnnot, tparam.span))) - case _ => tp - case _ => tp - def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) } @@ -4591,11 +4692,12 @@ object Types { /** Only created in `binder.paramRefs`. Use `binder.paramRefs(paramNum)` to * refer to `TermParamRef(binder, paramNum)`. */ - abstract case class TermParamRef(binder: TermLambda, paramNum: Int) extends ParamRef, CaptureRef { + abstract case class TermParamRef(binder: TermLambda, paramNum: Int) + extends ParamRef, SingletonCaptureRef { type BT = TermLambda - def canBeTracked(using Context) = true def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) + override def isTrackableRef(using Context) = true } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) @@ -4716,6 +4818,10 @@ object Types { * is different from the variable's creation state (meaning unrolls are possible) * in the current typer state. * + * FIXME: the "once" in the statement above is not true anymore now that `resetInst` + * exists, this is problematic for caching (see `Type#isProvisional`), + * we should try getting rid of this method. + * * @param origin the parameter that's tracked by the type variable. * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) @@ -4798,6 +4904,9 @@ object Types { tp } + def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = + TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub * of its lower bounds in the current constraint; otherwise it is @@ -4806,7 +4915,7 @@ object Types { * is also a singleton type. */ def instantiate(fromBelow: Boolean)(using Context): Type = - val tp = TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + val tp = typeToInstantiateWith(fromBelow) if myInst.exists then // The line above might have triggered instantiation of the current type variable myInst else @@ -4842,6 +4951,9 @@ object Types { if (inst.exists) inst else origin } + def wrapInTypeTree(owningTree: Tree)(using Context): InferredTypeTree = + new InferredTypeTree().withSpan(owningTree.span).withType(this) + override def computeHash(bs: Binders): Int = identityHash(bs) override def equals(that: Any): Boolean = this.eq(that.asInstanceOf[AnyRef]) @@ -4880,7 +4992,7 @@ object Types { def underlying(using Context): Type = bound private var myReduced: Type | Null = null - private var reductionContext: util.MutableMap[Type, Type] = _ + private var reductionContext: util.MutableMap[Type, Type] = uninitialized override def tryNormalize(using Context): Type = try @@ -4923,10 +5035,10 @@ object Types { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") myReduced = - trace(i"reduce match type $this $hashCode", matchTypes, show = true) { + trace(i"reduce match type $this $hashCode", matchTypes, show = true)(inMode(Mode.Type) { def matchCases(cmp: TrackingTypeComparer): Type = val saved = ctx.typerState.snapshot() - try cmp.matchCases(scrutinee.normalized, cases) + try cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze(_))) catch case ex: Throwable => handleRecursive("reduce type ", i"$scrutinee match ...", ex) finally @@ -4936,7 +5048,7 @@ object Types { // instantiations during matchtype reduction TypeComparer.tracked(matchCases) - } + }) myReduced.nn } @@ -4978,6 +5090,190 @@ object Types { case _ => None } + enum MatchTypeCasePattern: + case Capture(num: Int, isWildcard: Boolean) + case TypeTest(tpe: Type) + case BaseTypeTest(classType: TypeRef, argPatterns: List[MatchTypeCasePattern], needsConcreteScrut: Boolean) + case CompileTimeS(argPattern: MatchTypeCasePattern) + case AbstractTypeConstructor(tycon: Type, argPatterns: List[MatchTypeCasePattern]) + case TypeMemberExtractor(typeMemberName: TypeName, capture: Capture) + + def isTypeTest: Boolean = + this.isInstanceOf[TypeTest] + + def needsConcreteScrutInVariantPos: Boolean = this match + case Capture(_, isWildcard) => !isWildcard + case TypeTest(_) => false + case _ => true + end MatchTypeCasePattern + + enum MatchTypeCaseSpec: + case SubTypeTest(origMatchCase: Type, pattern: Type, body: Type) + case SpeccedPatMat(origMatchCase: HKTypeLambda, captureCount: Int, pattern: MatchTypeCasePattern, body: Type) + case LegacyPatMat(origMatchCase: HKTypeLambda) + case MissingCaptures(origMatchCase: HKTypeLambda, missing: collection.BitSet) + + def origMatchCase: Type + end MatchTypeCaseSpec + + object MatchTypeCaseSpec: + def analyze(cas: Type)(using Context): MatchTypeCaseSpec = + cas match + case cas: HKTypeLambda if !sourceVersion.isAtLeast(SourceVersion.`3.4`) => + // Always apply the legacy algorithm under -source:3.3 and below + LegacyPatMat(cas) + case cas: HKTypeLambda => + val defn.MatchCase(pat, body) = cas.resultType: @unchecked + val missing = checkCapturesPresent(cas, pat) + if !missing.isEmpty then + MissingCaptures(cas, missing) + else + val specPattern = tryConvertToSpecPattern(cas, pat) + if specPattern != null then + SpeccedPatMat(cas, cas.paramNames.size, specPattern, body) + else + LegacyPatMat(cas) + case _ => + val defn.MatchCase(pat, body) = cas: @unchecked + SubTypeTest(cas, pat, body) + end analyze + + /** Checks that all the captures of the case are present in the case. + * + * Sometimes, because of earlier substitutions of an abstract type constructor, + * we can end up with patterns that do not mention all their captures anymore. + * This can happen even when the body still refers to these missing captures. + * In that case, we must always consider the case to be unmatchable, i.e., to + * become `Stuck`. + * + * See pos/i12127.scala for an example. + */ + def checkCapturesPresent(cas: HKTypeLambda, pat: Type)(using Context): collection.BitSet = + val captureCount = cas.paramNames.size + val missing = new mutable.BitSet(captureCount) + missing ++= (0 until captureCount) + new CheckCapturesPresent(cas).apply(missing, pat) + + private class CheckCapturesPresent(cas: HKTypeLambda)(using Context) extends TypeAccumulator[mutable.BitSet]: + def apply(missing: mutable.BitSet, tp: Type): mutable.BitSet = tp match + case TypeParamRef(binder, num) if binder eq cas => + missing -= num + case _ => + foldOver(missing, tp) + end CheckCapturesPresent + + /** Tries to convert a match type case pattern in HKTypeLambda form into a spec'ed `MatchTypeCasePattern`. + * + * This method recovers the structure of *legal patterns* as defined in SIP-56 + * from the unstructured `HKTypeLambda` coming from the typer. + * + * It must adhere to the specification of legal patterns defined at + * https://docs.scala-lang.org/sips/match-types-spec.html#legal-patterns + * + * Returns `null` if the pattern in `caseLambda` is a not a legal pattern. + */ + private def tryConvertToSpecPattern(caseLambda: HKTypeLambda, pat: Type)(using Context): MatchTypeCasePattern | Null = + var typeParamRefsAccountedFor: Int = 0 + + def rec(pat: Type, variance: Int): MatchTypeCasePattern | Null = + pat match + case pat @ TypeParamRef(binder, num) if binder eq caseLambda => + typeParamRefsAccountedFor += 1 + MatchTypeCasePattern.Capture(num, isWildcard = pat.paramName.is(WildcardParamName)) + + case pat @ AppliedType(tycon: TypeRef, args) if variance == 1 => + val tyconSym = tycon.symbol + if tyconSym.isClass then + if tyconSym.name.startsWith("Tuple") && defn.isTupleNType(pat) then + rec(pat.toNestedPairs, variance) + else + recArgPatterns(pat) { argPatterns => + val needsConcreteScrut = argPatterns.zip(tycon.typeParams).exists { + (argPattern, tparam) => tparam.paramVarianceSign != 0 && argPattern.needsConcreteScrutInVariantPos + } + MatchTypeCasePattern.BaseTypeTest(tycon, argPatterns, needsConcreteScrut) + } + else if defn.isCompiletime_S(tyconSym) && args.sizeIs == 1 then + val argPattern = rec(args.head, variance) + if argPattern == null then + null + else if argPattern.isTypeTest then + MatchTypeCasePattern.TypeTest(pat) + else + MatchTypeCasePattern.CompileTimeS(argPattern) + else + tycon.info match + case _: RealTypeBounds => + recAbstractTypeConstructor(pat) + case TypeAlias(tl @ HKTypeLambda(onlyParam :: Nil, resType: RefinedType)) => + /* Unlike for eta-expanded classes, the typer does not automatically + * dealias poly type aliases to refined types. So we have to give them + * a chance here. + * We are quite specific about the shape of type aliases that we are willing + * to dealias this way, because we must not dealias arbitrary type constructors + * that could refine the bounds of the captures; those would amount of + * type-test + capture combos, which are out of the specced match types. + */ + rec(pat.superType, variance) + case _ => + null + + case pat @ AppliedType(tycon: TypeParamRef, _) if variance == 1 => + recAbstractTypeConstructor(pat) + + case pat @ RefinedType(parent, refinedName: TypeName, TypeAlias(alias @ TypeParamRef(binder, num))) + if variance == 1 && (binder eq caseLambda) => + parent.member(refinedName) match + case refinedMember: SingleDenotation if refinedMember.exists => + // Check that the bounds of the capture contain the bounds of the inherited member + val refinedMemberBounds = refinedMember.info + val captureBounds = caseLambda.paramInfos(num) + if captureBounds.contains(refinedMemberBounds) then + /* In this case, we know that any member we eventually find during reduction + * will have bounds that fit in the bounds of the capture. Therefore, no + * type-test + capture combo is necessary, and we can apply the specced match types. + */ + val capture = rec(alias, variance = 0).asInstanceOf[MatchTypeCasePattern.Capture] + MatchTypeCasePattern.TypeMemberExtractor(refinedName, capture) + else + // Otherwise, a type-test + capture combo might be necessary, and we are out of spec + null + case _ => + // If the member does not refine a member of the `parent`, we are out of spec + null + + case _ => + MatchTypeCasePattern.TypeTest(pat) + end rec + + def recAbstractTypeConstructor(pat: AppliedType): MatchTypeCasePattern | Null = + recArgPatterns(pat) { argPatterns => + MatchTypeCasePattern.AbstractTypeConstructor(pat.tycon, argPatterns) + } + end recAbstractTypeConstructor + + def recArgPatterns(pat: AppliedType)(whenNotTypeTest: List[MatchTypeCasePattern] => MatchTypeCasePattern | Null): MatchTypeCasePattern | Null = + val AppliedType(tycon, args) = pat + val tparams = tycon.typeParams + val argPatterns = args.zip(tparams).map { (arg, tparam) => + rec(arg, tparam.paramVarianceSign) + } + if argPatterns.exists(_ == null) then + null + else + val argPatterns1 = argPatterns.asInstanceOf[List[MatchTypeCasePattern]] // they are not null + if argPatterns1.forall(_.isTypeTest) then + MatchTypeCasePattern.TypeTest(pat) + else + whenNotTypeTest(argPatterns1) + end recArgPatterns + + val result = rec(pat, variance = 1) + if typeParamRefsAccountedFor == caseLambda.paramNames.size then result + else null + end tryConvertToSpecPattern + end MatchTypeCaseSpec + // ------ ClassInfo, Type Bounds -------------------------------------------------- type TypeOrSymbol = Type | Symbol @@ -5017,8 +5313,8 @@ object Types { else if (clsd.is(Module)) givenSelf else if (ctx.erasedTypes) appliedRef else givenSelf.dealiasKeepAnnots match - case givenSelf1 @ EventuallyCapturingType(tp, _) => - givenSelf1.derivedAnnotatedType(tp & appliedRef, givenSelf1.annot) + case givenSelf1 @ AnnotatedType(tp, ann) if ann.symbol == defn.RetainsAnnot => + givenSelf1.derivedAnnotatedType(tp & appliedRef, ann) case _ => AndType(givenSelf, appliedRef) } @@ -5235,6 +5531,8 @@ object Types { case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.eq(that.alias) case _ => false } + + override def toString = s"${getClass.getSimpleName}($alias)" } /** = T @@ -5288,7 +5586,7 @@ object Types { // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ - abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, ValueType { + abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, CaptureRef { override def underlying(using Context): Type = parent @@ -5307,7 +5605,7 @@ object Types { override def stripped(using Context): Type = parent.stripped private var isRefiningKnown = false - private var isRefiningCache: Boolean = _ + private var isRefiningCache: Boolean = uninitialized def isRefining(using Context): Boolean = { if (!isRefiningKnown) { @@ -5317,6 +5615,23 @@ object Types { isRefiningCache } + override def isTrackableRef(using Context) = + isReach && parent.isTrackableRef + + /** Is this a reach reference of the form `x*`? */ + override def isReach(using Context): Boolean = + annot.symbol == defn.ReachCapabilityAnnot + + override def stripReach(using Context): SingletonCaptureRef = + (if isReach then parent else this).asInstanceOf[SingletonCaptureRef] + + override def normalizedRef(using Context): CaptureRef = + if isReach then AnnotatedType(stripReach.normalizedRef, annot) else this + + override def captureSet(using Context): CaptureSet = + if isReach then super.captureSet + else CaptureSet.ofType(this, followResult = false) + // equals comes from case class; no matching override is needed override def computeHash(bs: Binders): Int = @@ -5389,6 +5704,9 @@ object Types { def explanation(using Context): String = msg.message } + /** Note: Make sure an errors is reported before construtcing this + * as the type of a tree. + */ object ErrorType: def apply(m: Message)(using Context): ErrorType = val et = new PreviousErrorType @@ -5458,104 +5776,136 @@ object Types { * A type is a SAM type if it is a reference to a class or trait, which * * - has a single abstract method with a method type (ExprType - * and PolyType not allowed!) whose result type is not an implicit function type - * and which is not marked inline. + * and PolyType not allowed!) according to `possibleSamMethods`. * - can be instantiated without arguments or with just () as argument. * - * The pattern `SAMType(sam)` matches a SAM type, where `sam` is the - * type of the single abstract method. + * Additionally, a SAM type may contain type aliases refinements if they refine + * an existing type member. + * + * The pattern `SAMType(samMethod, samParent)` matches a SAM type, where `samMethod` is the + * type of the single abstract method and `samParent` is a subtype of the matched + * SAM type which has been stripped of wildcards to turn it into a valid parent + * type. */ object SAMType { - def zeroParamClass(tp: Type)(using Context): Type = tp match { + /** If possible, return a type which is both a subtype of `origTp` and a (possibly refined) type + * application of `samClass` where none of the type arguments are + * wildcards (thus making it a valid parent type), otherwise return + * NoType. + * + * A wildcard in the original type will be replaced by its upper or lower bound in a way + * that maximizes the number of possible implementations of `samMeth`. For example, + * java.util.function defines an interface equivalent to: + * + * trait Function[T, R]: + * def apply(t: T): R + * + * and it usually appears with wildcards to compensate for the lack of + * definition-site variance in Java: + * + * (x => x.toInt): Function[? >: String, ? <: Int] + * + * When typechecking this lambda, we need to approximate the wildcards to find + * a valid parent type for our lambda to extend. We can see that in `apply`, + * `T` only appears contravariantly and `R` only appears covariantly, so by + * minimizing the first parameter and maximizing the second, we maximize the + * number of valid implementations of `apply` which lets us implement the lambda + * with a closure equivalent to: + * + * new Function[String, Int] { def apply(x: String): Int = x.toInt } + * + * If a type parameter appears invariantly or does not appear at all in `samMeth`, then + * we arbitrarily pick the upper-bound. + */ + def samParent(origTp: Type, samClass: Symbol, samMeth: Symbol)(using Context): Type = + val tp0 = origTp.baseType(samClass) + + /** Copy type aliases refinements to `toTp` from `fromTp` */ + def withRefinements(toType: Type, fromTp: Type): Type = fromTp.dealias match + case RefinedType(fromParent, name, info: TypeAlias) if tp0.member(name).exists => + val parent1 = withRefinements(toType, fromParent) + RefinedType(toType, name, info) + case _ => toType + val tp = withRefinements(tp0, origTp) + + if !(tp <:< origTp) then NoType + else + def approxWildcardArgs(tp: Type): Type = tp match + case tp @ AppliedType(tycon, args) if tp.hasWildcardArg => + val accu = new TypeAccumulator[VarianceMap[Symbol]]: + def apply(vmap: VarianceMap[Symbol], t: Type): VarianceMap[Symbol] = t match + case tp: TypeRef if tp.symbol.isAllOf(ClassTypeParam) => + vmap.recordLocalVariance(tp.symbol, variance) + case _ => + foldOver(vmap, t) + val vmap = accu(VarianceMap.empty, samMeth.info) + val tparams = tycon.typeParamSymbols + val args1 = args.zipWithConserve(tparams): + case (arg @ TypeBounds(lo, hi), tparam) => + val v = vmap.computedVariance(tparam) + if v.uncheckedNN < 0 then lo + else hi + case (arg, _) => arg + tp.derivedAppliedType(tycon, args1) + case tp: RefinedType => + tp.derivedRefinedType(approxWildcardArgs(tp.parent)) + case _ => + tp + approxWildcardArgs(tp) + end samParent + + def samClass(tp: Type)(using Context): Symbol = tp match case tp: ClassInfo => - def zeroParams(tp: Type): Boolean = tp.stripPoly match { + def zeroParams(tp: Type): Boolean = tp.stripPoly match case mt: MethodType => mt.paramInfos.isEmpty && !mt.resultType.isInstanceOf[MethodType] case et: ExprType => true case _ => false - } - // `ContextFunctionN` does not have constructors - val ctor = tp.cls.primaryConstructor - if (!ctor.exists || zeroParams(ctor.info)) tp - else NoType + val cls = tp.cls + val validCtor = + val ctor = cls.primaryConstructor + // `ContextFunctionN` does not have constructors + !ctor.exists || zeroParams(ctor.info) + val isInstantiable = !cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) + if validCtor && isInstantiable then tp.cls + else NoSymbol case tp: AppliedType => - zeroParamClass(tp.superType) + samClass(tp.superType) case tp: TypeRef => - zeroParamClass(tp.underlying) + samClass(tp.underlying) case tp: RefinedType => - zeroParamClass(tp.underlying) + samClass(tp.underlying) case tp: TypeBounds => - zeroParamClass(tp.underlying) + samClass(tp.underlying) case tp: TypeVar => - zeroParamClass(tp.underlying) + samClass(tp.underlying) case tp: AnnotatedType => - zeroParamClass(tp.underlying) - case _ => - NoType - } - def isInstantiatable(tp: Type)(using Context): Boolean = zeroParamClass(tp) match { - case cinfo: ClassInfo if !cinfo.cls.isOneOf(FinalOrSealed) => - val selfType = cinfo.selfType.asSeenFrom(tp, cinfo.cls) - tp <:< selfType + samClass(tp.underlying) case _ => - false - } - def unapply(tp: Type)(using Context): Option[MethodType] = - if (isInstantiatable(tp)) { - val absMems = tp.possibleSamMethods - if (absMems.size == 1) - absMems.head.info match { - case mt: MethodType if !mt.isParamDependent && - !defn.isContextFunctionType(mt.resultType) => - val cls = tp.classSymbol - - // Given a SAM type such as: - // - // import java.util.function.Function - // Function[? >: String, ? <: Int] - // - // the single abstract method will have type: - // - // (x: Function[? >: String, ? <: Int]#T): Function[? >: String, ? <: Int]#R - // - // which is not implementable outside of the scope of Function. - // - // To avoid this kind of issue, we approximate references to - // parameters of the SAM type by their bounds, this way in the - // above example we get: - // - // (x: String): Int - val approxParams = new ApproximatingTypeMap { - def apply(tp: Type): Type = tp match { - case tp: TypeRef if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => - tp.info match { - case info: AliasingBounds => - mapOver(info.alias) - case TypeBounds(lo, hi) => - range(atVariance(-variance)(apply(lo)), apply(hi)) - case _ => - range(defn.NothingType, defn.AnyType) // should happen only in error cases - } - case _ => - mapOver(tp) - } - } - val approx = - if ctx.owner.isContainedIn(cls) then mt - else approxParams(mt).asInstanceOf[MethodType] - Some(approx) + NoSymbol + + def unapply(tp: Type)(using Context): Option[(MethodType, Type)] = + val cls = samClass(tp) + if cls.exists then + val absMems = + if tp.isRef(defn.PartialFunctionClass) then + // To maintain compatibility with 2.x, we treat PartialFunction specially, + // pretending it is a SAM type. In the future it would be better to merge + // Function and PartialFunction, have Function1 contain a isDefinedAt method + // def isDefinedAt(x: T) = true + // and overwrite that method whenever the function body is a sequence of + // case clauses. + List(defn.PartialFunction_apply) + else + tp.possibleSamMethods.map(_.symbol) + if absMems.lengthCompare(1) == 0 then + val samMethSym = absMems.head + val parent = samParent(tp, cls, samMethSym) + samMethSym.asSeenFrom(parent).info match + case mt: MethodType if !mt.isParamDependent && mt.resultType.isValueTypeOrWildcard => + Some(mt, parent) case _ => None - } - else if (tp isRef defn.PartialFunctionClass) - // To maintain compatibility with 2.x, we treat PartialFunction specially, - // pretending it is a SAM type. In the future it would be better to merge - // Function and PartialFunction, have Function1 contain a isDefinedAt method - // def isDefinedAt(x: T) = true - // and overwrite that method whenever the function body is a sequence of - // case clauses. - absMems.find(_.symbol.name == nme.apply).map(_.info.asInstanceOf[MethodType]) else None - } else None } @@ -5609,23 +5959,16 @@ object Types { trait BiTypeMap extends TypeMap: thisMap => - /** The inverse of the type map as a function */ - def inverse(tp: Type): Type - - /** The inverse of the type map as a BiTypeMap map, which - * has the original type map as its own inverse. - */ - def inverseTypeMap(using Context) = new BiTypeMap: - def apply(tp: Type) = thisMap.inverse(tp) - def inverse(tp: Type) = thisMap.apply(tp) + /** The inverse of the type map */ + def inverse: BiTypeMap /** A restriction of this map to a function on tracked CaptureRefs */ def forward(ref: CaptureRef): CaptureRef = this(ref) match - case result: CaptureRef if result.canBeTracked => result + case result: CaptureRef if result.isTrackableRef => result /** A restriction of the inverse to a function on tracked CaptureRefs */ def backward(ref: CaptureRef): CaptureRef = inverse(ref) match - case result: CaptureRef if result.canBeTracked => result + case result: CaptureRef if result.isTrackableRef => result end BiTypeMap abstract class TypeMap(implicit protected var mapCtx: Context) @@ -5671,11 +6014,13 @@ object Types { protected def derivedLambdaType(tp: LambdaType)(formals: List[tp.PInfo], restpe: Type): Type = tp.derivedLambdaType(tp.paramNames, formals, restpe) + protected def mapArg(arg: Type, tparam: ParamInfo): Type = arg match + case arg: TypeBounds => this(arg) + case arg => atVariance(variance * tparam.paramVarianceSign)(this(arg)) + protected def mapArgs(args: List[Type], tparams: List[ParamInfo]): List[Type] = args match case arg :: otherArgs if tparams.nonEmpty => - val arg1 = arg match - case arg: TypeBounds => this(arg) - case arg => atVariance(variance * tparams.head.paramVarianceSign)(this(arg)) + val arg1 = mapArg(arg, tparams.head) val otherArgs1 = mapArgs(otherArgs, tparams.tail) if ((arg1 eq arg) && (otherArgs1 eq otherArgs)) args else arg1 :: otherArgs1 @@ -5815,7 +6160,7 @@ object Types { val elems = scope.toList val elems1 = mapOver(elems) if (elems1 eq elems) scope - else newScopeWith(elems1: _*) + else newScopeWith(elems1*) } def mapOver(tree: Tree): Tree = treeTypeMap(tree) @@ -5831,17 +6176,16 @@ object Types { } /** A type map that maps also parents and self type of a ClassInfo */ - abstract class DeepTypeMap(using Context) extends TypeMap { - override def mapClassInfo(tp: ClassInfo): ClassInfo = { + abstract class DeepTypeMap(using Context) extends TypeMap: + override def mapClassInfo(tp: ClassInfo): ClassInfo = val prefix1 = this(tp.prefix) - val parents1 = tp.declaredParents mapConserve this - val selfInfo1: TypeOrSymbol = tp.selfInfo match { - case selfInfo: Type => this(selfInfo) - case selfInfo => selfInfo - } - tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1) - } - } + inContext(ctx.withOwner(tp.cls)): + val parents1 = tp.declaredParents.mapConserve(this) + val selfInfo1: TypeOrSymbol = tp.selfInfo match + case selfInfo: Type => inContext(ctx.withOwner(tp.cls))(this(selfInfo)) + case selfInfo => selfInfo + tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1) + end DeepTypeMap @sharable object IdentityTypeMap extends TypeMap()(NoContext) { def apply(tp: Type): Type = tp @@ -5932,7 +6276,7 @@ object Types { */ def expandParam(tp: NamedType, pre: Type): Type = tp.argForParam(pre) match { - case arg @ TypeRef(pre, _) if pre.isArgPrefixOf(arg.symbol) => + case arg @ TypeRef(`pre`, _) if pre.isArgPrefixOf(arg.symbol) => arg.info match { case argInfo: TypeBounds => expandBounds(argInfo) case argInfo => useAlternate(arg) @@ -6129,7 +6473,7 @@ object Types { tp.derivedLambdaType(tp.paramNames, formals, restpe) } - /** Overridden in TypeOps.avoid */ + /** Overridden in TypeOps.avoid and in CheckCaptures.substParamsMap */ protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = true override def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = @@ -6347,7 +6691,9 @@ object Types { seen += tp tp match { case tp: AppliedType => - foldOver(n + 1, tp) + val tpNorm = tp.tryNormalize + if tpNorm.exists then apply(n, tpNorm) + else foldOver(n + 1, tp) case tp: RefinedType => foldOver(n + 1, tp) case tp: TypeRef if tp.info.isTypeAlias => @@ -6388,6 +6734,37 @@ object Types { } } + object VarianceMap: + /** An immutable map representing the variance of keys of type `K` */ + opaque type VarianceMap[K <: AnyRef] <: AnyRef = SimpleIdentityMap[K, Integer] + def empty[K <: AnyRef]: VarianceMap[K] = SimpleIdentityMap.empty[K] + extension [K <: AnyRef](vmap: VarianceMap[K]) + /** The backing map used to implement this VarianceMap. */ + inline def underlying: SimpleIdentityMap[K, Integer] = vmap + + /** Return a new map taking into account that K appears in a + * {co,contra,in}-variant position if `localVariance` is {positive,negative,zero}. + */ + def recordLocalVariance(k: K, localVariance: Int): VarianceMap[K] = + val previousVariance = vmap(k) + if previousVariance == null then + vmap.updated(k, localVariance) + else if previousVariance == localVariance || previousVariance == 0 then + vmap + else + vmap.updated(k, 0) + + /** Return the variance of `k`: + * - A positive value means that `k` appears only covariantly. + * - A negative value means that `k` appears only contravariantly. + * - A zero value means that `k` appears both covariantly and + * contravariantly, or appears invariantly. + * - A null value means that `k` does not appear at all. + */ + def computedVariance(k: K): Integer | Null = + vmap(k) + export VarianceMap.VarianceMap + // ----- Name Filters -------------------------------------------------- /** A name filter selects or discards a member name of a type `pre`. diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index 4078a2b1051a..da6b0aba88bd 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package core -import Types._, Contexts._, util.Stats._, Hashable._, Names._ +import Types.*, Contexts.*, util.Stats.*, Hashable.*, Names.* import config.Config -import Decorators._ +import Decorators.* import util.{WeakHashSet, Stats} import WeakHashSet.Entry import scala.annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/core/Variances.scala b/compiler/src/dotty/tools/dotc/core/Variances.scala index 2401b43c8e17..e18a31e46769 100644 --- a/compiler/src/dotty/tools/dotc/core/Variances.scala +++ b/compiler/src/dotty/tools/dotc/core/Variances.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package core -import Types._, Contexts._, Flags._, Symbols._, Annotations._ +import Types.*, Contexts.*, Flags.*, Symbols.*, Annotations.* import TypeApplications.TypeParamInfo -import Decorators._ +import Decorators.* object Variances { diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 4aa60d973264..6ad71c5fd1ce 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -331,7 +331,7 @@ object ClassfileConstants { inline val impdep1 = 0xfe inline val impdep2 = 0xff - import Flags._ + import Flags.* abstract class FlagTranslation { protected def baseFlags(jflags: Int): FlagSet = EmptyFlags diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 0c701eb03d38..93ebcfeee62a 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -7,12 +7,12 @@ import scala.language.unsafeNulls import dotty.tools.tasty.{ TastyReader, TastyHeaderUnpickler } -import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._ -import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Spans._ -import Phases._ +import Contexts.*, Symbols.*, Types.*, Names.*, StdNames.*, NameOps.*, Scopes.*, Decorators.* +import SymDenotations.*, unpickleScala2.Scala2Unpickler.*, Constants.*, Annotations.*, util.Spans.* +import Phases.* import ast.{ tpd, untpd } -import ast.tpd._, util._ -import java.io.{ ByteArrayOutputStream, IOException } +import ast.tpd.*, util.* +import java.io.IOException import java.lang.Integer.toHexString import java.util.UUID @@ -23,8 +23,36 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal +import dotty.tools.dotc.classpath.FileUtils.classToTasty + +import scala.compiletime.uninitialized object ClassfileParser { + + object Header: + opaque type Version = Long + + object Version: + val Unknown: Version = -1L + + def brokenVersionAddendum(classfileVersion: Version)(using Context): String = + if classfileVersion.exists then + val (maj, min) = (classfileVersion.majorVersion, classfileVersion.minorVersion) + val scalaVersion = config.Properties.versionNumberString + i""" (version $maj.$min), + | please check the JDK compatibility of your Scala version ($scalaVersion)""" + else + "" + + def apply(major: Int, minor: Int): Version = + (major.toLong << 32) | (minor.toLong & 0xFFFFFFFFL) + extension (version: Version) + def exists: Boolean = version != Unknown + def majorVersion: Int = (version >> 32).toInt + def minorVersion: Int = (version & 0xFFFFFFFFL).toInt + + import ClassfileConstants.* + /** Marker trait for unpicklers that can be embedded in classfiles. */ trait Embedded @@ -50,6 +78,191 @@ object ClassfileParser { mapOver(tp) } } + + private[classfile] def parseHeader(classfile: AbstractFile)(using in: DataReader): Header.Version = { + val magic = in.nextInt + if (magic != JAVA_MAGIC) + throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") + val minorVersion = in.nextChar.toInt + val majorVersion = in.nextChar.toInt + if ((majorVersion < JAVA_MAJOR_VERSION) || + ((majorVersion == JAVA_MAJOR_VERSION) && + (minorVersion < JAVA_MINOR_VERSION))) + throw new IOException( + s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") + Header.Version(majorVersion, minorVersion) + } + + abstract class AbstractConstantPool(using in: DataReader) { + protected val len = in.nextChar + protected val starts = new Array[Int](len) + protected val values = new Array[AnyRef](len) + protected val internalized = new Array[NameOrString](len) + + { var i = 1 + while (i < starts.length) { + starts(i) = in.bp + i += 1 + (in.nextByte.toInt: @switch) match { + case CONSTANT_UTF8 | CONSTANT_UNICODE => + in.skip(in.nextChar) + case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => + in.skip(2) + case CONSTANT_METHODHANDLE => + in.skip(3) + case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF + | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT + | CONSTANT_INVOKEDYNAMIC => + in.skip(4) + case CONSTANT_LONG | CONSTANT_DOUBLE => + in.skip(8) + i += 1 + case _ => + errorBadTag(in.bp - 1) + } + } + } + + /** Return the name found at given index. */ + def getName(index: Int)(using in: DataReader): NameOrString = { + if (index <= 0 || len <= index) + errorBadIndex(index) + + values(index) match { + case name: NameOrString => name + case null => + val start = starts(index) + if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) + val len = in.getChar(start + 1).toInt + val name = new NameOrString(in.getUTF(start + 1, len + 2)) + values(index) = name + name + } + } + + /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */ + def getExternalName(index: Int)(using in: DataReader): NameOrString = { + if (index <= 0 || len <= index) + errorBadIndex(index) + + if (internalized(index) == null) + internalized(index) = new NameOrString(getName(index).value.replace('/', '.')) + + internalized(index) + } + + def getClassSymbol(index: Int)(using ctx: Context, in: DataReader): Symbol + + /** Return the external name of the class info structure found at 'index'. + * Use 'getClassSymbol' if the class is sure to be a top-level class. + */ + def getClassName(index: Int)(using in: DataReader): NameOrString = { + val start = starts(index) + if (in.getByte(start).toInt != CONSTANT_CLASS) errorBadTag(start) + getExternalName(in.getChar(start + 1)) + } + + /** Return the type of a class constant entry. Since + * arrays are considered to be class types, they might + * appear as entries in 'newarray' or 'cast' opcodes. + */ + def getClassOrArrayType(index: Int)(using ctx: Context, in: DataReader): Type + + def getType(index: Int, isVarargs: Boolean = false)(using Context, DataReader): Type + + def getSuperClass(index: Int)(using Context, DataReader): Symbol = { + assert(index != 0, "attempt to parse java.lang.Object from classfile") + getClassSymbol(index) + } + + def getConstant(index: Int)(using ctx: Context, in: DataReader): Constant = { + if (index <= 0 || len <= index) errorBadIndex(index) + var value = values(index) + if (value eq null) { + val start = starts(index) + value = (in.getByte(start).toInt: @switch) match { + case CONSTANT_STRING => + Constant(getName(in.getChar(start + 1).toInt).value) + case CONSTANT_INTEGER => + Constant(in.getInt(start + 1)) + case CONSTANT_FLOAT => + Constant(in.getFloat(start + 1)) + case CONSTANT_LONG => + Constant(in.getLong(start + 1)) + case CONSTANT_DOUBLE => + Constant(in.getDouble(start + 1)) + case CONSTANT_CLASS => + getClassOrArrayType(index).typeSymbol + case _ => + errorBadTag(start) + } + values(index) = value + } + value match { + case ct: Constant => ct + case cls: Symbol => Constant(cls.typeRef) + case arr: Type => Constant(arr) + } + } + + private def getSubArray(bytes: Array[Byte]): Array[Byte] = { + val decodedLength = ByteCodecs.decode(bytes) + val arr = new Array[Byte](decodedLength) + System.arraycopy(bytes, 0, arr, 0, decodedLength) + arr + } + + def getBytes(index: Int)(using in: DataReader): Array[Byte] = { + if (index <= 0 || len <= index) errorBadIndex(index) + var value = values(index).asInstanceOf[Array[Byte]] + if (value eq null) { + val start = starts(index) + if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) + val len = in.getChar(start + 1) + val bytes = new Array[Byte](len) + in.getBytes(start + 3, bytes) + value = getSubArray(bytes) + values(index) = value + } + value + } + + def getBytes(indices: List[Int])(using in: DataReader): Array[Byte] = { + assert(!indices.isEmpty, indices) + var value = values(indices.head).asInstanceOf[Array[Byte]] + if (value eq null) { + val bytesBuffer = ArrayBuffer.empty[Byte] + for (index <- indices) { + if (index <= 0 || AbstractConstantPool.this.len <= index) errorBadIndex(index) + val start = starts(index) + if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) + val len = in.getChar(start + 1) + val buf = new Array[Byte](len) + in.getBytes(start + 3, buf) + bytesBuffer ++= buf + } + value = getSubArray(bytesBuffer.toArray) + values(indices.head) = value + } + value + } + + /** Throws an exception signaling a bad constant index. */ + protected def errorBadIndex(index: Int)(using in: DataReader) = + throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp) + + /** Throws an exception signaling a bad tag at given address. */ + protected def errorBadTag(start: Int)(using in: DataReader) = + throw new RuntimeException("bad constant pool tag " + in.getByte(start) + " at byte " + start) + } + + protected class NameOrString(val value: String) { + private var _name: SimpleName = null + def name: SimpleName = { + if (_name eq null) _name = termName(value) + _name + } + } } class ClassfileParser( @@ -57,19 +270,20 @@ class ClassfileParser( classRoot: ClassDenotation, moduleRoot: ClassDenotation)(ictx: Context) { - import ClassfileConstants._ - import ClassfileParser._ + import ClassfileConstants.* + import ClassfileParser.* protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx) - protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions - protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions - protected var pool: ConstantPool = _ // the classfile's constant pool + protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions + protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions + protected var pool: ConstantPool = uninitialized // the classfile's constant pool - protected var currentClassName: SimpleName = _ // JVM name of the current class + protected var currentClassName: SimpleName = uninitialized // JVM name of the current class protected var classTParams: Map[Name, Symbol] = Map() private var Scala2UnpicklingMode = Mode.Scala2Unpickling + private var classfileVersion: Header.Version = Header.Version.Unknown classRoot.info = NoLoader().withDecls(instanceScope) moduleRoot.info = NoLoader().withDecls(staticScope).withSourceModule(staticModule) @@ -82,7 +296,7 @@ class ClassfileParser( def run()(using Context): Option[Embedded] = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) report.debuglog("[class] >> " + classRoot.fullName) - parseHeader() + classfileVersion = parseHeader(classfile) this.pool = new ConstantPool val res = parseClass() this.pool = null @@ -91,22 +305,11 @@ class ClassfileParser( catch { case e: RuntimeException => if (ctx.debug) e.printStackTrace() + val addendum = Header.Version.brokenVersionAddendum(classfileVersion) throw new IOException( - i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass} - |${Option(e.getMessage).getOrElse("")}""") - } - - private def parseHeader()(using in: DataReader): Unit = { - val magic = in.nextInt - if (magic != JAVA_MAGIC) - throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") - val minorVersion = in.nextChar.toInt - val majorVersion = in.nextChar.toInt - if ((majorVersion < JAVA_MAJOR_VERSION) || - ((majorVersion == JAVA_MAJOR_VERSION) && - (minorVersion < JAVA_MINOR_VERSION))) - throw new IOException( - s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") + i""" class file ${classfile.canonicalPath} is broken$addendum, + | reading aborted with ${e.getClass}: + | ${Option(e.getMessage).getOrElse("")}""") } /** Return the class symbol of the given name. */ @@ -918,12 +1121,6 @@ class ClassfileParser( Some(unpickler) } - def unpickleTASTY(bytes: Array[Byte]): Some[Embedded] = { - val unpickler = new tasty.DottyUnpickler(bytes) - unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) - Some(unpickler) - } - def parseScalaSigBytes: Array[Byte] = { val tag = in.nextByte.toChar assert(tag == STRING_TAG, tag) @@ -944,54 +1141,11 @@ class ClassfileParser( } if (scan(tpnme.TASTYATTR)) { - val attrLen = in.nextInt - val bytes = in.nextBytes(attrLen) - if (attrLen == 16) { // A tasty attribute with that has only a UUID (16 bytes) implies the existence of the .tasty file - val tastyBytes: Array[Byte] = classfile match { // TODO: simplify when #3552 is fixed - case classfile: io.ZipArchive#Entry => // We are in a jar - val path = classfile.parent.lookupName( - classfile.name.stripSuffix(".class") + ".tasty", directory = false - ) - if (path != null) { - val stream = path.input - try { - val tastyOutStream = new ByteArrayOutputStream() - val buffer = new Array[Byte](1024) - var read = stream.read(buffer, 0, buffer.length) - while (read != -1) { - tastyOutStream.write(buffer, 0, read) - read = stream.read(buffer, 0, buffer.length) - } - tastyOutStream.flush() - tastyOutStream.toByteArray - } finally { - stream.close() - } - } - else { - report.error(em"Could not find $path in ${classfile.underlyingSource}") - Array.empty - } - case _ => - val dir = classfile.container - val name = classfile.name.stripSuffix(".class") + ".tasty" - val tastyFileOrNull = dir.lookupName(name, false) - if (tastyFileOrNull == null) { - report.error(em"Could not find TASTY file $name under $dir") - Array.empty - } else - tastyFileOrNull.toByteArray - } - if (tastyBytes.nonEmpty) { - val reader = new TastyReader(bytes, 0, 16) - val expectedUUID = new UUID(reader.readUncompressedLong(), reader.readUncompressedLong()) - val tastyUUID = new TastyHeaderUnpickler(tastyBytes).readHeader() - if (expectedUUID != tastyUUID) - report.warning(s"$classfile is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", NoSourcePosition) - return unpickleTASTY(tastyBytes) - } - } - else return unpickleTASTY(bytes) + val hint = + if classfile.classToTasty.isDefined then "This is likely a bug in the compiler. Please report." + else "This `.tasty` file is missing. Try cleaning the project to fix this issue." + report.error(s"Loading Scala 3 binary from $classfile. It should have been loaded from `.tasty` file. $hint", NoSourcePosition) + return None } if scan(tpnme.ScalaATTR) && !scalaUnpickleWhitelist.contains(classRoot.name) @@ -1160,78 +1314,7 @@ class ClassfileParser( private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0 private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0 - protected class NameOrString(val value: String) { - private var _name: SimpleName = null - def name: SimpleName = { - if (_name eq null) _name = termName(value) - _name - } - } - - def getClassSymbol(name: SimpleName)(using Context): Symbol = - if (name.endsWith("$") && (name ne nme.nothingRuntimeClass) && (name ne nme.nullRuntimeClass)) - // Null$ and Nothing$ ARE classes - requiredModule(name.dropRight(1)) - else classNameToSymbol(name) - - class ConstantPool(using in: DataReader) { - private val len = in.nextChar - private val starts = new Array[Int](len) - private val values = new Array[AnyRef](len) - private val internalized = new Array[NameOrString](len) - - { var i = 1 - while (i < starts.length) { - starts(i) = in.bp - i += 1 - (in.nextByte.toInt: @switch) match { - case CONSTANT_UTF8 | CONSTANT_UNICODE => - in.skip(in.nextChar) - case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => - in.skip(2) - case CONSTANT_METHODHANDLE => - in.skip(3) - case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF - | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT - | CONSTANT_INVOKEDYNAMIC => - in.skip(4) - case CONSTANT_LONG | CONSTANT_DOUBLE => - in.skip(8) - i += 1 - case _ => - errorBadTag(in.bp - 1) - } - } - } - - /** Return the name found at given index. */ - def getName(index: Int)(using in: DataReader): NameOrString = { - if (index <= 0 || len <= index) - errorBadIndex(index) - - values(index) match { - case name: NameOrString => name - case null => - val start = starts(index) - if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val len = in.getChar(start + 1).toInt - val name = new NameOrString(in.getUTF(start + 1, len + 2)) - values(index) = name - name - } - } - - /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */ - def getExternalName(index: Int)(using in: DataReader): NameOrString = { - if (index <= 0 || len <= index) - errorBadIndex(index) - - if (internalized(index) == null) - internalized(index) = new NameOrString(getName(index).value.replace('/', '.')) - - internalized(index) - } - + class ConstantPool(using in: DataReader) extends AbstractConstantPool { def getClassSymbol(index: Int)(using ctx: Context, in: DataReader): Symbol = { if (index <= 0 || len <= index) errorBadIndex(index) var c = values(index).asInstanceOf[Symbol] @@ -1245,19 +1328,6 @@ class ClassfileParser( c } - /** Return the external name of the class info structure found at 'index'. - * Use 'getClassSymbol' if the class is sure to be a top-level class. - */ - def getClassName(index: Int)(using in: DataReader): NameOrString = { - val start = starts(index) - if (in.getByte(start).toInt != CONSTANT_CLASS) errorBadTag(start) - getExternalName(in.getChar(start + 1)) - } - - /** Return the type of a class constant entry. Since - * arrays are considered to be class types, they might - * appear as entries in 'newarray' or 'cast' opcodes. - */ def getClassOrArrayType(index: Int)(using ctx: Context, in: DataReader): Type = { if (index <= 0 || len <= index) errorBadIndex(index) val value = values(index) @@ -1285,90 +1355,12 @@ class ClassfileParser( def getType(index: Int, isVarargs: Boolean = false)(using Context, DataReader): Type = sigToType(getExternalName(index).value, isVarargs = isVarargs) + } - def getSuperClass(index: Int)(using Context, DataReader): Symbol = { - assert(index != 0, "attempt to parse java.lang.Object from classfile") - getClassSymbol(index) - } - - def getConstant(index: Int)(using ctx: Context, in: DataReader): Constant = { - if (index <= 0 || len <= index) errorBadIndex(index) - var value = values(index) - if (value eq null) { - val start = starts(index) - value = (in.getByte(start).toInt: @switch) match { - case CONSTANT_STRING => - Constant(getName(in.getChar(start + 1).toInt).value) - case CONSTANT_INTEGER => - Constant(in.getInt(start + 1)) - case CONSTANT_FLOAT => - Constant(in.getFloat(start + 1)) - case CONSTANT_LONG => - Constant(in.getLong(start + 1)) - case CONSTANT_DOUBLE => - Constant(in.getDouble(start + 1)) - case CONSTANT_CLASS => - getClassOrArrayType(index).typeSymbol - case _ => - errorBadTag(start) - } - values(index) = value - } - value match { - case ct: Constant => ct - case cls: Symbol => Constant(cls.typeRef) - case arr: Type => Constant(arr) - } - } - - private def getSubArray(bytes: Array[Byte]): Array[Byte] = { - val decodedLength = ByteCodecs.decode(bytes) - val arr = new Array[Byte](decodedLength) - System.arraycopy(bytes, 0, arr, 0, decodedLength) - arr - } - - def getBytes(index: Int)(using in: DataReader): Array[Byte] = { - if (index <= 0 || len <= index) errorBadIndex(index) - var value = values(index).asInstanceOf[Array[Byte]] - if (value eq null) { - val start = starts(index) - if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val len = in.getChar(start + 1) - val bytes = new Array[Byte](len) - in.getBytes(start + 3, bytes) - value = getSubArray(bytes) - values(index) = value - } - value - } - - def getBytes(indices: List[Int])(using in: DataReader): Array[Byte] = { - assert(!indices.isEmpty, indices) - var value = values(indices.head).asInstanceOf[Array[Byte]] - if (value eq null) { - val bytesBuffer = ArrayBuffer.empty[Byte] - for (index <- indices) { - if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index) - val start = starts(index) - if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) - val len = in.getChar(start + 1) - val buf = new Array[Byte](len) - in.getBytes(start + 3, buf) - bytesBuffer ++= buf - } - value = getSubArray(bytesBuffer.toArray) - values(indices.head) = value - } - value - } - - /** Throws an exception signaling a bad constant index. */ - private def errorBadIndex(index: Int)(using in: DataReader) = - throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp) + def getClassSymbol(name: SimpleName)(using Context): Symbol = + if (name.endsWith("$") && (name ne nme.nothingRuntimeClass) && (name ne nme.nullRuntimeClass)) + // Null$ and Nothing$ ARE classes + requiredModule(name.dropRight(1)) + else classNameToSymbol(name) - /** Throws an exception signaling a bad tag at given address. */ - private def errorBadTag(start: Int)(using in: DataReader) = - throw new RuntimeException("bad constant pool tag " + in.getByte(start) + " at byte " + start) - } } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala new file mode 100644 index 000000000000..e2220e40c6b4 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala @@ -0,0 +1,109 @@ +package dotty.tools.dotc +package core.classfile + +import scala.language.unsafeNulls +import scala.compiletime.uninitialized + +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.util.* +import dotty.tools.io.AbstractFile +import dotty.tools.tasty.TastyReader + +import ClassfileParser.Header + +import java.io.IOException +import java.lang.Integer.toHexString +import java.util.UUID + +class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { + + import ClassfileConstants.* + + private var pool: ConstantPool = uninitialized // the classfile's constant pool + private var classfileVersion: Header.Version = Header.Version.Unknown + + def checkTastyUUID(tastyUUID: UUID)(using Context): Unit = try ctx.base.reusableDataReader.withInstance { reader => + implicit val reader2 = reader.reset(classfile) + this.classfileVersion = ClassfileParser.parseHeader(classfile) + this.pool = new ConstantPool + checkTastyAttr(tastyUUID) + this.pool = null + } + catch { + case e: RuntimeException => + if (ctx.debug) e.printStackTrace() + val addendum = Header.Version.brokenVersionAddendum(classfileVersion) + throw new IOException( + i""" class file ${classfile.canonicalPath} is broken$addendum, + | reading aborted with ${e.getClass}: + | ${Option(e.getMessage).getOrElse("")}""") + } + + private def checkTastyAttr(tastyUUID: UUID)(using ctx: Context, in: DataReader): Unit = { + in.nextChar // jflags + in.nextChar // nameIdx + skipSuperclasses() + skipMembers() // fields + skipMembers() // methods + val attrs = in.nextChar + val attrbp = in.bp + + def scan(target: TypeName): Boolean = { + in.bp = attrbp + var i = 0 + while (i < attrs && pool.getName(in.nextChar).name.toTypeName != target) { + val attrLen = in.nextInt + in.skip(attrLen) + i += 1 + } + i < attrs + } + + if (scan(tpnme.TASTYATTR)) { + val attrLen = in.nextInt + val bytes = in.nextBytes(attrLen) + if (attrLen == 16) { // A tasty attribute with that has only a UUID (16 bytes) implies the existence of the .tasty file + val expectedUUID = + val reader = new TastyReader(bytes, 0, 16) + new UUID(reader.readUncompressedLong(), reader.readUncompressedLong()) + if (expectedUUID != tastyUUID) + report.warning(s"$classfile is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", NoSourcePosition) + } + else + // Before 3.0.0 we had a mode where we could embed the TASTY bytes in the classfile. This has not been supported in any stable release. + report.error(s"Found a TASTY attribute with a length different from 16 in $classfile. This is likely a bug in the compiler. Please report.", NoSourcePosition) + } + + } + + private def skipAttributes()(using in: DataReader): Unit = { + val attrCount = in.nextChar + for (i <- 0 until attrCount) { + in.skip(2); in.skip(in.nextInt) + } + } + + private def skipMembers()(using in: DataReader): Unit = { + val memberCount = in.nextChar + for (i <- 0 until memberCount) { + in.skip(6); skipAttributes() + } + } + + private def skipSuperclasses()(using in: DataReader): Unit = { + in.skip(2) // superclass + val ifaces = in.nextChar + in.skip(2 * ifaces) + } + + class ConstantPool(using in: DataReader) extends ClassfileParser.AbstractConstantPool { + def getClassOrArrayType(index: Int)(using ctx: Context, in: DataReader): Type = throw new UnsupportedOperationException + def getClassSymbol(index: Int)(using ctx: Context, in: DataReader): Symbol = throw new UnsupportedOperationException + def getType(index: Int, isVarargs: Boolean)(using x$3: Context, x$4: DataReader): Type = throw new UnsupportedOperationException + } +} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala index eb1649091f77..e9bb7337c948 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala @@ -9,10 +9,10 @@ import java.io.{DataInputStream, InputStream} import java.nio.{BufferUnderflowException, ByteBuffer} final class ReusableDataReader() extends DataReader { - private[this] var data = new Array[Byte](32768) - private[this] var bb: ByteBuffer = ByteBuffer.wrap(data) - private[this] var size = 0 - private[this] val reader: DataInputStream = { + private var data = new Array[Byte](32768) + private var bb: ByteBuffer = ByteBuffer.wrap(data) + private var size = 0 + private val reader: DataInputStream = { val stream = new InputStream { override def read(): Int = try { bb.get & 0xff diff --git a/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala new file mode 100644 index 000000000000..cd2756f4c752 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala @@ -0,0 +1,38 @@ +package dotty.tools.dotc.core.tasty + +import dotty.tools.dotc.ast.{tpd, untpd} + +import dotty.tools.tasty.TastyBuffer +import dotty.tools.tasty.TastyFormat.* + +object AttributePickler: + + def pickleAttributes( + attributes: Attributes, + pickler: TastyPickler, + buf: TastyBuffer + ): Unit = + pickler.newSection(AttributesSection, buf) + + var lastTag = -1 + def assertTagOrder(tag: Int): Unit = + assert(tag != lastTag, s"duplicate attribute tag: $tag") + assert(tag > lastTag, s"attribute tags are not ordered: $tag after $lastTag") + lastTag = tag + + for tag <- attributes.booleanTags do + assert(isBooleanAttrTag(tag), "Not a boolean attribute tag: " + tag) + assertTagOrder(tag) + buf.writeByte(tag) + + assert(attributes.stringTagValues.exists(_._1 == SOURCEFILEattr)) + for (tag, value) <- attributes.stringTagValues do + assert(isStringAttrTag(tag), "Not a string attribute tag: " + tag) + assertTagOrder(tag) + val utf8Ref = pickler.nameBuffer.utf8Index(value) + buf.writeByte(tag) + buf.writeNat(utf8Ref.index) + + end pickleAttributes + +end AttributePickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala new file mode 100644 index 000000000000..43a2bea27216 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala @@ -0,0 +1,38 @@ +package dotty.tools.dotc +package core.tasty + +import scala.language.unsafeNulls +import scala.collection.immutable.BitSet +import scala.collection.immutable.TreeMap + +import dotty.tools.tasty.{TastyFormat, TastyReader, TastyBuffer}, TastyFormat.{isBooleanAttrTag, isStringAttrTag} +import dotty.tools.dotc.core.tasty.TastyUnpickler.NameTable + +class AttributeUnpickler(reader: TastyReader, nameAtRef: NameTable): + import reader._ + + lazy val attributes: Attributes = { + val booleanTags = BitSet.newBuilder + val stringTagValue = List.newBuilder[(Int, String)] + + var lastTag = -1 + while !isAtEnd do + val tag = readByte() + if isBooleanAttrTag(tag) then + booleanTags += tag + else if isStringAttrTag(tag) then + val utf8Ref = readNameRef() + val value = nameAtRef(utf8Ref).toString + stringTagValue += tag -> value + else + assert(false, "unknown attribute tag: " + tag) + + assert(tag != lastTag, s"duplicate attribute tag: $tag") + assert(tag > lastTag, s"attribute tags are not ordered: $tag after $lastTag") + lastTag = tag + end while + + new Attributes(booleanTags.result(), stringTagValue.result()) + } + +end AttributeUnpickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala b/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala new file mode 100644 index 000000000000..9e7c62ea9b5d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala @@ -0,0 +1,46 @@ +package dotty.tools.dotc.core.tasty + +import dotty.tools.tasty.TastyFormat.* + +import scala.collection.immutable.BitSet +import scala.collection.immutable.TreeMap + +class Attributes private[tasty]( + private[tasty] val booleanTags: BitSet, + private[tasty] val stringTagValues: List[(Int, String)], +) { + def scala2StandardLibrary: Boolean = booleanTags(SCALA2STANDARDLIBRARYattr) + def explicitNulls: Boolean = booleanTags(EXPLICITNULLSattr) + def captureChecked: Boolean = booleanTags(CAPTURECHECKEDattr) + def withPureFuns: Boolean = booleanTags(WITHPUREFUNSattr) + def isJava: Boolean = booleanTags(JAVAattr) + def isOutline: Boolean = booleanTags(OUTLINEattr) + def sourceFile: Option[String] = stringTagValues.find(_._1 == SOURCEFILEattr).map(_._2) +} + +object Attributes: + def apply( + sourceFile: String, + scala2StandardLibrary: Boolean, + explicitNulls: Boolean, + captureChecked: Boolean, + withPureFuns: Boolean, + isJava: Boolean, + isOutline: Boolean, + ): Attributes = + val booleanTags = BitSet.newBuilder + if scala2StandardLibrary then booleanTags += SCALA2STANDARDLIBRARYattr + if explicitNulls then booleanTags += EXPLICITNULLSattr + if captureChecked then booleanTags += CAPTURECHECKEDattr + if withPureFuns then booleanTags += WITHPUREFUNSattr + if isJava then booleanTags += JAVAattr + if isOutline then booleanTags += OUTLINEattr + + val stringTagValues = List.newBuilder[(Int, String)] + stringTagValues += SOURCEFILEattr -> sourceFile + + new Attributes(booleanTags.result(), stringTagValues.result()) + end apply + + val empty: Attributes = + new Attributes(BitSet.empty, Nil) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index fde6c669045d..10df2a437af6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -22,11 +22,8 @@ object CommentPickler: def pickleComment(addr: Addr, comment: Comment): Unit = if addr != NoAddr then - val bytes = comment.raw.getBytes(StandardCharsets.UTF_8).nn - val length = bytes.length buf.writeAddr(addr) - buf.writeNat(length) - buf.writeBytes(bytes, length) + buf.writeUtf8(comment.raw) buf.writeLongInt(comment.span.coords) def traverse(x: Any): Unit = x match diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index eb0d140df51e..d3b5c647b9c5 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -13,19 +13,15 @@ import TastyBuffer.Addr import java.nio.charset.StandardCharsets class CommentUnpickler(reader: TastyReader) { - import reader._ + import reader.* private[tasty] lazy val comments: HashMap[Addr, Comment] = { val comments = new HashMap[Addr, Comment] while (!isAtEnd) { val addr = readAddr() - val length = readNat() - if (length > 0) { - val bytes = readBytes(length) - val position = new Span(readLongInt()) - val rawComment = new String(bytes, StandardCharsets.UTF_8) - comments(addr) = Comment(position, rawComment) - } + val rawComment = readUtf8() + val position = new Span(readLongInt()) + comments(addr) = Comment(position, rawComment) } comments } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index b35c5c9f1acc..4f083b09b015 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -5,25 +5,28 @@ package tasty import scala.language.unsafeNulls -import Contexts._, SymDenotations._, Decorators._ +import Contexts.*, SymDenotations.*, Decorators.* import dotty.tools.dotc.ast.tpd -import TastyUnpickler._ +import TastyUnpickler.* import classfile.ClassfileParser import Names.SimpleName import TreeUnpickler.UnpickleMode import dotty.tools.tasty.TastyReader -import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} +import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection, AttributesSection} +import dotty.tools.tasty.TastyVersion + +import dotty.tools.io.AbstractFile object DottyUnpickler { /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) + class TreeSectionUnpickler(compilationUnitInfo: CompilationUnitInfo, posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) extends SectionUnpickler[TreeUnpickler](ASTsSection) { def unpickle(reader: TastyReader, nameAtRef: NameTable): TreeUnpickler = - new TreeUnpickler(reader, nameAtRef, posUnpickler, commentUnpickler) + new TreeUnpickler(reader, nameAtRef, compilationUnitInfo, posUnpickler, commentUnpickler) } class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection) { @@ -35,17 +38,33 @@ object DottyUnpickler { def unpickle(reader: TastyReader, nameAtRef: NameTable): CommentUnpickler = new CommentUnpickler(reader) } + + class AttributesSectionUnpickler extends SectionUnpickler[AttributeUnpickler](AttributesSection) { + def unpickle(reader: TastyReader, nameAtRef: NameTable): AttributeUnpickler = + new AttributeUnpickler(reader, nameAtRef) + } } /** A class for unpickling Tasty trees and symbols. + * @param tastyFile tasty file from which we unpickle (used for CompilationUnitInfo) * @param bytes the bytearray containing the Tasty file from which we unpickle * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) */ -class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { - import tpd._ - import DottyUnpickler._ +class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { + import tpd.* + import DottyUnpickler.* val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + + val tastyAttributes: Attributes = + unpickler.unpickle(new AttributesSectionUnpickler) + .map(_.attributes).getOrElse(Attributes.empty) + val compilationUnitInfo: CompilationUnitInfo = + import unpickler.header.{majorVersion, minorVersion, experimentalVersion} + val tastyVersion = TastyVersion(majorVersion, minorVersion, experimentalVersion) + val tastyInfo = TastyInfo(tastyVersion, tastyAttributes) + new CompilationUnitInfo(tastyFile, Some(tastyInfo)) + private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler) private val commentUnpicklerOpt = unpickler.unpickle(new CommentsSectionUnpickler) private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt)).get @@ -56,8 +75,11 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe def enter(roots: Set[SymDenotation])(using Context): Unit = treeUnpickler.enter(roots) - protected def treeSectionUnpickler(posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]): TreeSectionUnpickler = - new TreeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt) + protected def treeSectionUnpickler( + posUnpicklerOpt: Option[PositionUnpickler], + commentUnpicklerOpt: Option[CommentUnpickler], + ): TreeSectionUnpickler = + new TreeSectionUnpickler(compilationUnitInfo, posUnpicklerOpt, commentUnpicklerOpt) protected def computeRootTrees(using Context): List[Tree] = treeUnpickler.unpickle(mode) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 1ddcf9afe1dc..076c37435478 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -4,17 +4,17 @@ package core package tasty import dotty.tools.tasty.TastyBuffer -import TastyBuffer._ +import TastyBuffer.* import collection.mutable import Names.{Name, chrs, SimpleName, DerivedName, TypeName} -import NameKinds._ -import NameOps._ +import NameKinds.* +import NameOps.* import scala.io.Codec import NameTags.{SIGNED, TARGETSIGNED} class NameBuffer extends TastyBuffer(10000) { - import NameBuffer._ + import NameBuffer.* private val nameRefs = new mutable.LinkedHashMap[Name, NameRef] @@ -49,6 +49,10 @@ class NameBuffer extends TastyBuffer(10000) { } } + def utf8Index(value: String): NameRef = + import Decorators.toTermName + nameIndex(value.toTermName) + private inline def withLength(inline op: Unit, lengthWidth: Int = 1): Unit = { val lengthAddr = currentAddr var i = 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index 924b87bec003..86076517021a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -5,19 +5,19 @@ package tasty import dotty.tools.tasty.TastyFormat.{SOURCE, PositionsSection} import dotty.tools.tasty.TastyBuffer -import TastyBuffer._ +import TastyBuffer.* -import ast._ +import ast.* import Trees.WithLazyFields import util.{SourceFile, NoSource} -import core._ -import Annotations._, Decorators._ +import core.* +import Annotations.*, Decorators.* import collection.mutable -import util.Spans._ +import util.Spans.* import reporting.Message object PositionPickler: - import ast.tpd._ + import ast.tpd.* // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. // We use a SAM type to avoid boxing of Addr diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index 9c66e43eae80..975264a288dd 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -3,20 +3,22 @@ package dotc package core package tasty +import scala.compiletime.uninitialized + import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader} import TastyFormat.SOURCE import TastyBuffer.{Addr, NameRef} -import util.Spans._ +import util.Spans.* import Names.TermName /** Unpickler for tree positions */ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { - import reader._ + import reader.* - private var myLineSizes: Array[Int] = _ - private var mySpans: util.HashMap[Addr, Span] = _ - private var mySourcePaths: util.HashMap[Addr, String] = _ + private var myLineSizes: Array[Int] = uninitialized + private var mySpans: util.HashMap[Addr, Span] = uninitialized + private var mySourceNameRefs: util.HashMap[Addr, NameRef] = uninitialized private var isDefined = false def ensureDefined(): Unit = { @@ -29,15 +31,14 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { i += 1 mySpans = util.HashMap[Addr, Span]() - mySourcePaths = util.HashMap[Addr, String]() + mySourceNameRefs = util.HashMap[Addr, NameRef]() var curIndex = 0 var curStart = 0 var curEnd = 0 while (!isAtEnd) { val header = readInt() if (header == SOURCE) { - val path = nameAtRef(readNameRef()).toString - mySourcePaths(Addr(curIndex)) = path + mySourceNameRefs(Addr(curIndex)) = readNameRef() } else { val addrDelta = header >> 3 @@ -62,9 +63,9 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { mySpans } - private[tasty] def sourcePaths: util.ReadOnlyMap[Addr, String] = { + private[tasty] def sourceNameRefs: util.ReadOnlyMap[Addr, NameRef] = { ensureDefined() - mySourcePaths + mySourceNameRefs } private[tasty] def lineSizes: Array[Int] = { @@ -73,5 +74,5 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { } def spanAt(addr: Addr): Span = spans.getOrElse(addr, NoSpan) - def sourcePathAt(addr: Addr): String = sourcePaths.getOrElse(addr, "") + def sourcePathAt(addr: Addr): String = sourceNameRefs.get(addr).fold("")(nameAtRef(_).toString) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala index b36c78a77ac6..889cf31a40b0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala @@ -10,6 +10,7 @@ class ScratchData: val pickledIndices = new mutable.BitSet val commentBuffer = new TastyBuffer(5000) + val attributeBuffer = new TastyBuffer(32) def reset() = assert(delta ne delta1) @@ -17,4 +18,5 @@ class ScratchData: positionBuffer.reset() pickledIndices.clear() commentBuffer.reset() + attributeBuffer.reset() diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala index c938868a3c48..0a7068b65445 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala @@ -5,10 +5,10 @@ package tasty import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.NameRef -import Contexts._, Decorators._ +import Contexts.*, Decorators.* import Names.TermName import StdNames.nme -import TastyUnpickler._ +import TastyUnpickler.* import dotty.tools.tasty.TastyFormat.ASTsSection /** Reads the package and class name of the class contained in this TASTy */ @@ -21,9 +21,9 @@ class TastyClassName(bytes: Array[Byte]) { def readName(): Option[(TermName, TermName)] = unpickle(new TreeSectionUnpickler) class TreeSectionUnpickler extends SectionUnpickler[(TermName, TermName)](ASTsSection) { - import dotty.tools.tasty.TastyFormat._ + import dotty.tools.tasty.TastyFormat.* def unpickle(reader: TastyReader, tastyName: NameTable): (TermName, TermName) = { - import reader._ + import reader.* def readNames(packageName: TermName): (TermName, TermName) = { val tag = readByte() if (tag >= firstLengthTreeTag) { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 4f1e84ac9184..214f7a5f6702 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -6,21 +6,15 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash} -import TastyFormat._ -import TastyBuffer._ +import TastyFormat.* +import TastyBuffer.* import collection.mutable import core.Symbols.ClassSymbol -import Decorators._ +import Decorators.* -object TastyPickler { - - private val versionStringBytes = { - val compilerString = s"Scala ${config.Properties.simpleVersionString}" - compilerString.getBytes(java.nio.charset.StandardCharsets.UTF_8) - } - -} +object TastyPickler: + private val versionString = s"Scala ${config.Properties.simpleVersionString}" class TastyPickler(val rootCls: ClassSymbol) { @@ -48,13 +42,12 @@ class TastyPickler(val rootCls: ClassSymbol) { val uuidHi: Long = otherSectionHashes.fold(0L)(_ ^ _) val headerBuffer = { - val buf = new TastyBuffer(header.length + TastyPickler.versionStringBytes.length + 32) + val buf = new TastyBuffer(header.length + TastyPickler.versionString.length + 32) for (ch <- header) buf.writeByte(ch.toByte) buf.writeNat(MajorVersion) buf.writeNat(MinorVersion) buf.writeNat(ExperimentalVersion) - buf.writeNat(TastyPickler.versionStringBytes.length) - buf.writeBytes(TastyPickler.versionStringBytes, TastyPickler.versionStringBytes.length) + buf.writeUtf8(TastyPickler.versionString) buf.writeUncompressedLong(uuidLow) buf.writeUncompressedLong(uuidHi) buf diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 5876b69edfde..a74607dbc9d5 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -5,13 +5,17 @@ package tasty import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.NameRef -import Contexts._, Decorators._ +import Contexts.*, Decorators.* import Names.Name -import TastyUnpickler._ +import TastyUnpickler.* import util.Spans.offsetToInt -import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} +import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection, AttributesSection} import java.nio.file.{Files, Paths} import dotty.tools.io.{JarArchive, Path} +import dotty.tools.tasty.TastyFormat.header + +import scala.compiletime.uninitialized +import dotty.tools.tasty.TastyBuffer.Addr object TastyPrinter: @@ -60,54 +64,57 @@ object TastyPrinter: class TastyPrinter(bytes: Array[Byte]) { - private val sb: StringBuilder = new StringBuilder + class TastyPrinterUnpickler extends TastyUnpickler(bytes) { + var namesStart: Addr = uninitialized + var namesEnd: Addr = uninitialized + override def readNames() = { + namesStart = reader.currentAddr + super.readNames() + namesEnd = reader.currentAddr + } + } - private val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + private val unpickler: TastyPrinterUnpickler = new TastyPrinterUnpickler import unpickler.{nameAtRef, unpickle} private def nameToString(name: Name): String = name.debugString private def nameRefToString(ref: NameRef): String = nameToString(nameAtRef(ref)) - private def printNames(): Unit = + private def printHeader(sb: StringBuilder): Unit = + val header = unpickler.header + sb.append("Header:\n") + sb.append(s" version: ${header.majorVersion}.${header.minorVersion}.${header.experimentalVersion}\n") + sb.append(" tooling: ").append(header.toolingVersion).append("\n") + sb.append(" UUID: ").append(header.uuid).append("\n") + sb.append("\n") + + private def printNames(sb: StringBuilder): Unit = + sb.append(s"Names (${unpickler.namesEnd.index - unpickler.namesStart.index} bytes, starting from ${unpickler.namesStart.index}):\n") for ((name, idx) <- nameAtRef.contents.zipWithIndex) { - val index = nameStr("%4d".format(idx)) + val index = nameStr("%6d".format(idx)) sb.append(index).append(": ").append(nameToString(name)).append("\n") } def showContents(): String = { - sb.append("Names:\n") - printNames() - sb.append("\n") - sb.append("Trees:\n") - unpickle(new TreeSectionUnpickler) match { - case Some(s) => sb.append(s) - case _ => - } - sb.append("\n\n") - unpickle(new PositionSectionUnpickler) match { - case Some(s) => sb.append(s) - case _ => - } - sb.append("\n\n") - unpickle(new CommentSectionUnpickler) match { - case Some(s) => sb.append(s) - case _ => - } + val sb: StringBuilder = new StringBuilder + printHeader(sb) + printNames(sb) + unpickle(new TreeSectionUnpickler(sb)) + unpickle(new PositionSectionUnpickler(sb)) + unpickle(new CommentSectionUnpickler(sb)) + unpickle(new AttributesSectionUnpickler(sb)) sb.result } - class TreeSectionUnpickler extends SectionUnpickler[String](ASTsSection) { - import dotty.tools.tasty.TastyFormat._ - - private val sb: StringBuilder = new StringBuilder - - def unpickle(reader: TastyReader, tastyName: NameTable): String = { - import reader._ + class TreeSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](ASTsSection) { + import dotty.tools.tasty.TastyFormat.* + def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + import reader.* var indent = 0 def newLine() = { - val length = treeStr("%5d".format(index(currentAddr) - index(startAddr))) - sb.append(s"\n $length:" + " " * indent) + val length = treeStr("%6d".format(index(currentAddr) - index(startAddr))) + sb.append(s"\n$length:" + " " * indent) } def printNat() = sb.append(treeStr(" " + readNat())) def printName() = { @@ -163,61 +170,74 @@ class TastyPrinter(bytes: Array[Byte]) { } indent -= 2 } - sb.append(s"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr\n") - sb.append(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr\n") + sb.append(s"\n\nTrees (${endAddr.index - startAddr.index} bytes, starting from $base):") while (!isAtEnd) { printTree() newLine() } - sb.result } } - class PositionSectionUnpickler extends SectionUnpickler[String](PositionsSection) { - - private val sb: StringBuilder = new StringBuilder - - def unpickle(reader: TastyReader, tastyName: NameTable): String = { + class PositionSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](PositionsSection) { + def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + import reader.* val posUnpickler = new PositionUnpickler(reader, tastyName) - sb.append(s" ${reader.endAddr.index - reader.currentAddr.index}") - sb.append(" position bytes:\n") + sb.append(s"\n\nPositions (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") val lineSizes = posUnpickler.lineSizes - sb.append(s" lines: ${lineSizes.length}\n") - sb.append(posUnpickler.lineSizes.mkString(" line sizes: ", ", ", "\n")) - sb.append(" positions:\n") + sb.append(s" lines: ${lineSizes.length}\n") + sb.append(s" line sizes:\n") + val windowSize = 20 + for window <-posUnpickler.lineSizes.sliding(windowSize, windowSize) do + sb.append(" ").append(window.mkString(", ")).append("\n") + // sb.append(posUnpickler.lineSizes.mkString(" line sizes: ", ", ", "\n")) + sb.append(" positions:\n") val spans = posUnpickler.spans val sorted = spans.toSeq.sortBy(_._1.index) for ((addr, pos) <- sorted) { - sb.append(treeStr("%10d".format(addr.index))) + sb.append(treeStr("%6d".format(addr.index))) sb.append(s": ${offsetToInt(pos.start)} .. ${pos.end}\n") } - val sources = posUnpickler.sourcePaths - sb.append(s"\n source paths:\n") + val sources = posUnpickler.sourceNameRefs + sb.append(s"\n source paths:\n") val sortedPath = sources.toSeq.sortBy(_._1.index) - for ((addr, path) <- sortedPath) { - sb.append(treeStr("%10d: ".format(addr.index))) - sb.append(path) + for ((addr, nameRef) <- sortedPath) { + sb.append(treeStr("%6d".format(addr.index))) + sb.append(": ") + sb.append(nameStr(s"${nameRef.index} [${tastyName(nameRef)}]")) sb.append("\n") } - - sb.result } } - class CommentSectionUnpickler extends SectionUnpickler[String](CommentsSection) { - - private val sb: StringBuilder = new StringBuilder - - def unpickle(reader: TastyReader, tastyName: NameTable): String = { - sb.append(s" ${reader.endAddr.index - reader.currentAddr.index}") + class CommentSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](CommentsSection) { + def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + import reader.* val comments = new CommentUnpickler(reader).comments - sb.append(s" comment bytes:\n") - val sorted = comments.toSeq.sortBy(_._1.index) - for ((addr, cmt) <- sorted) { - sb.append(treeStr("%10d".format(addr.index))) - sb.append(s": ${cmt.raw} (expanded = ${cmt.isExpanded})\n") - } + if !comments.isEmpty then + sb.append(s"\n\nComments (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") + val sorted = comments.toSeq.sortBy(_._1.index) + for ((addr, cmt) <- sorted) { + sb.append(treeStr("%6d".format(addr.index))) + sb.append(s": ${cmt.raw} (expanded = ${cmt.isExpanded})\n") + } + } + } + + class AttributesSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](AttributesSection) { + import dotty.tools.tasty.TastyFormat.* + def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + import reader.* + sb.append(s"\n\nAttributes (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") + while !isAtEnd do + val tag = readByte() + sb.append(" ").append(attributeTagToString(tag)) + if isBooleanAttrTag(tag) then () + else if isStringAttrTag(tag) then + val utf8Ref = readNameRef() + val value = nameAtRef(utf8Ref).toString + sb.append(nameStr(s" ${utf8Ref.index} [$value]")) + sb.append("\n") sb.result } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 70bdec7780e2..6fe648ee98d3 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -4,13 +4,15 @@ package tasty import scala.language.unsafeNulls -import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler} -import TastyFormat.NameTags._, TastyFormat.nameTagToString +import dotty.tools.tasty.{TastyFormat, TastyVersion, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} +import TastyFormat.NameTags.*, TastyFormat.nameTagToString import TastyBuffer.NameRef import scala.collection.mutable import Names.{TermName, termName, EmptyTermName} -import NameKinds._ +import NameKinds.* +import dotty.tools.tasty.TastyHeader +import dotty.tools.tasty.TastyBuffer.Addr object TastyUnpickler { @@ -24,12 +26,45 @@ object TastyUnpickler { def apply(ref: NameRef): TermName = names(ref.index) def contents: Iterable[TermName] = names } + + trait Scala3CompilerConfig extends UnpicklerConfig: + private def asScala3Compiler(version: TastyVersion): String = + if (version.major == 28) { + // scala 3.x.y series + if (version.experimental > 0) + // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly. + s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler" + else s"a Scala 3.${version.minor}.0 compiler or newer" + } + else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same + else "a more recent Scala compiler" // unknown major version, just say later + + /** The description of the upgraded scala compiler that can read the given TASTy version */ + final def upgradedReaderTool(version: TastyVersion): String = asScala3Compiler(version) + + /** The description of the upgraded scala compiler that can produce the given TASTy version */ + final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version) + + final def recompileAdditionalInfo: String = """ + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && experimentalVersion == 0) { + """ + | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + } + else "" + end Scala3CompilerConfig + + /** A config for the TASTy reader of a scala 3 compiler */ + val scala3CompilerConfig: UnpicklerConfig = new Scala3CompilerConfig with UnpicklerConfig.DefaultTastyVersion {} + } -import TastyUnpickler._ +import TastyUnpickler.* -class TastyUnpickler(reader: TastyReader) { - import reader._ +class TastyUnpickler(protected val reader: TastyReader) { + import reader.* def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) @@ -88,10 +123,13 @@ class TastyUnpickler(reader: TastyReader) { result } - new TastyHeaderUnpickler(reader).readHeader() + val header: TastyHeader = + new TastyHeaderUnpickler(scala3CompilerConfig, reader).readFullHeader() - locally { + def readNames(): Unit = until(readEnd()) { nameAtRef.add(readNameContents()) } + + def loadSections(): Unit = { while (!isAtEnd) { val secName = readString() val secEnd = readEnd() @@ -99,6 +137,8 @@ class TastyUnpickler(reader: TastyReader) { goto(secEnd) } } + readNames() + loadSections() def unpickle[R](sec: SectionUnpickler[R]): Option[R] = for (reader <- sectionReader.get(sec.name)) yield diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 645c6f81e539..d70b56fca43d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -5,30 +5,30 @@ package tasty import scala.language.unsafeNulls -import dotty.tools.tasty.TastyFormat._ -import dotty.tools.tasty.TastyBuffer._ +import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.TastyBuffer.* -import ast.Trees._ +import ast.Trees.* import ast.{untpd, tpd} -import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, Flags._ -import Comments.{Comment, CommentsContext} -import NameKinds._ -import StdNames.nme -import transform.SymUtils._ +import Contexts.*, Symbols.*, Types.*, Names.*, Constants.*, Decorators.*, Annotations.*, Flags.* +import Comments.{Comment, docCtx} +import NameKinds.* +import StdNames.{nme, tpnme} import config.Config import collection.mutable import reporting.{Profile, NoProfile} import dotty.tools.tasty.TastyFormat.ASTsSection +import quoted.QuotePatterns object TreePickler: class StackSizeExceeded(val mdef: tpd.MemberDef) extends Exception -class TreePickler(pickler: TastyPickler) { +class TreePickler(pickler: TastyPickler, attributes: Attributes) { val buf: TreeBuffer = new TreeBuffer pickler.newSection(ASTsSection, buf) - import buf._ + import buf.* import pickler.nameBuffer.nameIndex - import tpd._ + import tpd.* import TreePickler.* private val symRefs = Symbols.MutableSymbolMap[Addr](256) @@ -49,6 +49,9 @@ class TreePickler(pickler: TastyPickler) { private var profile: Profile = NoProfile + private val isOutlinePickle: Boolean = attributes.isOutline + private val isJavaPickle: Boolean = attributes.isJava + def treeAnnots(tree: untpd.MemberDef): List[Tree] = val ts = annotTrees.lookup(tree) if ts == null then Nil else ts.toList @@ -188,19 +191,19 @@ class TreePickler(pickler: TastyPickler) { def pickleExternalRef(sym: Symbol) = { val isShadowedRef = sym.isClass && tpe.prefix.member(sym.name).symbol != sym - if (sym.is(Flags.Private) || isShadowedRef) { + if sym.is(Flags.Private) || isShadowedRef then writeByte(if (tpe.isType) TYPEREFin else TERMREFin) withLength { pickleNameAndSig(sym.name, sym.signature, sym.targetName) pickleType(tpe.prefix) pickleType(sym.owner.typeRef) } - } - else { + else if isJavaPickle && sym == defn.FromJavaObjectSymbol then + pickleType(defn.ObjectType) // when unpickling Java TASTy, replace by + else writeByte(if (tpe.isType) TYPEREF else TERMREF) pickleNameAndSig(sym.name, tpe.signature, sym.targetName) pickleType(tpe.prefix) - } } if (sym.is(Flags.Package)) { writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg) @@ -322,6 +325,11 @@ class TreePickler(pickler: TastyPickler) { if (!tree.isEmpty) pickleTree(tree) } + def pickleElidedUnlessEmpty(tree: Tree, tp: Type)(using Context): Unit = + if !tree.isEmpty then + writeByte(ELIDED) + pickleType(tp) + def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = { val sym = mdef.symbol @@ -337,7 +345,12 @@ class TreePickler(pickler: TastyPickler) { case _: Template | _: Hole => pickleTree(tpt) case _ if tpt.isType => pickleTpt(tpt) } - pickleTreeUnlessEmpty(rhs) + if isOutlinePickle && sym.isTerm && isJavaPickle then + // TODO: if we introduce outline typing for Scala definitions + // then we will need to update the check here + pickleElidedUnlessEmpty(rhs, tpt.tpe) + else + pickleTreeUnlessEmpty(rhs) pickleModifiers(sym, mdef) } catch @@ -348,7 +361,7 @@ class TreePickler(pickler: TastyPickler) { else throw ex if sym.is(Method) && sym.owner.isClass then - profile.recordMethodSize(sym, currentAddr.index - addr.index, mdef.span) + profile.recordMethodSize(sym, (currentAddr.index - addr.index) max 1, mdef.span) for docCtx <- ctx.docCtx do val comment = docCtx.docstrings.lookup(sym) if comment != null then @@ -527,12 +540,12 @@ class TreePickler(pickler: TastyPickler) { case SeqLiteral(elems, elemtpt) => writeByte(REPEATED) withLength { pickleTree(elemtpt); elems.foreach(pickleTree) } - case Inlined(call, bindings, expansion) => + case tree @ Inlined(call, bindings, expansion) => writeByte(INLINED) bindings.foreach(preRegister) withLength { pickleTree(expansion) - if (!call.isEmpty) pickleTree(call) + if (!tree.inlinedFromOuterScope) pickleTree(call) bindings.foreach { b => assert(b.isInstanceOf[DefDef] || b.isInstanceOf[ValDef]) pickleTree(b) @@ -604,7 +617,17 @@ class TreePickler(pickler: TastyPickler) { } } } - pickleStats(tree.constr :: rest) + if isJavaPickle then + val rest0 = rest.dropWhile: + case stat: ValOrDefDef => stat.symbol.is(Flags.Invisible) + case _ => false + if tree.constr.symbol.is(Flags.Invisible) then + writeByte(SPLITCLAUSE) + pickleStats(rest0) + else + pickleStats(tree.constr :: rest0) + else + pickleStats(tree.constr :: rest) } case Import(expr, selectors) => writeByte(IMPORT) @@ -685,12 +708,20 @@ class TreePickler(pickler: TastyPickler) { .appliedTo(expr) .withSpan(tree.span) ) + case tree: QuotePattern => + // TODO: Add QUOTEPATTERN tag to TASTy + pickleTree(QuotePatterns.encode(tree)) case Hole(_, idx, args, _) => writeByte(HOLE) withLength { writeNat(idx) pickleType(tree.tpe, richTypes = true) - args.foreach(pickleTree) + args.foreach { arg => + arg.tpe match + case _: TermRef if arg.isType => writeByte(EXPLICITtpt) + case _ => + pickleTree(arg) + } } } catch { @@ -699,6 +730,9 @@ class TreePickler(pickler: TastyPickler) { case ex: AssertionError => println(i"error when pickling tree $tree") throw ex + case ex: MatchError => + println(i"error when pickling tree $tree") + throw ex } } @@ -722,7 +756,7 @@ class TreePickler(pickler: TastyPickler) { } def pickleModifiers(sym: Symbol, mdef: MemberDef)(using Context): Unit = { - import Flags._ + import Flags.* var flags = sym.flags val privateWithin = sym.privateWithin if (privateWithin.exists) { @@ -737,7 +771,7 @@ class TreePickler(pickler: TastyPickler) { } def pickleFlags(flags: FlagSet, isTerm: Boolean)(using Context): Unit = { - import Flags._ + import Flags.* def writeModTag(tag: Int) = { assert(isModifierTag(tag)) writeByte(tag) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 98bd7152ff37..b7a25cb75613 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -5,59 +5,62 @@ package tasty import scala.language.unsafeNulls -import Comments.CommentsContext -import Contexts._ -import Symbols._ -import Types._ -import Scopes._ -import SymDenotations._ -import Denotations._ -import Names._ -import NameOps._ -import StdNames._ -import Flags._ -import Constants._ -import Annotations._ -import NameKinds._ -import NamerOps._ -import ContextOps._ +import Comments.docCtx +import Contexts.* +import Symbols.* +import Types.* +import Scopes.* +import SymDenotations.* +import Denotations.* +import Names.* +import NameOps.* +import StdNames.* +import Flags.* +import Constants.* +import Annotations.* +import NameKinds.* +import NamerOps.* +import ContextOps.* import Variances.Invariant import TastyUnpickler.NameTable import typer.ConstFold import typer.Checking.checkNonCyclic -import typer.Nullables._ -import util.Spans._ +import typer.Nullables.* +import util.Spans.* import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} -import Trees._ -import Decorators._ -import transform.SymUtils._ -import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} +import Trees.* +import Decorators.* +import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} -import TastyBuffer._ +import TastyBuffer.* import scala.annotation.{switch, tailrec} import scala.collection.mutable.ListBuffer import scala.collection.mutable import config.Printers.pickling -import dotty.tools.tasty.TastyFormat._ +import dotty.tools.tasty.TastyFormat.* import scala.annotation.constructorOnly import scala.annotation.internal.sharable +import scala.compiletime.uninitialized /** Unpickler for typed trees * @param reader the reader from which to unpickle + * @param compilationUnitInfo the compilation unit info of the TASTy * @param posUnpicklerOpt the unpickler for positions, if it exists * @param commentUnpicklerOpt the unpickler for comments, if it exists + * @param attributeUnpicklerOpt the unpickler for attributes, if it exists */ class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, + compilationUnitInfo: CompilationUnitInfo, posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]) { - import TreeUnpickler._ - import tpd._ + import TreeUnpickler.* + import tpd.* /** A map from addresses of definition entries to the symbols they define */ private val symAtAddr = new mutable.HashMap[Addr, Symbol] @@ -88,10 +91,23 @@ class TreeUnpickler(reader: TastyReader, private var seenRoots: Set[Symbol] = Set() /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */ - private var ownerTree: OwnerTree = _ + private var ownerTree: OwnerTree = uninitialized - /** Was unpickled class compiled with pureFunctions? */ - private var knowsPureFuns: Boolean = false + /** TASTy attributes */ + private val attributes: Attributes = compilationUnitInfo.tastyInfo.get.attributes + + /** Was unpickled class compiled with capture checks? */ + private val withCaptureChecks: Boolean = attributes.captureChecked + + private val unpicklingScala2Library = attributes.scala2StandardLibrary + + /** This dependency was compiled with explicit nulls enabled */ + // TODO Use this to tag the symbols of this dependency as compiled with explicit nulls (see use of unpicklingScala2Library). + private val explicitNulls = attributes.explicitNulls + + private val unpicklingJava = attributes.isJava + + private val isOutline = attributes.isOutline private def registerSym(addr: Addr, sym: Symbol) = symAtAddr(addr) = sym @@ -120,7 +136,7 @@ class TreeUnpickler(reader: TastyReader, } class Completer(reader: TastyReader)(using @constructorOnly _ctx: Context) extends LazyType { - import reader._ + import reader.* val owner = ctx.owner val mode = ctx.mode val source = ctx.source @@ -129,12 +145,7 @@ class TreeUnpickler(reader: TastyReader, def where = val f = denot.symbol.associatedFile if f == null then "" else s" in $f" - if ctx.settings.YdebugUnpickling.value then throw ex - else throw TypeError( - em"""Could not read definition of $denot$where - |An exception was encountered: - | $ex - |Run with -Ydebug-unpickling to see full stack trace.""") + throw UnpicklingError(denot, where, ex) treeAtAddr(currentAddr) = try atPhaseBeforeTransforms { @@ -147,11 +158,16 @@ class TreeUnpickler(reader: TastyReader, } class TreeReader(val reader: TastyReader) { - import reader._ + import reader.* def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr)) def fork: TreeReader = forkAt(currentAddr) + def skipParentTree(tag: Int): Unit = { + if tag == SPLITCLAUSE then () + else skipTree(tag) + } + def skipParentTree(): Unit = skipParentTree(readByte()) def skipTree(tag: Int): Unit = { if (tag >= firstLengthTreeTag) goto(readEnd()) else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() } @@ -263,7 +279,7 @@ class TreeUnpickler(reader: TastyReader, /** Read reference to definition and return symbol created at that definition */ def readSymRef()(using Context): Symbol = symbolAt(readAddr()) - /** The symbol at given address; createa new one if none exists yet */ + /** The symbol at given address; create a new one if none exists yet */ def symbolAt(addr: Addr)(using Context): Symbol = symAtAddr.get(addr) match { case Some(sym) => sym @@ -379,7 +395,7 @@ class TreeUnpickler(reader: TastyReader, // Note that the lambda "rt => ..." is not equivalent to a wildcard closure! // Eta expansion of the latter puts readType() out of the expression. case APPLIEDtype => - postProcessFunction(readType().appliedTo(until(end)(readType()))) + readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => val lo = readType() if nothingButMods(end) then @@ -430,7 +446,11 @@ class TreeUnpickler(reader: TastyReader, readPackageRef().termRef case TYPEREF => val name = readName().toTypeName - TypeRef(readType(), name) + val pre = readType() + if unpicklingJava && name == tpnme.Object && (pre.termSymbol eq defn.JavaLangPackageVal) then + defn.FromJavaObjectType + else + TypeRef(pre, name) case TERMREF => val sname = readName() val prefix = readType() @@ -456,8 +476,7 @@ class TreeUnpickler(reader: TastyReader, val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) case BYNAMEtype => - val arg = readType() - ExprType(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) + ExprType(readType()) case _ => ConstantType(readConstant(tag)) } @@ -491,12 +510,6 @@ class TreeUnpickler(reader: TastyReader, def readTreeRef()(using Context): TermRef = readType().asInstanceOf[TermRef] - /** Under pureFunctions, map all function types to impure function types, - * unless the unpickled class was also compiled with pureFunctions. - */ - private def postProcessFunction(tp: Type)(using Context): Type = - if knowsPureFuns then tp else tp.adaptFunctionTypeUnderPureFuns - // ------ Reading definitions ----------------------------------------------------- private def nothingButMods(end: Addr): Boolean = @@ -517,6 +530,8 @@ class TreeUnpickler(reader: TastyReader, flags |= (if (tag == VALDEF) ModuleValCreationFlags else ModuleClassCreationFlags) if flags.is(Enum, butNot = Method) && name.isTermName then flags |= StableRealizable + if name.isTypeName && withCaptureChecks then + flags |= CaptureChecked if (ctx.owner.isClass) { if (tag == TYPEPARAM) flags |= Param else if (tag == PARAM) { @@ -602,7 +617,11 @@ class TreeUnpickler(reader: TastyReader, val rhsStart = currentAddr val rhsIsEmpty = nothingButMods(end) if (!rhsIsEmpty) skipTree() - val (givenFlags, annotFns, privateWithin) = readModifiers(end) + val (givenFlags0, annotFns, privateWithin) = readModifiers(end) + val givenFlags = + if isClass && unpicklingScala2Library then givenFlags0 | Scala2x | Scala2Tasty + else if unpicklingJava then givenFlags0 | JavaDefined + else givenFlags0 pickling.println(i"creating symbol $name at $start with flags ${givenFlags.flagsString}, isAbsType = $isAbsType, $ttag") val flags = normalizeFlags(tag, givenFlags, name, isAbsType, rhsIsEmpty) def adjustIfModule(completer: LazyType) = @@ -621,8 +640,8 @@ class TreeUnpickler(reader: TastyReader, rootd.symbol case _ => val completer = adjustIfModule(new Completer(subReader(start, end))) - if (isClass) - newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord) + if isClass then + newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord, compilationUnitInfo) else newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) } @@ -646,8 +665,8 @@ class TreeUnpickler(reader: TastyReader, } registerSym(start, sym) if (isClass) { - if sym.owner.is(Package) && annots.exists(_.hasSymbol(defn.WithPureFunsAnnot)) then - knowsPureFuns = true + if sym.owner.is(Package) && withCaptureChecks then + sym.setFlag(CaptureChecked) sym.completer.withDecls(newScope) forkAt(templateStart).indexTemplateParams()(using localContext(sym)) } @@ -997,7 +1016,7 @@ class TreeUnpickler(reader: TastyReader, * parsed in this way as InferredTypeTrees. */ def readParents(withArgs: Boolean)(using Context): List[Tree] = - collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { + collectWhile({val tag = nextByte; tag != SELFDEF && tag != DEFDEF && tag != SPLITCLAUSE}) { nextUnsharedTag match case APPLY | TYPEAPPLY | BLOCK => if withArgs then readTree() @@ -1024,12 +1043,15 @@ class TreeUnpickler(reader: TastyReader, val bodyFlags = { val bodyIndexer = fork // The first DEFDEF corresponds to the primary constructor - while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() + while ({val tag = bodyIndexer.reader.nextByte; tag != DEFDEF && tag != SPLITCLAUSE}) do + bodyIndexer.skipParentTree() bodyIndexer.indexStats(end) } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) val parentTypes = parents.map(_.tpe.dealias) + if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then + cls.setFlag(JavaAnnotation) val self = if (nextByte == SELFDEF) { readByte() @@ -1038,10 +1060,41 @@ class TreeUnpickler(reader: TastyReader, else EmptyValDef cls.setNoInitsFlags(parentsKind(parents), bodyFlags) cls.info = ClassInfo( - cls.owner.thisType, cls, parentTypes, cls.unforcedDecls, - selfInfo = if (self.isEmpty) NoType else self.tpt.tpe) - .integrateOpaqueMembers - val constr = readIndexedDef().asInstanceOf[DefDef] + cls.owner.thisType, cls, parentTypes, cls.unforcedDecls, + selfInfo = if (self.isEmpty) NoType else self.tpt.tpe + ).integrateOpaqueMembers + + val constr = + if nextByte == SPLITCLAUSE then + assert(unpicklingJava, s"unexpected SPLITCLAUSE at $start") + val tag = readByte() + def ta = ctx.typeAssigner + val flags = Flags.JavaDefined | Flags.PrivateLocal | Flags.Invisible + val ctorCompleter = new LazyType { + def complete(denot: SymDenotation)(using Context) = + val sym = denot.symbol + val pflags = flags | Flags.Param + val tparamRefs = tparams.map(_.symbol.asType) + lazy val derivedTparamSyms: List[TypeSymbol] = tparams.map: tdef => + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context) = + denot.info = tdef.symbol.asType.info.subst(tparamRefs, derivedTparamRefs) + } + newSymbol(sym, tdef.name, Flags.JavaDefined | Flags.Param, completer, coord = cls.coord) + lazy val derivedTparamRefs: List[Type] = derivedTparamSyms.map(_.typeRef) + val vparamSym = + newSymbol(sym, nme.syntheticParamName(1), pflags, defn.UnitType, coord = cls.coord) + val vparamSymss: List[List[Symbol]] = List(vparamSym) :: Nil + val paramSymss = + if derivedTparamSyms.nonEmpty then derivedTparamSyms :: vparamSymss else vparamSymss + val res = effectiveResultType(sym, paramSymss) + denot.info = methodType(paramSymss, res) + denot.setParamss(paramSymss) + } + val ctorSym = newSymbol(ctx.owner, nme.CONSTRUCTOR, flags, ctorCompleter, coord = coordAt(start)) + tpd.DefDef(ctorSym, EmptyTree).setDefTree // fake primary constructor + else + readIndexedDef().asInstanceOf[DefDef] val mappedParents: LazyTreeList = if parents.exists(_.isInstanceOf[InferredTypeTree]) then // parents were not read fully, will need to be read again later on demand @@ -1162,6 +1215,9 @@ class TreeUnpickler(reader: TastyReader, // ------ Reading trees ----------------------------------------------------- + private def ElidedTree(tpe: Type)(using Context): Tree = + untpd.Ident(nme.WILDCARD).withType(tpe) + def readTree()(using Context): Tree = { val sctx = sourceChangeContext() if (sctx `ne` ctx) return readTree()(using sctx) @@ -1190,7 +1246,11 @@ class TreeUnpickler(reader: TastyReader, def completeSelect(name: Name, sig: Signature, target: Name): Select = val qual = readTree() - val denot = accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) + val denot = + if unpicklingJava && name == tpnme.Object && qual.symbol == defn.JavaLangPackageVal then + defn.FromJavaObjectSymbol.denot + else + accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) makeSelect(qual, name, denot) def readQualId(): (untpd.Ident, TypeRef) = @@ -1209,6 +1269,12 @@ class TreeUnpickler(reader: TastyReader, forkAt(readAddr()).readTree() case IDENT => untpd.Ident(readName()).withType(readType()) + case ELIDED => + if !isOutline then + val msg = + s"Illegal elided tree in unpickler at $start without ${attributeTagToString(OUTLINEattr)}, ${ctx.source}" + report.error(msg) + ElidedTree(readType()) case IDENTtpt => untpd.Ident(readName().toTypeName).withType(readType()) case SELECT => @@ -1229,10 +1295,11 @@ class TreeUnpickler(reader: TastyReader, case SINGLETONtpt => SingletonTypeTree(readTree()) case BYNAMEtpt => - val arg = readTpt() - ByNameTypeTree(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) + ByNameTypeTree(readTpt()) case NAMEDARG => NamedArg(readName(), readTree()) + case EXPLICITtpt => + readTpt() case _ => readPathTree() } @@ -1419,7 +1486,11 @@ class TreeUnpickler(reader: TastyReader, } val patType = readType() val argPats = until(end)(readTree()) - UnApply(fn, implicitArgs, argPats, patType) + val unapply = UnApply(fn, implicitArgs, argPats, patType) + if fn.symbol == defn.QuoteMatching_ExprMatch_unapply + || fn.symbol == defn.QuoteMatching_TypeMatch_unapply + then QuotePatterns.decode(unapply) + else unapply case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, newRefinedClassSymbol(coordAt(start))).asClass @@ -1436,7 +1507,7 @@ class TreeUnpickler(reader: TastyReader, val args = until(end)(readTpt()) val tree = untpd.AppliedTypeTree(tycon, args) val ownType = ctx.typeAssigner.processAppliedType(tree, tycon.tpe.safeAppliedTo(args.tpes)) - tree.withType(postProcessFunction(ownType)) + tree.withType(ownType) case ANNOTATEDtpt => Annotated(readTpt(), readTree()) case LAMBDAtpt => @@ -1447,17 +1518,29 @@ class TreeUnpickler(reader: TastyReader, val fst = readTpt() val (bound, scrut) = if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readTpt()) - MatchTypeTree(bound, scrut, readCases(end)) + val tpt = MatchTypeTree(bound, scrut, readCases(end)) + // If a match type definition can reduce (e.g. Id in i18261.min) + // then it's important to trigger that reduction + // before a TypeVar is added to the constraint, + // associated to the match type's type parameter. + // Otherwise, if the reduction is triggered with that constraint, + // the reduction will be simplified, + // at which point the TypeVar will replace the type parameter + // and then that TypeVar will be cached + // as the reduction of the match type definition! + // + // We also override the type, as that's what Typer does. + // The difference here is that a match type that reduces to a non-match type + // makes the TypeRef for that definition will have a TypeAlias info instead of a MatchAlias. + tpt.overwriteType(tpt.tpe.normalized) + tpt case TYPEBOUNDStpt => val lo = readTpt() val hi = if currentAddr == end then lo else readTpt() val alias = if currentAddr == end then EmptyTree else readTpt() createNullableTypeBoundsTree(lo, hi, alias) case HOLE => - val idx = readNat() - val tpe = readType() - val args = until(end)(readTree()) - Hole(true, idx, args, EmptyTree, tpe) + readHole(end, isTerm = true) case _ => readPathTree() } @@ -1488,10 +1571,7 @@ class TreeUnpickler(reader: TastyReader, case HOLE => readByte() val end = readEnd() - val idx = readNat() - val tpe = readType() - val args = until(end)(readTree()) - Hole(false, idx, args, EmptyTree, tpe) + readHole(end, isTerm = false) case _ => if (isTypeTreeTag(nextByte)) readTree() else { @@ -1524,6 +1604,12 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, CaseDef(pat, guard, rhs)) } + def readHole(end: Addr, isTerm: Boolean)(using Context): Tree = + val idx = readNat() + val tpe = readType() + val args = until(end)(readTree()) + Hole(isTerm, idx, args, EmptyTree, tpe) + def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Trees.Lazy[T] = readLaterWithOwner(end, op)(ctx.owner) diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala index 2aeb1bdeefcc..e4c253fddc53 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala @@ -3,7 +3,7 @@ package dotc package core package unpickleScala2 -import Flags._ +import Flags.* /** Variable length byte arrays, with methods for basic pickling and unpickling. * @@ -195,7 +195,7 @@ object PickleBuffer { private type FlagMap = Array[Array[Long]] private val (scalaTermFlagMap, scalaTypeFlagMap) = { - import Scala2Flags._ + import Scala2Flags.* val corr = Map( PROTECTED_PKL -> Protected, diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala index cc2d7dd7ee56..78d1666ad580 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala @@ -3,8 +3,8 @@ package dotc package core package unpickleScala2 -import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._ -import Decorators._ +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* +import Decorators.* import scala.collection.mutable.ListBuffer /** Erasure logic specific to Scala 2 symbols. */ diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index deb022d3c261..f876c87e8920 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -9,31 +9,29 @@ import java.io.IOException import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble -import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._ -import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._, Phases._ +import Contexts.*, Symbols.*, Types.*, Scopes.*, SymDenotations.*, Names.*, NameOps.* +import StdNames.*, Denotations.*, NameOps.*, Flags.*, Constants.*, Annotations.*, Phases.* import NameKinds.{Scala2MethodNameKinds, SuperAccessorName, ExpandedName} -import util.Spans._ -import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd._ +import util.Spans.* +import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd.* import ast.untpd.Modifiers import backend.sjs.JSDefinitions -import printing.Texts._ +import printing.Texts.* import printing.Printer import io.AbstractFile -import util.common._ +import util.common.* import util.NoSourcePosition import typer.Checking.checkNonCyclic -import typer.Nullables._ -import transform.SymUtils._ -import PickleBuffer._ -import PickleFormat._ -import Decorators._ -import TypeApplications._ +import typer.Nullables.* +import PickleBuffer.* +import PickleFormat.* +import Decorators.* +import TypeApplications.* import classfile.ClassfileParser import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.annotation.switch -import reporting._ -import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} +import reporting.* object Scala2Unpickler { @@ -147,12 +145,13 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // print("unpickling "); showPickled() // !!! DEBUG - import Scala2Unpickler._ + import Scala2Unpickler.* val moduleRoot: SymDenotation = inContext(ictx) { moduleClassRoot.sourceModule.denot } assert(moduleRoot.isTerm) checkVersion(using ictx) + checkScala2Stdlib(using ictx) private val loadingMirror = defn(using ictx) // was: mirrorThatLoaded(classRoot) @@ -239,6 +238,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas " in " + source) } + private def checkScala2Stdlib(using Context): Unit = + assert(!ctx.settings.YcompileScala2Library.value, "No Scala 2 libraries should be unpickled under -Ycompile-scala2-library") + /** The `decls` scope associated with given symbol */ protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope(0)) @@ -423,7 +425,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if (slowSearch(name).exists) System.err.println(i"**** slow search found: ${slowSearch(name)}") if (ctx.settings.YdebugMissingRefs.value) Thread.dumpStack() - newStubSymbol(owner, name, source) + newStubSymbol(owner, name, CompilationUnitInfo(source)) } } } @@ -447,10 +449,26 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // Scala 2 sometimes pickle the same type parameter symbol multiple times // (see i11173 for an example), but we should only unpickle it once. || tag == TYPEsym && flags.is(TypeParam) && symScope(owner).lookup(name.asTypeName).exists + // We discard the private val representing a case accessor. We only load the case accessor def. + || flags.isAllOf(CaseAccessor| PrivateLocal, butNot = Method) then // skip this member return NoSymbol + // Adapt the flags of getters so they become like vals/vars instead. + // The info of this symbol is adapted in the `LocalUnpickler`. + if flags.isAllOf(Method | Accessor) && !name.toString().endsWith("_$eq") then + flags &~= Method | Accessor + if !flags.is(StableRealizable) then flags |= Mutable + + // Skip case accessor `$access$` and keep track of their name to make `` the case accessor + if flags.is(CaseAccessor) && name.toString().contains("$access$") then + val accessorName = name.toString().split('$').head.toTermName // + symScope(owner) // we assume that the `` is listed before the accessor and hence is already entered in the scope + .find(decl => decl.isAllOf(ParamAccessor) && decl.name == accessorName) + .setFlag(CaseAccessor) + return NoSymbol // skip this member + name = name.adjustIfModuleClass(flags) if (flags.is(Method)) name = @@ -615,7 +633,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas setClassInfo(denot, tp, fromScala2 = true, selfInfo) NamerOps.addConstructorProxies(denot.classSymbol) case denot => - val tp1 = translateTempPoly(tp) + val tp1 = translateTempPoly(tp) match + case ExprType(resultType) if !denot.isOneOf(Param | Method) => + // Adapt the flags of getters so they become like vals/vars instead. + // This is the `def` of an accessor that needs to be transformed into + // a `val`/`var`. Note that the `Method | Accessor` flags were already + // striped away in `readDisambiguatedSymbol`. + resultType + case tp1 => tp1 denot.info = if (tag == ALIASsym) TypeAlias(tp1) else if (denot.isType) checkNonCyclic(denot.symbol, tp1, reportErrors = false) @@ -730,8 +755,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val info1 = info.symbol.info assert(info1.derivesFrom(defn.SingletonClass)) RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) - case info => - tp.derivedRefinedType(parent1, name, info) + case _ => + tp.derivedRefinedType(parent = parent1) } case tp @ AppliedType(tycon, args) => val tycon1 = tycon.safeDealias @@ -820,7 +845,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } val tycon = select(pre, sym) val args = until(end, () => readTypeRef()) - if (sym == defn.ByNameParamClass2x) ExprType(args.head.adaptByNameArgUnderPureFuns) + if (sym == defn.ByNameParamClass2x) ExprType(args.head) else if (ctx.settings.scalajs.value && args.length == 2 && sym.owner == JSDefinitions.jsdefn.ScalaJSJSPackageClass && sym == JSDefinitions.jsdefn.PseudoUnionClass) { // Treat Scala.js pseudo-unions as real unions, this requires a @@ -829,8 +854,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) - .adaptFunctionTypeUnderPureFuns - else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) + else if (sym.typeParams.nonEmpty) tycon.etaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => val lo = readTypeRef() diff --git a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala index e41bfcd5d09a..98e67178fb69 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala @@ -11,7 +11,12 @@ class Coverage: def addStatement(stmt: Statement): Unit = statementsById(stmt.id) = stmt -/** A statement that can be invoked, and thus counted as "covered" by code coverage tools. */ + +/** + * A statement that can be invoked, and thus counted as "covered" by code coverage tools. + * + * @param line 1-indexed line number + */ case class Statement( location: Location, id: Int, diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index aa7a586d4b57..88893709b8bd 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package coverage -import ast.tpd._ +import ast.tpd.* import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import java.nio.file.Path diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala index 85a56b9f1d15..16e7cc9c7adc 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala @@ -8,7 +8,7 @@ import java.nio.charset.StandardCharsets import scala.io.Codec -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase import dotty.tools.io.File diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala index c148ff5f9bca..c1bd6b6778fd 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala @@ -4,10 +4,10 @@ package decompiler import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.* import dotty.tools.dotc.core.tasty.TastyHTMLPrinter -import dotty.tools.dotc.reporting._ +import dotty.tools.dotc.reporting.* import dotty.tools.io.AbstractFile import scala.quoted.runtime.impl.QuotesImpl diff --git a/compiler/src/dotty/tools/dotc/decompiler/Main.scala b/compiler/src/dotty/tools/dotc/decompiler/Main.scala index 3cc94f782793..ecd067d3debf 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/Main.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/Main.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.decompiler import java.nio.file.Files import dotty.tools.dotc -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.io.AbstractFile /** Main class of the `dotc -decompiler` decompiler. diff --git a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala index 62f3e75d2001..7f396654749e 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.decompiler -import dotty.tools.dotc.fromtasty._ +import dotty.tools.dotc.fromtasty.* import dotty.tools.dotc.core.Phases.Phase /** Compiler from tasty to user readable high text representation diff --git a/compiler/src/dotty/tools/dotc/fromtasty/AlreadyLoadedCompilationUnit.scala b/compiler/src/dotty/tools/dotc/fromtasty/AlreadyLoadedCompilationUnit.scala index 74c680bda1b7..8700cb730e91 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/AlreadyLoadedCompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/AlreadyLoadedCompilationUnit.scala @@ -7,4 +7,4 @@ import dotty.tools.dotc.util.NoSource * encountered, and attempted to inspect, something that has already been loaded, for example a Scala primitive or a * library class like Option. */ -class AlreadyLoadedCompilationUnit(val className: String) extends CompilationUnit(NoSource) +class AlreadyLoadedCompilationUnit(val className: String) extends CompilationUnit(NoSource, null) diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 86ae99b3e0f9..74010b3f64d1 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package fromtasty -import core._ -import Decorators._ -import Contexts._ +import core.* +import Decorators.* +import Contexts.* import Symbols.{Symbol, ClassSymbol} import SymDenotations.ClassDenotation import Denotations.staticRef -import NameOps._ +import NameOps.* import ast.Trees.Tree import Phases.Phase - +import core.tasty.Attributes /** Load trees from TASTY files */ class ReadTasty extends Phase { @@ -22,7 +22,15 @@ class ReadTasty extends Phase { ctx.settings.fromTasty.value override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - withMode(Mode.ReadPositions)(units.flatMap(readTASTY(_))) + withMode(Mode.ReadPositions) { + val nextUnits = collection.mutable.ListBuffer.empty[CompilationUnit] + val unitContexts = units.view.map(ctx.fresh.setCompilationUnit) + for unitContext <- unitContexts if addTasty(nextUnits += _)(using unitContext) do () + nextUnits.toList + } + + def addTasty(fn: CompilationUnit => Unit)(using Context): Boolean = monitor(phaseName): + readTASTY(ctx.compilationUnit).foreach(fn) def readTASTY(unit: CompilationUnit)(using Context): Option[CompilationUnit] = unit match { case unit: TASTYCompilationUnit => @@ -39,9 +47,16 @@ class ReadTasty extends Phase { case unpickler: tasty.DottyUnpickler => if (cls.rootTree.isEmpty) None else { - val unit = CompilationUnit(cls, cls.rootTree, forceTrees = true) - unit.pickled += (cls -> (() => unpickler.unpickler.bytes)) - Some(unit) + val attributes = unpickler.tastyAttributes + if attributes.isJava && !ctx.settings.YjavaTasty.value then + // filter out Java compilation units if -Yjava-tasty is not set + None + else if attributes.isOutline && !ctx.settings.YallowOutlineFromTasty.value then + cannotUnpickle("it contains outline signatures and -Yallow-outline-from-tasty is not set.") + else + val unit = CompilationUnit(cls, cls.rootTree, forceTrees = true) + unit.pickled += (cls -> (() => unpickler.unpickler.bytes)) + Some(unit) } case tree: Tree[?] => // TODO handle correctly this case correctly to get the tree or avoid it completely. @@ -62,8 +77,8 @@ class ReadTasty extends Phase { staticRef(className) match { case clsd: ClassDenotation => clsd.infoOrCompleter match { - case info: ClassfileLoader => - info.load(clsd) // sets cls.rootTreeOrProvider and cls.moduleClass.treeProvider as a side-effect + case info: TastyLoader => + info.doComplete(clsd) // sets cls.rootTreeOrProvider and cls.moduleClass.treeProvider as a side-effect case _ => } def moduleClass = clsd.owner.info.member(className.moduleClassName).symbol @@ -77,7 +92,7 @@ class ReadTasty extends Phase { } } case unit => - Some(unit) + Some(unit) } def run(using Context): Unit = unsupported("run") diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala index 77021efa3050..1d4daff510e7 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala @@ -3,6 +3,6 @@ package dotty.tools.dotc.fromtasty import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.util.NoSource -class TASTYCompilationUnit(val className: String) extends CompilationUnit(NoSource) { +class TASTYCompilationUnit(val className: String) extends CompilationUnit(NoSource, null) { override def toString: String = s"class file $className" } diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala index 923892b62f13..c0adf454b063 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package fromtasty -import core._ -import Contexts._ +import core.* +import Contexts.* import Phases.Phase class TASTYCompiler extends Compiler { diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index fb0abe3332ed..2f4ecad8859d 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -5,7 +5,7 @@ package fromtasty import scala.language.unsafeNulls import io.{JarArchive, AbstractFile, Path} -import core.Contexts._ +import core.Contexts.* import core.Decorators.em import java.io.File diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index ebb76e9e9bf9..26fd52fb7138 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -5,7 +5,6 @@ package inlines import ast.*, core.* import Flags.*, Symbols.*, Types.*, Decorators.*, Contexts.* import StdNames.nme -import transform.SymUtils.* import typer.* import Names.TermName import NameKinds.{InlineAccessorName, InlineBinderName, InlineScrutineeName} @@ -329,7 +328,7 @@ class InlineReducer(inliner: Inliner)(using Context): val paramCls = paramType.classSymbol if (paramCls.is(Case) && unapp.symbol.is(Synthetic) && scrut <:< paramType) { val caseAccessors = - if (paramCls.is(Scala2x)) paramCls.caseAccessors.filter(_.is(Method)) + if paramCls.is(Scala2x) then paramCls.caseAccessors else paramCls.asClass.paramAccessors val selectors = for (accessor <- caseAccessors) @@ -342,8 +341,8 @@ class InlineReducer(inliner: Inliner)(using Context): } case Alternative(pats) => pats.exists(reducePattern(caseBindingMap, scrut, _)) - case Inlined(EmptyTree, Nil, ipat) => - reducePattern(caseBindingMap, scrut, ipat) + case tree: Inlined if tree.inlinedFromOuterScope => + reducePattern(caseBindingMap, scrut, tree.expansion) case _ => false } } diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 73fa2a2871a2..8bd89a71fa50 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -4,7 +4,6 @@ package inlines import ast.*, core.* import Flags.*, Symbols.*, Types.*, Decorators.*, Constants.*, Contexts.* -import transform.SymUtils.* import StdNames.nme import typer.* import Names.Name @@ -28,7 +27,7 @@ import scala.annotation.constructorOnly /** General support for inlining */ object Inliner: - import tpd._ + import tpd.* private[inlines] type DefBuffer = mutable.ListBuffer[ValOrDefDef] @@ -129,7 +128,7 @@ object Inliner: new InlinerMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) override def transformInlined(tree: Inlined)(using Context) = - if tree.call.isEmpty then + if tree.inlinedFromOuterScope then tree.expansion match case expansion: TypeTree => expansion case _ => tree @@ -143,8 +142,8 @@ end Inliner * @param rhsToInline the body of the inlineable method that replaces the call. */ class Inliner(val call: tpd.Tree)(using Context): - import tpd._ - import Inliner._ + import tpd.* + import Inliner.* private val methPart = funPart(call) protected val callTypeArgs = typeArgss(call).flatten @@ -177,7 +176,7 @@ class Inliner(val call: tpd.Tree)(using Context): /** A map from the classes of (direct and outer) this references in `rhsToInline` * to references of their proxies. * Note that we can't index by the ThisType itself since there are several - * possible forms to express what is logicaly the same ThisType. E.g. + * possible forms to express what is logically the same ThisType. E.g. * * ThisType(TypeRef(ThisType(p), cls)) * @@ -338,7 +337,7 @@ class Inliner(val call: tpd.Tree)(using Context): protected def hasOpaqueProxies = opaqueProxies.nonEmpty - /** Map first halfs of opaqueProxies pairs to second halfs, using =:= as equality */ + /** Map first halves of opaqueProxies pairs to second halves, using =:= as equality */ private def mapRef(ref: TermRef): Option[TermRef] = opaqueProxies.collectFirst { case (from, to) if from.symbol == ref.symbol && from =:= ref => to @@ -497,8 +496,8 @@ class Inliner(val call: tpd.Tree)(using Context): // assertAllPositioned(tree) // debug tree.changeOwner(originalOwner, ctx.owner) - def tryConstValue: Tree = - TypeComparer.constValue(callTypeArgs.head.tpe) match { + def tryConstValue(tpe: Type): Tree = + TypeComparer.constValue(tpe) match { case Some(c) => Literal(c).withSpan(call.span) case _ => EmptyTree } @@ -549,7 +548,7 @@ class Inliner(val call: tpd.Tree)(using Context): val inlineTyper = new InlineTyper(ctx.reporter.errorCount) - val inlineCtx = inlineContext(call).fresh.setTyper(inlineTyper).setNewScope + val inlineCtx = inlineContext(Inlined(call, Nil, ref(defn.Predef_undefined))).fresh.setTyper(inlineTyper).setNewScope def inlinedFromOutside(tree: Tree)(span: Span): Tree = Inlined(EmptyTree, Nil, tree)(using ctx.withSource(inlinedMethod.topLevelClass.source)).withSpan(span) @@ -596,7 +595,7 @@ class Inliner(val call: tpd.Tree)(using Context): val inlinedSingleton = singleton(t).withSpan(argSpan) inlinedFromOutside(inlinedSingleton)(tree.span) case Some(t) if tree.isType => - inlinedFromOutside(TypeTree(t).withSpan(argSpan))(tree.span) + inlinedFromOutside(new InferredTypeTree().withType(t).withSpan(argSpan))(tree.span) case _ => tree } case tree @ Select(qual: This, name) if tree.symbol.is(Private) && tree.symbol.isInlineMethod => @@ -649,13 +648,13 @@ class Inliner(val call: tpd.Tree)(using Context): def treeSize(x: Any): Int = var siz = 0 x match - case x: Trees.Inlined[_] => + case x: Trees.Inlined[?] => case x: Positioned => var i = 0 while i < x.productArity do siz += treeSize(x.productElement(i)) i += 1 - case x: List[_] => + case x: List[?] => var xs = x while xs.nonEmpty do siz += treeSize(xs.head) @@ -734,7 +733,7 @@ class Inliner(val call: tpd.Tree)(using Context): */ class InlineTyper(initialErrorCount: Int, @constructorOnly nestingLevel: Int = ctx.nestingLevel + 1) extends ReTyper(nestingLevel): - import reducer._ + import reducer.* override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = { tpe match { @@ -771,7 +770,7 @@ class Inliner(val call: tpd.Tree)(using Context): override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { val locked = ctx.typerState.ownedVars - val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) val reducedProjection = reducer.reduceProjection(resNoReduce) if reducedProjection.isType then @@ -793,7 +792,7 @@ class Inliner(val call: tpd.Tree)(using Context): typed(tree.cond, defn.BooleanType)(using condCtx) match { case cond1 @ ConstantValue(b: Boolean) => val selected0 = if (b) tree.thenp else tree.elsep - val selected = if (selected0.isEmpty) tpd.Literal(Constant(())) else typed(selected0, pt) + val selected = if (selected0.isEmpty) tpd.unitLiteral else typed(selected0, pt) if (isIdempotentExpr(cond1)) selected else Block(cond1 :: Nil, selected) case cond1 => @@ -877,8 +876,12 @@ class Inliner(val call: tpd.Tree)(using Context): } case _ => rhs0 } - val (usedBindings, rhs2) = dropUnusedDefs(caseBindings, rhs1) - val rhs = seq(usedBindings, rhs2) + val rhs2 = rhs1 match { + case Typed(expr, tpt) if rhs1.span.isSynthetic => constToLiteral(expr) + case _ => constToLiteral(rhs1) + } + val (usedBindings, rhs3) = dropUnusedDefs(caseBindings, rhs2) + val rhs = seq(usedBindings, rhs3) inlining.println(i"""--- reduce: |$tree |--- to: @@ -1047,13 +1050,13 @@ class Inliner(val call: tpd.Tree)(using Context): val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)) { Splicer.splice(body, splicePos, inlinedFrom.srcPos, MacroClassLoader.fromContext) } - val inlinedNormailizer = new TreeMap { + val inlinedNormalizer = new TreeMap { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { - case Inlined(EmptyTree, Nil, expr) if enclosingInlineds.isEmpty => transform(expr) + case tree @ Inlined(_, Nil, expr) if tree.inlinedFromOuterScope && enclosingInlineds.isEmpty => transform(expr) case _ => super.transform(tree) } } - val normalizedSplice = inlinedNormailizer.transform(evaluatedSplice) + val normalizedSplice = inlinedNormalizer.transform(evaluatedSplice) if (normalizedSplice.isEmpty) normalizedSplice else normalizedSplice.withSpan(splicePos.span) } @@ -1067,6 +1070,8 @@ class Inliner(val call: tpd.Tree)(using Context): tree match { case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) + case _: This if level == -1 && tree.symbol.isDefinedInCurrentRun => + tree.symbol :: syms case _: TypTree => syms case _ => foldOver(syms, tree) } diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 36dc8a642afc..4860913bdc63 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -4,8 +4,7 @@ package inlines import ast.*, core.* import Flags.*, Symbols.*, Types.*, Decorators.*, Constants.*, Contexts.* -import StdNames.tpnme -import transform.SymUtils._ +import StdNames.{tpnme, nme} import typer.* import NameKinds.BodyRetainerName import SymDenotations.SymDenotation @@ -17,12 +16,12 @@ import transform.{PostTyper, Inlining, CrossVersionChecks} import staging.StagingLevel import collection.mutable -import reporting.trace +import reporting.{NotConstant, trace} import util.Spans.Span /** Support for querying inlineable methods and for inlining calls to such methods */ object Inlines: - import tpd._ + import tpd.* /** An exception signalling that an inline info cannot be computed due to a * cyclic reference. i14772.scala shows a case where this happens. @@ -64,6 +63,7 @@ object Inlines: ) && !ctx.typer.hasInliningErrors && !ctx.base.stopInlining + && !ctx.mode.is(Mode.NoInline) } private def needsTransparentInlining(tree: Tree)(using Context): Boolean = @@ -101,7 +101,7 @@ object Inlines: override def transform(t: Tree)(using Context) = if call.span.exists then t match - case Inlined(t, Nil, expr) if t.isEmpty => expr + case t @ Inlined(_, Nil, expr) if t.inlinedFromOuterScope => expr case _ if t.isEmpty => t case _ => super.transform(t.withSpan(call.span)) else t @@ -117,7 +117,7 @@ object Inlines: case Block(stats, expr) => bindings ++= stats.map(liftPos) liftBindings(expr, liftPos) - case Inlined(call, stats, expr) => + case tree @ Inlined(call, stats, expr) => bindings ++= stats.map(liftPos) val lifter = liftFromInlined(call) cpy.Inlined(tree)(call, Nil, liftBindings(expr, liftFromInlined(call).transform(_))) @@ -189,28 +189,33 @@ object Inlines: // transforms the patterns into terms, the `inlinePatterns` phase removes this anonymous class by β-reducing // the call to the `unapply`. - val UnApply(fun, trailingImplicits, patterns) = unapp - - val sym = unapp.symbol - - var unapplySym1: Symbol = NoSymbol // created from within AnonClass() and used afterwards + val fun = unapp.fun + val sym = fun.symbol val newUnapply = AnonClass(ctx.owner, List(defn.ObjectType), sym.coord) { cls => // `fun` is a partially applied method that contains all type applications of the method. // The methodic type `fun.tpe.widen` is the type of the function starting from the scrutinee argument // and its type parameters are instantiated. - val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, fun.tpe.widen, coord = sym.coord).entered - val unapply = DefDef(unapplySym.asTerm, argss => - val body = fun.appliedToArgss(argss).withSpan(unapp.span) - if body.symbol.is(Transparent) then inlineCall(body)(using ctx.withOwner(unapplySym)) - else body - ) - unapplySym1 = unapplySym - List(unapply) + val unapplyInfo = fun.tpe.widen + val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, unapplyInfo, coord = sym.coord).entered + + val unapply = DefDef(unapplySym.asTerm, argss => fun.appliedToArgss(argss).withSpan(unapp.span)) + + if sym.is(Transparent) then + // Inline the body and refine the type of the unapply method + val inlinedBody = inlineCall(unapply.rhs)(using ctx.withOwner(unapplySym)) + val refinedResultType = inlinedBody.tpe.widen + def refinedResult(info: Type): Type = info match + case info: LambdaType => info.newLikeThis(info.paramNames, info.paramInfos, refinedResult(info.resultType)) + case _ => refinedResultType + unapplySym.info = refinedResult(unapplyInfo) + List(cpy.DefDef(unapply)(tpt = TypeTree(refinedResultType), rhs = inlinedBody)) + else + List(unapply) } - val newFun = newUnapply.select(unapplySym1).withSpan(unapp.span) - cpy.UnApply(unapp)(newFun, trailingImplicits, patterns) + val newFun = newUnapply.select(sym.name).withSpan(unapp.span) + cpy.UnApply(unapp)(fun = newFun) end inlinedUnapply /** For a retained inline method, another method that keeps track of @@ -390,7 +395,7 @@ object Inlines: * @param rhsToInline the body of the inlineable method that replaces the call. */ private class InlineCall(call: tpd.Tree)(using Context) extends Inliner(call): - import tpd._ + import tpd.* import Inlines.* /** The Inlined node representing the inlined call */ @@ -403,41 +408,72 @@ object Inlines: arg match case ConstantValue(_) | Inlined(_, Nil, Typed(ConstantValue(_), _)) => // ok case _ => report.error(em"expected a constant value but found: $arg", arg.srcPos) - return Literal(Constant(())).withSpan(call.span) + return unitLiteral.withSpan(call.span) else if inlinedMethod == defn.Compiletime_codeOf then return Intrinsics.codeOf(arg, call.srcPos) case _ => - // Special handling of `constValue[T]`, `constValueOpt[T], and summonInline[T]` + // Special handling of `constValue[T]`, `constValueOpt[T]`, `constValueTuple[T]`, `summonInline[T]` and `summonAll[T]` if callTypeArgs.length == 1 then - if (inlinedMethod == defn.Compiletime_constValue) { - val constVal = tryConstValue + + def constValueOrError(tpe: Type): Tree = + val constVal = tryConstValue(tpe) if constVal.isEmpty then - val msg = em"not a constant type: ${callTypeArgs.head}; cannot take constValue" - return ref(defn.Predef_undefined).withSpan(call.span).withType(ErrorType(msg)) + val msg = NotConstant("cannot take constValue", tpe) + ref(defn.Predef_undefined).withSpan(callTypeArgs.head.span).withType(ErrorType(msg)) else - return constVal + constVal + + def searchImplicitOrError(tpe: Type): Tree = + val evTyper = new Typer(ctx.nestingLevel + 1) + val evCtx = ctx.fresh.setTyper(evTyper) + inContext(evCtx) { + val evidence = evTyper.inferImplicitArg(tpe, callTypeArgs.head.span) + evidence.tpe match + case fail: Implicits.SearchFailureType => + errorTree(call, evTyper.missingArgMsg(evidence, tpe, "")) + case _ => + evidence + } + + def unrollTupleTypes(tpe: Type): Option[List[Type]] = tpe.dealias match + case AppliedType(tycon, args) if defn.isTupleClass(tycon.typeSymbol) => + Some(args) + case AppliedType(tycon, head :: tail :: Nil) if tycon.isRef(defn.PairClass) => + unrollTupleTypes(tail).map(head :: _) + case tpe: TermRef if tpe.symbol == defn.EmptyTupleModule => + Some(Nil) + case _ => + None + + if (inlinedMethod == defn.Compiletime_constValue) { + return constValueOrError(callTypeArgs.head.tpe) } else if (inlinedMethod == defn.Compiletime_constValueOpt) { - val constVal = tryConstValue + val constVal = tryConstValue(callTypeArgs.head.tpe) return ( if (constVal.isEmpty) ref(defn.NoneModule.termRef) else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) ) } + else if (inlinedMethod == defn.Compiletime_constValueTuple) { + unrollTupleTypes(callTypeArgs.head.tpe) match + case Some(types) => + val constants = types.map(constValueOrError) + return Typed(tpd.tupleTree(constants), TypeTree(callTypeArgs.head.tpe)).withSpan(call.span) + case _ => + return errorTree(call, em"Tuple element types must be known at compile time") + } else if (inlinedMethod == defn.Compiletime_summonInline) { - def searchImplicit(tpt: Tree) = - val evTyper = new Typer(ctx.nestingLevel + 1) - val evCtx = ctx.fresh.setTyper(evTyper) - inContext(evCtx) { - val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) - evidence.tpe match - case fail: Implicits.SearchFailureType => - errorTree(call, evTyper.missingArgMsg(evidence, tpt.tpe, "")) - case _ => - evidence - } - return searchImplicit(callTypeArgs.head) + return searchImplicitOrError(callTypeArgs.head.tpe) + } + else if (inlinedMethod == defn.Compiletime_summonAll) { + unrollTupleTypes(callTypeArgs.head.tpe) match + case Some(types) => + val implicits = types.map(searchImplicitOrError) + return Typed(tpd.tupleTree(implicits), TypeTree(callTypeArgs.head.tpe)).withSpan(call.span) + case _ => + return errorTree(call, em"Tuple element types must be known at compile time") } end if diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 060c8d21f390..1acc6a1c8317 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -3,29 +3,30 @@ package dotc package inlines import dotty.tools.dotc.ast.{Trees, tpd, untpd} -import Trees._ -import core._ -import Flags._ -import Symbols._ -import Flags._ -import Types._ -import Decorators._ +import Trees.* +import core.* +import Flags.* +import Symbols.* +import Flags.* +import Types.* +import Decorators.* import StdNames.nme -import Contexts._ +import Contexts.* import Names.{Name, TermName} import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines -import NameOps._ -import Annotations._ +import NameOps.* +import Annotations.* import transform.{AccessProxies, Splicer} import staging.CrossStageSafety -import transform.SymUtils.* import config.Printers.inlining import util.Property import staging.StagingLevel +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.util.SrcPos object PrepareInlineable { - import tpd._ + import tpd.* private val InlineAccessorsKey = new Property.Key[InlineAccessors] @@ -72,6 +73,7 @@ object PrepareInlineable { sym.isTerm && (sym.isOneOf(AccessFlags) || sym.privateWithin.exists) && !sym.isContainedIn(inlineSym) && + !sym.hasPublicInBinary && !(sym.isStableMember && sym.info.widenTermRefExpr.isInstanceOf[ConstantType]) && !sym.isInlineMethod && (Inlines.inInlineMethod || StagingLevel.level > 0) @@ -87,6 +89,11 @@ object PrepareInlineable { override def transform(tree: Tree)(using Context): Tree = postTransform(super.transform(preTransform(tree))) + + protected def checkUnstableAccessor(accessedTree: Tree, accessor: Symbol)(using Context): Unit = + if ctx.settings.WunstableInlineAccessors.value then + val accessorTree = accessorDef(accessor, accessedTree.symbol) + report.warning(reporting.UnstableInlineAccessor(accessedTree.symbol, accessorTree), accessedTree) } /** Direct approach: place the accessor with the accessed symbol. This has the @@ -101,7 +108,11 @@ object PrepareInlineable { report.error("Implementation restriction: cannot use private constructors in inline methods", tree.srcPos) tree // TODO: create a proper accessor for the private constructor } - else useAccessor(tree) + else + val accessor = useAccessor(tree) + if tree != accessor then + checkUnstableAccessor(tree, accessor.symbol) + accessor case _ => tree } @@ -180,6 +191,8 @@ object PrepareInlineable { accessorInfo = abstractQualType(addQualType(dealiasMap(accessedType))), accessed = accessed) + checkUnstableAccessor(tree, accessor) + val (leadingTypeArgs, otherArgss) = splitArgs(argss) val argss1 = joinArgs( localRefs.map(TypeTree(_)) ++ leadingTypeArgs, // TODO: pass type parameters in two sections? diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index e4d0cce9f6f9..6e91254c2d72 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -1,18 +1,17 @@ package dotty.tools.dotc.interactive -import scala.language.unsafeNulls - import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.config.Printers.interactiv -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.{Name, TermName} import dotty.tools.dotc.core.NameKinds.SimpleNameKind -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Phases -import dotty.tools.dotc.core.Scopes._ +import dotty.tools.dotc.core.Scopes.* import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol, defn, newSymbol} import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.SymDenotations.SymDenotation @@ -25,6 +24,10 @@ import dotty.tools.dotc.util.SourcePosition import scala.collection.mutable import scala.util.control.NonFatal +import dotty.tools.dotc.core.ContextOps.localContext +import dotty.tools.dotc.core.Names +import dotty.tools.dotc.core.Types +import dotty.tools.dotc.core.Symbols /** * One of the results of a completion query. @@ -37,18 +40,17 @@ import scala.util.control.NonFatal */ case class Completion(label: String, description: String, symbols: List[Symbol]) -object Completion { +object Completion: - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* /** Get possible completions from tree at `pos` * * @return offset and list of symbols for possible completions */ - def completions(pos: SourcePosition)(using Context): (Int, List[Completion]) = { - val path = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) + def completions(pos: SourcePosition)(using Context): (Int, List[Completion]) = + val path: List[Tree] = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) computeCompletions(pos, path)(using Interactive.contextOfPath(path).withPhase(Phases.typerPhase)) - } /** * Inspect `path` to determine what kinds of symbols should be considered. @@ -60,10 +62,11 @@ object Completion { * * Otherwise, provide no completion suggestion. */ - def completionMode(path: List[Tree], pos: SourcePosition): Mode = - path match { - case Ident(_) :: Import(_, _) :: _ => Mode.ImportOrExport - case (ref: RefTree) :: _ => + def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = + path match + case untpd.Ident(_) :: untpd.Import(_, _) :: _ => Mode.ImportOrExport + case untpd.Ident(_) :: (_: untpd.ImportSelector) :: _ => Mode.ImportOrExport + case (ref: untpd.RefTree) :: _ => if (ref.name.isTermName) Mode.Term else if (ref.name.isTypeName) Mode.Type else Mode.None @@ -72,9 +75,8 @@ object Completion { if sel.imported.span.contains(pos.span) then Mode.ImportOrExport else Mode.None // Can't help completing the renaming - case (_: ImportOrExport) :: _ => Mode.ImportOrExport + case (_: untpd.ImportOrExport) :: _ => Mode.ImportOrExport case _ => Mode.None - } /** When dealing with in varios palces we check to see if they are * due to incomplete backticks. If so, we ensure we get the full prefix @@ -101,10 +103,13 @@ object Completion { case (sel: untpd.ImportSelector) :: _ => completionPrefix(sel.imported :: Nil, pos) + case untpd.Ident(_) :: (sel: untpd.ImportSelector) :: _ if !sel.isGiven => + completionPrefix(sel.imported :: Nil, pos) + case (tree: untpd.ImportOrExport) :: _ => - tree.selectors.find(_.span.contains(pos.span)).map { selector => + tree.selectors.find(_.span.contains(pos.span)).map: selector => completionPrefix(selector :: Nil, pos) - }.getOrElse("") + .getOrElse("") // Foo.`se will result in Select(Ident(Foo), ) case (select: untpd.Select) :: _ if select.name == nme.ERROR => @@ -118,27 +123,65 @@ object Completion { if (ref.name == nme.ERROR) "" else ref.name.toString.take(pos.span.point - ref.span.point) - case _ => - "" + case _ => "" + end completionPrefix /** Inspect `path` to determine the offset where the completion result should be inserted. */ - def completionOffset(path: List[Tree]): Int = - path match { - case (ref: RefTree) :: _ => ref.span.point + def completionOffset(untpdPath: List[untpd.Tree]): Int = + untpdPath match { + case (ref: untpd.RefTree) :: _ => ref.span.point case _ => 0 } - private def computeCompletions(pos: SourcePosition, path: List[Tree])(using Context): (Int, List[Completion]) = { - val mode = completionMode(path, pos) - val rawPrefix = completionPrefix(path, pos) + /** Some information about the trees is lost after Typer such as Extension method construct + * is expanded into methods. In order to support completions in those cases + * we have to rely on untyped trees and only when types are necessary use typed trees. + */ + def resolveTypedOrUntypedPath(tpdPath: List[Tree], pos: SourcePosition)(using Context): List[untpd.Tree] = + lazy val untpdPath: List[untpd.Tree] = NavigateAST + .pathTo(pos.span, List(ctx.compilationUnit.untpdTree), true).collect: + case untpdTree: untpd.Tree => untpdTree + + tpdPath match + case (_: Bind) :: _ => tpdPath + case (_: untpd.TypTree) :: _ => tpdPath + case _ => untpdPath + + /** Handle case when cursor position is inside extension method construct. + * The extension method construct is then desugared into methods, and consturct parameters + * are no longer a part of a typed tree, but instead are prepended to method parameters. + * + * @param untpdPath The typed or untyped path to the tree that is being completed + * @param tpdPath The typed path that will be returned if no extension method construct is found + * @param pos The cursor position + * + * @return Typed path to the parameter of the extension construct if found or tpdPath + */ + private def typeCheckExtensionConstructPath( + untpdPath: List[untpd.Tree], tpdPath: List[Tree], pos: SourcePosition + )(using Context): List[Tree] = + untpdPath.collectFirst: + case untpd.ExtMethods(paramss, _) => + val enclosingParam = paramss.flatten.find(_.span.contains(pos.span)) + enclosingParam.map: param => + ctx.typer.index(paramss.flatten) + val typedEnclosingParam = ctx.typer.typed(param) + Interactive.pathTo(typedEnclosingParam, pos.span) + .flatten.getOrElse(tpdPath) + + private def computeCompletions(pos: SourcePosition, tpdPath: List[Tree])(using Context): (Int, List[Completion]) = + val path0 = resolveTypedOrUntypedPath(tpdPath, pos) + val mode = completionMode(path0, pos) + val rawPrefix = completionPrefix(path0, pos) val hasBackTick = rawPrefix.headOption.contains('`') val prefix = if hasBackTick then rawPrefix.drop(1) else rawPrefix val completer = new Completer(mode, prefix, pos) - val completions = path match { + val adjustedPath = typeCheckExtensionConstructPath(path0, tpdPath, pos) + val completions = adjustedPath match // Ignore synthetic select from `This` because in code it was `Ident` // See example in dotty.tools.languageserver.CompletionTest.syntheticThis case Select(qual @ This(_), _) :: _ if qual.span.isSynthetic => completer.scopeCompletions @@ -147,13 +190,12 @@ object Completion { case (tree: ImportOrExport) :: _ => completer.directMemberCompletions(tree.expr) case (_: untpd.ImportSelector) :: Import(expr, _) :: _ => completer.directMemberCompletions(expr) case _ => completer.scopeCompletions - } val describedCompletions = describeCompletions(completions) val backtickedCompletions = describedCompletions.map(completion => backtickCompletions(completion, hasBackTick)) - val offset = completionOffset(path) + val offset = completionOffset(path0) interactiv.println(i"""completion with pos = $pos, | prefix = ${completer.prefix}, @@ -161,7 +203,6 @@ object Completion { | type = ${completer.mode.is(Mode.Type)} | results = $backtickedCompletions%, %""") (offset, backtickedCompletions) - } def backtickCompletions(completion: Completion, hasBackTick: Boolean) = if hasBackTick || needsBacktick(completion.label) then @@ -174,17 +215,17 @@ object Completion { // https://github.com/scalameta/metals/blob/main/mtags/src/main/scala/scala/meta/internal/mtags/KeywordWrapper.scala // https://github.com/com-lihaoyi/Ammonite/blob/73a874173cd337f953a3edc9fb8cb96556638fdd/amm/util/src/main/scala/ammonite/util/Model.scala private def needsBacktick(s: String) = - val chunks = s.split("_", -1) + val chunks = s.split("_", -1).nn val validChunks = chunks.zipWithIndex.forall { case (chunk, index) => - chunk.forall(Chars.isIdentifierPart) || - (chunk.forall(Chars.isOperatorPart) && + chunk.nn.forall(Chars.isIdentifierPart) || + (chunk.nn.forall(Chars.isOperatorPart) && index == chunks.length - 1 && !(chunks.lift(index - 1).contains("") && index - 1 == 0)) } val validStart = - Chars.isIdentifierStart(s(0)) || chunks(0).forall(Chars.isOperatorPart) + Chars.isIdentifierStart(s(0)) || chunks(0).nn.forall(Chars.isOperatorPart) val valid = validChunks && validStart && !keywords.contains(s) @@ -216,7 +257,7 @@ object Completion { * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, val prefix: String, pos: SourcePosition) { + class Completer(val mode: Mode, val prefix: String, pos: SourcePosition): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -230,7 +271,7 @@ object Completion { * (even if the import follows it syntactically) * - a more deeply nested import shadowing a member or a local definition causes an ambiguity */ - def scopeCompletions(using context: Context): CompletionMap = { + def scopeCompletions(using context: Context): CompletionMap = val mappings = collection.mutable.Map.empty[Name, List[ScopedDenotations]].withDefaultValue(List.empty) def addMapping(name: Name, denots: ScopedDenotations) = mappings(name) = mappings(name) :+ denots @@ -302,7 +343,7 @@ object Completion { } resultMappings - } + end scopeCompletions /** Widen only those types which are applied or are exactly nothing */ @@ -335,16 +376,16 @@ object Completion { /** Completions introduced by imports directly in this context. * Completions from outer contexts are not included. */ - private def importedCompletions(using Context): CompletionMap = { + private def importedCompletions(using Context): CompletionMap = val imp = ctx.importInfo - def fromImport(name: Name, nameInScope: Name): Seq[(Name, SingleDenotation)] = - imp.site.member(name).alternatives - .collect { case denot if include(denot, nameInScope) => nameInScope -> denot } - if imp == null then Map.empty else + def fromImport(name: Name, nameInScope: Name): Seq[(Name, SingleDenotation)] = + imp.site.member(name).alternatives + .collect { case denot if include(denot, nameInScope) => nameInScope -> denot } + val givenImports = imp.importedImplicits .map { ref => (ref.implicitName: Name, ref.underlyingRef.denot.asSingleDenotation) } .filter((name, denot) => include(denot, name)) @@ -370,7 +411,7 @@ object Completion { }.toSeq.groupByName givenImports ++ wildcardMembers ++ explicitMembers - } + end importedCompletions /** Completions from implicit conversions including old style extensions using implicit classes */ private def implicitConversionMemberCompletions(qual: Tree)(using Context): CompletionMap = @@ -532,7 +573,6 @@ object Completion { extension [N <: Name](namedDenotations: Seq[(N, SingleDenotation)]) @annotation.targetName("groupByNameTupled") def groupByName: CompletionMap = namedDenotations.groupMap((name, denot) => name)((name, denot) => denot) - } private type CompletionMap = Map[Name, Seq[SingleDenotation]] @@ -545,11 +585,11 @@ object Completion { * The completion mode: defines what kinds of symbols should be included in the completion * results. */ - class Mode(val bits: Int) extends AnyVal { + class Mode(val bits: Int) extends AnyVal: def is(other: Mode): Boolean = (bits & other.bits) == other.bits def |(other: Mode): Mode = new Mode(bits | other.bits) - } - object Mode { + + object Mode: /** No symbol should be included */ val None: Mode = new Mode(0) @@ -561,6 +601,4 @@ object Completion { /** Both term and type symbols are allowed */ val ImportOrExport: Mode = new Mode(4) | Term | Type - } -} diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index fd6d426f39bb..3f3e5e25f66e 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -4,21 +4,20 @@ package interactive import scala.language.unsafeNulls -import scala.collection._ +import scala.collection.* import ast.{NavigateAST, Trees, tpd, untpd} -import core._ -import Decorators._, ContextOps._ -import Contexts._, Flags._, Names._, NameOps._, Symbols._, Trees._, Types._ -import transform.SymUtils._ -import util.Spans._, util.SourceFile, util.SourcePosition +import core.* +import Decorators.*, ContextOps.* +import Contexts.*, Flags.*, Names.*, NameOps.*, Symbols.*, Trees.*, Types.* +import util.Spans.*, util.SourceFile, util.SourcePosition /** High-level API to get information out of typed trees, designed to be used by IDEs. * * @see `InteractiveDriver` to get typed trees from code. */ object Interactive { - import ast.tpd._ + import ast.tpd.* object Include { case class Set private[Include] (val bits: Int) extends AnyVal { diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala index 38a93125a342..af1484896c72 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package interactive -import core._ -import Phases._ -import parsing._ -import typer._ +import core.* +import Phases.* +import parsing.* +import typer.* class InteractiveCompiler extends Compiler { // TODO: Figure out what phases should be run in IDEs diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 132ff162be61..b00cd1036018 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -5,28 +5,28 @@ package interactive import scala.language.unsafeNulls import java.net.URI -import java.io._ -import java.nio.file._ +import java.io.* +import java.nio.file.* import java.nio.file.attribute.BasicFileAttributes import java.nio.charset.StandardCharsets -import java.util.zip._ +import java.util.zip.* -import scala.collection._ +import scala.collection.* import scala.io.Codec import dotty.tools.io.AbstractFile import ast.{Trees, tpd} -import core._, core.Decorators._ -import Contexts._, Names._, NameOps._, Symbols._, SymDenotations._, Trees._, Types._ +import core.*, core.Decorators.* +import Contexts.*, Names.*, NameOps.*, Symbols.*, SymDenotations.*, Trees.*, Types.* import Denotations.staticRef -import classpath._ -import reporting._ -import util._ +import classpath.* +import reporting.* +import util.* /** A Driver subclass designed to be used from IDEs */ class InteractiveDriver(val settings: List[String]) extends Driver { - import tpd._ + import tpd.* override def sourcesRequired: Boolean = false @@ -145,10 +145,10 @@ class InteractiveDriver(val settings: List[String]) extends Driver { (fromSource ++ fromClassPath).distinct } - def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, toSource(uri, sourceCode)) + def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, SourceFile.virtual(uri, sourceCode)) def run(uri: URI, source: SourceFile): List[Diagnostic] = { - import typer.ImportInfo._ + import typer.ImportInfo.* val previousCtx = myCtx try { @@ -297,9 +297,6 @@ class InteractiveDriver(val settings: List[String]) extends Driver { cleanupTree(tree) } - private def toSource(uri: URI, sourceCode: String): SourceFile = - SourceFile.virtual(Paths.get(uri).toString, sourceCode) - /** * Initialize this driver and compiler. * @@ -323,7 +320,7 @@ object InteractiveDriver { else try // We don't use file.file here since it'll be null - // for the VirtualFiles created by InteractiveDriver#toSource + // for the VirtualFiles created by SourceFile#virtual // TODO: To avoid these round trip conversions, we could add an // AbstractFile#toUri method and implement it by returning a constant // passed as a parameter to a constructor of VirtualFile diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala index 60f01396e91e..5480d4a43043 100644 --- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala +++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala @@ -4,9 +4,9 @@ package interactive import ast.tpd -import core._ -import Contexts._, NameOps._, Symbols._, StdNames._ -import util._, util.Spans._ +import core.* +import Contexts.*, NameOps.*, Symbols.*, StdNames.* +import util.*, util.Spans.* /** * A `tree` coming from `source` @@ -55,7 +55,7 @@ object SourceTree { !sym.source.exists) // FIXME: We cannot deal with external projects yet Nil else { - import ast.Trees._ + import ast.Trees.* def sourceTreeOfClass(tree: tpd.Tree): Option[SourceTree] = tree match { case PackageDef(_, stats) => stats.flatMap(sourceTreeOfClass).headOption diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala index c63409d0d52b..aa0e9a47f775 100644 --- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala +++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala @@ -2,7 +2,9 @@ package dotty.tools package dotc package parsing -import util.Chars._ +import util.Chars.* + +import scala.compiletime.uninitialized abstract class CharArrayReader { self => @@ -16,7 +18,7 @@ abstract class CharArrayReader { self => protected def error(msg: String, offset: Int): Unit /** the last read character */ - var ch: Char = _ + var ch: Char = uninitialized /** The offset one past the last read character */ var charOffset: Int = startFrom diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 6ec896dcb200..f7ef86ee5cde 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -6,31 +6,35 @@ import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.FlagSet -import JavaTokens._ -import JavaScanners._ +import JavaTokens.* +import JavaScanners.* import Scanners.Offset -import Parsers._ -import core._ -import Contexts._ -import Names._ -import Types._ -import ast.Trees._ -import Decorators._ -import StdNames._ -import reporting._ +import Parsers.* +import core.* +import Contexts.* +import Symbols.defn +import Names.* +import Types.* +import ast.Trees.* +import Decorators.* +import StdNames.* +import reporting.* import dotty.tools.dotc.util.SourceFile -import util.Spans._ +import util.Spans.* import scala.collection.mutable.{ListBuffer, LinkedHashMap} object JavaParsers { - import ast.untpd._ + import ast.untpd.* + + + val fakeFlags = Flags.JavaDefined | Flags.PrivateLocal | Flags.Invisible class JavaParser(source: SourceFile)(using Context) extends ParserCommon(source) { val definitions: Definitions = ctx.definitions - import definitions._ + import definitions.* val in: JavaScanner = new JavaScanner(source) @@ -89,16 +93,16 @@ object JavaParsers { // --------- tree building ----------------------------- - def scalaAnnotationDot(name: Name): Select = Select(scalaDot(nme.annotation), name) - def javaDot(name: Name): Tree = Select(rootDot(nme.java), name) def javaLangDot(name: Name): Tree = Select(javaDot(nme.lang), name) - /** Tree representing `java.lang.Object` */ - def javaLangObject(): Tree = javaLangDot(tpnme.Object) + /** Synthetic tree representing `java.lang.Object`. + * The typer will type all references to `java.lang.Object` as `FromJavaObject`. + */ + def ObjectTpt(): Tree = TypeTree(defn.FromJavaObjectType) // javaLangDot(tpnme.Object) /** Tree representing `java.lang.Record` */ def javaLangRecord(): Tree = javaLangDot(tpnme.Record) @@ -107,6 +111,8 @@ object JavaParsers { AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean): Template = { + def UnitTpt(): Tree = TypeTree(defn.UnitType) + def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match { case (meth: DefDef) :: rest if meth.name == nme.CONSTRUCTOR => (meth, rest) case first :: rest => @@ -120,12 +126,12 @@ object JavaParsers { // can call it. // This also avoids clashes between the constructor parameter names and member names. if (needsDummyConstr) { - if (constr1 == EmptyTree) constr1 = makeConstructor(List(), Nil) + if (constr1 == EmptyTree) constr1 = makeConstructor(List(), Nil, Parsers.unimplementedExpr) stats1 = constr1 :: stats1 - constr1 = makeConstructor(List(scalaDot(tpnme.Unit)), tparams, Flags.JavaDefined | Flags.PrivateLocal) + constr1 = makeConstructor(List(UnitTpt()), tparams, EmptyTree, fakeFlags) } else if (constr1 == EmptyTree) { - constr1 = makeConstructor(List(), tparams) + constr1 = makeConstructor(List(), tparams, EmptyTree) } Template(constr1.asInstanceOf[DefDef], parents, Nil, EmptyValDef, stats1) } @@ -133,11 +139,11 @@ object JavaParsers { def makeSyntheticParam(count: Int, tpt: Tree): ValDef = makeParam(nme.syntheticParamName(count), tpt) def makeParam(name: TermName, tpt: Tree): ValDef = - ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.Param)) + ValDef(name, tpt, EmptyTree).withFlags(Flags.JavaDefined | Flags.Param) - def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined): DefDef = { - val vparams = formals.zipWithIndex.map { case (p, i) => makeSyntheticParam(i + 1, p) } - DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(vparams)), TypeTree(), EmptyTree).withMods(Modifiers(flags)) + def makeConstructor(formals: List[Tree], tparams: List[TypeDef], body: Tree, flags: FlagSet = Flags.JavaDefined): DefDef = { + val vparams = formals.zipWithIndex.map { case (p, i) => makeSyntheticParam(i + 1, p).withAddedFlags(flags) } + DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(vparams)), TypeTree(), body).withFlags(flags) } // ------------- general parsing --------------------------- @@ -306,7 +312,7 @@ object JavaParsers { if (in.token == QMARK) { val offset = in.offset in.nextToken() - val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else javaLangObject() + val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else ObjectTpt() val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree atSpan(offset) { /* @@ -507,7 +513,7 @@ object JavaParsers { atSpan(in.offset) { annotations() val name = identForType() - val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else javaLangObject() + val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else ObjectTpt() TypeDef(name, TypeBoundsTree(EmptyTree, hi)).withMods(Modifiers(flags)) } @@ -568,7 +574,7 @@ object JavaParsers { if in.token == IDENTIFIER && in.name == jnme.RECORDid then in.token = RECORD - def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = { + def termDecl(start: Offset, mods: Modifiers, parentToken: Int): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List() val isVoid = in.token == VOID @@ -740,17 +746,17 @@ object JavaParsers { ValDef(name, tpt2, if (mods.is(Flags.Param)) EmptyTree else unimplementedExpr).withMods(mods1) } - def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match + def memberDecl(start: Offset, mods: Modifiers, parentToken: Int): List[Tree] = in.token match case CLASS | ENUM | RECORD | INTERFACE | AT => typeDecl(start, if definesInterface(parentToken) then mods | Flags.JavaStatic else mods) case _ => - termDecl(start, mods, parentToken, parentTParams) + termDecl(start, mods, parentToken) def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree = atSpan(cdef.span) { assert(cdef.span.exists) ModuleDef(cdef.name.toTermName, - makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & Flags.RetainedModuleClassFlags).toTermFlags) + makeTemplate(List(), statics, List(), needsDummyConstr = false)).withMods((cdef.mods & Flags.RetainedModuleClassFlags).toTermFlags) } def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] = @@ -817,11 +823,11 @@ object JavaParsers { typ() } else - javaLangObject() + ObjectTpt() val interfaces = interfacesOpt() - val (statics, body) = typeBody(CLASS, name, tparams) + val (statics, body) = typeBody(CLASS, name) val cls = atSpan(start, nameOffset) { - TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, true)).withMods(mods) + TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, needsDummyConstr = true)).withMods(mods) } addCompanionObject(statics, cls) } @@ -834,7 +840,7 @@ object JavaParsers { val header = formalParams() val superclass = javaLangRecord() // records always extend java.lang.Record val interfaces = interfacesOpt() // records may implement interfaces - val (statics, body) = typeBody(RECORD, name, tparams) + val (statics, body) = typeBody(RECORD, name) // We need to generate accessors for every param, if no method with the same name is already defined @@ -864,7 +870,7 @@ object JavaParsers { parents = superclass :: interfaces, stats = canonicalConstructor :: accessors ::: body, tparams = tparams, - true + needsDummyConstr = true ) ).withMods(mods) } @@ -882,24 +888,24 @@ object JavaParsers { repsep(() => typ(), COMMA) } else - List(javaLangObject()) - val (statics, body) = typeBody(INTERFACE, name, tparams) + List(ObjectTpt()) + val (statics, body) = typeBody(INTERFACE, name) val iface = atSpan(start, nameOffset) { TypeDef( name, - makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) + makeTemplate(parents, body, tparams, needsDummyConstr = false)).withMods(mods | Flags.JavaInterface) } addCompanionObject(statics, iface) } - def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + def typeBody(leadingToken: Int, parentName: Name): (List[Tree], List[Tree]) = { accept(LBRACE) - val defs = typeBodyDecls(leadingToken, parentName, parentTParams) + val defs = typeBodyDecls(leadingToken, parentName) accept(RBRACE) defs } - def typeBodyDecls(parentToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + def typeBodyDecls(parentToken: Int, parentName: Name): (List[Tree], List[Tree]) = { val inInterface = definesInterface(parentToken) val statics = new ListBuffer[Tree] val members = new ListBuffer[Tree] @@ -915,7 +921,7 @@ object JavaParsers { else { adaptRecordIdentifier() if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) mods |= Flags.JavaStatic - val decls = memberDecl(start, mods, parentToken, parentTParams) + val decls = memberDecl(start, mods, parentToken) (if (mods.is(Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef]))) statics else @@ -925,7 +931,7 @@ object JavaParsers { (statics.toList, members.toList) } def annotationParents: List[Tree] = List( - javaLangObject(), + ObjectTpt(), Select(javaLangDot(nme.annotation), tpnme.Annotation) ) def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { @@ -933,14 +939,14 @@ object JavaParsers { accept(INTERFACE) val nameOffset = in.offset val name = identForType() - val (statics, body) = typeBody(AT, name, List()) + val (statics, body) = typeBody(AT, name) val constructorParams = body.collect { case dd: DefDef => makeParam(dd.name, dd.tpt) } val constr = DefDef(nme.CONSTRUCTOR, List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) - val templ = makeTemplate(annotationParents, constr :: body, List(), true) + val templ = makeTemplate(annotationParents, constr :: body, List(), needsDummyConstr = true) val annot = atSpan(start, nameOffset) { TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) } @@ -968,7 +974,7 @@ object JavaParsers { val (statics, body) = if (in.token == SEMI) { in.nextToken() - typeBodyDecls(ENUM, name, List()) + typeBodyDecls(ENUM, name) } else (List(), List()) @@ -992,7 +998,7 @@ object JavaParsers { Select(New(javaLangDot(tpnme.Enum)), nme.CONSTRUCTOR), List(enumType)), Nil) val enumclazz = atSpan(start, nameOffset) { TypeDef(name, - makeTemplate(superclazz :: interfaces, body, List(), true)).withMods(mods | Flags.JavaEnumTrait) + makeTemplate(superclazz :: interfaces, body, List(), needsDummyConstr = true)).withMods(mods | Flags.JavaEnum) } addCompanionObject(consts ::: statics ::: predefs, enumclazz) } @@ -1011,7 +1017,7 @@ object JavaParsers { skipAhead() accept(RBRACE) } - ValDef(name.toTermName, enumType, unimplementedExpr).withMods(Modifiers(Flags.JavaEnumTrait | Flags.StableRealizable | Flags.JavaDefined | Flags.JavaStatic)) + ValDef(name.toTermName, enumType, unimplementedExpr).withMods(Modifiers(Flags.JavaEnumValue | Flags.JavaStatic)) } } @@ -1092,7 +1098,7 @@ object JavaParsers { */ class OutlineJavaParser(source: SourceFile)(using Context) extends JavaParser(source) with OutlineParserCommon { override def skipBracesHook(): Option[Tree] = None - override def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + override def typeBody(leadingToken: Int, parentName: Name): (List[Tree], List[Tree]) = { skipBraces() (List(EmptyValDef), List(EmptyTree)) } diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index d21d4b85b5df..f50dcdda438c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package parsing -import core.Contexts._ +import core.Contexts.* import core.Names.SimpleName -import Scanners._ +import Scanners.* import util.SourceFile -import JavaTokens._ +import JavaTokens.* import scala.annotation.{switch, tailrec} -import util.Chars._ +import util.Chars.* import PartialFunction.cond import core.Decorators.em @@ -439,6 +439,7 @@ object JavaScanners { } oct.asInstanceOf[Char] end octal + var skip = false def greatEscape: Char = nextChar() if '0' <= ch && ch <= '7' then octal @@ -455,11 +456,12 @@ object JavaScanners { case '\\' => '\\' case CR | LF if inTextBlock => if !scanOnly then nextChar() + skip = true 0 case _ => if !scanOnly then error("invalid escape character", charOffset - 1) ch - if x != 0 then nextChar() + if !skip then nextChar() x end greatEscape @@ -470,7 +472,7 @@ object JavaScanners { val res = ch nextChar() res - if c != 0 && !scanOnly then putChar(c) + if !skip && !scanOnly then putChar(c) end getlitch /** Read a triple-quote delimited text block, starting after the first three double quotes. diff --git a/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala index a67bca34cae2..bcabfbd03a1d 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala @@ -22,7 +22,7 @@ class Parser extends Phase { */ private[dotc] var firstXmlPos: SourcePosition = NoSourcePosition - def parse(using Context) = monitor("parser") { + def parse(using Context): Boolean = monitor("parser") { val unit = ctx.compilationUnit unit.untpdTree = if (unit.isJava) new JavaParsers.JavaParser(unit.source).parse() @@ -30,6 +30,7 @@ class Parser extends Phase { val p = new Parsers.Parser(unit.source) // p.in.debugTokenStream = true val tree = p.parse() + ctx.compilationUnit.comments = p.in.comments if (p.firstXmlPos.exists && !firstXmlPos.exists) firstXmlPos = p.firstXmlPos tree @@ -45,10 +46,15 @@ class Parser extends Phase { report.inform(s"parsing ${unit.source}") ctx.fresh.setCompilationUnit(unit).withRootImports - unitContexts.foreach(parse(using _)) + val unitContexts0 = + for + unitContext <- unitContexts + if parse(using unitContext) + yield unitContext + record("parsedTrees", ast.Trees.ntrees) - unitContexts.map(_.compilationUnit) + unitContexts0.map(_.compilationUnit) } def run(using Context): Unit = unsupported("run") diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 7a29ac3f7a38..47b7ffbbc840 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -8,35 +8,36 @@ import scala.annotation.internal.sharable import scala.collection.mutable.ListBuffer import scala.collection.immutable.BitSet import util.{ SourceFile, SourcePosition, NoSourcePosition } -import Tokens._ -import Scanners._ +import Tokens.* +import Scanners.* import xml.MarkupParsers.MarkupParser -import core._ -import Flags._ -import Contexts._ -import Names._ +import core.* +import Flags.* +import Contexts.* +import Names.* import NameKinds.{WildcardParamName, QualifiedName} -import NameOps._ +import NameOps.* import ast.{Positioned, Trees} -import ast.Trees._ -import StdNames._ -import util.Spans._ -import Constants._ +import ast.Trees.* +import StdNames.* +import util.Spans.* +import Constants.* import Symbols.NoSymbol -import ScriptParsers._ -import Decorators._ +import ScriptParsers.* +import Decorators.* import util.Chars import scala.annotation.tailrec import rewrites.Rewrites.{patch, overlapsPatch} -import reporting._ +import reporting.* import config.Feature import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} -import config.SourceVersion._ +import config.SourceVersion.* import config.SourceVersion +import dotty.tools.dotc.config.MigrationVersion object Parsers { - import ast.untpd._ + import ast.untpd.* case class OpInfo(operand: Tree, operator: Ident, offset: Offset) @@ -51,7 +52,23 @@ object Parsers { case ElseWhere extends Location(false, false, false) enum ParamOwner: - case Class, Type, TypeParam, Def + case Class // class or trait or enum + case CaseClass // case class or enum case + case Type // type alias or abstract type + case TypeParam // type parameter + case Def // method + case Given // given definition + case ExtensionPrefix // extension clause, up to and including extension parameter + case ExtensionFollow // extension clause, following extension parameter + + def isClass = // owner is a class + this == Class || this == CaseClass + def takesOnlyUsingClauses = // only using clauses allowed for this owner + this == Given || this == ExtensionFollow + def acceptsVariance = + this == Class || this == CaseClass || this == Type + + end ParamOwner enum ParseKind: case Expr, Type, Pattern @@ -81,6 +98,9 @@ object Parsers { private val InCond: Region => Region = Scanners.InParens(LPAREN, _) private val InFor : Region => Region = Scanners.InBraces(_) + def unimplementedExpr(using Context): Select = + Select(scalaDot(nme.Predef), nme.???) + abstract class ParserCommon(val source: SourceFile)(using Context) { val in: ScannerCommon @@ -148,9 +168,6 @@ object Parsers { */ def syntaxError(msg: Message, span: Span): Unit = report.error(msg, source.atSpan(span)) - - def unimplementedExpr(using Context): Select = - Select(scalaDot(nme.Predef), nme.???) } trait OutlineParserCommon extends ParserCommon { @@ -382,7 +399,7 @@ object Parsers { false } - def errorTermTree(start: Offset): Literal = atSpan(start, in.offset, in.offset) { Literal(Constant(null)) } + def errorTermTree(start: Offset): Tree = atSpan(start, in.offset, in.offset) { unimplementedExpr } private var inFunReturnType = false private def fromWithinReturnType[T](body: => T): T = { @@ -413,6 +430,14 @@ object Parsers { finally inEnum = saved } + private var inMatchPattern = false + private def withinMatchPattern[T](body: => T): T = { + val saved = inMatchPattern + inMatchPattern = true + try body + finally inMatchPattern = saved + } + private var staged = StageKind.None def withinStaged[T](kind: StageKind)(op: => T): T = { val saved = staged @@ -438,8 +463,8 @@ object Parsers { case t @ Typed(Ident(_), _) => report.errorOrMigrationWarning( em"parentheses are required around the parameter of a lambda${rewriteNotice()}", - in.sourcePos(), from = `3.0`) - if migrateTo3 then + in.sourcePos(), MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then patch(source, t.span.startPos, "(") patch(source, t.span.endPos, ")") convertToParam(t, mods) :: Nil @@ -546,8 +571,16 @@ object Parsers { object symbXMLBuilder extends xml.SymbolicXMLBuilder(this, true) // DEBUG choices - def xmlLiteral() : Tree = xmlp.xLiteral - def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern + def xmlLiteral() : Tree = xmlDeprecationWarning(xmlp.xLiteral) + def xmlLiteralPattern() : Tree = xmlDeprecationWarning(xmlp.xLiteralPattern) + + private def xmlDeprecationWarning(tree: Tree): Tree = + report.errorOrMigrationWarning( + em"""XML literals are no longer supported. + |See https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html""", + tree.srcPos, + MigrationVersion.XmlLiteral) + tree /* -------- COMBINATORS -------------------------------------------------------- */ @@ -555,10 +588,29 @@ object Parsers { accept(tok) try body finally accept(tok + 1) + /** Same as enclosed, but if closing token is missing, add `,` to the expected tokens + * in the error message provided the next token could have followed a `,`. + */ + def enclosedWithCommas[T](tok: Token, body: => T): T = + accept(tok) + val closing = tok + 1 + val isEmpty = in.token == closing + val ts = body + if in.token != closing then + val followComma = + if tok == LPAREN then canStartExprTokens3 else canStartTypeTokens + val prefix = if !isEmpty && followComma.contains(in.token) then "',' or " else "" + syntaxErrorOrIncomplete(ExpectedTokenButFound(closing, in.token, prefix)) + if in.token == closing then in.nextToken() + ts + def inParens[T](body: => T): T = enclosed(LPAREN, body) def inBraces[T](body: => T): T = enclosed(LBRACE, body) def inBrackets[T](body: => T): T = enclosed(LBRACKET, body) + def inParensWithCommas[T](body: => T): T = enclosedWithCommas(LPAREN, body) + def inBracketsWithCommas[T](body: => T): T = enclosedWithCommas(LBRACKET, body) + def inBracesOrIndented[T](body: => T, rewriteWithColon: Boolean = false): T = if in.token == INDENT then val rewriteToBraces = in.rewriteNoIndent @@ -970,6 +1022,17 @@ object Parsers { isArrowIndent() else false + /** Can the next lookahead token start an operand as defined by + * leadingOperandTokens, or is postfix ops enabled? + * This is used to decide whether the current token can be an infix operator. + */ + def nextCanFollowOperator(leadingOperandTokens: BitSet): Boolean = + leadingOperandTokens.contains(in.lookahead.token) + || in.postfixOpsEnabled + || in.lookahead.token == COLONop + || in.lookahead.token == EOF // important for REPL completions + || ctx.mode.is(Mode.Interactive) // in interactive mode the next tokens might be missing + /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ var opStack: List[OpInfo] = Nil @@ -1050,7 +1113,11 @@ object Parsers { then recur(top) else top - recur(first) + val res = recur(first) + if isIdent(nme.raw.STAR) && !followingIsVararg() then + syntaxError(em"spread operator `*` not allowed here; must come last in a parameter list") + in.nextToken() + res end infixOps /* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */ @@ -1128,19 +1195,21 @@ object Parsers { if in.token == THIS then handleThis(EmptyTypeIdent) else if in.token == SUPER then handleSuper(EmptyTypeIdent) - else - val t = termIdent() - if in.token == DOT then - def qual = cpy.Ident(t)(t.name.toTypeName) - in.lookahead.token match - case THIS => - in.nextToken() - handleThis(qual) - case SUPER => - in.nextToken() - handleSuper(qual) - case _ => t - else t + else if in.token != INTERPOLATIONID && in.lookahead.token == DOT then + val tok = in.token + val offset = in.offset + val name = ident() + def qual = makeIdent(tok, offset, name.toTypeName) + in.lookahead.token match + case THIS => + in.nextToken() + handleThis(qual) + case SUPER => + in.nextToken() + handleSuper(qual) + case _ => + makeIdent(tok, offset, name) + else termIdent() end simpleRef /** MixinQualifier ::= `[' id `]' @@ -1197,7 +1266,7 @@ object Parsers { case EXPOLIT => return Number(digits, NumberKind.Floating) case _ => } - import scala.util.FromDigits._ + import scala.util.FromDigits.* val value = try token match { case INTLIT => intFromDigits(digits, in.base) @@ -1254,8 +1323,8 @@ object Parsers { |or enclose in braces '{$name} if you want a quoted expression. |For now, you can also `import language.deprecated.symbolLiterals` to accept |the idiom, but this possibility might no longer be available in the future.""", - in.sourcePos(), from = `3.0`) - if migrateTo3 then + in.sourcePos(), MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then patch(source, Span(in.offset, in.offset + 1), "Symbol(\"") patch(source, Span(in.charOffset - 1), "\")") atSpan(in.skipToken()) { SymbolLit(in.strVal) } @@ -1352,8 +1421,9 @@ object Parsers { em"""This opening brace will start a new statement in Scala 3. |It needs to be indented to the right to keep being treated as |an argument to the previous expression.${rewriteNotice()}""", - in.sourcePos(), from = `3.0`) - patch(source, Span(in.offset), " ") + in.sourcePos(), MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then + patch(source, Span(in.offset), " ") def possibleTemplateStart(isNew: Boolean = false): Unit = in.observeColonEOL(inTemplate = true) @@ -1417,34 +1487,26 @@ object Parsers { private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) + case Function(_, _: CapturesAndResult) => + // A function tree like this will be desugared + // into a capturing type in the typer, + // so None is returned. + None case t: Function => Some(t) case _ => None } - private def checkFunctionNotErased(f: Function, context: String) = - def fail(span: Span) = - syntaxError(em"Implementation restriction: erased parameters are not supported in $context", span) - // erased parameter in type - val hasErasedParam = f match - case f: FunctionWithMods => f.hasErasedParams - case _ => false - if hasErasedParam then - fail(f.span) - // erased parameter in term - val hasErasedMods = f.args.collectFirst { - case v: ValDef if v.mods.is(Flags.Erased) => v - } - hasErasedMods match - case Some(param) => fail(param.span) - case _ => - - /** CaptureRef ::= ident | `this` + /** CaptureRef ::= ident | `this` | `cap` [`[` ident `]`] */ def captureRef(): Tree = if in.token == THIS then simpleRef() - else termIdent() match - case Ident(nme.CAPTURE_ROOT) => captureRoot - case id => id + else + val id = termIdent() + if isIdent(nme.raw.STAR) then + in.nextToken() + atSpan(startOffset(id)): + PostfixOp(id, Ident(nme.CC_REACH)) + else id /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ @@ -1468,8 +1530,8 @@ object Parsers { * PolyFunType ::= HKTypeParamClause '=>' Type * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType - * | `(' [ [ ‘[using]’ ‘['erased'] FunArgType {`,' FunArgType } ] `)' - * | '(' [ ‘[using]’ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' + * | `(' [ [ ‘['erased'] FunArgType {`,' FunArgType } ] `)' + * | '(' [ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' */ def typ(): Tree = val start = in.offset @@ -1511,6 +1573,7 @@ object Parsers { TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], resultType) else if imods.isOneOf(Given | Impure) || erasedArgs.contains(true) then if imods.is(Given) && params.isEmpty then + imods &~= Given syntaxError(em"context function types require at least one parameter", paramSpan) FunctionWithMods(params, resultType, imods, erasedArgs.toList) else if !ctx.settings.YkindProjector.isDefault then @@ -1586,7 +1649,6 @@ object Parsers { atSpan(start, arrowOffset) { getFunction(body) match { case Some(f) => - checkFunctionNotErased(f, "poly function") PolyFunction(tparams, body) case None => syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) @@ -1659,19 +1721,20 @@ object Parsers { /** FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ */ def funParamClause(): List[ValDef] = - inParens(commaSeparated(() => typedFunParam(in.offset, ident()))) + inParensWithCommas(commaSeparated(() => typedFunParam(in.offset, ident()))) def funParamClauses(): List[List[ValDef]] = if in.token == LPAREN then funParamClause() :: funParamClauses() else Nil /** InfixType ::= RefinedType {id [nl] RefinedType} - * | RefinedType `^` + * | RefinedType `^` // under capture checking */ def infixType(): Tree = infixTypeRest(refinedType()) def infixTypeRest(t: Tree): Tree = infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow) + isOperator = !followingIsVararg() && !isPureArrow + && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] */ @@ -1721,8 +1784,13 @@ object Parsers { if in.token == LBRACE || in.token == INDENT then t else - if sourceVersion.isAtLeast(future) then - deprecationWarning(DeprecatedWithOperator(), withOffset) + val withSpan = Span(withOffset, withOffset + 4) + report.errorOrMigrationWarning( + DeprecatedWithOperator(rewriteNotice(`3.4-migration`)), + source.atSpan(withSpan), + MigrationVersion.WithOperator) + if MigrationVersion.WithOperator.needsPatch then + patch(source, withSpan, "&") atSpan(startOffset(t)) { makeAndType(t, withType()) } else t @@ -1737,8 +1805,39 @@ object Parsers { }) else t - /** The block in a quote or splice */ - def stagedBlock() = inBraces(block(simplify = true)) + /** TypeBlock ::= {TypeBlockStat semi} Type + */ + def typeBlock(): Tree = + typeBlockStats() match + case Nil => typ() + case tdefs => Block(tdefs, typ()) + + def typeBlockStats(): List[Tree] = + val tdefs = new ListBuffer[Tree] + while in.token == TYPE do tdefs += typeBlockStat() + tdefs.toList + + /** TypeBlockStat ::= ‘type’ {nl} TypeDef + */ + def typeBlockStat(): Tree = + val mods = defAnnotsMods(BitSet()) + val tdef = typeDefOrDcl(in.offset, in.skipToken(mods)) + if in.token == SEMI then in.nextToken() + if in.isNewLine then in.nextToken() + tdef + + /** Quoted ::= ‘'’ ‘{’ Block ‘}’ + * | ‘'’ ‘[’ TypeBlock ‘]’ + */ + def quote(inPattern: Boolean): Tree = + atSpan(in.skipToken()) { + withinStaged(StageKind.Quoted | (if (inPattern) StageKind.QuotedPattern else 0)) { + val body = + if (in.token == LBRACKET) inBrackets(typeBlock()) + else inBraces(block(simplify = true)) + Quote(body, Nil) + } + } /** ExprSplice ::= ‘$’ spliceId -- if inside quoted block * | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern @@ -1754,7 +1853,8 @@ object Parsers { val expr = if (in.name.length == 1) { in.nextToken() - withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock()) + val inPattern = (staged & StageKind.QuotedPattern) != 0 + withinStaged(StageKind.Spliced)(inBraces(if inPattern then pattern() else block(simplify = true))) } else atSpan(in.offset + 1) { val id = Ident(in.name.drop(1)) @@ -1789,9 +1889,14 @@ object Parsers { val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else - if sourceVersion.isAtLeast(future) then - deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead") - patch(source, Span(in.offset, in.offset + 1), "?") + if !inMatchPattern then + report.errorOrMigrationWarning( + em"`_` is deprecated for wildcard arguments of types: use `?` instead${rewriteNotice(`3.4-migration`)}", + in.sourcePos(), + MigrationVersion.WildcardType) + if MigrationVersion.WildcardType.needsPatch then + patch(source, Span(in.offset, in.offset + 1), "?") + end if val start = in.skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) // Allow symbols -_ and +_ through for compatibility with code written using kind-projector in Scala 3 underscore mode. @@ -1807,7 +1912,7 @@ object Parsers { else def singletonArgs(t: Tree): Tree = if in.token == LPAREN && in.featureEnabled(Feature.dependent) - then singletonArgs(AppliedTypeTree(t, inParens(commaSeparated(singleton)))) + then singletonArgs(AppliedTypeTree(t, inParensWithCommas(commaSeparated(singleton)))) else t singletonArgs(simpleType1()) @@ -1823,7 +1928,7 @@ object Parsers { def simpleType1() = simpleTypeRest { if in.token == LPAREN then atSpan(in.offset) { - makeTupleOrParens(inParens(argTypes(namedOK = false, wildOK = true))) + makeTupleOrParens(inParensWithCommas(argTypes(namedOK = false, wildOK = true))) } else if in.token == LBRACE then atSpan(in.offset) { RefinedTypeTree(EmptyTree, refinement(indentOK = false)) } @@ -1976,7 +2081,8 @@ object Parsers { /** TypeArgs ::= `[' Type {`,' Type} `]' * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' */ - def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = inBrackets(argTypes(namedOK, wildOK)) + def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = + inBracketsWithCommas(argTypes(namedOK, wildOK)) /** Refinement ::= `{' RefineStatSeq `}' */ @@ -2012,7 +2118,7 @@ object Parsers { else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", - in.sourcePos(), from = `3.0`) + in.sourcePos(), MigrationVersion.Scala2to3) atSpan(in.skipToken()) { Function(Ident(pname) :: Nil, toplevelTyp()) } :: contextBounds(pname) @@ -2121,7 +2227,6 @@ object Parsers { atSpan(start, arrowOffset) { getFunction(body) match case Some(f) => - checkFunctionNotErased(f, "poly function") PolyFunction(tparams, f) case None => syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) @@ -2162,7 +2267,7 @@ object Parsers { report.errorOrMigrationWarning( em"""`do while ` is no longer supported, |use `while ; do ()` instead.${rewriteNotice()}""", - in.sourcePos(), from = `3.0`) + in.sourcePos(), MigrationVersion.Scala2to3) val start = in.skipToken() atSpan(start) { val body = expr() @@ -2170,7 +2275,7 @@ object Parsers { val whileStart = in.offset accept(WHILE) val cond = expr() - if migrateTo3 then + if MigrationVersion.Scala2to3.needsPatch then patch(source, Span(start, start + 2), "while ({") patch(source, Span(whileStart, whileStart + 5), ";") cond match { @@ -2180,7 +2285,7 @@ object Parsers { case _ => } patch(source, cond.span.endPos, "}) ()") - WhileDo(Block(body, cond), Literal(Constant(()))) + WhileDo(Block(body, cond), unitLiteral) } case TRY => val tryOffset = in.offset @@ -2210,7 +2315,7 @@ object Parsers { in.nextToken(); val expr = subExpr() if expr.span.exists then expr - else Literal(Constant(())) // finally without an expression + else unitLiteral // finally without an expression } else { if handler.isEmpty then @@ -2273,16 +2378,16 @@ object Parsers { in.nextToken() if isVarargSplice then report.errorOrMigrationWarning( - em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead${rewriteNotice(`future-migration`)}", + em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead${rewriteNotice(`3.4-migration`)}", in.sourcePos(uscoreStart), - future) - if sourceVersion == `future-migration` then - patch(source, Span(t.span.end, in.lastOffset), " *") + MigrationVersion.VarargSpliceAscription) + if MigrationVersion.VarargSpliceAscription.needsPatch then + patch(source, Span(t.span.end, in.lastOffset), "*") else if opStack.nonEmpty then report.errorOrMigrationWarning( em"""`_*` can be used only for last argument of method application. |It is no longer allowed in operands of infix operations.""", - in.sourcePos(uscoreStart), from = `3.0`) + in.sourcePos(uscoreStart), MigrationVersion.Scala2to3) else syntaxError(SeqWildcardPatternPos(), uscoreStart) Typed(t, atSpan(uscoreStart) { Ident(tpnme.WILDCARD_STAR) }) @@ -2349,13 +2454,12 @@ object Parsers { report.errorOrMigrationWarning( em"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", source.atSpan(Span(start, in.lastOffset)), - from = future) + MigrationVersion.ParameterEnclosedByParenthesis) in.nextToken() val t = infixType() - if (sourceVersion == `future-migration`) { + if MigrationVersion.ParameterEnclosedByParenthesis.needsPatch then patch(source, Span(start), "(") patch(source, Span(in.lastOffset), ")") - } t } else TypeTree() @@ -2413,7 +2517,8 @@ object Parsers { def postfixExprRest(t: Tree, location: Location): Tree = infixOps(t, in.canStartExprTokens, prefixExpr, location, ParseKind.Expr, - isOperator = !(location.inArgs && followingIsVararg())) + isOperator = !(location.inArgs && followingIsVararg()) + && nextCanFollowOperator(canStartInfixExprTokens)) /** PrefixExpr ::= [PrefixOperator'] SimpleExpr * PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ (if not backquoted) @@ -2472,19 +2577,12 @@ object Parsers { placeholderParams = param :: placeholderParams atSpan(start) { Ident(pname) } case LPAREN => - atSpan(in.offset) { makeTupleOrParens(inParens(exprsInParensOrBindings())) } + atSpan(in.offset) { makeTupleOrParens(inParensWithCommas(exprsInParensOrBindings())) } case LBRACE | INDENT => canApply = false blockExpr() case QUOTE => - atSpan(in.skipToken()) { - withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) { - val body = - if (in.token == LBRACKET) inBrackets(typ()) - else stagedBlock() - Quote(body, Nil) - } - } + quote(location.inPattern) case NEW => canApply = false newExpr() @@ -2577,15 +2675,15 @@ object Parsers { /** ParArgumentExprs ::= `(' [‘using’] [ExprsInParens] `)' * | `(' [ExprsInParens `,'] PostfixExpr `*' ')' */ - def parArgumentExprs(): (List[Tree], Boolean) = inParens { - if in.token == RPAREN then - (Nil, false) - else if isIdent(nme.using) then - in.nextToken() - (commaSeparated(argumentExpr), true) - else - (commaSeparated(argumentExpr), false) - } + def parArgumentExprs(): (List[Tree], Boolean) = + inParensWithCommas: + if in.token == RPAREN then + (Nil, false) + else if isIdent(nme.using) then + in.nextToken() + (commaSeparated(argumentExpr), true) + else + (commaSeparated(argumentExpr), false) /** ArgumentExprs ::= ParArgumentExprs * | [nl] BlockExpr @@ -2706,7 +2804,7 @@ object Parsers { atSpan(startOffset(pat), accept(LARROW)) { val checkMode = if casePat then GenCheckMode.FilterAlways - else if sourceVersion.isAtLeast(`future`) then GenCheckMode.Check + else if sourceVersion.isAtLeast(`3.4`) then GenCheckMode.Check else if sourceVersion.isAtLeast(`3.2`) then GenCheckMode.CheckAndFilter else GenCheckMode.FilterNow // filter on source version < 3.2, for backward compat GenFrom(pat, subExpr(), checkMode) @@ -2806,7 +2904,7 @@ object Parsers { def caseClause(exprOnly: Boolean = false): CaseDef = atSpan(in.offset) { val (pat, grd) = inSepRegion(InCase) { accept(CASE) - (pattern(), guard()) + (withinMatchPattern(pattern()), guard()) } CaseDef(pat, grd, atSpan(accept(ARROW)) { if exprOnly then @@ -2830,7 +2928,7 @@ object Parsers { val start = in.skipToken() Ident(tpnme.WILDCARD).withSpan(Span(start, in.lastOffset, start)) case _ => - rejectWildcardType(infixType()) + withinMatchPattern(rejectWildcardType(infixType())) } } CaseDef(pat, EmptyTree, atSpan(accept(ARROW)) { @@ -2855,50 +2953,51 @@ object Parsers { if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } else Nil - /** Pattern1 ::= PatVar Ascription - * | [‘-’] integerLiteral Ascription - * | [‘-’] floatingPointLiteral Ascription + /** Pattern1 ::= PatVar `:` RefinedType + * | [‘-’] integerLiteral `:` RefinedType + * | [‘-’] floatingPointLiteral `:` RefinedType * | Pattern2 */ def pattern1(location: Location = Location.InPattern): Tree = - val p = pattern2() + val p = pattern2(location) if in.isColon then val isVariableOrNumber = isVarPattern(p) || p.isInstanceOf[Number] if !isVariableOrNumber then - report.gradualErrorOrMigrationWarning( + report.errorOrMigrationWarning( em"""Type ascriptions after patterns other than: | * variable pattern, e.g. `case x: String =>` | * number literal pattern, e.g. `case 10.5: Double =>` |are no longer supported. Remove the type ascription or move it to a separate variable pattern.""", in.sourcePos(), - warnFrom = `3.3`, - errorFrom = future - ) + MigrationVersion.AscriptionAfterPattern) in.nextToken() ascription(p, location) else p /** Pattern3 ::= InfixPattern - * | PatVar ‘*’ */ - def pattern3(): Tree = + def pattern3(location: Location): Tree = val p = infixPattern() if followingIsVararg() then val start = in.skipToken() - p match - case p @ Ident(name) if name.isVarPattern => - Typed(p, atSpan(start) { Ident(tpnme.WILDCARD_STAR) }) - case _ => - syntaxError(em"`*` must follow pattern variable", start) - p + if location.inArgs then + p match + case p @ Ident(name) if name.isVarPattern => + Typed(p, atSpan(start) { Ident(tpnme.WILDCARD_STAR) }) + case _ => + syntaxError(em"`*` must follow pattern variable", start) + p + else + syntaxError(em"bad use of `*` - sequence pattern not allowed here", start) + p else p /** Pattern2 ::= [id `@'] Pattern3 */ - val pattern2: () => Tree = () => pattern3() match + val pattern2: Location => Tree = location => pattern3(location) match case p @ Ident(name) if in.token == AT => val offset = in.skipToken() - pattern3() match { + pattern3(location) match { case pt @ Bind(nme.WILDCARD, pt1: Typed) if pt.mods.is(Given) => atSpan(startOffset(p), 0) { Bind(name, pt1).withMods(pt.mods) } case Typed(Ident(nme.WILDCARD), pt @ Ident(tpnme.WILDCARD_STAR)) => @@ -2909,18 +3008,13 @@ object Parsers { case p => p - private def warnStarMigration(p: Tree) = - report.errorOrMigrationWarning( - em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead", - in.sourcePos(startOffset(p)), - from = future) - /** InfixPattern ::= SimplePattern {id [nl] SimplePattern} */ def infixPattern(): Tree = infixOps( simplePattern(), in.canStartExprTokens, simplePatternFn, Location.InPattern, ParseKind.Pattern, - isOperator = in.name != nme.raw.BAR && !followingIsVararg()) + isOperator = in.name != nme.raw.BAR && !followingIsVararg() + && nextCanFollowOperator(canStartPatternTokens)) /** SimplePattern ::= PatVar * | Literal @@ -2928,6 +3022,7 @@ object Parsers { * | XmlPattern * | `(' [Patterns] `)' * | SimplePattern1 [TypeArgs] [ArgumentPatterns] + * | ‘given’ RefinedType * SimplePattern1 ::= SimpleRef * | SimplePattern1 `.' id * PatVar ::= id @@ -2941,7 +3036,7 @@ object Parsers { case USCORE => wildcardIdent() case LPAREN => - atSpan(in.offset) { makeTupleOrParens(inParens(patternsOpt())) } + atSpan(in.offset) { makeTupleOrParens(inParensWithCommas(patternsOpt())) } case QUOTE => simpleExpr(Location.InPattern) case XMLSTART => @@ -2987,7 +3082,7 @@ object Parsers { * | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ */ def argumentPatterns(): List[Tree] = - inParens(patternsOpt(Location.InPatternArgs)) + inParensWithCommas(patternsOpt(Location.InPatternArgs)) /* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */ @@ -3028,7 +3123,8 @@ object Parsers { val name = in.name val mod = atSpan(in.skipToken()) { modOfToken(tok, name) } - if (mods.isOneOf(mod.flags)) syntaxError(RepeatedModifier(mod.flags.flagsString)) + if mods.isOneOf(mod.flags) then + syntaxError(RepeatedModifier(mod.flags.flagsString, source, mod.span), mod.span) addMod(mods, mod) } @@ -3046,15 +3142,22 @@ object Parsers { if (in.token == LBRACKET) { if (mods.is(Local) || mods.hasPrivateWithin) syntaxError(DuplicatePrivateProtectedQualifier()) - inBrackets { + val startOffset = in.offset + val mods1 = inBrackets { if in.token == THIS then - if sourceVersion.isAtLeast(future) then - deprecationWarning( - em"The [this] qualifier will be deprecated in the future; it should be dropped.") in.nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) } + if mods1.is(Local) then + report.errorOrMigrationWarning( + em"""The [this] qualifier will be deprecated in the future; it should be dropped. + |See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html${rewriteNotice(`3.4-migration`)}""", + in.sourcePos(), + MigrationVersion.RemoveThisQualifier) + if MigrationVersion.RemoveThisQualifier.needsPatch then + patch(source, Span(startOffset, in.lastOffset), "") + mods1 } else mods @@ -3130,33 +3233,29 @@ object Parsers { * | UsingParamClause */ def typeOrTermParamClauses( - ownerKind: ParamOwner, - numLeadParams: Int = 0 - ): List[List[TypeDef] | List[ValDef]] = + paramOwner: ParamOwner, numLeadParams: Int = 0): List[List[TypeDef] | List[ValDef]] = - def recur(firstClause: Boolean, numLeadParams: Int, prevIsTypeClause: Boolean): List[List[TypeDef] | List[ValDef]] = + def recur(numLeadParams: Int, firstClause: Boolean, prevIsTypeClause: Boolean): List[List[TypeDef] | List[ValDef]] = newLineOptWhenFollowedBy(LPAREN) newLineOptWhenFollowedBy(LBRACKET) if in.token == LPAREN then val paramsStart = in.offset - val params = termParamClause( - numLeadParams, - firstClause = firstClause) + val params = termParamClause(paramOwner, numLeadParams, firstClause) val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) params :: ( if lastClause then Nil - else recur(firstClause = false, numLeadParams + params.length, prevIsTypeClause = false)) + else recur(numLeadParams + params.length, firstClause = false, prevIsTypeClause = false)) else if in.token == LBRACKET then if prevIsTypeClause then syntaxError( em"Type parameter lists must be separated by a term or using parameter list", in.offset ) - typeParamClause(ownerKind) :: recur(firstClause, numLeadParams, prevIsTypeClause = true) + typeParamClause(paramOwner) :: recur(numLeadParams, firstClause, prevIsTypeClause = true) else Nil end recur - recur(firstClause = true, numLeadParams = numLeadParams, prevIsTypeClause = false) + recur(numLeadParams, firstClause = true, prevIsTypeClause = false) end typeOrTermParamClauses @@ -3175,23 +3274,20 @@ object Parsers { * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypePamClause] | ‘_’) TypeBounds */ - def typeParamClause(ownerKind: ParamOwner): List[TypeDef] = inBrackets { + def typeParamClause(paramOwner: ParamOwner): List[TypeDef] = inBracketsWithCommas { def checkVarianceOK(): Boolean = - val ok = ownerKind != ParamOwner.Def && ownerKind != ParamOwner.TypeParam + val ok = paramOwner.acceptsVariance if !ok then syntaxError(em"no `+/-` variance annotation allowed here") in.nextToken() ok def typeParam(): TypeDef = { - val isAbstractOwner = ownerKind == ParamOwner.Type || ownerKind == ParamOwner.TypeParam + val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if ownerKind == ParamOwner.Class then mods |= PrivateLocal - if Feature.ccEnabled && in.token == SEALED then - if ownerKind == ParamOwner.Def then mods |= Sealed - else syntaxError(em"`sealed` modifier only allowed for method type parameters") - in.nextToken() + if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant else if isIdent(nme.raw.MINUS) && checkVarianceOK() then @@ -3211,16 +3307,16 @@ object Parsers { commaSeparated(() => typeParam()) } - def typeParamClauseOpt(ownerKind: ParamOwner): List[TypeDef] = - if (in.token == LBRACKET) typeParamClause(ownerKind) else Nil + def typeParamClauseOpt(paramOwner: ParamOwner): List[TypeDef] = + if (in.token == LBRACKET) typeParamClause(paramOwner) else Nil /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ - def contextTypes(ofClass: Boolean, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = + def contextTypes(paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = val tps = commaSeparated(funArgType) var counter = numLeadParams def nextIdx = { counter += 1; counter } - val paramFlags = if ofClass then LocalParamAccessor else Param + val paramFlags = if paramOwner.isClass then LocalParamAccessor else Param tps.map(makeSyntheticParameter(nextIdx, _, paramFlags | Synthetic | impliedMods.flags)) /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause @@ -3242,11 +3338,8 @@ object Parsers { * @return the list of parameter definitions */ def termParamClause( + paramOwner: ParamOwner, numLeadParams: Int, // number of parameters preceding this clause - ofClass: Boolean = false, // owner is a class - ofCaseClass: Boolean = false, // owner is a case class - prefix: Boolean = false, // clause precedes name of an extension method - givenOnly: Boolean = false, // only given parameters allowed firstClause: Boolean = false // clause is the first in regular list of clauses ): List[ValDef] = { var impliedMods: Modifiers = EmptyModifiers @@ -3265,7 +3358,7 @@ object Parsers { var mods = impliedMods.withAnnotations(annotations()) if isErasedKw then mods = addModifier(mods) - if (ofClass) { + if paramOwner.isClass then mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3277,9 +3370,8 @@ object Parsers { else if (!(mods.flags &~ (ParamAccessor | Inline | Erased | impliedMods.flags)).isEmpty) syntaxError(em"`val` or `var` expected") - if (firstClause && ofCaseClass) mods + if firstClause && paramOwner == ParamOwner.CaseClass then mods else mods | PrivateLocal - } else { if (isIdent(nme.inline) && in.isSoftModifierInParamModifierPosition) mods = addModifier(mods) @@ -3288,7 +3380,7 @@ object Parsers { atSpan(start, nameStart) { val name = ident() acceptColon() - if (in.token == ARROW && ofClass && !mods.is(Local)) + if (in.token == ARROW && paramOwner.isClass && !mods.is(Local)) syntaxError(VarValParametersMayNotBeCallByName(name, mods.is(Mutable))) // needed?, it's checked later anyway val tpt = paramType() @@ -3303,25 +3395,31 @@ object Parsers { def checkVarArgsRules(vparams: List[ValDef]): Unit = vparams match { case Nil => - case _ :: Nil if !prefix => case vparam :: rest => vparam.tpt match { case PostfixOp(_, op) if op.name == tpnme.raw.STAR => - syntaxError(VarArgsParamMustComeLast(), vparam.tpt.span) + if vparam.mods.isOneOf(GivenOrImplicit) then + syntaxError(VarArgsParamCannotBeGiven(vparam.mods.is(Given)), vparam.tpt.span) + if rest.nonEmpty then + syntaxError(VarArgsParamMustComeLast(), vparam.tpt.span) case _ => } checkVarArgsRules(rest) } // begin termParamClause - inParens { - if in.token == RPAREN && !prefix && !impliedMods.is(Given) then Nil + inParensWithCommas { + if in.token == RPAREN && paramOwner != ParamOwner.ExtensionPrefix && !impliedMods.is(Given) + then Nil else val clause = - if prefix && !isIdent(nme.using) && !isIdent(nme.erased) then param() :: Nil + if paramOwner == ParamOwner.ExtensionPrefix + && !isIdent(nme.using) && !isIdent(nme.erased) + then + param() :: Nil else paramMods() - if givenOnly && !impliedMods.is(Given) then + if paramOwner.takesOnlyUsingClauses && !impliedMods.is(Given) then syntaxError(em"`using` expected") val (firstParamMod, isParams) = var mods = EmptyModifiers @@ -3335,7 +3433,7 @@ object Parsers { || isIdent && (in.name == nme.inline || in.lookahead.isColon) (mods, isParams) (if isParams then commaSeparated(() => param()) - else contextTypes(ofClass, numLeadParams, impliedMods)) match { + else contextTypes(paramOwner, numLeadParams, impliedMods)) match { case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t } @@ -3349,31 +3447,21 @@ object Parsers { * * @return The parameter definitions */ - def termParamClauses( - ofClass: Boolean = false, - ofCaseClass: Boolean = false, - givenOnly: Boolean = false, - numLeadParams: Int = 0 - ): List[List[ValDef]] = + def termParamClauses(paramOwner: ParamOwner, numLeadParams: Int = 0): List[List[ValDef]] = - def recur(firstClause: Boolean, numLeadParams: Int): List[List[ValDef]] = + def recur(numLeadParams: Int, firstClause: Boolean): List[List[ValDef]] = newLineOptWhenFollowedBy(LPAREN) if in.token == LPAREN then val paramsStart = in.offset - val params = termParamClause( - numLeadParams, - ofClass = ofClass, - ofCaseClass = ofCaseClass, - givenOnly = givenOnly, - firstClause = firstClause) + val params = termParamClause(paramOwner, numLeadParams, firstClause) val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) params :: ( if lastClause then Nil - else recur(firstClause = false, numLeadParams + params.length)) + else recur(numLeadParams + params.length, firstClause = false)) else Nil end recur - recur(firstClause = true, numLeadParams) + recur(numLeadParams, firstClause = true) end termParamClauses /* -------- DEFS ------------------------------------------- */ @@ -3388,7 +3476,7 @@ object Parsers { */ def importOrExportClause(leading: Token, mkTree: ImportConstr): List[Tree] = { val offset = accept(leading) - commaSeparated(importExpr(mkTree)) match { + commaSeparated(importExpr(leading, mkTree)) match { case t :: rest => // The first import should start at the start offset of the keyword. val firstPos = @@ -3443,16 +3531,22 @@ object Parsers { * NamedSelector ::= id [‘as’ (id | ‘_’)] * WildCardSelector ::= ‘*' | ‘given’ [InfixType] */ - def importExpr(mkTree: ImportConstr): () => Tree = + def importExpr(leading: Token, mkTree: ImportConstr): () => Tree = + + def exprName = + (leading: @unchecked) match + case EXPORT => "export" + case IMPORT => "import" /** ‘*' | ‘_' */ def wildcardSelector() = - if in.token == USCORE && sourceVersion.isAtLeast(future) then + if in.token == USCORE then report.errorOrMigrationWarning( - em"`_` is no longer supported for a wildcard import; use `*` instead${rewriteNotice(`future-migration`)}", + em"`_` is no longer supported for a wildcard $exprName; use `*` instead${rewriteNotice(`future-migration`)}", in.sourcePos(), - from = future) - patch(source, Span(in.offset, in.offset + 1), "*") + MigrationVersion.ImportWildcard) + if MigrationVersion.ImportWildcard.needsPatch then + patch(source, Span(in.offset, in.offset + 1), "*") ImportSelector(atSpan(in.skipToken()) { Ident(nme.WILDCARD) }) /** 'given [InfixType]' */ @@ -3466,14 +3560,15 @@ object Parsers { /** id [‘as’ (id | ‘_’) */ def namedSelector(from: Ident) = if in.token == ARROW || isIdent(nme.as) then - if in.token == ARROW && sourceVersion.isAtLeast(future) then + if in.token == ARROW then report.errorOrMigrationWarning( - em"The import renaming `a => b` is no longer supported ; use `a as b` instead${rewriteNotice(`future-migration`)}", + em"The $exprName renaming `a => b` is no longer supported ; use `a as b` instead${rewriteNotice(`future-migration`)}", in.sourcePos(), - from = future) - patch(source, Span(in.offset, in.offset + 2), - if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as" - else " as ") + MigrationVersion.ImportRename) + if MigrationVersion.ImportRename.needsPatch then + patch(source, Span(in.offset, in.offset + 2), + if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as" + else " as ") atSpan(startOffset(from), in.skipToken()) { val to = if in.token == USCORE then wildcardIdent() else termIdent() ImportSelector(from, if to.name == nme.ERROR then EmptyTree else to) @@ -3488,7 +3583,7 @@ object Parsers { case _ => if isIdent(nme.raw.STAR) then wildcardSelector() else - if !idOK then syntaxError(em"named imports cannot follow wildcard imports") + if !idOK then syntaxError(em"named ${exprName}s cannot follow wildcard ${exprName}s") namedSelector(termIdent()) } @@ -3535,12 +3630,8 @@ object Parsers { /** Def ::= val PatDef * | var VarDef * | def DefDef - * | type {nl} TypeDcl + * | type {nl} TypeDef * | TmplDef - * Dcl ::= val ValDcl - * | var ValDcl - * | def DefDcl - * | type {nl} TypeDcl * EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) */ def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match { @@ -3561,15 +3652,15 @@ object Parsers { tmplDef(start, mods) } - /** PatDef ::= ids [‘:’ Type] ‘=’ Expr - * | Pattern2 [‘:’ Type] ‘=’ Expr + /** PatDef ::= ids [‘:’ Type] [‘=’ Expr] + * | Pattern2 [‘:’ Type] [‘=’ Expr] * VarDef ::= PatDef - * | id {`,' id} `:' Type `=' `_' (deprecated in 3.x) - * ValDcl ::= id {`,' id} `:' Type - * VarDcl ::= id {`,' id} `:' Type + * | id {`,' id} `:' Type `=' `_' (deprecated in 3.x) */ def patDefOrDcl(start: Offset, mods: Modifiers): Tree = atSpan(start, nameStart) { - val first = pattern2() + if in.token != USCORE && isKeyword(in.token) then + syntaxError(ExpectedTokenButFound(IDENTIFIER, in.token), Span(in.offset)) + val first = pattern2(Location.InPattern) var lhs = first match { case id: Ident if in.token == COMMA => in.nextToken() @@ -3585,11 +3676,13 @@ object Parsers { subExpr() match case rhs0 @ Ident(name) if placeholderParams.nonEmpty && name == placeholderParams.head.name && !tpt.isEmpty && mods.is(Mutable) && lhs.forall(_.isInstanceOf[Ident]) => - if sourceVersion.isAtLeast(future) then - deprecationWarning( - em"""`= _` has been deprecated; use `= uninitialized` instead. - |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", - rhsOffset) + report.errorOrMigrationWarning( + em"""`= _` has been deprecated; use `= uninitialized` instead. + |`uninitialized` can be imported with `scala.compiletime.uninitialized`.${rewriteNotice(`3.4-migration`)}""", + in.sourcePos(rhsOffset), + MigrationVersion.UninitializedVars) + if MigrationVersion.UninitializedVars.needsPatch then + patch(source, Span(rhsOffset, rhsOffset + 1), "scala.compiletime.uninitialized") placeholderParams = placeholderParams.tail atSpan(rhs0.span) { Ident(nme.WILDCARD) } case rhs0 => rhs0 @@ -3615,9 +3708,8 @@ object Parsers { } } - /** DefDef ::= DefSig [‘:’ Type] ‘=’ Expr + /** DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr - * DefDcl ::= DefSig `:' Type * DefSig ::= id [DefTypeParamClause] DefTermParamClauses * * if clauseInterleaving is enabled: @@ -3631,16 +3723,17 @@ object Parsers { else ": Unit " // trailing space ensures that `def f()def g()` works. if migrateTo3 then report.errorOrMigrationWarning( - em"Procedure syntax no longer supported; `$toInsert` should be inserted here", - in.sourcePos(), from = `3.0`) - patch(source, Span(in.lastOffset), toInsert) + em"Procedure syntax no longer supported; `$toInsert` should be inserted here${rewriteNotice()}", + in.sourcePos(), MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then + patch(source, Span(in.lastOffset), toInsert) true else false if (in.token == THIS) { in.nextToken() - val vparamss = termParamClauses(numLeadParams = numLeadParams) + val vparamss = termParamClauses(ParamOwner.Def, numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) in.token match { case LBRACKET => syntaxError(em"no type parameters allowed here") @@ -3661,10 +3754,10 @@ object Parsers { val paramss = if in.featureEnabled(Feature.clauseInterleaving) then // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" - typeOrTermParamClauses(ParamOwner.Def, numLeadParams = numLeadParams) + typeOrTermParamClauses(ParamOwner.Def, numLeadParams) else val tparams = typeParamClauseOpt(ParamOwner.Def) - val vparamss = termParamClauses(numLeadParams = numLeadParams) + val vparamss = termParamClauses(ParamOwner.Def, numLeadParams) joinParams(tparams, vparamss) @@ -3702,10 +3795,10 @@ object Parsers { val stats = selfInvocation() :: ( if (isStatSep) { in.nextToken(); blockStatSeq() } else Nil) - Block(stats, Literal(Constant(()))) + Block(stats, unitLiteral) } } - else Block(selfInvocation() :: Nil, Literal(Constant(()))) + else Block(selfInvocation() :: Nil, unitLiteral) /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs} */ @@ -3715,7 +3808,7 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() @@ -3758,6 +3851,8 @@ object Parsers { else makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => makeTypeDef(typeBounds()) + case _ if (staged & StageKind.QuotedPattern) != 0 => + makeTypeDef(typeBounds()) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree @@ -3803,16 +3898,16 @@ object Parsers { } def classDefRest(start: Offset, mods: Modifiers, name: TypeName): TypeDef = - val constr = classConstr(isCaseClass = mods.is(Case)) + val constr = classConstr(if mods.is(Case) then ParamOwner.CaseClass else ParamOwner.Class) val templ = templateOpt(constr) finalizeDef(TypeDef(name, templ), mods, start) /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsTermParamClauses */ - def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset) { - val tparams = typeParamClauseOpt(ParamOwner.Class) + def classConstr(paramOwner: ParamOwner): DefDef = atSpan(in.lastOffset) { + val tparams = typeParamClauseOpt(paramOwner) val cmods = fromWithinClassConstr(constrModsOpt()) - val vparamss = termParamClauses(ofClass = true, ofCaseClass = isCaseClass) + val vparamss = termParamClauses(paramOwner) makeConstructor(tparams, vparamss).withMods(cmods) } @@ -3830,7 +3925,9 @@ object Parsers { } private def checkAccessOnly(mods: Modifiers, where: String): Modifiers = - val mods1 = mods & (AccessFlags | Enum) + // We allow `infix to mark the `enum`s type as infix. + // Syntax rules disallow the soft infix modifier on `case`s. + val mods1 = mods & (AccessFlags | Enum | Infix) if mods1 ne mods then syntaxError(em"Only access modifiers are allowed on enum $where") mods1 @@ -3841,7 +3938,7 @@ object Parsers { val mods1 = checkAccessOnly(mods, "definitions") val modulName = ident() val clsName = modulName.toTypeName - val constr = classConstr() + val constr = classConstr(ParamOwner.Class) val templ = template(constr, isEnum = true) finalizeDef(TypeDef(clsName, templ), mods1, start) } @@ -3863,7 +3960,7 @@ object Parsers { val caseDef = if (in.token == LBRACKET || in.token == LPAREN || in.token == AT || isModifier) { val clsName = id.name.toTypeName - val constr = classConstr(isCaseClass = true) + val constr = classConstr(ParamOwner.CaseClass) TypeDef(clsName, caseTemplate(constr)) } else @@ -3910,11 +4007,11 @@ object Parsers { val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName val gdef = - val tparams = typeParamClauseOpt(ParamOwner.Def) + val tparams = typeParamClauseOpt(ParamOwner.Given) newLineOpt() val vparamss = if in.token == LPAREN && in.lookahead.isIdent(nme.using) - then termParamClauses(givenOnly = true) + then termParamClauses(ParamOwner.Given) else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty @@ -3954,15 +4051,15 @@ object Parsers { */ def extension(): ExtMethods = val start = in.skipToken() - val tparams = typeParamClauseOpt(ParamOwner.Def) + val tparams = typeParamClauseOpt(ParamOwner.ExtensionPrefix) val leadParamss = ListBuffer[List[ValDef]]() def numLeadParams = leadParamss.map(_.length).sum while - val extParams = termParamClause(numLeadParams, prefix = true) + val extParams = termParamClause(ParamOwner.ExtensionPrefix, numLeadParams) leadParamss += extParams isUsingClause(extParams) do () - leadParamss ++= termParamClauses(givenOnly = true, numLeadParams = numLeadParams) + leadParamss ++= termParamClauses(ParamOwner.ExtensionFollow, numLeadParams) if in.isColon then syntaxError(em"no `:` expected here") in.nextToken() @@ -4055,7 +4152,7 @@ object Parsers { if (in.token == LBRACE || in.token == COLONeol) { report.errorOrMigrationWarning( em"`extends` must be followed by at least one parent", - in.sourcePos(), from = `3.0`) + in.sourcePos(), MigrationVersion.Scala2to3) Nil } else constrApps() @@ -4202,7 +4299,6 @@ object Parsers { * TemplateStat ::= Import * | Export * | Annotations Modifiers Def - * | Annotations Modifiers Dcl * | Extension * | Expr1 * | @@ -4232,10 +4328,10 @@ object Parsers { } /** RefineStatSeq ::= RefineStat {semi RefineStat} - * RefineStat ::= ‘val’ VarDcl - * | ‘def’ DefDcl - * | ‘type’ {nl} TypeDcl - * (in reality we admit Defs and vars and filter them out afterwards in `checkLegal`) + * RefineStat ::= ‘val’ VarDef + * | ‘def’ DefDef + * | ‘type’ {nl} TypeDef + * (in reality we admit class defs and vars and filter them out afterwards in `checkLegal`) */ def refineStatSeq(): List[Tree] = { val stats = new ListBuffer[Tree] diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index fac73bfb4992..ea43706e9fdb 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -4,24 +4,27 @@ package parsing import scala.language.unsafeNulls -import core.Names._, core.Contexts._, core.Decorators._, util.Spans._ -import core.StdNames._, core.Comments._ +import core.Names.*, core.Contexts.*, core.Decorators.*, util.Spans.* +import core.StdNames.*, core.Comments.* import util.SourceFile -import util.Chars._ +import util.Chars.* import util.{SourcePosition, CharBuffer} import util.Spans.Span import config.Config -import Tokens._ +import Tokens.* import scala.annotation.{switch, tailrec} import scala.collection.mutable import scala.collection.immutable.SortedMap import rewrites.Rewrites.patch import config.Feature import config.Feature.{migrateTo3, fewerBracesEnabled} -import config.SourceVersion.`3.0` +import config.SourceVersion.{`3.0`, `3.0-migration`} +import config.MigrationVersion import reporting.{NoProfile, Profile, Message} import java.util.Objects +import dotty.tools.dotc.reporting.Message.rewriteNotice +import dotty.tools.dotc.config.Feature.sourceVersion object Scanners { @@ -227,11 +230,11 @@ object Scanners { */ private var docstringMap: SortedMap[Int, Comment] = SortedMap.empty - /* A Buffer for comment positions */ - private val commentPosBuf = new mutable.ListBuffer[Span] + /* A Buffer for comments */ + private val commentBuf = new mutable.ListBuffer[Comment] - /** Return a list of all the comment positions */ - def commentSpans: List[Span] = commentPosBuf.toList + /** Return a list of all the comments */ + def comments: List[Comment] = commentBuf.toList private def addComment(comment: Comment): Unit = { val lookahead = lookaheadReader() @@ -246,18 +249,19 @@ object Scanners { def getDocComment(pos: Int): Option[Comment] = docstringMap.get(pos) /** A buffer for comments */ - private val commentBuf = CharBuffer(initialCharBufferSize) + private val currentCommentBuf = CharBuffer(initialCharBufferSize) def toToken(identifier: SimpleName): Token = def handleMigration(keyword: Token): Token = if scala3keywords.contains(keyword) && migrateTo3 then val what = tokenString(keyword) report.errorOrMigrationWarning( - em"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", + em"$what is now a keyword, write `$what` instead of $what to keep it as an identifier${rewriteNotice("This", `3.0-migration`)}", sourcePos(), - from = `3.0`) - patch(source, Span(offset), "`") - patch(source, Span(offset + identifier.length), "`") + MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then + patch(source, Span(offset), "`") + patch(source, Span(offset + identifier.length), "`") IDENTIFIER else keyword val idx = identifier.start @@ -467,7 +471,7 @@ object Scanners { em"""$what starts with an operator; |it is now treated as a continuation of the $previous, |not as a separate statement.""", - sourcePos(), from = `3.0`) + sourcePos(), MigrationVersion.Scala2to3) true } @@ -523,7 +527,7 @@ object Scanners { * * The following tokens can start an indentation region: * - * : = => <- if then else while do try catch + * : = => <- if then else while do try catch * finally for yield match throw return with * * Inserting an INDENT starts a new indentation region with the indentation of the current @@ -1019,7 +1023,7 @@ object Scanners { private def skipComment(): Boolean = { def appendToComment(ch: Char) = - if (keepComments) commentBuf.append(ch) + if (keepComments) currentCommentBuf.append(ch) def nextChar() = { appendToComment(ch) Scanner.this.nextChar() @@ -1047,9 +1051,9 @@ object Scanners { def finishComment(): Boolean = { if (keepComments) { val pos = Span(start, charOffset - 1, start) - val comment = Comment(pos, commentBuf.toString) - commentBuf.clear() - commentPosBuf += pos + val comment = Comment(pos, currentCommentBuf.toString) + currentCommentBuf.clear() + commentBuf += comment if (comment.isDocComment) addComment(comment) @@ -1065,7 +1069,7 @@ object Scanners { else if (ch == '*') { nextChar(); skipComment(); finishComment() } else { // This was not a comment, remove the `/` from the buffer - commentBuf.clear() + currentCommentBuf.clear() false } } diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala index d11db73b0455..d71e4cf11102 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala @@ -3,9 +3,9 @@ package dotc package parsing import util.SourceFile -import core._ -import Contexts._ -import Parsers._ +import core.* +import Contexts.* +import Parsers.* /**

Performs the following context-free rewritings:

@@ -45,7 +45,7 @@ import Parsers._ */ object ScriptParsers { - import ast.untpd._ + import ast.untpd.* class ScriptParser(source: SourceFile)(using Context) extends Parser(source) { @@ -118,7 +118,7 @@ object ScriptParsers { * } * } */ - import definitions._ + import definitions.* def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index dba0ad3fa2ee..fbf4e8d701dd 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -5,7 +5,7 @@ package parsing import scala.language.unsafeNulls import collection.immutable.BitSet -import core.Decorators._ +import core.Decorators.* import core.StdNames.nme abstract class TokensCommon { @@ -14,7 +14,7 @@ abstract class TokensCommon { type Token = Int type TokenSet = BitSet - def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi: _*) + def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi *) val tokenString, debugString: Array[String] = new Array[String](maxToken + 1) @@ -221,10 +221,13 @@ object Tokens extends TokensCommon { final val openParensTokens = BitSet(LBRACE, LPAREN, LBRACKET) - final val canStartExprTokens3: TokenSet = - atomicExprTokens + final val canStartInfixExprTokens = + atomicExprTokens | openParensTokens - | BitSet(INDENT, QUOTE, IF, WHILE, FOR, NEW, TRY, THROW) + | BitSet(QUOTE, NEW) + + final val canStartExprTokens3: TokenSet = + canStartInfixExprTokens | BitSet(INDENT, IF, WHILE, FOR, TRY, THROW) final val canStartExprTokens2: TokenSet = canStartExprTokens3 | BitSet(DO) @@ -233,6 +236,8 @@ object Tokens extends TokensCommon { final val canStartTypeTokens: TokenSet = canStartInfixTypeTokens | BitSet(LBRACE) + final val canStartPatternTokens = atomicExprTokens | openParensTokens | BitSet(USCORE, QUOTE) + final val templateIntroTokens: TokenSet = BitSet(CLASS, TRAIT, OBJECT, ENUM, CASECLASS, CASEOBJECT) final val dclIntroTokens: TokenSet = BitSet(DEF, VAL, VAR, TYPE, GIVEN) diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index ee3ecda60aee..0e51b487d7c4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc -import util.Chars._ +import util.Chars.* import core.Names.Name import core.StdNames.nme -import core.NameOps._ +import core.NameOps.* package object parsing { diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala index 0f7d426fbd28..803470fe85a5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala @@ -9,7 +9,7 @@ package dotty.tools.dotc package parsing package xml -import Utility._ +import Utility.* import util.Chars.SU import scala.collection.BufferedIterator diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index b3f41fab9eaa..22ef15b6f497 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -4,6 +4,7 @@ package parsing package xml import scala.language.unsafeNulls +import scala.compiletime.uninitialized import scala.collection.mutable import scala.collection.BufferedIterator @@ -11,13 +12,13 @@ import core.Contexts.Context import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import util.Chars.SU -import Parsers._ -import util.Spans._ -import core._ -import Constants._ +import Parsers.* +import util.Spans.* +import core.* +import Constants.* import Decorators.{em, toMessage} import util.SourceFile -import Utility._ +import Utility.* // XXX/Note: many/most of the functions in here are almost direct cut and pastes @@ -38,7 +39,7 @@ import Utility._ */ object MarkupParsers { - import ast.untpd._ + import ast.untpd.* case object MissingEndTagControl extends ControlThrowable { override def getMessage: String = "start tag was here: " @@ -71,7 +72,7 @@ object MarkupParsers { if (ch == SU) throw TruncatedXMLControl else reportSyntaxError(msg) - var input : CharArrayReader = _ + var input : CharArrayReader = uninitialized def lookahead(): BufferedIterator[Char] = (input.buf drop input.charOffset).iterator.buffered @@ -276,7 +277,7 @@ object MarkupParsers { * | xmlTag1 '/' '>' */ def element: Tree = { - val start = curOffset + val start = curOffset // FIXME should be `curOffset - 1` (scalatest and tests/neg/i19100.scala must be updated) val (qname, attrMap) = xTag(()) if (ch == '/') { // empty element xToken("/>") @@ -434,7 +435,7 @@ object MarkupParsers { * | Name [S] '/' '>' */ def xPattern: Tree = { - var start = curOffset + var start = curOffset // FIXME should be `curOffset - 1` (scalatest and tests/neg/i19100.scala must be updated) val qname = xName debugLastStartElement = (start, qname) :: debugLastStartElement xSpaceOpt() diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala index 0e70cc077fa4..d1f2875064d4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala @@ -4,14 +4,15 @@ package parsing package xml import scala.language.unsafeNulls +import scala.compiletime.uninitialized import scala.collection.mutable -import core._ -import Decorators._ +import core.* +import Decorators.* import Flags.Mutable -import Names._, StdNames._, ast.Trees._, ast.{tpd, untpd} -import Symbols._, Contexts._ -import util.Spans._ +import Names.*, StdNames.*, ast.Trees.*, ast.{tpd, untpd} +import Symbols.*, Contexts.* +import util.Spans.* import Parsers.Parser /** This class builds instance of `Tree` that represent XML. @@ -28,11 +29,11 @@ import Parsers.Parser class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { import Constants.Constant - import untpd._ + import untpd.* import parser.atSpan - private[parsing] var isPattern: Boolean = _ + private[parsing] var isPattern: Boolean = uninitialized private object xmltypes extends ScalaTypeNames { val _Comment: TypeName = "Comment" @@ -66,7 +67,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml} // convenience methods - private def LL[A](x: A*): List[List[A]] = List(List(x:_*)) + private def LL[A](x: A*): List[List[A]] = List(x.toList) private def const(x: Any) = Literal(Constant(x)) private def wild = Ident(nme.WILDCARD) private def wildStar = Ident(tpnme.WILDCARD_STAR) @@ -220,7 +221,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs) else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs) - Assign(Ident(_md), New(clazz, LL(attrArgs: _*))) + Assign(Ident(_md), New(clazz, LL(attrArgs*))) } def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value) diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala index 87412cf6d69c..6577030ec671 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala @@ -130,7 +130,7 @@ object Utility { * See [4] and Appendix B of XML 1.0 specification. */ def isNameChar(ch: Char): Boolean = { - import java.lang.Character._ + import java.lang.Character.* // The constants represent groups Mc, Me, Mn, Lm, and Nd. isNameStart(ch) || (getType(ch).toByte match { @@ -151,7 +151,7 @@ object Utility { * See [3] and Appendix B of XML 1.0 specification */ def isNameStart(ch: Char): Boolean = { - import java.lang.Character._ + import java.lang.Character.* getType(ch).toByte match { case LOWERCASE_LETTER | diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index 1baf3a06ad9e..ce77a5b9d97a 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -3,10 +3,10 @@ package plugins import scala.language.unsafeNulls -import core._ -import Contexts._ -import Phases._ -import dotty.tools.io._ +import core.* +import Contexts.* +import Phases.* +import dotty.tools.io.* import transform.MegaPhase.MiniPhase import java.io.InputStream @@ -43,6 +43,9 @@ sealed trait Plugin { /** A standard plugin can be inserted into the normal compilation pipeline */ trait StandardPlugin extends Plugin { /** Non-research plugins should override this method to return the phases + * + * The phases returned must be freshly constructed (not reused + * and returned again on subsequent calls). * * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan @@ -56,6 +59,9 @@ trait StandardPlugin extends Plugin { */ trait ResearchPlugin extends Plugin { /** Research plugins should override this method to return the new phase plan + * + * Any plugin phases included in the plan must be freshly constructed (not reused + * and returned again on subsequent calls). * * @param options commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) * @param plan the given phase plan diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index c44fe4cf59b4..05b9f0cf75d7 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -3,13 +3,16 @@ package plugins import scala.language.unsafeNulls -import core._ -import Contexts._ +import core.* +import Contexts.* import Decorators.em import config.{ PathResolver, Feature } -import dotty.tools.io._ -import Phases._ +import dotty.tools.io.* +import Phases.* import config.Printers.plugins.{ println => debug } +import config.Properties + +import scala.compiletime.uninitialized /** Support for run-time loading of compiler plugins. * @@ -44,7 +47,7 @@ trait Plugins { goods map (_.get) } - private var _roughPluginsList: List[Plugin] = _ + private var _roughPluginsList: List[Plugin] = uninitialized protected def roughPluginsList(using Context): List[Plugin] = if (_roughPluginsList == null) { _roughPluginsList = loadRoughPluginsList @@ -96,7 +99,7 @@ trait Plugins { plugs } - private var _plugins: List[Plugin] = _ + private var _plugins: List[Plugin] = uninitialized def plugins(using Context): List[Plugin] = if (_plugins == null) { _plugins = loadPlugins @@ -126,7 +129,7 @@ trait Plugins { val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins - if (Feature.isExperimentalEnabled) + if Properties.experimental && !ctx.settings.YnoExperimental.value then plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan) { (plug, plan) => plug.init(options(plug), plan) } diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 3f32b29654c9..02f470324e8a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -6,12 +6,12 @@ import scala.language.unsafeNulls import scala.collection.mutable -import core._ -import Texts._, Types._, Flags._, Symbols._, Contexts._ -import Decorators._ +import core.* +import Texts.*, Types.*, Flags.*, Symbols.*, Contexts.* +import Decorators.* import reporting.Message import util.DiffUtil -import Highlighting._ +import Highlighting.* object Formatting { @@ -144,7 +144,7 @@ object Formatting { case Nil => ("", Nil) } val (args1, suffixes1) = args.lazyZip(suffixes).map(treatArg(_, _)).unzip - new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*) + new StringContext(prefix :: suffixes1.toList*).s(args1*) } } diff --git a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala index ceb5afdea750..c9b3e2a5aa83 100644 --- a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala @@ -3,7 +3,7 @@ package dotc package printing import scala.collection.mutable -import core.Contexts._ +import core.Contexts.* object Highlighting { diff --git a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala index c9ac4a5af4ce..24f02f37956e 100644 --- a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala +++ b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package printing -import core._ -import Contexts._ +import core.* +import Contexts.* import util.Property import Texts.Text diff --git a/compiler/src/dotty/tools/dotc/printing/OutlinePrinter.scala b/compiler/src/dotty/tools/dotc/printing/OutlinePrinter.scala new file mode 100644 index 000000000000..cd8267355201 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/printing/OutlinePrinter.scala @@ -0,0 +1,56 @@ +package dotty.tools +package dotc +package printing + +import core.* +import Texts.* +import Flags.* +import NameOps.* +import StdNames.* +import Contexts.* +import Symbols.* +import ast.{Trees, untpd} +import Trees.* + +object OutlinePrinter: + def apply(_ctx: Context): Printer = new OutlinePrinter(_ctx) + +/** A printer that elides known standard tree forms from the rhs of def and val. + * Typically used for printing Java trees which elide the rhs. + * Note that there may still be some differences if you compare before and after pickling. + */ +class OutlinePrinter private (_ctx: Context) extends RefinedPrinter(_ctx) { + + /** print the symbol infos of type params for the fake java constructor */ + def shouldShowInfo(tsym: Symbol): Boolean = + tsym != NoSymbol && { + val ctor = tsym.owner + ctor.isAllOf(JavaDefined | PrivateLocal | Invisible) && ctor.isConstructor + } + + override def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match + case untpd.TypeDefs(tparams) if shouldShowInfo(tparams.head.symbol) => + "[" ~ toText(tparams.map(_.symbol.info), ", ") ~ "]" + case _ => super.paramsText(params) + + /* Typical patterns seen in output of typer for Java code, plus the output of unpickling an ELIDED tree */ + def isElidableExpr[T <: Untyped](tree: Tree[T]): Boolean = tree match { + case tree if tree.isEmpty => false + case tree: Ident[T] if tree.name == nme.WILDCARD => true // `ELIDED exprType` + case tree: Literal[T] => true // e.g. `()` + case tree: Select[T] if tree.symbol == defn.Predef_undefined => true // e.g. `Predef.???` + case Apply(Select(tree: New[T], nme.CONSTRUCTOR), Nil) + if tree.tpt.typeOpt.typeSymbol.is(Module) => true // e.g. `new foo.Foo$()` (rhs of a module val) + case _ => + sys.error(s"Unexpected tree in OutlinePrinter: ${tree.show}, $tree") + false + } + + override protected def rhsValDef[T <: Untyped](tree: ValDef[T]): Text = + if isElidableExpr(tree.rhs) then " = " ~ "elided" ~ "[" ~ toText(tree.tpt) ~ "]" + else super.rhsValDef(tree) + + override protected def rhsDefDef[T <: Untyped](tree: DefDef[T]): Text = + if isElidableExpr(tree.rhs) then " = " ~ "elided" ~ "[" ~ toText(tree.tpt) ~ "]" + else super.rhsDefDef(tree) +} diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 700b3fbf525f..83e6a3b204c3 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package printing -import core._ -import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, Denotations._ -import StdNames._ -import Contexts._ +import core.* +import Texts.*, Types.*, Flags.*, Names.*, Symbols.*, NameOps.*, Constants.*, Denotations.* +import StdNames.* +import Contexts.* import Scopes.Scope, Denotations.Denotation, Annotations.Annotation import StdNames.nme -import ast.Trees._ -import typer.Implicits._ +import ast.Trees.* +import typer.Implicits.* import typer.ImportInfo import Variances.varianceSign import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch import config.{Config, Feature} -import cc.{CapturingType, EventuallyCapturingType, CaptureSet, isBoxed} +import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, isBoxed, levelOwner, retainedElems} class PlainPrinter(_ctx: Context) extends Printer { @@ -150,10 +150,23 @@ class PlainPrinter(_ctx: Context) extends Printer { + defn.FromJavaObjectSymbol def toTextCaptureSet(cs: CaptureSet): Text = - if printDebug && !cs.isConst then cs.toString - else if ctx.settings.YccDebug.value then cs.show - else if !cs.isConst && cs.elems.isEmpty then "?" - else "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + if printDebug && ctx.settings.YccDebug.value && !cs.isConst then cs.toString + else if cs == CaptureSet.Fluid then "" + else + val core: Text = + if !cs.isConst && cs.elems.isEmpty then "?" + else "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + // ~ Str("?").provided(!cs.isConst) + core ~ cs.optionalInfo + + private def toTextRetainedElem[T <: Untyped](ref: Tree[T]): Text = ref match + case ref: RefTree[?] if ref.typeOpt.exists => + toTextCaptureRef(ref.typeOpt) + case _ => + toText(ref) + + private def toTextRetainedElems[T <: Untyped](refs: List[Tree[T]]): Text = + "{" ~ Text(refs.map(ref => toTextRetainedElem(ref)), ", ") ~ "}" /** Print capturing type, overridden in RefinedPrinter to account for * capturing function types. @@ -163,23 +176,26 @@ class PlainPrinter(_ctx: Context) extends Printer { boxText ~ toTextLocal(parent) ~ "^" ~ (refsText provided refsText != rootSetText) - final protected def rootSetText = Str("{cap}") + final protected def rootSetText = Str("{cap}") // TODO Use disambiguation def toText(tp: Type): Text = controlled { homogenize(tp) match { case tp: TypeType => toTextRHS(tp) case tp: TermRef - if !tp.denotationIsCurrent && !homogenizedView || // always print underlying when testing picklers - tp.symbol.is(Module) || tp.symbol.name == nme.IMPORT => + if !tp.denotationIsCurrent + && !homogenizedView // always print underlying when testing picklers + && !tp.isRootCapability + || tp.symbol.is(Module) + || tp.symbol.name == nme.IMPORT => toTextRef(tp) ~ ".type" case tp: TermRef if tp.denot.isOverloaded => "" case tp: TypeRef => if (printWithoutPrefix.contains(tp.symbol)) - toText(tp.name) + selectionString(tp) else - toTextPrefix(tp.prefix) ~ selectionString(tp) + toTextPrefixOf(tp) ~ selectionString(tp) case tp: TermParamRef => ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ ".type" case tp: TypeParamRef => @@ -219,10 +235,16 @@ class PlainPrinter(_ctx: Context) extends Printer { keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~ (" <: " ~ toText(bound) provided !bound.isAny) }.close - case tp @ EventuallyCapturingType(parent, refs) => + case tp @ CapturingType(parent, refs) => val boxText: Text = Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value - val refsText = if refs.isUniversal then rootSetText else toTextCaptureSet(refs) + val showAsCap = refs.isUniversal && (refs.elems.size == 1 || !printDebug) + val refsText = if showAsCap then rootSetText else toTextCaptureSet(refs) toTextCapturing(parent, refsText, boxText) + case tp @ RetainingType(parent, refs) => + val refsText = refs match + case ref :: Nil if ref.symbol == defn.captureRoot => rootSetText + case _ => toTextRetainedElems(refs) + toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recuresevely case tp: ErrorType => @@ -245,8 +267,10 @@ class PlainPrinter(_ctx: Context) extends Printer { } case ExprType(restp) => def arrowText: Text = restp match - case ct @ EventuallyCapturingType(parent, refs) if ct.annot.symbol == defn.RetainsByNameAnnot => - if refs.isUniversal then Str("=>") else Str("->") ~ toTextCaptureSet(refs) + case AnnotatedType(parent, ann) if ann.symbol == defn.RetainsByNameAnnot => + val refs = ann.tree.retainedElems + if refs.exists(_.symbol == defn.captureRoot) then Str("=>") + else Str("->") ~ toTextRetainedElems(refs) case _ => if Feature.pureFunsEnabled then "->" else "=>" changePrec(GlobalPrec)(arrowText ~ " " ~ toText(restp)) @@ -296,10 +320,10 @@ class PlainPrinter(_ctx: Context) extends Printer { "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" protected def paramsText(lam: LambdaType): Text = { - val erasedParams = lam.erasedParams - def paramText(name: Name, tp: Type, erased: Boolean) = - keywordText("erased ").provided(erased) ~ toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) - Text(lam.paramNames.lazyZip(lam.paramInfos).lazyZip(erasedParams).map(paramText), ", ") + def paramText(ref: ParamRef) = + val erased = ref.underlying.hasAnnotation(defn.ErasedParamAnnot) + keywordText("erased ").provided(erased) ~ ParamRefNameString(ref) ~ lambdaHash(lam) ~ toTextRHS(ref.underlying, isParameter = true) + Text(lam.paramRefs.map(paramText), ", ") } protected def ParamRefNameString(name: Name): String = nameString(name) @@ -323,7 +347,7 @@ class PlainPrinter(_ctx: Context) extends Printer { */ protected def idString(sym: Symbol): String = (if (showUniqueIds || Printer.debugPrintUnique) "#" + sym.id else "") + - (if (showNestingLevel) "%" + sym.nestingLevel else "") + (if showNestingLevel then "%" + sym.nestingLevel else "") def nameString(sym: Symbol): String = simpleNameString(sym) + idString(sym) // + "<" + (if (sym.exists) sym.owner else "") + ">" @@ -353,7 +377,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextRef(tp: SingletonType): Text = controlled { tp match { case tp: TermRef => - toTextPrefix(tp.prefix) ~ selectionString(tp) + toTextPrefixOf(tp) ~ selectionString(tp) case tp: ThisType => nameString(tp.cls) + ".this" case SuperType(thistpe: SingletonType, _) => @@ -363,7 +387,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp @ ConstantType(value) => toText(value) case pref: TermParamRef => - nameString(pref.binder.paramNames(pref.paramNum)) ~ lambdaHash(pref.binder) + ParamRefNameString(pref) ~ lambdaHash(pref.binder) case tp: RecThis => val idx = openRecs.reverse.indexOf(tp.binder) if (idx >= 0) selfRecName(idx + 1) @@ -375,24 +399,25 @@ class PlainPrinter(_ctx: Context) extends Printer { } } - /** The string representation of this type used as a prefix, including separator */ - def toTextPrefix(tp: Type): Text = controlled { - homogenize(tp) match { - case NoPrefix => "" - case tp: SingletonType => toTextRef(tp) ~ "." - case tp => trimPrefix(toTextLocal(tp)) ~ "#" - } - } - def toTextCaptureRef(tp: Type): Text = homogenize(tp) match case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") case tp: SingletonType => toTextRef(tp) + case ReachCapability(tp1) => toTextRef(tp1) ~ "*" case _ => toText(tp) protected def isOmittablePrefix(sym: Symbol): Boolean = defn.unqualifiedOwnerTypes.exists(_.symbol == sym) || isEmptyPrefix(sym) + /** The string representation of type prefix, including separator */ + def toTextPrefixOf(tp: NamedType): Text = controlled { + homogenize(tp.prefix) match { + case NoPrefix => "" + case tp: SingletonType => toTextRef(tp) ~ "." + case tp => trimPrefix(toTextLocal(tp)) ~ "#" + } + } + protected def isEmptyPrefix(sym: Symbol): Boolean = sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName @@ -557,6 +582,8 @@ class PlainPrinter(_ctx: Context) extends Printer { def extendedLocationText(sym: Symbol): Text = if (!sym.exists) "" + else if isEmptyPrefix(sym.owner) then + " in the empty package" else { def recur(ownr: Symbol, innerLocation: String): Text = { def nextOuter(innerKind: String): Text = diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index ab0c867ec31f..8687925ed5fb 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -2,14 +2,15 @@ package dotty.tools package dotc package printing -import core._ -import Texts._, ast.Trees._ -import Types.{Type, SingletonType, LambdaParam}, +import core.* +import Texts.*, ast.Trees.* +import Types.{Type, SingletonType, LambdaParam, NamedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* import util.SourcePosition import typer.ImportInfo +import cc.CaptureSet import scala.annotation.internal.sharable @@ -101,11 +102,14 @@ abstract class Printer { def toTextRef(tp: SingletonType): Text /** Textual representation of a prefix of some reference, ending in `.` or `#` */ - def toTextPrefix(tp: Type): Text + def toTextPrefixOf(tp: NamedType): Text /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text + /** Textual representation of a reference in a capture set */ + def toTextCaptureSet(cs: CaptureSet): Text + /** Textual representation of symbol's declaration */ def dclText(sym: Symbol): Text diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 51aaa0932e5e..de9e21aa4146 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -2,34 +2,33 @@ package dotty.tools package dotc package printing -import core._ +import core.* import Constants.* -import Texts._ -import Types._ -import Flags._ -import Names._ -import Symbols._ -import NameOps._ +import Texts.* +import Types.* +import Flags.* +import Names.* +import Symbols.* +import NameOps.* import TypeErasure.ErasedValueType -import Contexts._ +import Contexts.* import Annotations.Annotation -import Denotations._ -import SymDenotations._ +import Denotations.* +import SymDenotations.* import StdNames.{nme, tpnme} import ast.{Trees, tpd, untpd} import typer.{Implicits, Namer, Applications} -import typer.ProtoTypes._ -import Trees._ -import TypeApplications._ +import typer.ProtoTypes.* +import Trees.* +import TypeApplications.* import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart -import transform.TypeUtils._ -import transform.SymUtils._ import config.{Config, Feature} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} -import cc.{CaptureSet, toCaptureSet, IllegalCaptureRef} +import cc.{CaptureSet, CapturingType, toCaptureSet, IllegalCaptureRef} +import dotty.tools.dotc.parsing.JavaParsers class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { @@ -92,7 +91,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { nameString(if (ctx.property(XprintMode).isEmpty) sym.initial.name else sym.name) override def fullNameString(sym: Symbol): String = - if !sym.exists || isEmptyPrefix(sym.effectiveOwner) then nameString(sym) + if !sym.exists + || isEmptyPrefix(sym.effectiveOwner) + || !homogenizedView && !sym.is(Package) && isOmittablePrefix(sym.effectiveOwner) + then nameString(sym) else super.fullNameString(sym) override protected def fullNameOwner(sym: Symbol): Symbol = { @@ -106,26 +108,35 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (tp.cls.isAnonymousClass) keywordStr("this") if (tp.cls.is(ModuleClass)) fullNameString(tp.cls.sourceModule) else super.toTextRef(tp) + case tp: TermRef if !printDebug => + if tp.symbol.is(Package) then fullNameString(tp.symbol) + else super.toTextRef(tp) case _ => super.toTextRef(tp) } } - override def toTextPrefix(tp: Type): Text = controlled { + override def toTextPrefixOf(tp: NamedType): Text = controlled { def isOmittable(sym: Symbol) = if printDebug then false else if homogenizedView then isEmptyPrefix(sym) // drop and anonymous classes, but not scala, Predef. else if sym.isPackageObject then isOmittablePrefix(sym.owner) else isOmittablePrefix(sym) - tp match { - case tp: ThisType if isOmittable(tp.cls) => - "" - case tp @ TermRef(pre, _) => - val sym = tp.symbol - if sym.isPackageObject && !homogenizedView && !printDebug then toTextPrefix(pre) - else if (isOmittable(sym)) "" - else super.toTextPrefix(tp) - case _ => super.toTextPrefix(tp) + def isSkippedPackageObject(sym: Symbol) = + sym.isPackageObject && !homogenizedView && !printDebug + + tp.prefix match { + case thisType: ThisType => + val sym = thisType.cls + if isSkippedPackageObject(sym) then toTextPrefixOf(sym.typeRef) + else if isOmittable(sym) then "" + else super.toTextPrefixOf(tp) + case termRef: TermRef => + val sym = termRef.symbol + if isSkippedPackageObject(sym) then toTextPrefixOf(termRef) + else if isOmittable(sym) then "" + else super.toTextPrefixOf(tp) + case _ => super.toTextPrefixOf(tp) } } @@ -168,7 +179,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ~ " " ~ argText(args.last) } - private def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match + protected def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match case info: MethodType => val capturesRoot = refs == rootSetText changePrec(GlobalPrec) { @@ -228,7 +239,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def appliedText(tp: Type): Text = tp match case tp @ AppliedType(tycon, args) => - tp.tupleElementTypes match + tp.tupleElementTypesUpTo(200, normalize = false) match case Some(types) if types.size >= 2 && !printDebug => toTextTuple(types) case _ => val tsym = tycon.typeSymbol @@ -259,9 +270,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if !printDebug && appliedText(tp.asInstanceOf[HKLambda].resType).isEmpty => // don't eta contract if the application would be printed specially toText(tycon) - case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => + case tp: RefinedType if defn.isFunctionType(tp) && !printDebug => toTextMethodAsFunction(tp.refinedInfo, - isPure = Feature.pureFunsEnabled && !tp.typeSymbol.name.isImpureFunction) + isPure = Feature.pureFunsEnabled && !tp.typeSymbol.name.isImpureFunction, + refs = tp.parent match + case CapturingType(_, cs) => toTextCaptureSet(cs) + case _ => "") case tp: TypeRef => if (tp.symbol.isAnonymousClass && !showUniqueIds) toText(tp.info) @@ -295,7 +309,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) case tp @ FunProto(args, resultType) => "[applied to (" - ~ keywordText("using ").provided(tp.isContextualMethod) + ~ keywordText("using ").provided(tp.applyKind == ApplyKind.Using) ~ argsTreeText(args) ~ ") returning " ~ toText(resultType) @@ -335,7 +349,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { - import untpd._ + import untpd.* def isLocalThis(tree: Tree) = tree.typeOpt match { case tp: ThisType => tp.cls == ctx.owner.enclosingClass @@ -421,8 +435,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case id @ Ident(name) => val txt = tree.typeOpt match { case tp: NamedType if name != nme.WILDCARD => - val pre = if (tp.symbol.is(JavaStatic)) tp.prefix.widen else tp.prefix - toTextPrefix(pre) ~ withPos(selectionString(tp), tree.sourcePos) + toTextPrefixOf(tp) ~ withPos(selectionString(tp), tree.sourcePos) case _ => toText(name) } @@ -529,7 +542,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tree @ Inlined(call, bindings, body) => val bodyText = if bindings.isEmpty then toText(body) else blockText(bindings :+ body) if homogenizedView || !ctx.settings.XprintInline.value then bodyText - else if call.isEmpty then stringText("{{") ~ stringText("/* inlined from outside */") ~ bodyText ~ stringText("}}") + else if tree.inlinedFromOuterScope then stringText("{{") ~ stringText("/* inlined from outside */") ~ bodyText ~ stringText("}}") else keywordText("{{") ~ keywordText("/* inlined from ") ~ toText(call) ~ keywordText(" */") ~ bodyText ~ keywordText("}}") case tpt: untpd.DerivedTypeTree => "" @@ -631,7 +644,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner == defn.RetainsAnnot - && Feature.ccEnabled && Config.printCaptureSetsAsPrefix && !printDebug + && Feature.ccEnabled && !printDebug + && Phases.checkCapturesPhase.exists // might be missing on -Ytest-pickler then toTextRetainsAnnot else toTextAnnot case EmptyTree => @@ -693,7 +707,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val opPrec = parsing.precedence(op.name) changePrec(opPrec) { toText(l) ~ " " ~ toText(op) ~ " " ~ toText(r) } case PostfixOp(l, op) => - changePrec(InfixPrec) { toText(l) ~ " " ~ toText(op) } + if op.name == nme.CC_REACH then + changePrec(DotPrec) { toText(l) ~ "*" } + else + changePrec(InfixPrec) { toText(l) ~ " " ~ toText(op) } case PrefixOp(op, r) => changePrec(DotPrec) { toText(op) ~ " " ~ toText(r) } case Parens(t) => @@ -728,13 +745,21 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { keywordStr("macro ") ~ toTextGlobal(call) case tree @ Quote(body, tags) => val tagsText = (keywordStr("<") ~ toTextGlobal(tags, ", ") ~ keywordStr(">")).provided(tree.tags.nonEmpty) - val exprTypeText = (keywordStr("[") ~ toTextGlobal(tree.bodyType) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + val exprTypeText = (keywordStr("[") ~ toTextGlobal(tpd.bodyType(tree.asInstanceOf[tpd.Quote])) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) val open = if (body.isTerm) keywordStr("{") else keywordStr("[") val close = if (body.isTerm) keywordStr("}") else keywordStr("]") keywordStr("'") ~ tagsText ~ exprTypeText ~ open ~ toTextGlobal(body) ~ close case Splice(expr) => val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) keywordStr("$") ~ spliceTypeText ~ keywordStr("{") ~ toTextGlobal(expr) ~ keywordStr("}") + case tree @ QuotePattern(bindings, body, quotes) => + val quotesText = (keywordStr("<") ~ toText(quotes) ~ keywordStr(">")).provided(printDebug) + val bindingsText = bindings.map(binding => { + keywordStr("type ") ~ toText(binding.symbol.name) ~ toText(binding.symbol.info) ~ "; " + }).reduceLeft(_ ~~ _).provided(bindings.nonEmpty) + val open = if (body.isTerm) keywordStr("{") else keywordStr("[") + val close = if (body.isTerm) keywordStr("}") else keywordStr("]") + keywordStr("'") ~ quotesText ~ open ~ bindingsText ~ toTextGlobal(body) ~ close case SplicePattern(pattern, args) => val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) keywordStr("$") ~ spliceTypeText ~ { @@ -757,13 +782,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override protected def toTextCapturing(tp: Type, refsText: Text, boxText: Text): Text = tp match case tp: AppliedType if defn.isFunctionSymbol(tp.typeSymbol) && !printDebug => boxText ~ toTextFunction(tp, refsText) - case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => + case tp: RefinedType if defn.isFunctionType(tp) && !printDebug => boxText ~ toTextMethodAsFunction(tp.refinedInfo, isPure = !tp.typeSymbol.name.isImpureFunction, refsText) case _ => super.toTextCapturing(tp, refsText, boxText) override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { - import untpd._ + import untpd.* var txt = toTextCore(tree) @@ -901,12 +926,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { dclTextOr(tree) { modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ - withEnclosingDef(tree) { optText(tree.rhs)(" = " ~ _) } + withEnclosingDef(tree) { rhsValDef(tree) } } } protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { - import untpd._ + import untpd.* dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) val isExtension = tree.hasType && tree.symbol.is(ExtensionMethod) @@ -958,11 +983,19 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { coreSig ~ optAscription(tree.tpt) - ~ optText(tree.rhs)(" = " ~ keywordText("macro ").provided(tree.symbol.isScala2Macro) ~ _) + ~ rhsDefDef(tree) } } } + /** Inspect the rhs of a ValDef, overridden in OutlinePrinter */ + protected def rhsValDef[T <: Untyped](tree: ValDef[T]): Text = + optText(tree.rhs)(" = " ~ _) + + /** Inspect the rhs of a DefDef, overridden in OutlinePrinter */ + protected def rhsDefDef[T <: Untyped](tree: DefDef[T]): Text = + optText(tree.rhs)(" = " ~ keywordText("macro ").provided(tree.symbol.isScala2Macro) ~ _) + protected def toTextTemplate(impl: Template, ofNew: Boolean = false): Text = { val Template(constr @ DefDef(_, paramss, _, _), _, self, _) = impl val tparamsTxt = withEnclosingDef(constr) { @@ -988,10 +1021,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val (params, rest) = impl.body partition { case stat: TypeDef => stat.symbol.is(Param) case stat: ValOrDefDef => - stat.symbol.is(ParamAccessor) && !stat.symbol.isSetter + val sym = stat.symbol + sym.is(ParamAccessor) && !sym.isSetter + || sym.isAllOf(JavaParsers.fakeFlags | Param) case _ => false } - params ::: rest + val params0 = + if constr.symbol.isAllOf(JavaParsers.fakeFlags) then + // filter out fake param accessors + params.filterNot(_.symbol.isAllOf(JavaParsers.fakeFlags | Param)) + else + params + params0 ::: rest } else impl.body @@ -1093,13 +1134,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { fullNameString(sym) else if (sym.is(ModuleClass) && sym.isPackageObject && sym.name.stripModuleClassSuffix == tpnme.PACKAGE) nameString(sym.owner.name) + else if (sym.is(ModuleClass) && sym.isTopLevelDefinitionsObject) + nameString(sym.owner.name) else if (sym.is(ModuleClass)) nameString(sym.name.stripModuleClassSuffix) + idString(sym) else if (hasMeaninglessName(sym)) simpleNameString(sym.owner) + idString(sym) else nameString(sym) - (keywordText(kindString(sym)) ~~ { + + if sym.is(ModuleClass) && sym.isTopLevelDefinitionsObject then + "the top-level definitions in package " + nameString(sym.owner.name) + else (keywordText(kindString(sym)) ~~ { if (sym.isAnonymousClass) toTextParents(sym.info.parents) ~~ "{...}" else diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index ea3afef27fae..f02cbf159224 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -2,13 +2,13 @@ package dotty.tools.dotc.printing import dotty.tools.dotc.core.Constants import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.printing.Texts._ +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.printing.Texts.* class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx) { diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala index 4a0e68861a1a..4480aa9c76a4 100644 --- a/compiler/src/dotty/tools/dotc/printing/Showable.scala +++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package printing -import core._ +import core.* -import Contexts._, Texts._, Decorators._ +import Contexts.*, Texts.*, Decorators.* import config.Config.summarizeDepth trait Showable extends Any { diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala index 53e6b9472f5e..6f65320d2c8e 100644 --- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala @@ -4,11 +4,11 @@ package printing import scala.language.unsafeNulls import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.parsing.Parsers.Parser import dotty.tools.dotc.parsing.Scanners.Scanner -import dotty.tools.dotc.parsing.Tokens._ +import dotty.tools.dotc.parsing.Tokens.* import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.SourceFile @@ -83,11 +83,11 @@ object SyntaxHighlighting { } } - for (span <- scanner.commentSpans) - highlightPosition(span, CommentColor) + for (comment <- scanner.comments) + highlightPosition(comment.span, CommentColor) object TreeHighlighter extends untpd.UntypedTreeTraverser { - import untpd._ + import untpd.* def ignored(tree: NameTree) = { val name = tree.name.toTermName diff --git a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala b/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala deleted file mode 100644 index 61bee4d9f32a..000000000000 --- a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala +++ /dev/null @@ -1,142 +0,0 @@ -package dotty.tools.dotc.profile - -import scala.language.unsafeNulls - -import java.util.concurrent.ThreadPoolExecutor.AbortPolicy -import java.util.concurrent._ -import java.util.concurrent.atomic.AtomicInteger - -import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.Contexts._ - -sealed trait AsyncHelper { - - def newUnboundedQueueFixedThreadPool - (nThreads: Int, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor - def newBoundedQueueFixedThreadPool - (nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor -} - - -object AsyncHelper { - def apply(phase: Phase)(using Context): AsyncHelper = ctx.profiler match { - case NoOpProfiler => new BasicAsyncHelper(phase) - case r: RealProfiler => new ProfilingAsyncHelper(phase, r) - } - - private abstract class BaseAsyncHelper(phase: Phase)(using Context) extends AsyncHelper { - val baseGroup = new ThreadGroup(s"dotc-${phase.phaseName}") - private def childGroup(name: String) = new ThreadGroup(baseGroup, name) - - protected def wrapRunnable(r: Runnable, shortId:String): Runnable - - protected class CommonThreadFactory(shortId: String, - daemon: Boolean = true, - priority: Int) extends ThreadFactory { - private val group: ThreadGroup = childGroup(shortId) - private val threadNumber: AtomicInteger = new AtomicInteger(1) - private val namePrefix = s"${baseGroup.getName}-$shortId-" - - override def newThread(r: Runnable): Thread = { - val wrapped = wrapRunnable(r, shortId) - val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) - if (t.isDaemon != daemon) t.setDaemon(daemon) - if (t.getPriority != priority) t.setPriority(priority) - t - } - } - } - - private final class BasicAsyncHelper(phase: Phase)(using Context) extends BaseAsyncHelper(phase) { - - override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { - val threadFactory = new CommonThreadFactory(shortId, priority = priority) - //like Executors.newFixedThreadPool - new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory) - } - - override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { - val threadFactory = new CommonThreadFactory(shortId, priority = priority) - //like Executors.newFixedThreadPool - new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) - } - - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = r - } - - private class ProfilingAsyncHelper(phase: Phase, private val profiler: RealProfiler)(using Context) extends BaseAsyncHelper(phase) { - - override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { - val threadFactory = new CommonThreadFactory(shortId, priority = priority) - //like Executors.newFixedThreadPool - new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory, new AbortPolicy) - } - - override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { - val threadFactory = new CommonThreadFactory(shortId, priority = priority) - //like Executors.newFixedThreadPool - new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) - } - - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = { - () => - val data = new ThreadProfileData - localData.set(data) - - val profileStart = profiler.snapThread(0) - try r.run finally { - val snap = profiler.snapThread(data.idleNs) - val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) - profiler.completeBackground(threadRange) - } - } - - /** - * data for thread run. Not threadsafe, only written from a single thread - */ - final class ThreadProfileData { - var firstStartNs = 0L - var taskCount = 0 - - var idleNs = 0L - var runningNs = 0L - - var lastStartNs = 0L - var lastEndNs = 0L - } - - val localData = new ThreadLocal[ThreadProfileData] - - private class SinglePhaseInstrumentedThreadPoolExecutor - ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, - workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler - ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { - - override def beforeExecute(t: Thread, r: Runnable): Unit = { - val data = localData.get - data.taskCount += 1 - val now = System.nanoTime() - - if (data.firstStartNs == 0) data.firstStartNs = now - else data.idleNs += now - data.lastEndNs - - data.lastStartNs = now - - super.beforeExecute(t, r) - } - - override def afterExecute(r: Runnable, t: Throwable): Unit = { - val now = System.nanoTime() - val data = localData.get - - data.lastEndNs = now - data.runningNs += now - data.lastStartNs - - super.afterExecute(r, t) - } - } - } -} - diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 64cc08160701..a13c9d41b529 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -11,7 +11,7 @@ import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.io.AbstractFile import annotation.internal.sharable @@ -83,7 +83,7 @@ private [profile] object NoOpProfiler extends Profiler { override def finished(): Unit = () } private [profile] object RealProfiler { - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean val memoryMx: MemoryMXBean = ManagementFactory.getMemoryMXBean val gcMx: List[GarbageCollectorMXBean] = ManagementFactory.getGarbageCollectorMXBeans.asScala.toList @@ -106,7 +106,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { - import RealProfiler._ + import RealProfiler.* val current = Thread.currentThread() ProfileSnap( diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala new file mode 100644 index 000000000000..e3ea69d9be06 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -0,0 +1,151 @@ +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import java.util.concurrent.* +import java.util.concurrent.atomic.AtomicInteger + +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.core.Contexts.* + +sealed trait ThreadPoolFactory { + + def newUnboundedQueueFixedThreadPool( + nThreads: Int, + shortId: String, + priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor + + def newBoundedQueueFixedThreadPool( + nThreads: Int, + maxQueueSize: Int, + rejectHandler: RejectedExecutionHandler, + shortId: String, + priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor +} + + +object ThreadPoolFactory { + def apply(phase: Phase)(using Context): ThreadPoolFactory = ctx.profiler match { + case NoOpProfiler => new BasicThreadPoolFactory(phase) + case r: RealProfiler => new ProfilingThreadPoolFactory(phase, r) + } + + private abstract class BaseThreadPoolFactory(phase: Phase) extends ThreadPoolFactory { + val baseGroup = new ThreadGroup(s"dotc-${phase.phaseName}") + + private def childGroup(name: String) = new ThreadGroup(baseGroup, name) + + // Invoked when a new `Worker` is created, see `CommonThreadFactory.newThread` + protected def wrapWorker(worker: Runnable, shortId:String): Runnable = worker + + protected final class CommonThreadFactory( + shortId: String, + daemon: Boolean = true, + priority: Int) extends ThreadFactory { + private val group: ThreadGroup = childGroup(shortId) + private val threadNumber: AtomicInteger = new AtomicInteger(1) + private val namePrefix = s"${baseGroup.getName}-$shortId-" + + // Invoked by the `ThreadPoolExecutor` when creating a new worker thread. The argument + // runnable is the `Worker` (which extends `Runnable`). Its `run` method gets tasks from + // the thread pool and executes them (on the thread created here). + override def newThread(worker: Runnable): Thread = { + val wrapped = wrapWorker(worker, shortId) + val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) + if (t.isDaemon != daemon) t.setDaemon(daemon) + if (t.getPriority != priority) t.setPriority(priority) + t + } + } + } + + private final class BasicThreadPoolFactory(phase: Phase) extends BaseThreadPoolFactory(phase) { + + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory) + } + + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) + } + } + + private class ProfilingThreadPoolFactory(phase: Phase, private val profiler: RealProfiler) extends BaseThreadPoolFactory(phase) { + + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory, new AbortPolicy) + } + + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) + } + + override protected def wrapWorker(worker: Runnable, shortId: String): Runnable = { + () => + val data = new ThreadProfileData + localData.set(data) + + val profileStart = profiler.snapThread(0) + try worker.run finally { + val snap = profiler.snapThread(data.idleNs) + val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) + profiler.completeBackground(threadRange) + } + } + + /** + * data for thread run. Not threadsafe, only written from a single thread + */ + final class ThreadProfileData { + var firstStartNs = 0L + var taskCount = 0 + + var idleNs = 0L + var runningNs = 0L + + var lastStartNs = 0L + var lastEndNs = 0L + } + + val localData = new ThreadLocal[ThreadProfileData] + + private class SinglePhaseInstrumentedThreadPoolExecutor + ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, + workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler + ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { + + override def beforeExecute(t: Thread, r: Runnable): Unit = { + val data = localData.get + data.taskCount += 1 + val now = System.nanoTime() + + if (data.firstStartNs == 0) data.firstStartNs = now + else data.idleNs += now - data.lastEndNs + + data.lastStartNs = now + + super.beforeExecute(t, r) + } + + override def afterExecute(r: Runnable, t: Throwable): Unit = { + val now = System.nanoTime() + val data = localData.get + + data.lastEndNs = now + data.runningNs += now - data.lastStartNs + + super.afterExecute(r, t) + } + } + } +} + diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index c9a77dbfa151..c124e12077fe 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -11,19 +11,19 @@ import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.TreeMapWithImplicits -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Constants.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Denotations.staticRef -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.FlatName -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.TypeErasure -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.typer.ImportInfo.withRootImports import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.reporting.Message @@ -32,8 +32,8 @@ import dotty.tools.dotc.core.CyclicReference /** Tree interpreter for metaprogramming constructs */ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): - import Interpreter._ - import tpd._ + import Interpreter.* + import tpd.* val classLoader = if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then @@ -47,7 +47,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): /** Returns the result of interpreting the code in the tree. * Return Some of the result or None if the result type is not consistent with the expected type. - * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception occurred. */ final def interpret[T](tree: Tree)(using ct: ClassTag[T]): Option[T] = interpretTree(tree)(using emptyEnv) match { @@ -59,7 +59,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): } /** Returns the result of interpreting the code in the tree. - * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception occurred. */ protected def interpretTree(tree: Tree)(using Env): Object = tree match { case Literal(Constant(value)) => @@ -126,11 +126,13 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): view.toList fnType.dealias match - case fnType: MethodType if fnType.hasErasedParams => interpretArgs(argss, fnType.resType) case fnType: MethodType => val argTypes = fnType.paramInfos assert(argss.head.size == argTypes.size) - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) + val nonErasedArgs = argss.head.lazyZip(fnType.erasedParams).collect { case (arg, false) => arg }.toList + val nonErasedArgTypes = fnType.paramInfos.lazyZip(fnType.erasedParams).collect { case (arg, false) => arg }.toList + assert(nonErasedArgs.size == nonErasedArgTypes.size) + interpretArgsGroup(nonErasedArgs, nonErasedArgTypes) ::: interpretArgs(argss.tail, fnType.resType) case fnType: AppliedType if defn.isContextFunctionType(fnType) => val argTypes :+ resType = fnType.args: @unchecked interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) @@ -169,7 +171,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): val clazz = inst.getClass val name = fn.name.asTermName val method = getMethod(clazz, name, paramsSig(fn)) - stopIfRuntimeException(method.invoke(inst, args: _*), method) + stopIfRuntimeException(method.invoke(inst, args*), method) } private def interpretedStaticFieldAccess(sym: Symbol): Object = { @@ -184,8 +186,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def interpretNew(fn: Symbol, args: List[Object]): Object = { val className = fn.owner.fullName.mangledString.replaceAll("\\$\\.", "\\$") val clazz = loadClass(className) - val constr = clazz.getConstructor(paramsSig(fn): _*) - constr.newInstance(args: _*).asInstanceOf[Object] + val constr = clazz.getConstructor(paramsSig(fn)*) + constr.newInstance(args*).asInstanceOf[Object] } private def unexpectedTree(tree: Tree): Object = @@ -216,7 +218,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = - try clazz.getMethod(name.toString, paramClasses: _*) + try clazz.getMethod(name.toString, paramClasses*) catch { case _: NoSuchMethodException => val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" @@ -326,10 +328,10 @@ object Interpreter: class StopInterpretation(val msg: Message, val pos: SrcPos) extends Exception object Call: - import tpd._ + import tpd.* /** Matches an expression that is either a field access or an application - * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. - */ + * It returns a TermRef containing field accessed or a method reference and the arguments passed to it. + */ def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) @@ -339,10 +341,8 @@ object Interpreter: Some((fn, args)) case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) case fn: Select => Some((fn, Nil)) - case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.hasErasedParams) Some((fn, args1)) - else Some((fn, args2 :: args1)) - case TypeApply(Call0(fn, args), _) => Some((fn, args)) + case Apply(f @ Call0(fn, argss), args) => Some((fn, args :: argss)) + case TypeApply(Call0(fn, argss), _) => Some((fn, argss)) case _ => None } } diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala index 141b349826b4..d8ba1b72ce3b 100644 --- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala +++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.quoted import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.typer.Typer import dotty.tools.dotc.util.{Property, SourcePosition} diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 7596549fe401..8ebd1f6973f2 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -1,29 +1,30 @@ package dotty.tools.dotc.quoted -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{TreeTypeMap, tpd} -import dotty.tools.dotc.config.Printers._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.config.Printers.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Mode -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter, TreePickler } +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter, TreePickler, Attributes } import dotty.tools.dotc.core.tasty.DottyUnpickler import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode import dotty.tools.dotc.report import dotty.tools.dotc.reporting.Message import scala.quoted.Quotes -import scala.quoted.runtime.impl._ +import scala.quoted.runtime.impl.* import scala.collection.mutable -import QuoteUtils._ +import QuoteUtils.* +import dotty.tools.io.NoAbstractFile object PickledQuotes { - import tpd._ + import tpd.* /** Pickle the tree of the quote into strings */ def pickleQuote(tree: Tree)(using Context): List[String] = @@ -81,12 +82,12 @@ object PickledQuotes { /** Unpickle the tree contained in the TastyExpr */ def unpickleTerm(pickled: String | List[String], typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { - val unpickled = withMode(Mode.ReadPositions)(unpickle(pickled, isType = false)) - val Inlined(call, Nil, expansion) = unpickled: @unchecked - val inlineCtx = inlineContext(call) - val expansion1 = spliceTypes(expansion, typeHole)(using inlineCtx) - val expansion2 = spliceTerms(expansion1, typeHole, termHole)(using inlineCtx) - cpy.Inlined(unpickled)(call, Nil, expansion2) + withMode(Mode.ReadPositions)(unpickle(pickled, isType = false)) match + case tree @ Inlined(call, Nil, expansion) => + val inlineCtx = inlineContext(tree) + val expansion1 = spliceTypes(expansion, typeHole)(using inlineCtx) + val expansion2 = spliceTerms(expansion1, typeHole, termHole)(using inlineCtx) + cpy.Inlined(tree)(call, Nil, expansion2) } @@ -98,7 +99,7 @@ object PickledQuotes { /** Replace all term holes with the spliced terms */ private def spliceTerms(tree: Tree, typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { - def evaluateHoles = new TreeMap { + def evaluateHoles = new TreeMapWithPreciseStatContexts { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { case Hole(isTerm, idx, args, _) => inContext(SpliceScope.contextWithNewSpliceScope(tree.sourcePos)) { @@ -217,7 +218,7 @@ object PickledQuotes { private def pickle(tree: Tree)(using Context): Array[Byte] = { quotePickling.println(i"**** pickling quote of\n$tree") val pickler = new TastyPickler(defn.RootClass) - val treePkl = new TreePickler(pickler) + val treePkl = new TreePickler(pickler, Attributes.empty) treePkl.pickle(tree :: Nil) treePkl.compactify() if tree.span.exists then @@ -268,7 +269,7 @@ object PickledQuotes { quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term - val unpickler = new DottyUnpickler(bytes, mode) + val unpickler = new DottyUnpickler(NoAbstractFile, bytes, mode) unpickler.enter(Set.empty) val tree = unpickler.tree diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala new file mode 100644 index 000000000000..76961f691617 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala @@ -0,0 +1,266 @@ +package dotty.tools.dotc +package quoted + +import dotty.tools.dotc.ast.TreeTypeMap +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.core.* +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Constants.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.TypeOps.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative + + +import scala.collection.mutable + +object QuotePatterns: + import tpd.* + + /** Check for restricted patterns */ + def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = tree match { + case _: SplicePattern => + case tdef: TypeDef if tdef.symbol.isClass => + val kind = if tdef.symbol.is(Module) then "objects" else "classes" + report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) + case tree: NamedDefTree => + if tree.name.is(NameKinds.WildcardParamName) then + report.warning( + "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", + tree.srcPos) + if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name != nme.ANON_FUN && tree.name.startsWith("$") then + report.error("Names cannot start with $ quote pattern", tree.namePos) + traverseChildren(tree) + case _: Match => + report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) + case _: Try => + report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) + case _: Return => + report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) + case _ => + traverseChildren(tree) + } + + }.traverse(quotePattern.body) + + /** Encode the quote pattern into an `unapply` that the pattern matcher can handle. + * + * A quote pattern + * ``` + * '{ + * // type variables (QuotePattern.bindings Bind list) + * type t1 >: l1 <: b1 + * ... + * type tn >: ln <: bn + * // pattern (QuotePattern.body) + * ... $x1: T1 ... ${ F(x2) }: T2 ... $f(a1: A1, ..., an: An): T3 ... + * } // (using quotes) (QuotePattern.quotes) + * ``` + * + * is transformed into the pattern + * + * ``` + * quotes + * .asInstanceOf[QuoteMatching] // scala.quoted.runtime.QuoteMatching + * .ExprMatch // or TypeMatch + * .unapply[ + * KCons[t1 >: l1 <: b1, ...KCons[tn >: ln <: bn, KNil]...], // scala.quoted.runtime.{KCons, KNil} + * (T1, T2, (A1, ..., An) => T3, ...) + * ]( + * '{ + * type t1' >: l1' <: b1' + * ... + * type tn' >: ln' <: bn' + * // scala.quoted.runtime.Patterns.{patternHole, higherOrderHole} + * ... $patternHole[T1] ... $patternHole[T2] ... $higherOrderHole[T3](a1, ..., an) ... + * }, + * quotes + * ) + * + * Here ti' is a `TypeDef` that represents `ti` in the (pickled) pattern body. The type bounds + * `>: l1' <: b1` of `ti'` are the same as the type bounds `>: l1 <: b1` replacing all references + * to `tj` with `tj'`. + * ``` + */ + def encode(quotePattern: QuotePattern)(using Context): UnApply = + val quoteClass = if (quotePattern.body.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass + + val matchModule = if quotePattern.body.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch + val unapplySym = if quotePattern.body.isTerm then defn.QuoteMatching_ExprMatch_unapply else defn.QuoteMatching_TypeMatch_unapply + val unapplyFun = quotePattern.quotes.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(unapplySym) + + val typeBindingsTuple = tpd.hkNestedPairsTypeTree(quotePattern.bindings) + + val (splicePatterns, shape0) = splitQuotePattern(quotePattern.body) + + val shape1 = + if quotePattern.bindings.isEmpty then shape0 + else + val oldBindings = quotePattern.bindings.map(_.symbol) + val newBindings = quotePattern.bindings.map { binding => + val sym = binding.symbol + val typeSym = newSymbol(ctx.owner, sym.name, EmptyFlags, sym.info, NoSymbol, binding.span) + typeSym.addAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) + for fromAbove <- sym.getAnnotation(defn.QuotedRuntimePatterns_fromAboveAnnot) do + typeSym.addAnnotation(fromAbove) + typeSym.asType + } + var newBindingsRefs = newBindings.map(_.typeRef) + for newBinding <- newBindings do + newBinding.info = newBinding.info.subst(oldBindings, newBindingsRefs) + + val patternTypes = newBindings.map(sym => TypeDef(sym).withSpan(sym.span)) + Block(patternTypes, shape0.subst(oldBindings, newBindings)) + + val quotedShape = + if (quotePattern.body.isTerm) tpd.Quote(shape1, Nil).select(nme.apply).appliedTo(quotePattern.quotes) + else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape1).appliedTo(quotePattern.quotes) + + val givenTypes = quotePattern.bindings.map { binding => + val name = binding.symbol.name.toTypeName + val nameOfSyntheticGiven = PatMatGivenVarName.fresh(name.toTermName) + val tpe = defn.QuotedTypeClass.typeRef.appliedTo(binding.symbol.typeRef) + val givenTypeSym = newPatternBoundSymbol(nameOfSyntheticGiven, tpe, binding.span, flags = Given) + Bind(givenTypeSym, untpd.Ident(nme.WILDCARD).withType(tpe)).withSpan(binding.span) + } + + val patterns = givenTypes ::: splicePatterns + val patternTypes = patterns.map(_.tpe.widenTermRefExpr) + + val splicePat = + if patterns.isEmpty then ref(defn.EmptyTupleModule.termRef) + else if patterns.size <= Definitions.MaxTupleArity then + val tupleNUnapply = + ref(defn.TupleType(patterns.size).nn.typeSymbol.companionModule) + .select(nme.unapply) + .appliedToTypes(patternTypes) + UnApply(tupleNUnapply, Nil, patterns, defn.tupleType(patternTypes)) + else + val tupleXXLUnapplySeq = ref(defn.TupleXXL_unapplySeq) + val unapply = UnApply(tupleXXLUnapplySeq, Nil, patterns, defn.tupleType(patternTypes)) + Typed(unapply, TypeTree(defn.TupleXXLClass.typeRef)) + + val patType = + val quotedTypes = + quotePattern.bindings.map(givenType => defn.QuotedTypeClass.typeRef.appliedTo(givenType.symbol.typeRef)) + val quotedExprs = + splicePatterns.map(_.tpe.widenTermRefExpr) + defn.tupleType(quotedTypes :::quotedExprs) + + UnApply( + fun = unapplyFun.appliedToTypeTrees(typeBindingsTuple :: TypeTree(patType) :: Nil), + implicits = quotedShape :: Nil, + patterns = splicePat :: Nil, + quotePattern.tpe) + + /** Split a typed quoted pattern into the contents of its splices and replace them with place holders. + * + * A quote pattern + * ``` + * case '${ + * val a: T = ??? + * List[T]( + * $x, + * ${Expr(y)}, + * $f(a)) + * ) + * } => ... + * ``` + * will return + * ``` + * ( + * List( + * : Tree, + * : Tree, + * T]>: Tree) + * <'{ + * val a: T = ??? + * List[T]( + * scala.quoted.runtime.Patterns.patternHole[T], + * scala.quoted.runtime.Patterns.patternHole[T], + * scala.quoted.runtime.Patterns.higherOrderHole[T](a) + * ) + * }>: Tree, + * ) + * ``` + */ + private def splitQuotePattern(body: Tree)(using Context): (List[Tree], Tree) = { + val patBuf = new mutable.ListBuffer[Tree] + val shape = new tpd.TreeMap { + override def transform(tree: Tree)(using Context) = tree match { + case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => + transform(tpt) // Collect type bindings + transform(splice) + case SplicePattern(pat, args) => + val patType = pat.tpe.widen + val patType1 = patType.translateFromRepeated(toArray = false) + val pat1 = if (patType eq patType1) pat else pat.withType(patType1) + patBuf += pat1 + if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) + else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) + case _ => + super.transform(tree) + } + }.transform(body) + (patBuf.toList, shape) + } + + + /** Decodes an encoded pattern into a QuotePattern. + * + * See the documentation of `encode`, this does the opposite transformation. + */ + def decode(tree: UnApply)(using Context): QuotePattern = + val (fun, implicits, patternTuple) = (tree: @unchecked) match + case UnApply(fun, implicits, patternTuple :: Nil) => (fun, implicits, patternTuple) + val patterns = patternTuple match + case _: Ident => Nil // EmptyTuple + case UnApply(_, _, patterns) => patterns // TupleN + case Typed(UnApply(_, _, patterns), _) => patterns // TupleXXL + val shape = (implicits: @unchecked) match + case Apply(Select(Quote(shape, _), _), _) :: Nil => shape + case List(Apply(TypeApply(_, shape :: Nil), _)) => shape + fun match + // .asInstanceOf[QuoteMatching].{ExprMatch,TypeMatch}.unapply[, ] + case TypeApply(Select(Select(TypeApply(Select(quotes, _), _), _), _), typeBindings :: resTypes :: Nil) => + val bindings = unrollBindings(typeBindings) + val addPattenSplice = new TreeMap { + private val patternIterator = patterns.iterator.filter { + case pat: Bind => !pat.symbol.name.is(PatMatGivenVarName) + case _ => true + } + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match + case TypeApply(patternHole, _) if patternHole.symbol == defn.QuotedRuntimePatterns_patternHole => + cpy.SplicePattern(tree)(patternIterator.next(), Nil) + case Apply(patternHole, SeqLiteral(args, _) :: Nil) if patternHole.symbol == defn.QuotedRuntimePatterns_higherOrderHole => + cpy.SplicePattern(tree)(patternIterator.next(), args) + case _ => super.transform(tree) + } + val body = addPattenSplice.transform(shape) match + case block @ Block((tdef: TypeDef) :: rest, expr) if tdef.symbol.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) => + val (tdefs, stats) = rest.span { + case tdef: TypeDef => tdef.symbol.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) + case _ => false + } + val shapeBindingSyms = tdef.symbol :: tdefs.map(_.symbol) + for (binding, shapeBinding) <- bindings.zip(shapeBindingSyms) do + if shapeBinding.hasAnnotation(defn.QuotedRuntimePatterns_fromAboveAnnot) then + binding.symbol.addAnnotation(defn.QuotedRuntimePatterns_fromAboveAnnot) + val body1 = if stats.isEmpty then expr else cpy.Block(block)(stats, expr) + body1.subst(shapeBindingSyms, bindings.map(_.symbol)) + case body => body + cpy.QuotePattern(tree)(bindings, body, quotes) + + private def unrollBindings(tree: Tree)(using Context): List[Tree] = tree match + case AppliedTypeTree(tupleN, bindings) if defn.isTupleClass(tupleN.symbol) => bindings // TupleN, 1 <= N <= 22 + case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollBindings(tail) // KCons or *: + case _ => Nil // KNil or EmptyTuple diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala index 604c8da3420a..a015c726c59f 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc.quoted import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* object QuoteUtils: - import tpd._ + import tpd.* /** Get the owner of a tree if it has one */ def treeOwner(tree: Tree)(using Context): Option[Symbol] = { diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala index c063e437cb19..4147e49b87ce 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc.quoted -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.ast.tpd object QuotesCache { - import tpd._ + import tpd.* /** A key to be used in a context property that caches the unpickled trees */ private val QuotesCacheKey = new Property.Key[collection.mutable.Map[String | List[String], Tree]] diff --git a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala index 87d0cbb7be08..cfc09a8ed836 100644 --- a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala +++ b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala @@ -3,11 +3,11 @@ package reflect import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* object FromSymbol { diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 38f2ab347c4c..8e39afdd6e7d 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -1,14 +1,15 @@ package dotty.tools.dotc -import reporting._ -import Diagnostic._ +import reporting.* +import Diagnostic.* import util.{SourcePosition, NoSourcePosition, SrcPos} -import core._ -import Contexts._, Flags.*, Symbols._, Decorators._ +import core.* +import Contexts.*, Flags.*, Symbols.*, Decorators.* import config.SourceVersion -import ast._ +import ast.* import config.Feature.sourceVersion import java.lang.System.currentTimeMillis +import dotty.tools.dotc.config.MigrationVersion object report: @@ -80,14 +81,11 @@ object report: if ctx.settings.YdebugError.value then Thread.dumpStack() if ctx.settings.YdebugTypeError.value then ex.printStackTrace() - def errorOrMigrationWarning(msg: Message, pos: SrcPos, from: SourceVersion)(using Context): Unit = - if sourceVersion.isAtLeast(from) then - if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) - else error(msg, pos) - - def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = - if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) - else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) + def errorOrMigrationWarning(msg: Message, pos: SrcPos, migrationVersion: MigrationVersion)(using Context): Unit = + if sourceVersion.isAtLeast(migrationVersion.errorFrom) then + if !sourceVersion.isMigrating then error(msg, pos) + else if ctx.settings.rewrite.value.isEmpty then migrationWarning(msg, pos) + else if sourceVersion.isAtLeast(migrationVersion.warnFrom) then warning(msg, pos) def restrictionError(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = error(msg.mapMsg("Implementation restriction: " + _), pos) @@ -131,61 +129,35 @@ object report: private object messageRendering extends MessageRendering // Should only be called from Run#enrichErrorMessage. - def enrichErrorMessage(errorMessage: String)(using Context): String = try { + def enrichErrorMessage(errorMessage: String)(using Context): String = + if ctx.settings.YnoEnrichErrorMessages.value then errorMessage + else try enrichErrorMessage1(errorMessage) + catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions + + private def enrichErrorMessage1(errorMessage: String)(using Context): String = { + import untpd.*, config.Settings.* def formatExplain(pairs: List[(String, Any)]) = pairs.map((k, v) => f"$k%20s: $v").mkString("\n") val settings = ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name) - val tree = ctx.tree - val sym = tree.symbol - val pos = tree.sourcePos - val path = pos.source.path - val site = ctx.outersIterator.map(_.owner).filter(sym => !sym.exists || sym.isClass || sym.is(Method)).next() - - import untpd.* - extension (tree: Tree) def summaryString: String = tree match - case Literal(const) => s"Literal($const)" - case Ident(name) => s"Ident(${name.decode})" - case Select(qual, name) => s"Select(${qual.summaryString}, ${name.decode})" - case tree: NameTree => (if tree.isType then "type " else "") + tree.name.decode - case tree => s"${tree.className}${if tree.symbol.exists then s"(${tree.symbol})" else ""}" + def showSetting(s: Setting[?]): String = if s.value == "" then s"${s.name} \"\"" else s"${s.name} ${s.value}" val info1 = formatExplain(List( "while compiling" -> ctx.compilationUnit, - "during phase" -> ctx.phase.prevMega, + "during phase" -> ctx.phase.megaPhase, "mode" -> ctx.mode, "library version" -> scala.util.Properties.versionString, "compiler version" -> dotty.tools.dotc.config.Properties.versionString, - "settings" -> settings.map(s => if s.value == "" then s"${s.name} \"\"" else s"${s.name} ${s.value}").mkString(" "), - )) - val symbolInfos = if sym eq NoSymbol then List("symbol" -> sym) else List( - "symbol" -> sym.showLocated, - "symbol definition" -> s"${sym.showDcl} (a ${sym.className})", - "symbol package" -> sym.enclosingPackageClass.fullName, - "symbol owners" -> sym.showExtendedLocation, - ) - val info2 = formatExplain(List( - "tree" -> tree.summaryString, - "tree position" -> (if pos.exists then s"$path:${pos.line + 1}:${pos.column}" else s"$path:"), - "tree type" -> tree.typeOpt.show, - ) ::: symbolInfos ::: List( - "call site" -> s"${site.showLocated} in ${site.enclosingPackageClass}" + "settings" -> settings.map(showSetting).mkString(" "), )) - val context_s = try - s""" == Source file context for tree position == - | - |${messageRendering.messageAndPos(Diagnostic.Error("", pos))}""".stripMargin - catch case _: Exception => "" s""" | $errorMessage | | An unhandled exception was thrown in the compiler. | Please file a crash report here: | https://github.com/lampepfl/dotty/issues/new/choose + | For non-enriched exceptions, compile with -Yno-enrich-error-messages. | |$info1 - | - |$info2 - | - |$context_s""".stripMargin - } catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions + |""".stripMargin + } end report diff --git a/compiler/src/dotty/tools/dotc/reporting/CodeAction.scala b/compiler/src/dotty/tools/dotc/reporting/CodeAction.scala new file mode 100644 index 000000000000..b2b18cc00104 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/reporting/CodeAction.scala @@ -0,0 +1,16 @@ +package dotty.tools.dotc.reporting + +import dotty.tools.dotc.rewrites.Rewrites.ActionPatch + +/** A representation of a code action / fix that can be used by tooling to + * apply a fix to their code. + * + * @param title The title of the fix, often showed to a user in their editor. + * @param description An optional description of the fix. + * @param patches The patches that this fix contains. + */ +case class CodeAction( + title: String, + description: Option[String], + patches: List[ActionPatch] +) diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index a95af962c053..ce7477f4da70 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import java.io.{ BufferedReader, PrintWriter } import Diagnostic.Error diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index 624aa93924e8..7a8edb233aee 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -5,12 +5,12 @@ package reporting import scala.language.unsafeNulls import dotty.tools.dotc.config.Settings.Setting -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} import dotty.tools.dotc.util.SourcePosition import java.util.{Collections, Optional, List => JList} -import scala.util.chaining._ +import scala.util.chaining.* import core.Decorators.toMessage object Diagnostic: diff --git a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala new file mode 100644 index 000000000000..04b9b518fd5e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala @@ -0,0 +1,161 @@ +package dotty.tools +package dotc +package reporting + +import core.* +import Contexts.* +import Decorators.*, Symbols.*, Names.*, Types.*, Flags.* +import typer.ProtoTypes.{FunProto, SelectionProto} + +/** A utility object to support "did you mean" hinting */ +object DidYouMean: + + def kindOK(sym: Symbol, isType: Boolean, isApplied: Boolean)(using Context): Boolean = + if isType then sym.isType + else sym.isTerm || isApplied && sym.isClass && !sym.is(ModuleClass) + // also count classes if followed by `(` since they have constructor proxies, + // but these don't show up separately as members + // Note: One need to be careful here not to complete symbols. For instance, + // we run into trouble if we ask whether a symbol is a legal value. + + /** The names of all non-synthetic, non-private members of `site` + * that are of the same type/term kind as the missing member. + */ + def memberCandidates(site: Type, isType: Boolean, isApplied: Boolean)(using Context): collection.Set[Symbol] = + for + bc <- site.widen.baseClasses.toSet + sym <- bc.info.decls.filter(sym => + kindOK(sym, isType, isApplied) + && !sym.isConstructor + && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) + yield sym + + case class Binding(name: Name, sym: Symbol, site: Type) + + /** The name, symbol, and prefix type of all non-synthetic declarations that are + * defined or imported in some enclosing scope and that are of the same type/term + * kind as the missing member. + */ + def inScopeCandidates(isType: Boolean, isApplied: Boolean, rootImportOK: Boolean)(using Context): collection.Set[Binding] = + val acc = collection.mutable.HashSet[Binding]() + def nextInteresting(ctx: Context): Context = + if ctx.outer.isImportContext + || ctx.outer.scope != ctx.scope + || ctx.outer.owner.isClass && ctx.outer.owner != ctx.owner + || (ctx.outer eq NoContext) + then ctx.outer + else nextInteresting(ctx.outer) + + def recur()(using Context): Unit = + if ctx eq NoContext then + () // done + else if ctx.isImportContext then + val imp = ctx.importInfo.nn + if imp.isRootImport && !rootImportOK then + () // done + else imp.importSym.info match + case ImportType(expr) => + val candidates = memberCandidates(expr.tpe, isType, isApplied) + if imp.isWildcardImport then + for cand <- candidates if !imp.excluded.contains(cand.name.toTermName) do + acc += Binding(cand.name, cand, expr.tpe) + for sel <- imp.selectors do + val selStr = sel.name.show + if sel.name == sel.rename then + for cand <- candidates if cand.name.toTermName.show == selStr do + acc += Binding(cand.name, cand, expr.tpe) + else if !sel.isUnimport then + for cand <- candidates if cand.name.toTermName.show == selStr do + acc += Binding(sel.rename.likeSpaced(cand.name), cand, expr.tpe) + case _ => + recur()(using nextInteresting(ctx)) + else + if ctx.owner.isClass then + for sym <- memberCandidates(ctx.owner.typeRef, isType, isApplied) do + acc += Binding(sym.name, sym, ctx.owner.thisType) + else + ctx.scope.foreach: sym => + if kindOK(sym, isType, isApplied) + && !sym.isConstructor + && !sym.flagsUNSAFE.is(Synthetic) + then acc += Binding(sym.name, sym, NoPrefix) + recur()(using nextInteresting(ctx)) + end recur + + recur() + acc + end inScopeCandidates + + /** The Levenshtein distance between two strings */ + def distance(s1: String, s2: String): Int = + val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) + for + j <- 0 to s2.length + i <- 0 to s1.length + do + dist(j)(i) = + if j == 0 then i + else if i == 0 then j + else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) + else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 + dist(s2.length)(s1.length) + + /** List of possible candidate names with their Levenstein distances + * to the name `from` of the missing member. + * @param maxDist Maximal number of differences to be considered for a hint + * A distance qualifies if it is at most `maxDist`, shorter than + * the lengths of both the candidate name and the missing member name + * and not greater than half the average of those lengths. + */ + extension [S <: Symbol | Binding](candidates: collection.Set[S]) + def closestTo(str: String, maxDist: Int = 3)(using Context): List[(Int, S)] = + def nameStr(cand: S): String = cand match + case sym: Symbol => sym.name.show + case bdg: Binding => bdg.name.show + candidates + .toList + .map(cand => (distance(nameStr(cand), str), cand)) + .filter((d, cand) => + d <= maxDist + && d * 4 <= str.length + nameStr(cand).length + && d < str.length + && d < nameStr(cand).length) + .sortBy((d, cand) => (d, nameStr(cand))) // sort by distance first, alphabetically second + + def didYouMean(candidates: List[(Int, Binding)], proto: Type, prefix: String)(using Context): String = + + def qualifies(b: Binding)(using Context): Boolean = + try + val valueOK = proto match + case _: SelectionProto => true + case _ => !b.sym.isNoValue + val accessOK = b.sym.isAccessibleFrom(b.site) + valueOK && accessOK + catch case ex: Exception => false + // exceptions might arise when completing (e.g. malformed class file, or cyclic reference) + + def showName(name: Name, sym: Symbol)(using Context): String = + if sym.is(ModuleClass) then s"${name.show}.type" + else name.show + + def alternatives(distance: Int, candidates: List[(Int, Binding)]): List[Binding] = candidates match + case (d, b) :: rest if d == distance => + if qualifies(b) then b :: alternatives(distance, rest) else alternatives(distance, rest) + case _ => + Nil + + def recur(candidates: List[(Int, Binding)]): String = candidates match + case (d, b) :: rest + if d != 0 || b.sym.is(ModuleClass) => // Avoid repeating the same name in "did you mean" + if qualifies(b) then + def hint(b: Binding) = prefix ++ showName(b.name, b.sym) + val alts = alternatives(d, rest).map(hint).take(3) + val suffix = if alts.isEmpty then "" else alts.mkString(" or perhaps ", " or ", "?") + s" - did you mean ${hint(b)}?$suffix" + else + recur(rest) + case _ => "" + + recur(candidates) + end didYouMean +end DidYouMean \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index fc679210db17..f5e7f9d44f56 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -135,7 +135,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case JavaSymbolIsNotAValueID // errorNumber: 119 case DoubleDefinitionID // errorNumber: 120 case MatchCaseOnlyNullWarningID // errorNumber: 121 - case ImportRenamedTwiceID // errorNumber: 122 + case ImportedTwiceID // errorNumber: 122 case TypeTestAlwaysDivergesID // errorNumber: 123 case TermMemberNeedsNeedsResultTypeForImplicitSearchID // errorNumber: 124 case ClassCannotExtendEnumID // errorNumber: 125 @@ -195,6 +195,17 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case MatchTypeScrutineeCannotBeHigherKindedID // errorNumber: 179 case AmbiguousExtensionMethodID // errorNumber 180 case UnqualifiedCallToAnyRefMethodID // errorNumber: 181 + case NotConstantID // errorNumber: 182 + case ClosureCannotHaveInternalParameterDependenciesID // errorNumber: 183 + case MatchTypeNoCasesID // errorNumber: 184 + case UnimportedAndImportedID // errorNumber: 185 + case ImplausiblePatternWarningID // errorNumber: 186 + case SynchronizedCallOnBoxedClassID // errorNumber: 187 + case VarArgsParamCannotBeGivenID // errorNumber: 188 + case ExtractorNotFoundID // errorNumber: 189 + case PureUnitExpressionID // errorNumber: 190 + case MatchTypeLegacyPatternID // errorNumber: 191 + case UnstableInlineAccessorID // errorNumber: 192 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala index a2062bd1b2c7..f469c03764c0 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala @@ -6,7 +6,7 @@ import scala.language.unsafeNulls import collection.mutable import core.Contexts.Context -import Diagnostic._ +import Diagnostic.* /** A re-usable Reporter used in Contexts#test */ class ExploringReporter extends StoreReporter(null, fromTyperState = false): diff --git a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala index 9b6a3c75ba5d..81e17c495d90 100644 --- a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* /** * This trait implements `isHidden` so that we avoid reporting non-sensical messages. @@ -13,8 +13,8 @@ trait HideNonSensicalMessages extends Reporter { */ override def isHidden(dia: Diagnostic)(using Context): Boolean = super.isHidden(dia) || { - dia.msg.isNonSensical && - hasErrors && // if there are no errors yet, report even if diagnostic is non-sensical - !ctx.settings.YshowSuppressedErrors.value + hasErrors // if there are no errors yet, report even if diagnostic is non-sensical + && dia.msg.isNonSensical // defer forcing the message by calling hasErrors first + && !ctx.settings.YshowSuppressedErrors.value } } diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index a1fe6773c1d2..484789a7fe45 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -10,7 +10,6 @@ import printing.Formatting.hl import config.SourceVersion import scala.language.unsafeNulls - import scala.annotation.threadUnsafe /** ## Tips for error message generation @@ -51,6 +50,13 @@ object Message: */ private class Seen(disambiguate: Boolean): + /** The set of lambdas that were opened at some point during printing. */ + private val openedLambdas = new collection.mutable.HashSet[LambdaType] + + /** Register that `tp` was opened during printing. */ + def openLambda(tp: LambdaType): Unit = + openedLambdas += tp + val seen = new collection.mutable.HashMap[SeenKey, List[Recorded]]: override def default(key: SeenKey) = Nil @@ -82,15 +88,29 @@ object Message: def followAlias(e1: Recorded): Recorded = e1 match { case e1: Symbol if e1.isAliasType => val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol - if (underlying.name == e1.name) underlying else e1 + if (underlying.name == e1.name) underlying else e1.namedType.dealias.typeSymbol case _ => e1 } val key = SeenKey(str, isType) val existing = seen(key) lazy val dealiased = followAlias(entry) - // alts: The alternatives in `existing` that are equal, or follow (an alias of) `entry` - var alts = existing.dropWhile(alt => dealiased ne followAlias(alt)) + /** All lambda parameters with the same name are given the same superscript as + * long as their corresponding binder has been printed. + * See tests/neg/lambda-rename.scala for test cases. + */ + def sameSuperscript(cur: Recorded, existing: Recorded) = + (cur eq existing) || + (cur, existing).match + case (cur: ParamRef, existing: ParamRef) => + (cur.paramName eq existing.paramName) && + openedLambdas.contains(cur.binder) && + openedLambdas.contains(existing.binder) + case _ => + false + + // The length of alts corresponds to the number of superscripts we need to print. + var alts = existing.dropWhile(alt => !sameSuperscript(dealiased, followAlias(alt))) if alts.isEmpty then alts = entry :: existing seen(key) = alts @@ -208,10 +228,20 @@ object Message: case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) case _ => super.toTextRef(tp) + override def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text): Text = + info match + case info: LambdaType => + seen.openLambda(info) + case _ => + super.toTextMethodAsFunction(info, isPure, refs) + override def toText(tp: Type): Text = if !tp.exists || tp.isErroneous then seen.nonSensical = true tp match case tp: TypeRef if useSourceModule(tp.symbol) => Str("object ") ~ super.toText(tp) + case tp: LambdaType => + seen.openLambda(tp) + super.toText(tp) case _ => super.toText(tp) override def toText(sym: Symbol): Text = @@ -378,12 +408,17 @@ abstract class Message(val errorId: ErrorMessageID)(using Context) { self => override def canExplain = true /** Override with `true` for messages that should always be shown even if their - * position overlaps another messsage of a different class. On the other hand + * position overlaps another message of a different class. On the other hand * multiple messages of the same class with overlapping positions will lead * to only a single message of that class to be issued. */ def showAlways = false + /** A list of actions attached to this message to address the issue this + * message represents. + */ + def actions(using Context): List[CodeAction] = List.empty + override def toString = msg } diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index f53359fb8b19..6881235e3dc1 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -6,11 +6,11 @@ import scala.language.unsafeNulls import java.lang.System.{lineSeparator => EOL} -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import printing.Highlighting.{Blue, Red, Yellow} import printing.SyntaxHighlighting -import Diagnostic._ +import Diagnostic.* import util.{ SourcePosition, NoSourcePosition } import util.Chars.{ LF, CR, FF, SU } import scala.annotation.switch @@ -210,7 +210,7 @@ trait MessageRendering { } private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit = - import dia._ + import dia.* val hasId = msg.errorId.errorNumber >= 0 val category = dia match { case _: UncheckedWarning => "unchecked" @@ -228,7 +228,7 @@ trait MessageRendering { /** The whole message rendered from `msg` */ def messageAndPos(dia: Diagnostic)(using Context): String = { - import dia._ + import dia.* val pos1 = pos.nonInlined val inlineStack = inlinePosStack(pos).filter(_ != pos1) val maxLineNumber = diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index f5aadac27296..ca4114a82cdc 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -4,11 +4,11 @@ package reporting import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol} -import dotty.tools.dotc.reporting.Diagnostic._ -import dotty.tools.dotc.reporting.Message._ +import dotty.tools.dotc.reporting.Diagnostic.* +import dotty.tools.dotc.reporting.Message.* import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} @@ -63,7 +63,7 @@ object Reporter { * error messages. */ abstract class Reporter extends interfaces.ReporterResult { - import Reporter._ + import Reporter.* /** Report a diagnostic */ def doReport(dia: Diagnostic)(using Context): Unit @@ -109,9 +109,14 @@ abstract class Reporter extends interfaces.ReporterResult { private var errors: List[Error] = Nil + private var warnings: List[Warning] = Nil + /** All errors reported by this reporter (ignoring outer reporters) */ def allErrors: List[Error] = errors + /** All warnings reported by this reporter (ignoring outer reporters) */ + def allWarnings: List[Warning] = warnings + /** Were sticky errors reported? Overridden in StoreReporter. */ def hasStickyErrors: Boolean = false @@ -149,15 +154,11 @@ abstract class Reporter extends interfaces.ReporterResult { val key = w.enablingOption.name addUnreported(key, 1) case _ => - // conditional warnings that are not enabled are not fatal - val d = dia match - case w: Warning if ctx.settings.XfatalWarnings.value => w.toError - case _ => dia - if !isHidden(d) then // avoid isHidden test for summarized warnings so that message is not forced - markReported(d) - withMode(Mode.Printing)(doReport(d)) - d match { - case _: Warning => _warningCount += 1 + if !isHidden(dia) then // avoid isHidden test for summarized warnings so that message is not forced + dia match { + case w: Warning => + warnings = w :: warnings + _warningCount += 1 case e: Error => errors = e :: errors _errorCount += 1 @@ -166,16 +167,24 @@ abstract class Reporter extends interfaces.ReporterResult { case _: Info => // nothing to do here // match error if d is something else } + markReported(dia) + withMode(Mode.Printing)(doReport(dia)) end issueUnconfigured def issueIfNotSuppressed(dia: Diagnostic)(using Context): Unit = + def toErrorIfFatal(dia: Diagnostic) = dia match + case w: Warning if ctx.settings.silentWarnings.value => dia + case w: ConditionalWarning if w.isSummarizedConditional => dia + case w: Warning if ctx.settings.XfatalWarnings.value => w.toError + case _ => dia + def go() = - import Action._ + import Action.* dia match - case w: Warning => WConf.parsed.action(w) match + case w: Warning => WConf.parsed.action(dia) match case Error => issueUnconfigured(w.toError) - case Warning => issueUnconfigured(w) - case Verbose => issueUnconfigured(w.setVerbose()) + case Warning => issueUnconfigured(toErrorIfFatal(w)) + case Verbose => issueUnconfigured(toErrorIfFatal(w.setVerbose())) case Info => issueUnconfigured(w.toInfo) case Silent => case _ => issueUnconfigured(dia) diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala index 9783a3208a60..aef5f2c5863b 100644 --- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import collection.mutable import config.Printers.typr -import Diagnostic._ +import Diagnostic.* /** This class implements a Reporter that stores all messages * diff --git a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala index 8d7204a93fa2..c0b5ffe8e650 100644 --- a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala @@ -5,7 +5,7 @@ package reporting import scala.language.unsafeNulls import collection.mutable -import Diagnostic._ +import Diagnostic.* /** A re-usable Reporter used in Contexts#test */ class TestingReporter extends StoreReporter(null, fromTyperState = false): diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala index 153212522541..75c698a28ee4 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import Diagnostic.Error /** diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 98fd7da3032a..d8426aa8781e 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -4,7 +4,7 @@ package reporting import scala.collection.mutable import util.SourceFile -import core.Contexts._ +import core.Contexts.* /** This trait implements `isHidden` so that multiple messages per position * are suppressed, unless they are of increasing severity. */ @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists(pos => - positions.get((ctx.source, pos)).exists(_.hides(dia))) + && (dia.pos.start to dia.pos.end).exists: offset => + positions.get((ctx.source, offset)).exists(_.hides(dia)) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for (pos <- dia.pos.start to dia.pos.end) - positions.get(ctx.source, pos) match + for offset <- dia.pos.start to dia.pos.end do + positions.get((ctx.source, offset)) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, pos)) = dia + case _ => positions((ctx.source, offset)) = dia super.markReported(dia) } diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index af1a5c0f0f47..cc0a63cb1532 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -4,7 +4,7 @@ package reporting import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.SourcePosition import java.util.regex.PatternSyntaxException @@ -36,8 +36,8 @@ final case class WConf(confs: List[(List[MessageFilter], Action)]): }.getOrElse(Action.Warning) object WConf: - import Action._ - import MessageFilter._ + import Action.* + import MessageFilter.* private type Conf = (List[MessageFilter], Action) @@ -106,7 +106,7 @@ object WConf: def fromSettings(settings: List[String]): Either[List[String], WConf] = if (settings.isEmpty) Right(WConf(Nil)) else - val parsedConfs: List[Either[List[String], (List[MessageFilter], Action)]] = settings.map(conf => + val parsedConfs: List[Either[List[String], (List[MessageFilter], Action)]] = settings.reverse.map(conf => val filtersAndAction = conf.split(':') if filtersAndAction.length != 2 then Left(List("exactly one `:` expected (&...&:)")) else @@ -117,7 +117,7 @@ object WConf: else Right(WConf(configs)) class Suppression(val annotPos: SourcePosition, filters: List[MessageFilter], val start: Int, end: Int, val verbose: Boolean): - private[this] var _used = false + private var _used = false def used: Boolean = _used def markUsed(): Unit = { _used = true } diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index d205b816214c..4c7cf003cbc9 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2,33 +2,38 @@ package dotty.tools package dotc package reporting -import core._ -import Contexts._ -import Decorators._, Symbols._, Names._, NameOps._, Types._, Flags._, Phases._ +import core.* +import Contexts.* +import Decorators.*, Symbols.*, Names.*, NameOps.*, Types.*, Flags.*, Phases.* import Denotations.SingleDenotation import SymDenotations.SymDenotation -import NameKinds.WildcardParamName +import NameKinds.{WildcardParamName, ContextFunctionParamName} import parsing.Scanners.Token import parsing.Tokens -import printing.Highlighting._ +import printing.Highlighting.* import printing.Formatting -import ErrorMessageID._ +import ErrorMessageID.* import ast.Trees import config.{Feature, ScalaVersion} import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} -import typer.ProtoTypes.ViewProto +import typer.ProtoTypes.{ViewProto, SelectionProto, FunProto} import typer.Implicits.* import typer.Inferencing import scala.util.control.NonFatal import StdNames.nme import printing.Formatting.hl -import ast.Trees._ +import ast.Trees.* import ast.untpd import ast.tpd -import transform.SymUtils._ import scala.util.matching.Regex import java.util.regex.Matcher.quoteReplacement import cc.CaptureSet.IdentityCaptRefMap +import dotty.tools.dotc.rewrites.Rewrites.ActionPatch +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.dotc.util.SourcePosition +import scala.jdk.CollectionConverters.* +import dotty.tools.dotc.util.SourceFile +import DidYouMean.* /** Messages * ======== @@ -132,10 +137,10 @@ extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { |its body in a block; no exceptions are handled.""" } -class DeprecatedWithOperator()(using Context) +class DeprecatedWithOperator(rewrite: String)(using Context) extends SyntaxMsg(DeprecatedWithOperatorID) { def msg(using Context) = - i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" + i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead$rewrite""" def explain(using Context) = i"""|Dotty introduces intersection types - ${hl("&")} types. These replace the |use of the ${hl("with")} keyword. There are a few differences in @@ -155,23 +160,30 @@ extends SyntaxMsg(CaseClassMissingParamListID) { class AnonymousFunctionMissingParamType(param: untpd.ValDef, tree: untpd.Function, - pt: Type) + inferredType: Type, + expectedType: Type, + ) (using Context) extends TypeMsg(AnonymousFunctionMissingParamTypeID) { def msg(using Context) = { - val ofFun = + val paramDescription = if param.name.is(WildcardParamName) - || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) - then i" of expanded function:\n$tree" + || param.name.is(ContextFunctionParamName) + || MethodType.syntheticParamNames(tree.args.length + 1).contains(param.name) + then i"\nin expanded function:\n $tree" else "" val inferred = - if (pt == WildcardType) "" - else i"\nWhat I could infer was: $pt" + if inferredType == WildcardType then "" + else i"\nWhat I could infer was: $inferredType" + + val expected = + if expectedType == WildcardType then "" + else i"\nExpected type for the whole anonymous function:\n $expectedType" i"""Missing parameter type - | - |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" + | + |I could not infer the type of the parameter ${param.name}$paramDescription$inferred$expected""" } def explain(using Context) = "" @@ -234,14 +246,29 @@ extends NamingMsg(DuplicateBindID) { } } -class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) +class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name, proto: Type)(using Context) extends NotFoundMsg(MissingIdentID) { - def msg(using Context) = i"Not found: $treeKind$name" + def msg(using Context) = + val missing = name.show + val addendum = + didYouMean( + inScopeCandidates(name.isTypeName, isApplied = proto.isInstanceOf[FunProto], rootImportOK = true) + .closestTo(missing), + proto, "") + + i"Not found: $treeKind$name$addendum" def explain(using Context) = { - i"""|The identifier for `$treeKind$name` is not bound, that is, - |no declaration for this identifier can be found. - |That can happen, for example, if `$name` or its declaration has either been - |misspelt or if an import is missing.""" + i"""|Each identifier in Scala needs a matching declaration. There are two kinds of + |identifiers: type identifiers and value identifiers. Value identifiers are introduced + |by `val`, `def`, or `object` declarations. Type identifiers are introduced by `type`, + |`class`, `enum`, or `trait` declarations. + | + |Identifiers refer to matching declarations in their environment, or they can be + |imported from elsewhere. + | + |Possible reasons why no matching declaration was found: + | - The declaration or the use is mis-spelt. + | - An import is missing.""" } } @@ -300,48 +327,13 @@ class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], adde end TypeMismatch -class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) +class NotAMember(site: Type, val name: Name, selected: String, proto: Type, addendum: => String = "")(using Context) extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG def msg(using Context) = { - import core.Flags._ - val maxDist = 3 // maximal number of differences to be considered for a hint val missing = name.show - // The symbols of all non-synthetic, non-private members of `site` - // that are of the same type/term kind as the missing member. - def candidates: Set[Symbol] = - for - bc <- site.widen.baseClasses.toSet - sym <- bc.info.decls.filter(sym => - sym.isType == name.isTypeName - && !sym.isConstructor - && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) - yield sym - - // Calculate Levenshtein distance - def distance(s1: String, s2: String): Int = - val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) - for - j <- 0 to s2.length - i <- 0 to s1.length - do - dist(j)(i) = - if j == 0 then i - else if i == 0 then j - else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) - else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 - dist(s2.length)(s1.length) - - // A list of possible candidate symbols with their Levenstein distances - // to the name of the missing member - def closest: List[(Int, Symbol)] = candidates - .toList - .map(sym => (distance(sym.name.show, missing), sym)) - .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length) - .sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second - val enumClause = if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method" @@ -358,17 +350,18 @@ extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { val finalAddendum = if addendum.nonEmpty then prefixEnumClause(addendum) - else closest match - case (d, sym) :: _ => - val siteName = site match - case site: NamedType => site.name.show - case site => i"$site" - val showName = - // Add .type to the name if it is a module - if sym.is(ModuleClass) then s"${sym.name.show}.type" - else sym.name.show - s" - did you mean $siteName.$showName?$enumClause" - case Nil => prefixEnumClause("") + else + val hint = didYouMean( + memberCandidates(site, name.isTypeName, isApplied = proto.isInstanceOf[FunProto]) + .closestTo(missing) + .map((d, sym) => (d, Binding(sym.name, sym, site))), + proto, + prefix = site match + case site: NamedType => i"${site.name}." + case site => i"$site." + ) + if hint.isEmpty then prefixEnumClause("") + else hint ++ enumClause i"$selected $name is not a member of ${site.widen}$finalAddendum" } @@ -493,7 +486,7 @@ extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { } } -class RepeatedModifier(modifier: String)(implicit ctx:Context) +class RepeatedModifier(modifier: String, source: SourceFile, span: Span)(implicit ctx:Context) extends SyntaxMsg(RepeatedModifierID) { def msg(using Context) = i"""Repeated modifier $modifier""" @@ -512,6 +505,17 @@ extends SyntaxMsg(RepeatedModifierID) { | |""" } + + override def actions(using Context) = + import scala.language.unsafeNulls + List( + CodeAction(title = s"""Remove repeated modifier: "$modifier"""", + description = None, + patches = List( + ActionPatch(SourcePosition(source, span), "") + ) + ) + ) } class InterpolatedStringError()(implicit ctx:Context) @@ -824,10 +828,13 @@ extends Message(LossyWideningConstantConversionID): |Write `.to$targetType` instead.""" def explain(using Context) = "" -class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) +class PatternMatchExhaustivity(uncoveredCases: Seq[String], tree: untpd.Match)(using Context) extends Message(PatternMatchExhaustivityID) { def kind = MessageKind.PatternMatchExhaustivity - lazy val uncovered = uncoveredFn + + private val hasMore = uncoveredCases.lengthCompare(6) > 0 + val uncovered = uncoveredCases.take(6).mkString(", ") + def msg(using Context) = val addendum = if hasMore then "(More unmatched cases are elided)" else "" i"""|${hl("match")} may not be exhaustive. @@ -842,11 +849,39 @@ extends Message(PatternMatchExhaustivityID) { | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type | - Add a ${hl("case _ => ...")} at the end to match all remaining cases |""" + + override def actions(using Context) = + import scala.language.unsafeNulls + val endPos = tree.cases.lastOption.map(_.endPos) + .getOrElse(tree.selector.endPos) + val startColumn = tree.cases.lastOption + .map(_.startPos.startColumn) + .getOrElse(tree.selector.startPos.startColumn + 2) + + val pathes = List( + ActionPatch( + srcPos = endPos, + replacement = uncoveredCases.map(c => indent(s"case $c => ???", startColumn)) + .mkString("\n", "\n", "") + ), + ) + List( + CodeAction(title = s"Insert missing cases (${uncoveredCases.size})", + description = None, + patches = pathes + ) + ) + + + private def indent(text:String, margin: Int): String = { + import scala.language.unsafeNulls + " " * margin + text + } } -class UncheckedTypePattern(msgFn: => String)(using Context) +class UncheckedTypePattern(argType: Type, whyNot: String)(using Context) extends PatternMatchMsg(UncheckedTypePatternID) { - def msg(using Context) = msgFn + def msg(using Context) = i"the type test for $argType cannot be checked at runtime because $whyNot" def explain(using Context) = i"""|Type arguments and type refinements are erased during compile time, thus it's |impossible to check them at run-time. @@ -922,66 +957,63 @@ extends SyntaxMsg(IllegalStartOfSimplePatternID) { def msg(using Context) = "pattern expected" def explain(using Context) = { val sipCode = - """def f(x: Int, y: Int) = x match { - | case `y` => ... - |} - """ + """def f(x: Int, y: Int) = x match + | case `y` => ...""".stripMargin val constructorPatternsCode = """case class Person(name: String, age: Int) | - |def test(p: Person) = p match { - | case Person(name, age) => ... - |} - """ - val tupplePatternsCode = - """def swap(tuple: (String, Int)): (Int, String) = tuple match { - | case (text, number) => (number, text) - |} - """ + | def test(p: Person) = p match + | case Person(name, age) => ...""".stripMargin + val tuplePatternsCode = + """def swap(tuple: (String, Int)): (Int, String) = tuple match + | case (text, number) => (number, text)""".stripMargin val patternSequencesCode = - """def getSecondValue(list: List[Int]): Int = list match { - | case List(_, second, x:_*) => second - | case _ => 0 - |}""" + """def getSecondValue(list: List[Int]): Int = list match + | case List(_, second, x*) => second + | case _ => 0""".stripMargin i"""|Simple patterns can be divided into several groups: - |- Variable Patterns: ${hl("case x => ...")}. + |- Variable Patterns: ${hl("case x => ...")} or ${hl("case _ => ...")} | It matches any value, and binds the variable name to that value. | A special case is the wild-card pattern _ which is treated as if it was a fresh | variable on each occurrence. | - |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}. + |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")} | This pattern matches any value matched by the specified type; it binds the variable | name to that value. | - |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}. + |- Given Patterns: ${hl("case given ExecutionContext => ...")} + | This pattern matches any value matched by the specified type; it binds a ${hl("given")} + | instance with the same type to that value. + | + |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")} | This type of pattern matches any value that is equal to the specified literal. | |- Stable Identifier Patterns: | - | $sipCode + | ${hl(sipCode)} | | the match succeeds only if the x argument and the y argument of f are equal. | |- Constructor Patterns: | - | $constructorPatternsCode + | ${hl(constructorPatternsCode)} | | The pattern binds all object's fields to the variable names (name and age, in this | case). | |- Tuple Patterns: | - | $tupplePatternsCode + | ${hl(tuplePatternsCode)} | | Calling: | - | ${hl("""swap(("Luftballons", 99)""")} + | ${hl("""swap(("Luftballons", 99))""")} | | would give ${hl("""(99, "Luftballons")""")} as a result. | |- Pattern Sequences: | - | $patternSequencesCode + | ${hl(patternSequencesCode)} | | Calling: | @@ -1139,7 +1171,7 @@ extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { |""" } -class ExpectedTokenButFound(expected: Token, found: Token)(using Context) +class ExpectedTokenButFound(expected: Token, found: Token, prefix: String = "")(using Context) extends SyntaxMsg(ExpectedTokenButFoundID) { private def foundText = Tokens.showToken(found) @@ -1148,7 +1180,7 @@ extends SyntaxMsg(ExpectedTokenButFoundID) { val expectedText = if (Tokens.isIdentifier(expected)) "an identifier" else Tokens.showToken(expected) - i"""${expectedText} expected, but ${foundText} found""" + i"""$prefix$expectedText expected, but $foundText found""" def explain(using Context) = if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found)) @@ -1303,6 +1335,14 @@ extends SyntaxMsg(VarArgsParamMustComeLastID) { |""" } +class VarArgsParamCannotBeGiven(isGiven: Boolean)(using Context) +extends SyntaxMsg(VarArgsParamCannotBeGivenID) { + def msg(using Context) = i"repeated parameters are not allowed in a ${if isGiven then "using" else "implicit"} clause" + def explain(using Context) = + "It is not possible to define a given with a repeated parameter type. This hypothetical given parameter could always be satisfied by providing 0 arguments, which defeats the purpose of a given argument." +} + + import typer.Typer.BindingPrec class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolean)(using Context) @@ -1336,7 +1376,8 @@ class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolea |or use a full prefix for ${shadowed.termSymbol.name} if you mean the latter.""" end ConstrProxyShadows -class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) +class AmbiguousReference( + name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context, isExtension: => Boolean = false)(using Context) extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { /** A string which explains how something was bound; Depending on `prec` this is either @@ -1358,10 +1399,17 @@ class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec i"""$howVisible$qualifier in ${whereFound.owner}""" } + def importHint = + if (newPrec == BindingPrec.NamedImport || newPrec == BindingPrec.WildImport) + && prevPrec == newPrec + && isExtension + then i"\n\n Hint: This error may arise if extension method `$name` is called as a normal method." + else "" + def msg(using Context) = i"""|Reference to $name is ambiguous. |It is both ${bindingString(newPrec, ctx)} - |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" + |and ${bindingString(prevPrec, prevCtx, " subsequently")}$importHint""" def explain(using Context) = val precedent = @@ -1619,10 +1667,15 @@ class CannotExtendAnyVal(sym: Symbol)(using Context) extends SyntaxMsg(CannotExtendAnyValID) { def msg(using Context) = i"""$sym cannot extend ${hl("AnyVal")}""" def explain(using Context) = - i"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend - |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. - |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. - |""" + if sym.is(Trait) then + i"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend + |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. + |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. + |""" + else if sym.is(Module) then + i"""Only classes (not objects) are allowed to extend ${hl("AnyVal")}. + |""" + else "" } class CannotExtendJavaEnum(sym: Symbol)(using Context) @@ -1645,7 +1698,7 @@ class JavaEnumParentArgs(parent: Type)(using Context) class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) extends NamingMsg(CannotHaveSameNameAsID) { - import CannotHaveSameNameAs._ + import CannotHaveSameNameAs.* def reasonMessage(using Context): String = reason match { case CannotBeOverridden => "class definitions cannot be overridden" case DefinedInSelf(self) => @@ -1739,10 +1792,20 @@ class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathI | - a reference to `this`, or | - a selection of an immutable path with an immutable value.""" -class WrongNumberOfParameters(expected: Int)(using Context) +class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expectedCount: Int)(using Context) extends SyntaxMsg(WrongNumberOfParametersID) { - def msg(using Context) = s"Wrong number of parameters, expected: $expected" - def explain(using Context) = "" + def msg(using Context) = s"Wrong number of parameters, expected: $expectedCount" + def explain(using Context) = + val ending = if foundCount == 1 then "" else "s" + i"""The function literal + | + | $tree + | + |has $foundCount parameter$ending. But the expected type + | + | $pt + | + |requires a function with $expectedCount parameters.""" } class DuplicatePrivateProtectedQualifier()(using Context) @@ -1846,15 +1909,28 @@ class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: L |are only approximated in a best-effort way.""" } -class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) +class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type, tree: untpd.PostfixOp)(using Context) extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { def msg(using Context) = i"Only function types can be followed by ${hl("_")} but the current expression has type $tp" def explain(using Context) = i"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. |To convert to a function value, you need to explicitly write ${hl("() => x")}""" + + override def actions(using Context) = + import scala.language.unsafeNulls + val untpd.PostfixOp(qual, Ident(nme.WILDCARD)) = tree: @unchecked + List( + CodeAction(title = "Rewrite to function value", + description = None, + patches = List( + ActionPatch(SourcePosition(tree.source, Span(tree.span.start)), "(() => "), + ActionPatch(SourcePosition(tree.source, Span(qual.span.end, tree.span.end)), ")") + ) + ) + ) } -class MissingEmptyArgumentList(method: String)(using Context) +class MissingEmptyArgumentList(method: String, tree: tpd.Tree)(using Context) extends SyntaxMsg(MissingEmptyArgumentListID) { def msg(using Context) = i"$method must be called with ${hl("()")} argument" def explain(using Context) = { @@ -1869,6 +1945,17 @@ class MissingEmptyArgumentList(method: String)(using Context) |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" } + + override def actions(using Context) = + import scala.language.unsafeNulls + List( + CodeAction(title = "Insert ()", + description = None, + patches = List( + ActionPatch(SourcePosition(tree.source, tree.span.endPos), "()"), + ) + ) + ) } class DuplicateNamedTypeParameter(name: Name)(using Context) @@ -2180,7 +2267,7 @@ extends NamingMsg(DoubleDefinitionID) { def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" def details(using Context): String = if (decl.isRealMethod && previousDecl.isRealMethod) { - import Signature.MatchDegree._ + import Signature.MatchDegree.* // compare the signatures when both symbols represent methods decl.signature.matchDegree(previousDecl.signature) match { @@ -2234,8 +2321,15 @@ extends NamingMsg(DoubleDefinitionID) { def explain(using Context) = "" } -class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { - def msg(using Context) = s"${ident.show} is renamed twice on the same import line." +class ImportedTwice(sel: Name)(using Context) extends SyntaxMsg(ImportedTwiceID) { + def msg(using Context) = s"${sel.show} is imported twice on the same import line." + def explain(using Context) = "" +} + +class UnimportedAndImported(sel: Name, isImport: Boolean)(using Context) extends SyntaxMsg(UnimportedAndImportedID) { + def msg(using Context) = + val otherStr = if isImport then "and imported" else "twice" + s"${sel.show} is unimported $otherStr on the same import line." def explain(using Context) = "" } @@ -2260,7 +2354,7 @@ class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends def explain(using Context) = "" } -class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { +class NotAnExtractor(tree: untpd.Tree)(using Context) extends PatternMatchMsg(NotAnExtractorID) { def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" def explain(using Context) = i"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: @@ -2273,6 +2367,24 @@ class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExt |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""" } +class ExtractorNotFound(val name: Name)(using Context) extends NotFoundMsg(ExtractorNotFoundID): + def msg(using Context) = i"no pattern match extractor named $name was found" + def explain(using Context) = + i"""An application $name(...) in a pattern can refer to an extractor + |which defines an unapply or unapplySeq method. Example: + | + | object split: + | def unapply(x: String) = + | val (leading, trailing) = x.splitAt(x.length / 2) + | Some((leading, trailing)) + | + | val split(fst, snd) = "HiHo" + | + |The extractor pattern `split(fst, snd)` defines `fst` as the first half "Hi" and + |`snd` as the second half "Ho" of the right hand side "HiHo". Case classes and + |enum cases implicitly define extractors with the name of the class or enum case. + |Here, no extractor named $name was found, so the pattern could not be typed.""" + class MemberWithSameNameAsStatic()(using Context) extends SyntaxMsg(MemberWithSameNameAsStaticID) { def msg(using Context) = i"Companion classes cannot define members with same name as a ${hl("@static")} member" @@ -2282,12 +2394,22 @@ class MemberWithSameNameAsStatic()(using Context) class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) extends Message(PureExpressionInStatementPositionID) { def kind = MessageKind.PotentialIssue - def msg(using Context) = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" + def msg(using Context) = "A pure expression does nothing in statement position" def explain(using Context) = i"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. |It can be removed without changing the semantics of the program. This may indicate an error.""" } +class PureUnitExpression(stat: untpd.Tree, tpe: Type)(using Context) + extends Message(PureUnitExpressionID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"Discarded non-Unit value of type ${tpe.widen}. You may want to use `()`." + def explain(using Context) = + i"""As this expression is not of type Unit, it is desugared into `{ $stat; () }`. + |Here the `$stat` expression is a pure statement that can be discarded. + |Therefore the expression is effectively equivalent to `()`.""" +} + class UnqualifiedCallToAnyRefMethod(stat: untpd.Tree, method: Symbol)(using Context) extends Message(UnqualifiedCallToAnyRefMethodID) { def kind = MessageKind.PotentialIssue @@ -2298,6 +2420,15 @@ class UnqualifiedCallToAnyRefMethod(stat: untpd.Tree, method: Symbol)(using Cont |you intended.""" } +class SynchronizedCallOnBoxedClass(stat: tpd.Tree)(using Context) + extends Message(SynchronizedCallOnBoxedClassID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"Suspicious ${hl("synchronized")} call on boxed class" + def explain(using Context) = + i"""|You called the ${hl("synchronized")} method on a boxed primitive. This might not be what + |you intended.""" +} + class TraitCompanionWithMutableStatic()(using Context) extends SyntaxMsg(TraitCompanionWithMutableStaticID) { def msg(using Context) = i"Companion of traits cannot define mutable @static fields" @@ -2516,7 +2647,7 @@ class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) |""" } -class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) +class UnexpectedPatternForSummonFrom(tree: Tree[?])(using Context) extends SyntaxMsg(UnexpectedPatternForSummonFromID) { def msg(using Context) = i"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" def explain(using Context) = @@ -2544,10 +2675,10 @@ class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) |""" } -class ModifierNotAllowedForDefinition(flag: Flag)(using Context) +class ModifierNotAllowedForDefinition(flag: Flag, explanation: String = "")(using Context) extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is not allowed for this definition" - def explain(using Context) = "" + def explain(using Context) = explanation } class RedundantModifier(flag: Flag)(using Context) @@ -2624,6 +2755,13 @@ extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): def msg(using Context) = i"$tp is not a class type" def explain(using Context) = "" +class NotConstant(suffix: String, tp: Type)(using Context) +extends TypeMsg(NotConstantID), ShowMatchTrace(tp): + def msg(using Context) = + i"$tp is not a constant type" + + (if suffix.isEmpty then "" else i"; $suffix") + def explain(using Context) = "" + class MissingImplicitArgument( arg: tpd.Tree, pt: Type, @@ -2841,10 +2979,17 @@ class MissingImplicitArgument( i"The following implicits in scope can be implicitly converted to ${pt.show}:" + ignoredConvertibleImplicits.map { imp => s"\n- ${imp.symbol.showDcl}"}.mkString ) + def importSuggestionAddendum: String = + arg.tpe match + // If the failure was caused by an underlying NoMatchingImplicits, compute the addendum for its expected type + case noMatching: NoMatchingImplicits => // FIXME also handle SynthesisFailure + ctx.typer.importSuggestionAddendum(noMatching.expectedType) + case _ => + ctx.typer.importSuggestionAddendum(pt) super.msgPostscript ++ ignoredInstanceNormalImport.map(hiddenImplicitNote) .orElse(noChainConversionsNote(ignoredConvertibleImplicits)) - .getOrElse(ctx.typer.importSuggestionAddendum(pt)) + .getOrElse(importSuggestionAddendum) def explain(using Context) = userDefinedImplicitNotFoundMessage(explain = true) .getOrElse("") @@ -2863,9 +3008,26 @@ extends ReferenceMsg(CannotBeAccessedID): i"${if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated} cannot" case _ => i"none of the overloaded alternatives named $name can" - val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else "" + val where = if (ctx.owner.exists) i" from ${ctx.owner.enclosingClass}" else "" val whyNot = new StringBuffer - alts.foreach(_.isAccessibleFrom(pre, superAccess, whyNot)) + for alt <- alts do + val cls = alt.owner.enclosingSubClass + val owner = if cls.exists then cls else alt.owner + val location: String = + if alt.is(Protected) then + if alt.privateWithin.exists && alt.privateWithin != owner then + if owner.is(Final) then alt.privateWithin.showLocated + else alt.privateWithin.showLocated + ", or " + owner.showLocated + " or one of its subclasses" + else + if owner.is(Final) then owner.showLocated + else owner.showLocated + " or one of its subclasses" + else + alt.privateWithin.orElse(owner).showLocated + val accessMod = if alt.is(Protected) then "protected" else "private" + val within = if alt.privateWithin.exists then i"[${alt.privateWithin.name}]" + else "" + whyNot.append(i""" + | $accessMod$within $alt can only be accessed from $location.""") i"$whatCanNot be accessed as a member of $pre$where.$whyNot" def explain(using Context) = "" @@ -2902,7 +3064,67 @@ class UnusedNonUnitValue(tp: Type)(using Context) def msg(using Context) = i"unused value of type $tp" def explain(using Context) = "" +class MatchTypeNoCases(casesText: String)(using Context) extends TypeMsg(MatchTypeNoCasesID): + def msg(using Context) = i"Match type reduction $casesText" + def explain(using Context) = "" + class MatchTypeScrutineeCannotBeHigherKinded(tp: Type)(using Context) extends TypeMsg(MatchTypeScrutineeCannotBeHigherKindedID) : def msg(using Context) = i"the scrutinee of a match type cannot be higher-kinded" def explain(using Context) = "" + +class MatchTypeLegacyPattern(errorText: String)(using Context) extends TypeMsg(MatchTypeLegacyPatternID): + def msg(using Context) = errorText + def explain(using Context) = "" + +class ClosureCannotHaveInternalParameterDependencies(mt: Type)(using Context) + extends TypeMsg(ClosureCannotHaveInternalParameterDependenciesID): + def msg(using Context) = + i"""cannot turn method type $mt into closure + |because it has internal parameter dependencies""" + def explain(using Context) = "" + +class ImplausiblePatternWarning(pat: tpd.Tree, selType: Type)(using Context) + extends TypeMsg(ImplausiblePatternWarningID): + def msg(using Context) = + i"""|Implausible pattern: + |$pat could match selector of type $selType + |only if there is an `equals` method identifying elements of the two types.""" + def explain(using Context) = "" + +class UnstableInlineAccessor(accessed: Symbol, accessorTree: tpd.Tree)(using Context) + extends Message(UnstableInlineAccessorID) { + def kind = MessageKind.Compatibility + + def msg(using Context) = + i"""Unstable inline accessor ${accessor.name} was generated in $where.""" + + def explain(using Context) = + i"""Access to non-public $accessed causes the automatic generation of an accessor. + |This accessor is not stable, its name may change or it may disappear + |if not needed in a future version. + | + |To make sure that the inlined code is binary compatible you must make sure that + |$accessed is public in the binary API. + | * Option 1: Annotate $accessed with @publicInBinary + | * Option 2: Make $accessed public + | + |This change may break binary compatibility if a previous version of this + |library was compiled with generated accessors. Binary compatibility should + |be checked using MiMa. If binary compatibility is broken, you should add the + |old accessor explicitly in the source code. The following code should be + |added to $where: + | @publicInBinary private[$within] ${accessorTree.show} + |""" + + private def accessor = accessorTree.symbol + + private def where = + if accessor.owner.name.isPackageObjectName then s"package ${within}" + else if accessor.owner.is(Module) then s"object $within" + else s"class $within" + + private def within = + if accessor.owner.name.isPackageObjectName then accessor.owner.owner.name.stripModuleClassSuffix + else accessor.owner.name.stripModuleClassSuffix +} diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 8e8d3efb8b40..fbbc3d990969 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -106,7 +106,7 @@ trait TraceSyntax: finalize(trailing(res)) res catch - case ex: runtime.NonLocalReturnControl[T] => + case ex: runtime.NonLocalReturnControl[T @unchecked] => finalize(trailing(ex.value)) throw ex case ex: Throwable => diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index f2dfac88d464..2586ad8604c3 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -3,13 +3,15 @@ package rewrites import util.{SourceFile, Spans} import Spans.Span -import core.Contexts._ +import core.Contexts.* import collection.mutable import scala.annotation.tailrec import dotty.tools.dotc.reporting.Reporter +import dotty.tools.dotc.util.SourcePosition; import java.io.OutputStreamWriter import java.nio.charset.StandardCharsets.UTF_8 +import dotty.tools.dotc.reporting.CodeAction /** Handles rewriting of Scala2 files to Dotty */ object Rewrites { @@ -19,6 +21,16 @@ object Rewrites { def delta = replacement.length - (span.end - span.start) } + /** A special type of Patch that instead of just a span, contains the + * full SourcePosition. This is useful when being used by + * [[dotty.tools.dotc.reporting.CodeAction]] or if the patch doesn't + * belong to the same file that the actual issue it's addressing is in. + * + * @param srcPos The SourcePosition of the patch. + * @param replacement The Replacement that should go in that position. + */ + case class ActionPatch(srcPos: SourcePosition, replacement: String) + private class Patches(source: SourceFile) { private[Rewrites] val pbuf = new mutable.ListBuffer[Patch]() @@ -88,12 +100,20 @@ object Rewrites { report.echo(s"[patched file ${source.file.path}]") rewrites.patched(source).writeBack() } + + /** Given a CodeAction take the patches and apply them. + * + * @param action The CodeAction containing the patches + */ + def applyAction(action: CodeAction)(using Context): Unit = + action.patches.foreach: actionPatch => + patch(actionPatch.srcPos.span, actionPatch.replacement) } /** A completely encapsulated class representing rewrite state, used * as an optional setting. */ class Rewrites { - import Rewrites._ + import Rewrites.* private val patched = new PatchedFiles } diff --git a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala index aa98f79c8e3b..07fa2027fbe6 100644 --- a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala +++ b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala @@ -3,11 +3,11 @@ package sbt import scala.language.unsafeNulls -import core._ -import Contexts._ -import Flags._ -import Symbols._ -import NameOps._ +import core.* +import Contexts.* +import Flags.* +import Symbols.* +import NameOps.* import xsbti.api import xsbti.api.SafeLazy.strict @@ -24,7 +24,7 @@ object APIUtils { val EmptyType = api.EmptyType.of() } - import Constants._ + import Constants.* /** Registers a dummy class for sbt's incremental compilation. * @@ -35,9 +35,9 @@ object APIUtils { * a dummy empty class can be registered instead, using this method. */ def registerDummyClass(classSym: ClassSymbol)(using Context): Unit = { - if (ctx.sbtCallback != null) { + ctx.withIncCallback { cb => val classLike = emptyClassLike(classSym) - ctx.sbtCallback.api(ctx.compilationUnit.source.file.file, classLike) + cb.api(ctx.compilationUnit.source, classLike) } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index f54baeb7256c..dafb44d525e4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -5,20 +5,19 @@ import scala.language.unsafeNulls import ExtractDependencies.internalError import ast.{Positioned, Trees, tpd} -import core._ -import core.Decorators._ -import Annotations._ -import Contexts._ -import Flags._ -import Phases._ -import Trees._ -import Types._ -import Symbols._ -import Names._ -import NameOps._ +import core.* +import core.Decorators.* +import Annotations.* +import Contexts.* +import Flags.* +import Phases.* +import Trees.* +import Types.* +import Symbols.* +import Names.* +import NameOps.* import inlines.Inlines import transform.ValueClasses -import transform.SymUtils._ import dotty.tools.io.File import java.io.PrintWriter @@ -49,13 +48,15 @@ class ExtractAPI extends Phase { override def description: String = ExtractAPI.description override def isRunnable(using Context): Boolean = { - def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value - super.isRunnable && (ctx.sbtCallback != null || forceRun) + super.isRunnable && ctx.runZincPhases } // Check no needed. Does not transform trees override def isCheckable: Boolean = false + // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + override def skipIfJava(using Context): Boolean = false + // SuperAccessors need to be part of the API (see the scripted test // `trait-super` for an example where this matters), this is only the case // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees @@ -65,9 +66,9 @@ class ExtractAPI extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit - val sourceFile = unit.source.file - if (ctx.sbtCallback != null) - ctx.sbtCallback.startSource(sourceFile.file) + val sourceFile = unit.source + ctx.withIncCallback: cb => + cb.startSource(sourceFile) val apiTraverser = new ExtractAPICollector val classes = apiTraverser.apiSource(unit.tpdTree) @@ -75,18 +76,17 @@ class ExtractAPI extends Phase { if (ctx.settings.YdumpSbtInc.value) { // Append to existing file that should have been created by ExtractDependencies - val pw = new PrintWriter(File(sourceFile.jpath).changeExtension("inc").toFile + val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension("inc").toFile .bufferedWriter(append = true), true) try { classes.foreach(source => pw.println(DefaultShowAPI(source))) } finally pw.close() } - if ctx.sbtCallback != null && - !ctx.compilationUnit.suspendedAtInliningPhase // already registered before this unit was suspended - then - classes.foreach(ctx.sbtCallback.api(sourceFile.file, _)) - mainClasses.foreach(ctx.sbtCallback.mainClass(sourceFile.file, _)) + ctx.withIncCallback: cb => + if !ctx.compilationUnit.suspendedAtInliningPhase then // already registered before this unit was suspended + classes.foreach(cb.api(sourceFile, _)) + mainClasses.foreach(cb.mainClass(sourceFile, _)) } } @@ -137,7 +137,7 @@ object ExtractAPI: * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ private class ExtractAPICollector(using Context) extends ThunkHolder { - import tpd._ + import tpd.* import xsbti.api /** This cache is necessary for correctness, see the comment about inherited @@ -276,7 +276,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { report.error(ex, csym.sourcePos) defn.ObjectType :: Nil } - if (ValueClasses.isDerivedValueClass(csym)) { + if (csym.isDerivedValueClass) { val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType // The underlying type of a value class should be part of the name hash // of the value class (see the test `value-class-underlying`), this is accomplished @@ -568,7 +568,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) case MatchType(bound, scrut, cases) => - val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType): _*) + val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType)*) withMarker(s, matchMarker) case ConstantType(constant) => api.Constant.of(apiType(constant.tpe), constant.stringValue) @@ -616,7 +616,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { apiType(lo), apiType(hi)) def apiVariance(v: Int): api.Variance = { - import api.Variance._ + import api.Variance.* if (v < 0) Contravariant else if (v > 0) Covariant else Invariant @@ -678,11 +678,16 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { // In the Scala2 ExtractAPI phase we only extract annotations that extend // StaticAnnotation, but in Dotty we currently pickle all annotations so we - // extract everything (except annotations missing from the classpath which - // we simply skip over, and inline body annotations which are handled above). + // extract everything, except: + // - annotations missing from the classpath which we simply skip over + // - inline body annotations which are handled above + // - the Child annotation since we already extract children via + // `api.ClassLike#childrenOfSealedClass` and adding this annotation would + // lead to overcompilation when using zinc's + // `IncOptions#useOptimizedSealed`. s.annotations.foreach { annot => val sym = annot.symbol - if sym.exists && sym != defn.BodyAnnot then + if sym.exists && sym != defn.BodyAnnot && sym != defn.ChildAnnot then annots += apiAnnotation(annot) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index fe5c8d061c78..a35628dc52e4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -4,28 +4,32 @@ package sbt import scala.language.unsafeNulls import java.io.File +import java.nio.file.Path import java.util.{Arrays, EnumSet} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameOps._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension} +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Denotations.StaleSymbol -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Types.* + import dotty.tools.dotc.util.{SrcPos, NoSourcePosition} import dotty.tools.io -import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive} +import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile} import xsbti.UseScope import xsbti.api.DependencyContext -import xsbti.api.DependencyContext._ +import xsbti.api.DependencyContext.* -import scala.collection.{Set, mutable} +import scala.jdk.CollectionConverters.* +import scala.collection.{Set, mutable} +import scala.compiletime.uninitialized /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -47,20 +51,22 @@ import scala.collection.{Set, mutable} * @see ExtractAPI */ class ExtractDependencies extends Phase { - import ExtractDependencies._ + import ExtractDependencies.* override def phaseName: String = ExtractDependencies.name override def description: String = ExtractDependencies.description override def isRunnable(using Context): Boolean = { - def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value - super.isRunnable && (ctx.sbtCallback != null || forceRun) + super.isRunnable && ctx.runZincPhases } // Check no needed. Does not transform trees override def isCheckable: Boolean = false + // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + override def skipIfJava(using Context): Boolean = false + // This phase should be run directly after `Frontend`, if it is run after // `PostTyper`, some dependencies will be lost because trees get simplified. // See the scripted test `constants` for an example where this matters. @@ -68,12 +74,13 @@ class ExtractDependencies extends Phase { override def run(using Context): Unit = { val unit = ctx.compilationUnit - val collector = new ExtractDependenciesCollector + val rec = unit.depRecorder + val collector = ExtractDependenciesCollector(rec) collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = collector.dependencies.map(_.toString).toArray[Object] - val names = collector.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] + val deps = rec.foundDeps.iterator.map { case (clazz, found) => s"$clazz: ${found.classesString}" }.toArray[Object] + val names = rec.foundDeps.iterator.map { case (clazz, found) => s"$clazz: ${found.namesString}" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -90,67 +97,7 @@ class ExtractDependencies extends Phase { } finally pw.close() } - if (ctx.sbtCallback != null) { - collector.usedNames.foreach { - case (clazz, usedNames) => - val className = classNameAsString(clazz) - usedNames.names.foreach { - case (usedName, scopes) => - ctx.sbtCallback.usedName(className, usedName.toString, scopes) - } - } - - collector.dependencies.foreach(recordDependency) - } - } - - /* - * Handles dependency on given symbol by trying to figure out if represents a term - * that is coming from either source code (not necessarily compiled in this compilation - * run) or from class file and calls respective callback method. - */ - def recordDependency(dep: ClassDependency)(using Context): Unit = { - val fromClassName = classNameAsString(dep.from) - val sourceFile = ctx.compilationUnit.source.file.file - - def binaryDependency(file: File, binaryClassName: String) = - ctx.sbtCallback.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context) - - def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { - depFile match { - case ze: ZipArchive#Entry => // The dependency comes from a JAR - ze.underlyingSource match - case Some(zip) if zip.file != null => - binaryDependency(zip.file, binaryClassName) - case _ => - case pf: PlainFile => // The dependency comes from a class file - // FIXME: pf.file is null for classfiles coming from the modulepath - // (handled by JrtClassPath) because they cannot be represented as - // java.io.File, since the `binaryDependency` callback must take a - // java.io.File, this means that we cannot record dependencies coming - // from the modulepath. For now this isn't a big deal since we only - // support having the standard Java library on the modulepath. - if pf.file != null then - binaryDependency(pf.file, binaryClassName) - case _ => - internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.from.srcPos) - } - } - - val depFile = dep.to.associatedFile - if (depFile != null) { - // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) - def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance - if (depFile.extension == "class") { - // Dependency is external -- source is undefined - processExternalDependency(depFile, dep.to.binaryClassName) - } else if (allowLocal || depFile.file != sourceFile) { - // We cannot ignore dependencies coming from the same source file because - // the dependency info needs to propagate. See source-dependencies/trait-trait-211. - val toClassName = classNameAsString(dep.to) - ctx.sbtCallback.classDependency(toClassName, fromClassName, dep.context) - } - } + rec.sendToZinc() } } @@ -166,31 +113,6 @@ object ExtractDependencies { report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } -private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) - -/** An object that maintain the set of used names from within a class */ -private final class UsedNamesInClass { - private val _names = new mutable.HashMap[Name, EnumSet[UseScope]] - def names: collection.Map[Name, EnumSet[UseScope]] = _names - - def update(name: Name, scope: UseScope): Unit = { - val scopes = _names.getOrElseUpdate(name, EnumSet.noneOf(classOf[UseScope])) - scopes.add(scope) - } - - override def toString(): String = { - val builder = new StringBuilder - names.foreach { case (name, scopes) => - builder.append(name.mangledString) - builder.append(" in [") - scopes.forEach(scope => builder.append(scope.toString)) - builder.append("]") - builder.append(", ") - } - builder.toString() - } -} - /** Extract the dependency information of a compilation unit. * * To understand why we track the used names see the section "Name hashing @@ -199,110 +121,18 @@ private final class UsedNamesInClass { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeTraverser => - import tpd._ - - private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] - private val _dependencies = new mutable.HashSet[ClassDependency] - - /** The names used in this class, this does not include names which are only - * defined and not referenced. - */ - def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames - - /** The set of class dependencies from this compilation unit. - */ - def dependencies: Set[ClassDependency] = _dependencies - - /** Top level import dependencies are registered as coming from a first top level - * class/trait/object declared in the compilation unit. If none exists, issue warning. - */ - private var _responsibleForImports: Symbol = _ - private def responsibleForImports(using Context) = { - def firstClassOrModule(tree: Tree) = { - val acc = new TreeAccumulator[Symbol] { - def apply(x: Symbol, t: Tree)(using Context) = - t match { - case typeDef: TypeDef => - typeDef.symbol - case other => - foldOver(x, other) - } - } - acc(NoSymbol, tree) - } - - if (_responsibleForImports == null) { - val tree = ctx.compilationUnit.tpdTree - _responsibleForImports = firstClassOrModule(tree) - if (!_responsibleForImports.exists) - report.warning("""|No class, trait or object is defined in the compilation unit. - |The incremental compiler cannot record the dependency information in such case. - |Some errors like unused import referring to a non-existent class might not be reported. - |""".stripMargin, tree.sourcePos) - } - _responsibleForImports - } - - private var lastOwner: Symbol = _ - private var lastDepSource: Symbol = _ - - /** - * Resolves dependency source (that is, the closest non-local enclosing - * class from a given `ctx.owner` - */ - private def resolveDependencySource(using Context): Symbol = { - def nonLocalEnclosingClass = { - var clazz = ctx.owner.enclosingClass - var owner = clazz - - while (!owner.is(PackageClass)) { - if (owner.isTerm) { - clazz = owner.enclosingClass - owner = clazz - } else { - owner = owner.owner - } - } - clazz - } - - if (lastOwner != ctx.owner) { - lastOwner = ctx.owner - val source = nonLocalEnclosingClass - lastDepSource = if (source.is(PackageClass)) responsibleForImports else source - } - - lastDepSource - } - - private def addUsedName(fromClass: Symbol, name: Name, scope: UseScope): Unit = { - val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) - usedName.update(name, scope) - } - - private def addUsedName(name: Name, scope: UseScope)(using Context): Unit = { - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) - addUsedName(fromClass, name, scope) - } - } +private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd.TreeTraverser { thisTreeTraverser => + import tpd.* private def addMemberRefDependency(sym: Symbol)(using Context): Unit = if (!ignoreDependency(sym)) { - val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass - assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") - - val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports - assert(fromClass.isClass) + rec.addUsedName(sym) + // packages have class symbol. Only record them as used names but not dependency + if (!sym.is(Package)) { + val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass + assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") - addUsedName(fromClass, sym.zincMangledName, UseScope.Default) - // packages have class symbol. Only record them as used names but not dependency - if (!sym.is(Package)) { - _dependencies += ClassDependency(fromClass, enclOrModuleClass, DependencyByMemberRef) - } + rec.addClassDependency(enclOrModuleClass, DependencyByMemberRef) } } @@ -310,15 +140,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // If the tpt is empty, this is a non-SAM lambda, so no need to register // an inheritance relationship. if !tree.tpt.isEmpty then - val from = resolveDependencySource - _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + rec.addClassDependency(tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) - val from = resolveDependencySource for parent <- tree.parents do - _dependencies += ClassDependency(from, parent.tpe.classSymbol, depContext) + rec.addClassDependency(parent.tpe.classSymbol, depContext) } private def depContextOf(cls: Symbol)(using Context): DependencyContext = @@ -339,7 +167,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT /** Traverse the tree of a source file and record the dependencies and used names which - * can be retrieved using `dependencies` and`usedNames`. + * can be retrieved using `foundDeps`. */ override def traverse(tree: Tree)(using Context): Unit = try { tree match { @@ -356,7 +184,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT for sel <- selectors if !sel.isWildcard do addImported(sel.name) if sel.rename != sel.name then - addUsedName(sel.rename, UseScope.Default) + rec.addUsedRawName(sel.rename) case exp @ Export(expr, selectors) => val dep = expr.tpe.classSymbol if dep.exists && selectors.exists(_.isWildcard) then @@ -369,8 +197,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT // inheritance dependency in the presence of wildcard exports // to ensure all new members of `dep` are forwarded to. val depContext = depContextOf(ctx.owner.lexicallyEnclosingClass) - val from = resolveDependencySource - _dependencies += ClassDependency(from, dep, depContext) + rec.addClassDependency(dep, depContext) case t: TypeTree => addTypeDependency(t.tpe) case ref: RefTree => @@ -384,10 +211,10 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT } tree match { - case Inlined(call, _, _) if !call.isEmpty => + case tree: Inlined if !tree.inlinedFromOuterScope => // The inlined call is normally ignored by TreeTraverser but we need to // record it as a dependency - traverse(call) + traverse(tree.call) case vd: ValDef if vd.symbol.is(ModuleVal) => // Don't visit module val case t: Template if t.symbol.owner.is(ModuleClass) => @@ -404,6 +231,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT throw ex } + /**Reused EqHashSet, safe to use as each TypeDependencyTraverser is used atomically + * Avoid cycles by remembering both the types (testcase: + * tests/run/enum-values.scala) and the symbols of named types (testcase: + * tests/pos-java-interop/i13575) we've seen before. + */ + private val scratchSeen = new util.EqHashSet[Symbol | Type](128) + /** Traverse a used type and record all the dependencies we need to keep track * of for incremental recompilation. * @@ -440,17 +274,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT private abstract class TypeDependencyTraverser(using Context) extends TypeTraverser() { protected def addDependency(symbol: Symbol): Unit - // Avoid cycles by remembering both the types (testcase: - // tests/run/enum-values.scala) and the symbols of named types (testcase: - // tests/pos-java-interop/i13575) we've seen before. - val seen = new mutable.HashSet[Symbol | Type] - def traverse(tp: Type): Unit = if (!seen.contains(tp)) { - seen += tp + scratchSeen.clear(resetToInitial = false) + + def traverse(tp: Type): Unit = if scratchSeen.add(tp) then { tp match { case tp: NamedType => val sym = tp.symbol - if !seen.contains(sym) && !sym.is(Package) then - seen += sym + if !sym.is(Package) && scratchSeen.add(sym) then addDependency(sym) if !sym.isClass then traverse(tp.info) traverse(tp.prefix) @@ -477,10 +307,289 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT val traverser = new TypeDependencyTraverser { def addDependency(symbol: Symbol) = if (!ignoreDependency(symbol) && symbol.is(Sealed)) { - val usedName = symbol.zincMangledName - addUsedName(usedName, UseScope.PatMatTarget) + rec.addUsedName(symbol, includeSealedChildren = true) } } traverser.traverse(tpe) } } + +/** Record dependencies using `addUsedName`/`addClassDependency` and inform Zinc using `sendToZinc()`. + * + * Note: As an alternative design choice, we could directly call the appropriate + * callback as we record each dependency, this way we wouldn't need to record + * them locally and we could get rid of `sendToZinc()`, but this may be less + * efficient since it would mean calling `classNameAsString` on each call + * to `addUsedName` rather than once per class. + */ +class DependencyRecorder { + import ExtractDependencies.* + + /** A map from a non-local class to the names and classes it uses, this does not include + * names which are only defined and not referenced. + */ + def foundDeps: util.ReadOnlyMap[Symbol, FoundDepsInClass] = _foundDeps + + /** Record a reference to the name of `sym` from the current non-local + * enclosing class. + * + * @param includeSealedChildren See documentation of `addUsedRawName`. + */ + def addUsedName(sym: Symbol, includeSealedChildren: Boolean = false)(using Context): Unit = + addUsedRawName(sym.zincMangledName, includeSealedChildren) + + /** Record a reference to `name` from the current non-local enclosing class (aka, "from class"). + * + * Most of the time, prefer to use `addUsedName` which takes + * care of name mangling. + * + * Zinc will use this information to invalidate the current non-local + * enclosing class if something changes in the set of definitions named + * `name` among the possible dependencies of the from class. + * + * @param includeSealedChildren If true, the addition or removal of children + * to a sealed class called `name` will also + * invalidate the from class. + * Note that this only has an effect if zinc's + * `IncOptions.useOptimizedSealed` is enabled, + * otherwise the addition or removal of children + * always lead to invalidation. + * + * TODO: If the compiler reported to zinc all usages of + * `SymDenotation#{children,sealedDescendants}` (including from macro code), + * we should be able to turn `IncOptions.useOptimizedSealed` on by default + * safely. + */ + def addUsedRawName(name: Name, includeSealedChildren: Boolean = false)(using Context): Unit = { + val fromClass = resolveDependencyFromClass + if (fromClass.exists) { + lastFoundCache.recordName(name, includeSealedChildren) + } + } + + // The two possible value of `UseScope`. To avoid unnecessary allocations, + // we use vals here, but that means we must be careful to never mutate these sets. + private val DefaultScopes = EnumSet.of(UseScope.Default) + private val PatMatScopes = EnumSet.of(UseScope.Default, UseScope.PatMatTarget) + + /** An object that maintain the set of used names and class dependencies from within a class */ + final class FoundDepsInClass { + /** Each key corresponds to a name used in the class. To understand the meaning + * of the associated value, see the documentation of parameter `includeSealedChildren` + * of `addUsedRawName`. + */ + private val _names = new util.HashMap[Name, DefaultScopes.type | PatMatScopes.type] + + /** Each key corresponds to a class dependency used in the class. + */ + private val _classes = util.EqHashMap[Symbol, EnumSet[DependencyContext]]() + + def addDependency(fromClass: Symbol, context: DependencyContext): Unit = + val set = _classes.getOrElseUpdate(fromClass, EnumSet.noneOf(classOf[DependencyContext])) + set.add(context) + + def classes: Iterator[(Symbol, EnumSet[DependencyContext])] = _classes.iterator + + def names: Iterator[(Name, EnumSet[UseScope])] = _names.iterator + + private[DependencyRecorder] def recordName(name: Name, includeSealedChildren: Boolean): Unit = { + if (includeSealedChildren) + _names(name) = PatMatScopes + else + _names.getOrElseUpdate(name, DefaultScopes) + } + + def namesString: String = { + val builder = new StringBuilder + names.foreach { case (name, scopes) => + builder.append(name.mangledString) + builder.append(" in [") + scopes.forEach(scope => builder.append(scope.toString)) + builder.append("]") + builder.append(", ") + } + builder.toString() + } + + def classesString: String = { + val builder = new StringBuilder + classes.foreach { case (clazz, scopes) => + builder.append(clazz.toString) + builder.append(" in [") + scopes.forEach(scope => builder.append(scope.toString)) + builder.append("]") + builder.append(", ") + } + builder.toString() + } + } + + /** Record a dependency to the class `to` in a given `context` + * from the current non-local enclosing class. + */ + def addClassDependency(toClass: Symbol, context: DependencyContext)(using Context): Unit = + val fromClass = resolveDependencyFromClass + if (fromClass.exists) + lastFoundCache.addDependency(toClass, context) + + private val _foundDeps = new util.EqHashMap[Symbol, FoundDepsInClass] + + /** Send the collected dependency information to Zinc and clear the local caches. */ + def sendToZinc()(using Context): Unit = + ctx.withIncCallback: cb => + val siblingClassfiles = new mutable.HashMap[PlainFile, Path] + _foundDeps.iterator.foreach: + case (clazz, foundDeps) => + val className = classNameAsString(clazz) + foundDeps.names.foreach: (usedName, scopes) => + cb.usedName(className, usedName.toString, scopes) + for (toClass, deps) <- foundDeps.classes do + for dep <- deps.asScala do + recordClassDependency(cb, clazz, toClass, dep, siblingClassfiles) + clear() + + /** Clear all state. */ + def clear(): Unit = + _foundDeps.clear() + lastOwner = NoSymbol + lastDepSource = NoSymbol + lastFoundCache = null + _responsibleForImports = NoSymbol + + /** Handles dependency on given symbol by trying to figure out if represents a term + * that is coming from either source code (not necessarily compiled in this compilation + * run) or from class file and calls respective callback method. + */ + private def recordClassDependency(cb: interfaces.IncrementalCallback, fromClass: Symbol, toClass: Symbol, + depCtx: DependencyContext, siblingClassfiles: mutable.Map[PlainFile, Path])(using Context): Unit = { + val fromClassName = classNameAsString(fromClass) + val sourceFile = ctx.compilationUnit.source + + /**For a `.tasty` file, constructs a sibling class to the `jpath`. + * Does not validate if it exists as a real file. + * + * Because classpath scanning looks for tasty files first, `dep.fromClass` will be + * associated to a `.tasty` file. However Zinc records all dependencies either based on `.jar` or `.class` files, + * where classes are in directories on the filesystem. + * + * So if the dependency comes from an upstream `.tasty` file and it was not packaged in a jar, then + * we need to call this to resolve the classfile that will eventually exist at runtime. + * + * The way this works is that by the end of compilation analysis, + * we should have called `cb.generatedNonLocalClass` with the same class file name. + * + * FIXME: we still need a way to resolve the correct classfile when we split tasty and classes between + * different outputs (e.g. scala2-library-bootstrapped). + */ + def cachedSiblingClass(pf: PlainFile): Path = + siblingClassfiles.getOrElseUpdate(pf, { + val jpath = pf.jpath + jpath.getParent.resolve(jpath.getFileName.toString.stripSuffix(".tasty") + ".class") + }) + + def binaryDependency(path: Path, binaryClassName: String) = + cb.binaryDependency(path, binaryClassName, fromClassName, sourceFile, depCtx) + + val depClass = toClass + val depFile = depClass.associatedFile + if depFile != null then { + // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) + def allowLocal = depCtx == DependencyByInheritance || depCtx == LocalDependencyByInheritance + val isTasty = depFile.hasTastyExtension + + def processExternalDependency() = { + val binaryClassName = depClass.binaryClassName + depFile match { + case ze: ZipArchive#Entry => // The dependency comes from a JAR + ze.underlyingSource match + case Some(zip) if zip.jpath != null => + binaryDependency(zip.jpath, binaryClassName) + case _ => + case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem + binaryDependency(if isTasty then cachedSiblingClass(pf) else pf.jpath, binaryClassName) + case _ => + internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", fromClass.srcPos) + } + } + + if isTasty || depFile.hasClassExtension then + processExternalDependency() + else if allowLocal || depFile != sourceFile.file then + // We cannot ignore dependencies coming from the same source file because + // the dependency info needs to propagate. See source-dependencies/trait-trait-211. + val toClassName = classNameAsString(depClass) + cb.classDependency(toClassName, fromClassName, depCtx) + } + } + + private var lastOwner: Symbol = uninitialized + private var lastDepSource: Symbol = uninitialized + private var lastFoundCache: FoundDepsInClass | Null = uninitialized + + /** The source of the dependency according to `nonLocalEnclosingClass` + * if it exists, otherwise fall back to `responsibleForImports`. + * + * This is backed by a cache which is invalidated when `ctx.owner` changes. + */ + private def resolveDependencyFromClass(using Context): Symbol = { + import dotty.tools.uncheckedNN + if (lastOwner != ctx.owner) { + lastOwner = ctx.owner + val source = nonLocalEnclosingClass + val fromClass = if (source.is(PackageClass)) responsibleForImports else source + if lastDepSource != fromClass then + lastDepSource = fromClass + lastFoundCache = _foundDeps.getOrElseUpdate(fromClass, new FoundDepsInClass) + } + + lastDepSource + } + + /** The closest non-local enclosing class from `ctx.owner`. */ + private def nonLocalEnclosingClass(using Context): Symbol = { + var clazz = ctx.owner.enclosingClass + var owner = clazz + + while (!owner.is(PackageClass)) { + if (owner.isTerm) { + clazz = owner.enclosingClass + owner = clazz + } else { + owner = owner.owner + } + } + clazz + } + + private var _responsibleForImports: Symbol = uninitialized + + /** Top level import dependencies are registered as coming from a first top level + * class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol. + */ + private def responsibleForImports(using Context) = { + import tpd.* + def firstClassOrModule(tree: Tree) = { + val acc = new TreeAccumulator[Symbol] { + def apply(x: Symbol, t: Tree)(using Context) = + t match { + case typeDef: TypeDef => + typeDef.symbol + case other => + foldOver(x, other) + } + } + acc(NoSymbol, tree) + } + + if (_responsibleForImports == null) { + val tree = ctx.compilationUnit.tpdTree + _responsibleForImports = firstClassOrModule(tree) + if (!_responsibleForImports.exists) + report.warning("""|No class, trait or object is defined in the compilation unit. + |The incremental compiler cannot record the dependency information in such case. + |Some errors like unused import referring to a non-existent class might not be reported. + |""".stripMargin, tree.sourcePos) + } + _responsibleForImports + } +} diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index cacb10cf98bc..61baebbe9517 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -11,7 +11,7 @@ package sbt import scala.language.unsafeNulls -import xsbti.api._ +import xsbti.api.* import scala.util.Try diff --git a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java new file mode 100644 index 000000000000..4c6afa113f4f --- /dev/null +++ b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java @@ -0,0 +1,39 @@ +package dotty.tools.dotc.sbt.interfaces; + +import dotty.tools.dotc.util.SourceFile; + +import java.util.EnumSet; +import java.nio.file.Path; + +/* User code should not implement this interface, it is intended to be a wrapper around xsbti.AnalysisCallback. */ +public interface IncrementalCallback { + default void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { + } + + default void startSource(SourceFile sourceFile) { + } + + default void mainClass(SourceFile sourceFile, String className) { + } + + default boolean enabled() { + return false; + } + + default void usedName(String className, String name, EnumSet useScopes) { + } + + default void binaryDependency(Path onBinaryEntry, String onBinaryClassName, String fromClassName, + SourceFile fromSourceFile, xsbti.api.DependencyContext context) { + } + + default void classDependency(String onClassName, String sourceClassName, xsbti.api.DependencyContext context) { + } + + default void generatedLocalClass(SourceFile source, Path classFile) { + } + + default void generatedNonLocalClass(SourceFile source, Path classFile, String binaryClassName, + String srcClassName) { + } +} diff --git a/compiler/src/dotty/tools/dotc/sbt/interfaces/ProgressCallback.java b/compiler/src/dotty/tools/dotc/sbt/interfaces/ProgressCallback.java new file mode 100644 index 000000000000..39f5ca39962b --- /dev/null +++ b/compiler/src/dotty/tools/dotc/sbt/interfaces/ProgressCallback.java @@ -0,0 +1,21 @@ +package dotty.tools.dotc.sbt.interfaces; + +import dotty.tools.dotc.CompilationUnit; + +public interface ProgressCallback { + /** Record that the cancellation signal has been received during the Zinc run. */ + default void cancel() {} + + /** Report on if there was a cancellation signal for the current Zinc run. */ + default boolean isCancelled() { return false; } + + /** Record that a unit has started compiling in the given phase. */ + default void informUnitStarting(String phase, CompilationUnit unit) {} + + /** Record the current compilation progress. + * @param current `completedPhaseCount * totalUnits + completedUnitsInCurrPhase + completedLate` + * @param total `totalPhases * totalUnits + totalLate` + * @return true if the compilation should continue (callers are expected to cancel if this returns false) + */ + default boolean progress(int current, int total, String currPhase, String nextPhase) { return true; } +} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala index 975d5480fe9b..784b23cfc78c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala @@ -5,7 +5,7 @@ package semanticdb import dotty.tools.dotc.{semanticdb => s} import core.Contexts.Context -import core.Constants._ +import core.Constants.* object ConstantOps: extension (const: Constant) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/DiagnosticOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/DiagnosticOps.scala new file mode 100644 index 000000000000..4bc6e1ecb026 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/semanticdb/DiagnosticOps.scala @@ -0,0 +1,23 @@ +package dotty.tools.dotc.semanticdb + +import dotty.tools.dotc.reporting.Diagnostic +import dotty.tools.dotc.{semanticdb => s} +import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} +import dotty.tools.dotc.core.Contexts.Context +import scala.annotation.internal.sharable + +object DiagnosticOps: + @sharable private val asciiColorCodes = "\u001B\\[[;\\d]*m".r + extension (d: Diagnostic) + def toSemanticDiagnostic: s.Diagnostic = + val severity = d.level match + case ERROR => s.Diagnostic.Severity.ERROR + case WARNING => s.Diagnostic.Severity.WARNING + case INFO => s.Diagnostic.Severity.INFORMATION + case _ => s.Diagnostic.Severity.INFORMATION + val msg = asciiColorCodes.replaceAllIn(d.msg.message, m => "") + s.Diagnostic( + range = Scala3.range(d.pos.span, d.pos.source), + severity = severity, + message = msg + ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 91614aaccad2..77eef4564bbf 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -4,37 +4,52 @@ package semanticdb import scala.language.unsafeNulls -import core._ -import Phases._ -import ast.tpd._ +import core.* +import Phases.* +import ast.tpd.* import ast.Trees.{mods, WithEndMarker} -import Contexts._ -import Symbols._ -import Flags._ +import Contexts.* +import Symbols.* +import Flags.* import Names.Name import StdNames.nme -import NameOps._ +import NameOps.* import Denotations.StaleSymbol import util.Spans.Span import util.SourceFile -import transform.SymUtils._ import scala.collection.mutable import scala.annotation.{ threadUnsafe => tu, tailrec } +import scala.jdk.CollectionConverters.* import scala.PartialFunction.condOpt +import typer.ImportInfo.withRootImports +import dotty.tools.dotc.reporting.Diagnostic.Warning import dotty.tools.dotc.{semanticdb => s} import dotty.tools.io.{AbstractFile, JarArchive} +import dotty.tools.dotc.semanticdb.DiagnosticOps.* +import scala.util.{Using, Failure, Success} +import java.nio.file.Path + /** Extract symbol references and uses to semanticdb files. * See https://scalameta.org/docs/semanticdb/specification.html#symbol-1 * for a description of the format. - * TODO: Also extract type information + * + * Here, we define two phases for "ExtractSemanticDB", "PostTyper" and "PostInlining". + * + * The "PostTyper" phase extracts SemanticDB information such as symbol + * definitions, symbol occurrences, type information, and synthetics + * and write .semanticdb file. + * + * The "PostInlining" phase extracts diagnostics from "ctx.reporter" and + * attaches them to the SemanticDB information extracted in the "PostTyper" phase. + * We need to run this phase after the "CheckUnused.PostInlining" phase + * so that we can extract the warnings generated by "-Wunused". */ -class ExtractSemanticDB extends Phase: - import Scala3.{_, given} +class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode) extends Phase: - override val phaseName: String = ExtractSemanticDB.name + override val phaseName: String = ExtractSemanticDB.phaseNamePrefix + phaseMode.toString() override val description: String = ExtractSemanticDB.description @@ -46,14 +61,161 @@ class ExtractSemanticDB extends Phase: // Check not needed since it does not transform trees override def isCheckable: Boolean = false - override def run(using Context): Unit = + private def computeDiagnostics( + sourceRoot: String, + warnings: Map[SourceFile, List[Warning]], + append: ((Path, List[Diagnostic])) => Unit)(using Context): Boolean = monitor(phaseName) { val unit = ctx.compilationUnit - val extractor = Extractor() - extractor.extract(unit.tpdTree) - ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList) + warnings.get(unit.source).foreach { ws => + val outputDir = + ExtractSemanticDB.semanticdbPath( + unit.source, + ExtractSemanticDB.semanticdbOutDir, + sourceRoot + ) + append((outputDir, ws.map(_.toSemanticDiagnostic))) + } + } + + private def extractSemanticDB(sourceRoot: String, writeSemanticdbText: Boolean)(using Context): Boolean = + monitor(phaseName) { + val unit = ctx.compilationUnit + val outputDir = + ExtractSemanticDB.semanticdbPath( + unit.source, + ExtractSemanticDB.semanticdbOutDir, + sourceRoot + ) + val extractor = ExtractSemanticDB.Extractor() + extractor.extract(unit.tpdTree) + ExtractSemanticDB.write( + unit.source, + extractor.occurrences.toList, + extractor.symbolInfos.toList, + extractor.synthetics.toList, + outputDir, + sourceRoot, + writeSemanticdbText + ) + } + + override def runOn(units: List[CompilationUnit])(using ctx: Context): List[CompilationUnit] = { + val sourceRoot = ctx.settings.sourceroot.value + val appendDiagnostics = phaseMode == ExtractSemanticDB.PhaseMode.AppendDiagnostics + val unitContexts = units.map(ctx.fresh.setCompilationUnit(_).withRootImports) + if (appendDiagnostics) + val warnings = ctx.reporter.allWarnings.groupBy(w => w.pos.source) + val buf = mutable.ListBuffer.empty[(Path, Seq[Diagnostic])] + val units0 = + for unitCtx <- unitContexts if computeDiagnostics(sourceRoot, warnings, buf += _)(using unitCtx) + yield unitCtx.compilationUnit + cancellable { + buf.toList.asJava.parallelStream().forEach { case (out, warnings) => + ExtractSemanticDB.appendDiagnostics(warnings, out) + } + } + units0 + else + val writeSemanticdbText = ctx.settings.semanticdbText.value + for unitCtx <- unitContexts if extractSemanticDB(sourceRoot, writeSemanticdbText)(using unitCtx) + yield unitCtx.compilationUnit + } + + def run(using Context): Unit = unsupported("run") +end ExtractSemanticDB + +object ExtractSemanticDB: + import java.nio.file.Path + import java.nio.file.Files + import java.nio.file.Paths + + val phaseNamePrefix: String = "extractSemanticDB" + val description: String = "extract info into .semanticdb files" + + enum PhaseMode: + case ExtractSemanticInfo + case AppendDiagnostics + + class ExtractSemanticInfo extends ExtractSemanticDB(PhaseMode.ExtractSemanticInfo) + + class AppendDiagnostics extends ExtractSemanticDB(PhaseMode.AppendDiagnostics) + + private def semanticdbTarget(using Context): Option[Path] = + Option(ctx.settings.semanticdbTarget.value) + .filterNot(_.isEmpty) + .map(Paths.get(_)) + + /** Destination for generated classfiles */ + private def outputDirectory(using Context): AbstractFile = + ctx.settings.outputDir.value + + /** Output directory for SemanticDB files */ + private def semanticdbOutDir(using Context): Path = + semanticdbTarget.getOrElse(outputDirectory.jpath) + + private def absolutePath(path: Path): Path = path.toAbsolutePath.normalize + + private def write( + source: SourceFile, + occurrences: List[SymbolOccurrence], + symbolInfos: List[SymbolInformation], + synthetics: List[Synthetic], + outpath: Path, + sourceRoot: String, + semanticdbText: Boolean + ): Unit = + Files.createDirectories(outpath.getParent()) + val doc: TextDocument = TextDocument( + schema = Schema.SEMANTICDB4, + language = Language.SCALA, + uri = Tools.mkURIstring(Paths.get(relPath(source, sourceRoot))), + text = if semanticdbText then String(source.content) else "", + md5 = internal.MD5.compute(String(source.content)), + symbols = symbolInfos, + occurrences = occurrences, + synthetics = synthetics, + ) + val docs = TextDocuments(List(doc)) + val out = Files.newOutputStream(outpath) + try + val stream = internal.SemanticdbOutputStream.newInstance(out) + docs.writeTo(stream) + stream.flush() + finally + out.close() + end write + + private def appendDiagnostics( + diagnostics: Seq[Diagnostic], + outpath: Path + ): Unit = + Using.Manager { use => + val in = use(Files.newInputStream(outpath)) + val sin = internal.SemanticdbInputStream.newInstance(in) + val docs = TextDocuments.parseFrom(sin) + + val out = use(Files.newOutputStream(outpath)) + val sout = internal.SemanticdbOutputStream.newInstance(out) + TextDocuments(docs.documents.map(_.withDiagnostics(diagnostics))).writeTo(sout) + sout.flush() + } match + case Failure(ex) => // failed somehow, should we say something? + case Success(_) => // success to update semanticdb, say nothing + end appendDiagnostics + + private def relPath(source: SourceFile, sourceRoot: String) = + SourceFile.relativePath(source, sourceRoot) + + private def semanticdbPath(source: SourceFile, base: Path, sourceRoot: String): Path = + absolutePath(base) + .resolve("META-INF") + .resolve("semanticdb") + .resolve(relPath(source, sourceRoot)) + .resolveSibling(source.name + ".semanticdb") /** Extractor of symbol occurrences from trees */ class Extractor extends TreeTraverser: + import Scala3.{_, given} given s.SemanticSymbolBuilder = s.SemanticSymbolBuilder() val synth = SyntheticsExtractor() given converter: s.TypeOps = s.TypeOps() @@ -392,15 +554,12 @@ class ExtractSemanticDB extends Phase: }).toMap end findGetters - private def selectSpan(tree: Select) = + private def selectSpan(tree: Select)(using Context) = val end = tree.span.end val limit = tree.qualifier.span.end - val start = - if limit < end then - val len = tree.name.toString.length - if tree.source.content()(end - 1) == '`' then end - len - 2 else end - len - else limit - Span(start max limit, end) + if limit < end then + tree.nameSpan + else Span(limit, end) extension (span: Span) private def hasLength: Boolean = span.exists && !span.isZeroExtent @@ -468,52 +627,5 @@ class ExtractSemanticDB extends Phase: registerSymbol(vparam.symbol, symkinds) traverse(vparam.tpt) tparams.foreach(tp => traverse(tp.rhs)) - - -object ExtractSemanticDB: - import java.nio.file.Path - import java.nio.file.Files - import java.nio.file.Paths - - val name: String = "extractSemanticDB" - val description: String = "extract info into .semanticdb files" - - private def semanticdbTarget(using Context): Option[Path] = - Option(ctx.settings.semanticdbTarget.value) - .filterNot(_.isEmpty) - .map(Paths.get(_)) - - private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value - - def write( - source: SourceFile, - occurrences: List[SymbolOccurrence], - symbolInfos: List[SymbolInformation], - synthetics: List[Synthetic], - )(using Context): Unit = - def absolutePath(path: Path): Path = path.toAbsolutePath.normalize - val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value) - val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) - .resolve("META-INF") - .resolve("semanticdb") - .resolve(relPath) - .resolveSibling(source.name + ".semanticdb") - Files.createDirectories(outpath.getParent()) - val doc: TextDocument = TextDocument( - schema = Schema.SEMANTICDB4, - language = Language.SCALA, - uri = Tools.mkURIstring(Paths.get(relPath)), - text = "", - md5 = internal.MD5.compute(String(source.content)), - symbols = symbolInfos, - occurrences = occurrences, - synthetics = synthetics, - ) - val docs = TextDocuments(List(doc)) - val out = Files.newOutputStream(outpath) - try - val stream = internal.SemanticdbOutputStream.newInstance(out) - docs.writeTo(stream) - stream.flush() - finally - out.close() + end Extractor +end ExtractSemanticDB diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala index b53ee787f501..fdf159836878 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala @@ -4,7 +4,7 @@ import dotty.tools.dotc.{semanticdb => s} import scala.collection.mutable import dotty.tools.dotc.semanticdb.Scala3.given -import SymbolInformation.Kind._ +import SymbolInformation.Kind.* import dotty.tools.dotc.util.SourceFile class SymbolInformationPrinter (symtab: PrinterSymtab): val notes = InfoNotes() diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala index e157b52fe260..f49b00089712 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala @@ -2,11 +2,11 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.core import core.Symbols.{ Symbol , defn, NoSymbol } -import core.Contexts._ +import core.Contexts.* import core.Names import core.Names.Name import core.Types.{Type, TypeBounds} -import core.Flags._ +import core.Flags.* import core.NameKinds import core.StdNames.nme import SymbolInformation.{Kind => k} @@ -20,8 +20,8 @@ import scala.annotation.internal.sharable import scala.annotation.switch object Scala3: - import Symbols._ - import core.NameOps._ + import Symbols.* + import core.NameOps.* @sharable private val unicodeEscape = raw"\$$u(\p{XDigit}{4})".r @sharable private val locals = raw"local(\d+)".r @@ -29,7 +29,7 @@ object Scala3: private val WILDCARDTypeName = nme.WILDCARD.toTypeName - def range(span: Span, treeSource: SourceFile)(using Context): Option[Range] = + def range(span: Span, treeSource: SourceFile): Option[Range] = def lineCol(offset: Int) = (treeSource.offsetToLine(offset), treeSource.column(offset)) val (startLine, startCol) = lineCol(span.start) val (endLine, endCol) = lineCol(span.end) @@ -47,7 +47,8 @@ object Scala3: // for secondary constructors `this` desig match case sym: Symbol => - if sym.isConstructor && nameInSource == nme.THISkw.toString then + if sym.isConstructor + && (sym.isPrimaryConstructor || nameInSource == nme.THISkw.toString) then true else val target = @@ -216,7 +217,8 @@ object Scala3: def isEmptyNumbered: Boolean = !name.is(NameKinds.WildcardParamName) - && !name.is(NameKinds.EvidenceParamName) + && !name.is(NameKinds.ContextBoundParamName) + && !name.is(NameKinds.ContextFunctionParamName) && { name match case NameKinds.AnyNumberedName(nme.EMPTY, _) => true case _ => false @@ -484,6 +486,8 @@ object Scala3: given Ordering[SymbolInformation] = Ordering.by[SymbolInformation, String](_.symbol)(IdentifierOrdering()) + given Ordering[Diagnostic] = (x, y) => compareRange(x.range, y.range) + given Ordering[Synthetic] = (x, y) => compareRange(x.range, y.range) /** diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index c7b0dfd437db..6376fb86d6c5 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package semanticdb -import core._ -import Contexts._ -import Symbols._ -import Flags._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* import Names.Name import scala.annotation.tailrec @@ -84,7 +84,7 @@ class SemanticSymbolBuilder: else decls0 end decls - val alts = decls.filter(_.isOneOf(Method | Mutable)).toList.reverse + val alts = decls.filter(_.isOneOf(Method | Mutable)).toList.reverse.partition(!_.is(Synthetic)).toList.flatten def find(filter: Symbol => Boolean) = alts match case notSym :: rest if !filter(notSym) => val idx = rest.indexWhere(filter).ensuring(_ >= 0) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala index b2f26e3e992f..af38315a857e 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.NameKinds import dotty.tools.dotc.{semanticdb => s} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala index d37973237a9f..ea95e34a57b9 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.semanticdb -import java.nio.file._ +import java.nio.file.* import java.nio.charset.StandardCharsets -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.semanticdb.Scala3.given @@ -69,6 +69,8 @@ object Tools: sb.append("Language => ").append(languageString(doc.language)).nl sb.append("Symbols => ").append(doc.symbols.length).append(" entries").nl sb.append("Occurrences => ").append(doc.occurrences.length).append(" entries").nl + if doc.diagnostics.nonEmpty then + sb.append("Diagnostics => ").append(doc.diagnostics.length).append(" entries").nl if doc.synthetics.nonEmpty then sb.append("Synthetics => ").append(doc.synthetics.length).append(" entries").nl sb.nl @@ -78,6 +80,10 @@ object Tools: sb.append("Occurrences:").nl doc.occurrences.sorted.foreach(processOccurrence) sb.nl + if doc.diagnostics.nonEmpty then + sb.append("Diagnostics:").nl + doc.diagnostics.sorted.foreach(d => processDiag(d)) + sb.nl if doc.synthetics.nonEmpty then sb.append("Synthetics:").nl doc.synthetics.sorted.foreach(s => processSynth(s, synthPrinter)) @@ -86,7 +92,7 @@ object Tools: end metac private def schemaString(schema: Schema) = - import Schema._ + import Schema.* schema match case SEMANTICDB3 => "SemanticDB v3" case SEMANTICDB4 => "SemanticDB v4" @@ -95,7 +101,7 @@ object Tools: end schemaString private def languageString(language: Language) = - import Language._ + import Language.* language match case SCALA => "Scala" case JAVA => "Java" @@ -108,6 +114,20 @@ object Tools: private def processSynth(synth: Synthetic, printer: SyntheticPrinter)(using sb: StringBuilder): Unit = sb.append(printer.pprint(synth)).nl + private def processDiag(d: Diagnostic)(using sb: StringBuilder): Unit = + d.range match + case Some(range) => processRange(sb, range) + case _ => sb.append("[):") + sb.append(" ") + d.severity match + case Diagnostic.Severity.ERROR => sb.append("[error]") + case Diagnostic.Severity.WARNING => sb.append("[warning]") + case Diagnostic.Severity.INFORMATION => sb.append("[info]") + case _ => sb.append("[unknown]") + sb.append(" ") + sb.append(d.message) + sb.nl + private def processOccurrence(occ: SymbolOccurrence)(using sb: StringBuilder, sourceFile: SourceFile): Unit = occ.range match case Some(range) => diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index b0d032c7d83b..4293ecd6ca43 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package semanticdb -import core.Symbols._ +import core.Symbols.* import core.Contexts.Context -import core.Types._ +import core.Types.* import core.Annotations.Annotation import core.Flags import core.Names.Name @@ -18,7 +18,7 @@ import Scala3.{FakeSymbol, SemanticSymbol, WildcardTypeSymbol, TypeParamRefSymbo import dotty.tools.dotc.core.Names.Designator class TypeOps: - import SymbolScopeOps._ + import SymbolScopeOps.* import Scala3.given private val paramRefSymtab = mutable.Map[(LambdaType, Name), Symbol]() private val refinementSymtab = mutable.Map[(RefinedType, Name), Symbol]() @@ -245,7 +245,7 @@ class TypeOps: loop(tpe) def toSemanticType(sym: Symbol)(using LinkMode, SemanticSymbolBuilder, Context): s.Type = - import ConstantOps._ + import ConstantOps.* def loop(tpe: Type): s.Type = tpe match { case t if t.isFromJavaObject => loop(defn.AnyType) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala index c646e67b69ad..69b8712878af 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual { object Access { case object Empty extends dotty.tools.dotc.semanticdb.Access - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Access def defaultInstance: dotty.tools.dotc.semanticdb.Access = Empty - + implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] { override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess => __v.value @@ -50,8 +50,8 @@ final case class AccessMessage( sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.privateAccess.isDefined) { val __value = sealedValue.privateAccess.get @@ -90,7 +90,7 @@ final case class AccessMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.privateAccess.foreach { __v => @@ -152,10 +152,10 @@ final case class AccessMessage( def withPublicAccess(__v: dotty.tools.dotc.semanticdb.PublicAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v)) def clearSealedValue: AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue): AccessMessage = copy(sealedValue = __v) - - - - + + + + def toAccess: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Access]) } @@ -190,12 +190,12 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.AccessMessage( sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) @@ -226,7 +226,7 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.PrivateAccess @@ -298,10 +298,10 @@ final case class PrivateAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateAccess]) } @@ -319,12 +319,12 @@ object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.do dotty.tools.dotc.semanticdb.PrivateAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateAccess( ) def of( @@ -339,10 +339,10 @@ final case class PrivateThisAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess]) } @@ -360,12 +360,12 @@ object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tool dotty.tools.dotc.semanticdb.PrivateThisAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateThisAccess( ) def of( @@ -379,10 +379,10 @@ final case class PrivateWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -398,7 +398,7 @@ final case class PrivateWithinAccess( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -409,10 +409,10 @@ final case class PrivateWithinAccess( }; } def withSymbol(__v: _root_.scala.Predef.String): PrivateWithinAccess = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess]) } @@ -434,12 +434,12 @@ object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.to symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateWithinAccess( symbol = "" ) @@ -458,10 +458,10 @@ final case class ProtectedAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedAccess]) } @@ -479,12 +479,12 @@ object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools. dotty.tools.dotc.semanticdb.ProtectedAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedAccess( ) def of( @@ -499,10 +499,10 @@ final case class ProtectedThisAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess]) } @@ -520,12 +520,12 @@ object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.to dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) def of( @@ -539,10 +539,10 @@ final case class ProtectedWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -558,7 +558,7 @@ final case class ProtectedWithinAccess( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -569,10 +569,10 @@ final case class ProtectedWithinAccess( }; } def withSymbol(__v: _root_.scala.Predef.String): ProtectedWithinAccess = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess]) } @@ -594,12 +594,12 @@ object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty. symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedWithinAccess( symbol = "" ) @@ -618,10 +618,10 @@ final case class PublicAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PublicAccess]) } @@ -639,12 +639,12 @@ object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.PublicAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PublicAccess( ) def of( diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala index 2cb478d89e2d..cf07e8c58747 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -13,10 +13,10 @@ final case class Annotation( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -32,7 +32,7 @@ final case class Annotation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -45,10 +45,10 @@ final case class Annotation( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): Annotation = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Annotation]) } @@ -70,12 +70,12 @@ object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Annotation( tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala index 0ca96d9ae8c6..da8bf56455ef 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual object Constant { case object Empty extends dotty.tools.dotc.semanticdb.Constant - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Constant def defaultInstance: dotty.tools.dotc.semanticdb.Constant = Empty - + implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] { override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant => __v.value @@ -58,8 +58,8 @@ final case class ConstantMessage( sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.unitConstant.isDefined) { val __value = sealedValue.unitConstant.get @@ -114,7 +114,7 @@ final case class ConstantMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.unitConstant.foreach { __v => @@ -208,10 +208,10 @@ final case class ConstantMessage( def withNullConstant(__v: dotty.tools.dotc.semanticdb.NullConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v)) def clearSealedValue: ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue): ConstantMessage = copy(sealedValue = __v) - - - - + + + + def toConstant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Constant]) } @@ -254,12 +254,12 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantMessage( sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) @@ -298,7 +298,7 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.UnitConstant @@ -402,10 +402,10 @@ final case class UnitConstant( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnitConstant]) } @@ -423,12 +423,12 @@ object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.UnitConstant( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnitConstant( ) def of( @@ -442,10 +442,10 @@ final case class BooleanConstant( value: _root_.scala.Boolean = false ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != false) { @@ -461,7 +461,7 @@ final case class BooleanConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -472,10 +472,10 @@ final case class BooleanConstant( }; } def withValue(__v: _root_.scala.Boolean): BooleanConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.BooleanConstant]) } @@ -497,12 +497,12 @@ object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools. value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.BooleanConstant( value = false ) @@ -520,10 +520,10 @@ final case class ByteConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -539,7 +539,7 @@ final case class ByteConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -550,10 +550,10 @@ final case class ByteConstant( }; } def withValue(__v: _root_.scala.Int): ByteConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByteConstant]) } @@ -575,12 +575,12 @@ object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByteConstant( value = 0 ) @@ -598,10 +598,10 @@ final case class ShortConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -617,7 +617,7 @@ final case class ShortConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -628,10 +628,10 @@ final case class ShortConstant( }; } def withValue(__v: _root_.scala.Int): ShortConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ShortConstant]) } @@ -653,12 +653,12 @@ object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ShortConstant( value = 0 ) @@ -676,10 +676,10 @@ final case class CharConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -695,7 +695,7 @@ final case class CharConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -706,10 +706,10 @@ final case class CharConstant( }; } def withValue(__v: _root_.scala.Int): CharConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.CharConstant]) } @@ -731,12 +731,12 @@ object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.CharConstant( value = 0 ) @@ -754,10 +754,10 @@ final case class IntConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -773,7 +773,7 @@ final case class IntConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -784,10 +784,10 @@ final case class IntConstant( }; } def withValue(__v: _root_.scala.Int): IntConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntConstant]) } @@ -809,12 +809,12 @@ object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntConstant( value = 0 ) @@ -832,10 +832,10 @@ final case class LongConstant( value: _root_.scala.Long = 0L ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0L) { @@ -851,7 +851,7 @@ final case class LongConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -862,10 +862,10 @@ final case class LongConstant( }; } def withValue(__v: _root_.scala.Long): LongConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LongConstant]) } @@ -887,12 +887,12 @@ object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LongConstant( value = 0L ) @@ -910,10 +910,10 @@ final case class FloatConstant( value: _root_.scala.Float = 0.0f ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0.0f) { @@ -929,7 +929,7 @@ final case class FloatConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -940,10 +940,10 @@ final case class FloatConstant( }; } def withValue(__v: _root_.scala.Float): FloatConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FloatConstant]) } @@ -965,12 +965,12 @@ object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.FloatConstant( value = 0.0f ) @@ -988,10 +988,10 @@ final case class DoubleConstant( value: _root_.scala.Double = 0.0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0.0) { @@ -1007,7 +1007,7 @@ final case class DoubleConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1018,10 +1018,10 @@ final case class DoubleConstant( }; } def withValue(__v: _root_.scala.Double): DoubleConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.DoubleConstant]) } @@ -1043,12 +1043,12 @@ object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.DoubleConstant( value = 0.0 ) @@ -1066,10 +1066,10 @@ final case class StringConstant( value: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (!__value.isEmpty) { @@ -1085,7 +1085,7 @@ final case class StringConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1096,10 +1096,10 @@ final case class StringConstant( }; } def withValue(__v: _root_.scala.Predef.String): StringConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StringConstant]) } @@ -1121,12 +1121,12 @@ object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.StringConstant( value = "" ) @@ -1145,10 +1145,10 @@ final case class NullConstant( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.NullConstant]) } @@ -1166,12 +1166,12 @@ object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.NullConstant( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.NullConstant( ) def of( diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala index cc8aa82bf8ea..43f9dca4d49b 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -15,21 +15,21 @@ final case class Diagnostic( message: _root_.scala.Predef.String = "" ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = severity.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(2, __value) } }; - + { val __value = message if (!__value.isEmpty) { @@ -45,7 +45,7 @@ final case class Diagnostic( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -72,10 +72,10 @@ final case class Diagnostic( def withRange(__v: dotty.tools.dotc.semanticdb.Range): Diagnostic = copy(range = Option(__v)) def withSeverity(__v: dotty.tools.dotc.semanticdb.Diagnostic.Severity): Diagnostic = copy(severity = __v) def withMessage(__v: _root_.scala.Predef.String): Diagnostic = copy(message = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Diagnostic]) } @@ -105,12 +105,12 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. message = __message ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Diagnostic( range = _root_.scala.None, severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY, @@ -123,49 +123,49 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. def isWarning: _root_.scala.Boolean = false def isInformation: _root_.scala.Boolean = false def isHint: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized]) } - + object Severity { sealed trait Recognized extends Severity - - + + @SerialVersionUID(0L) case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized { val index = 0 val name = "UNKNOWN_SEVERITY" override def isUnknownSeverity: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ERROR extends Severity(1) with Severity.Recognized { val index = 1 val name = "ERROR" override def isError: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object WARNING extends Severity(2) with Severity.Recognized { val index = 2 val name = "WARNING" override def isWarning: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INFORMATION extends Severity(3) with Severity.Recognized { val index = 3 val name = "INFORMATION" override def isInformation: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object HINT extends Severity(4) with Severity.Recognized { val index = 4 val name = "HINT" override def isHint: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT) @@ -177,8 +177,8 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. case 4 => HINT case __other => Unrecognized(__other) } - - + + } final val RANGE_FIELD_NUMBER = 1 final val SEVERITY_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala index 07fbda4991af..256e8ae15f37 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -14,17 +14,17 @@ final case class Documentation( format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = message if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(1, __value) } }; - + { val __value = format.value if (__value != 0) { @@ -40,7 +40,7 @@ final case class Documentation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -58,10 +58,10 @@ final case class Documentation( } def withMessage(__v: _root_.scala.Predef.String): Documentation = copy(message = __v) def withFormat(__v: dotty.tools.dotc.semanticdb.Documentation.Format): Documentation = copy(format = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Documentation]) } @@ -87,12 +87,12 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do format = __format ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Documentation( message = "", format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML @@ -104,49 +104,49 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do def isJavadoc: _root_.scala.Boolean = false def isScaladoc: _root_.scala.Boolean = false def isKdoc: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized]) } - + object Format { sealed trait Recognized extends Format - - + + @SerialVersionUID(0L) case object HTML extends Format(0) with Format.Recognized { val index = 0 val name = "HTML" override def isHtml: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object MARKDOWN extends Format(1) with Format.Recognized { val index = 1 val name = "MARKDOWN" override def isMarkdown: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object JAVADOC extends Format(2) with Format.Recognized { val index = 2 val name = "JAVADOC" override def isJavadoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SCALADOC extends Format(3) with Format.Recognized { val index = 3 val name = "SCALADOC" override def isScaladoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object KDOC extends Format(4) with Format.Recognized { val index = 4 val name = "KDOC" override def isKdoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC) @@ -158,8 +158,8 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do case 4 => KDOC case __other => Unrecognized(__other) } - - + + } final val MESSAGE_FIELD_NUMBER = 1 final val FORMAT_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala index c57a3d3cddc3..ef47e9020361 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { @@ -13,35 +13,35 @@ sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbG def isUnknownLanguage: _root_.scala.Boolean = false def isScala: _root_.scala.Boolean = false def isJava: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Language.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Language.Recognized]) } object Language { sealed trait Recognized extends Language - - + + @SerialVersionUID(0L) case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized { val index = 0 val name = "UNKNOWN_LANGUAGE" override def isUnknownLanguage: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SCALA extends Language(1) with Language.Recognized { val index = 1 val name = "SCALA" override def isScala: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object JAVA extends Language(2) with Language.Recognized { val index = 2 val name = "JAVA" override def isJava: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA) @@ -51,6 +51,6 @@ object Language { case 2 => JAVA case __other => Unrecognized(__other) } - - + + } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala index a3667e944ae4..1072d25654f0 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -14,10 +14,10 @@ final case class Location( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = uri if (!__value.isEmpty) { @@ -37,7 +37,7 @@ final case class Location( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -57,10 +57,10 @@ final case class Location( def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: Location = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Location = copy(range = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Location]) } @@ -86,12 +86,12 @@ object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se range = __range ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Location( uri = "", range = _root_.scala.None diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala index d273664bdf6a..5f1c0477e17d 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -16,31 +16,31 @@ final case class Range( endCharacter: _root_.scala.Int = 0 ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = startLine if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(1, __value) } }; - + { val __value = startCharacter if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(2, __value) } }; - + { val __value = endLine if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(3, __value) } }; - + { val __value = endCharacter if (__value != 0) { @@ -56,7 +56,7 @@ final case class Range( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -88,10 +88,10 @@ final case class Range( def withStartCharacter(__v: _root_.scala.Int): Range = copy(startCharacter = __v) def withEndLine(__v: _root_.scala.Int): Range = copy(endLine = __v) def withEndCharacter(__v: _root_.scala.Int): Range = copy(endCharacter = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Range]) } @@ -125,12 +125,12 @@ object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman endCharacter = __endCharacter ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Range( startLine = 0, startCharacter = 0, diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala index 841e69166feb..c9239c85f409 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { @@ -13,35 +13,35 @@ sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGen def isLegacy: _root_.scala.Boolean = false def isSemanticdb3: _root_.scala.Boolean = false def isSemanticdb4: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Schema.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Schema.Recognized]) } object Schema { sealed trait Recognized extends Schema - - + + @SerialVersionUID(0L) case object LEGACY extends Schema(0) with Schema.Recognized { val index = 0 val name = "LEGACY" override def isLegacy: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEMANTICDB3 extends Schema(3) with Schema.Recognized { val index = 1 val name = "SEMANTICDB3" override def isSemanticdb3: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEMANTICDB4 extends Schema(4) with Schema.Recognized { val index = 2 val name = "SEMANTICDB4" override def isSemanticdb4: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4) @@ -51,6 +51,6 @@ object Schema { case 4 => SEMANTICDB4 case __other => Unrecognized(__other) } - - + + } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala index 655ebe75185e..44d273d25af4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -14,8 +14,8 @@ final case class Scope( hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 symlinks.foreach { __item => val __value = __item @@ -34,7 +34,7 @@ final case class Scope( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { symlinks.foreach { __v => @@ -56,10 +56,10 @@ final case class Scope( def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation *): Scope = addAllHardlinks(__vs) def addAllHardlinks(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = hardlinks ++ __vs) def withHardlinks(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Scope]) } @@ -85,12 +85,12 @@ object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman hardlinks = __hardlinks.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Scope( symlinks = _root_.scala.Seq.empty, hardlinks = _root_.scala.Seq.empty diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala index 228e2f02349b..810ea9a792d4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual object Signature { case object Empty extends dotty.tools.dotc.semanticdb.Signature - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Signature def defaultInstance: dotty.tools.dotc.semanticdb.Signature = Empty - + implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] { override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature => __v.value @@ -44,8 +44,8 @@ final case class SignatureMessage( sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.classSignature.isDefined) { val __value = sealedValue.classSignature.get @@ -72,7 +72,7 @@ final case class SignatureMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.classSignature.foreach { __v => @@ -110,10 +110,10 @@ final case class SignatureMessage( def withValueSignature(__v: dotty.tools.dotc.semanticdb.ValueSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v)) def clearSealedValue: SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue): SignatureMessage = copy(sealedValue = __v) - - - - + + + + def toSignature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Signature]) } @@ -142,12 +142,12 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SignatureMessage( sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) @@ -172,7 +172,7 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.ClassSignature @@ -222,8 +222,8 @@ final case class ClassSignature( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -233,7 +233,7 @@ final case class ClassSignature( val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) if (__value.serializedSize != 0) { @@ -253,7 +253,7 @@ final case class ClassSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -294,10 +294,10 @@ final case class ClassSignature( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: ClassSignature = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ClassSignature]) } @@ -331,12 +331,12 @@ object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ClassSignature( typeParameters = _root_.scala.None, parents = _root_.scala.Seq.empty, @@ -372,8 +372,8 @@ final case class MethodSignature( returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -383,7 +383,7 @@ final case class MethodSignature( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) if (__value.serializedSize != 0) { @@ -399,7 +399,7 @@ final case class MethodSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -431,10 +431,10 @@ final case class MethodSignature( def addAllParameterLists(__vs: Iterable[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = parameterLists ++ __vs) def withParameterLists(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = __v) def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): MethodSignature = copy(returnType = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MethodSignature]) } @@ -464,12 +464,12 @@ object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools. returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MethodSignature( typeParameters = _root_.scala.None, parameterLists = _root_.scala.Seq.empty, @@ -499,21 +499,21 @@ final case class TypeSignature( upperBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) if (__value.serializedSize != 0) { @@ -529,7 +529,7 @@ final case class TypeSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -560,10 +560,10 @@ final case class TypeSignature( def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): TypeSignature = copy(typeParameters = Option(__v)) def withLowerBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(lowerBound = __v) def withUpperBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(upperBound = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeSignature]) } @@ -593,12 +593,12 @@ object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.do upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(__upperBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeSignature( typeParameters = _root_.scala.None, lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), @@ -628,10 +628,10 @@ final case class ValueSignature( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -647,7 +647,7 @@ final case class ValueSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -660,10 +660,10 @@ final case class ValueSignature( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ValueSignature = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ValueSignature]) } @@ -685,12 +685,12 @@ object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ValueSignature( tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala index 93fbb207c4f6..d22504a51731 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -22,45 +22,45 @@ final case class SymbolInformation( documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(1, __value) } }; - + { val __value = language.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(16, __value) } }; - + { val __value = kind.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(3, __value) } }; - + { val __value = properties if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(4, __value) } }; - + { val __value = displayName if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(5, __value) } }; - + { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) if (__value.serializedSize != 0) { @@ -71,7 +71,7 @@ final case class SymbolInformation( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) if (__value.serializedSize != 0) { @@ -95,7 +95,7 @@ final case class SymbolInformation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -179,10 +179,10 @@ final case class SymbolInformation( def getDocumentation: dotty.tools.dotc.semanticdb.Documentation = documentation.getOrElse(dotty.tools.dotc.semanticdb.Documentation.defaultInstance) def clearDocumentation: SymbolInformation = copy(documentation = _root_.scala.None) def withDocumentation(__v: dotty.tools.dotc.semanticdb.Documentation): SymbolInformation = copy(documentation = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolInformation]) } @@ -240,12 +240,12 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool documentation = __documentation ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolInformation( symbol = "", language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE, @@ -276,126 +276,126 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isClass: _root_.scala.Boolean = false def isTrait: _root_.scala.Boolean = false def isInterface: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized]) } - + object Kind { sealed trait Recognized extends Kind - - + + @SerialVersionUID(0L) case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized { val index = 0 val name = "UNKNOWN_KIND" override def isUnknownKind: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object LOCAL extends Kind(19) with Kind.Recognized { val index = 1 val name = "LOCAL" override def isLocal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object FIELD extends Kind(20) with Kind.Recognized { val index = 2 val name = "FIELD" override def isField: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object METHOD extends Kind(3) with Kind.Recognized { val index = 3 val name = "METHOD" override def isMethod: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CONSTRUCTOR extends Kind(21) with Kind.Recognized { val index = 4 val name = "CONSTRUCTOR" override def isConstructor: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object MACRO extends Kind(6) with Kind.Recognized { val index = 5 val name = "MACRO" override def isMacro: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TYPE extends Kind(7) with Kind.Recognized { val index = 6 val name = "TYPE" override def isType: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PARAMETER extends Kind(8) with Kind.Recognized { val index = 7 val name = "PARAMETER" override def isParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SELF_PARAMETER extends Kind(17) with Kind.Recognized { val index = 8 val name = "SELF_PARAMETER" override def isSelfParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized { val index = 9 val name = "TYPE_PARAMETER" override def isTypeParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OBJECT extends Kind(10) with Kind.Recognized { val index = 10 val name = "OBJECT" override def isObject: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PACKAGE extends Kind(11) with Kind.Recognized { val index = 11 val name = "PACKAGE" override def isPackage: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized { val index = 12 val name = "PACKAGE_OBJECT" override def isPackageObject: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CLASS extends Kind(13) with Kind.Recognized { val index = 13 val name = "CLASS" override def isClass: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TRAIT extends Kind(14) with Kind.Recognized { val index = 14 val name = "TRAIT" override def isTrait: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INTERFACE extends Kind(18) with Kind.Recognized { val index = 15 val name = "INTERFACE" override def isInterface: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE) @@ -418,8 +418,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 21 => CONSTRUCTOR case __other => Unrecognized(__other) } - - + + } sealed abstract class Property(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { type EnumType = Property @@ -444,161 +444,161 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isTransparent: _root_.scala.Boolean = false def isInfix: _root_.scala.Boolean = false def isOpaque: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized]) } - + object Property { sealed trait Recognized extends Property - - + + @SerialVersionUID(0L) case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized { val index = 0 val name = "UNKNOWN_PROPERTY" override def isUnknownProperty: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ABSTRACT extends Property(4) with Property.Recognized { val index = 1 val name = "ABSTRACT" override def isAbstract: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object FINAL extends Property(8) with Property.Recognized { val index = 2 val name = "FINAL" override def isFinal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEALED extends Property(16) with Property.Recognized { val index = 3 val name = "SEALED" override def isSealed: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object IMPLICIT extends Property(32) with Property.Recognized { val index = 4 val name = "IMPLICIT" override def isImplicit: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object LAZY extends Property(64) with Property.Recognized { val index = 5 val name = "LAZY" override def isLazy: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CASE extends Property(128) with Property.Recognized { val index = 6 val name = "CASE" override def isCase: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object COVARIANT extends Property(256) with Property.Recognized { val index = 7 val name = "COVARIANT" override def isCovariant: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CONTRAVARIANT extends Property(512) with Property.Recognized { val index = 8 val name = "CONTRAVARIANT" override def isContravariant: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object VAL extends Property(1024) with Property.Recognized { val index = 9 val name = "VAL" override def isVal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object VAR extends Property(2048) with Property.Recognized { val index = 10 val name = "VAR" override def isVar: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object STATIC extends Property(4096) with Property.Recognized { val index = 11 val name = "STATIC" override def isStatic: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PRIMARY extends Property(8192) with Property.Recognized { val index = 12 val name = "PRIMARY" override def isPrimary: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ENUM extends Property(16384) with Property.Recognized { val index = 13 val name = "ENUM" override def isEnum: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object DEFAULT extends Property(32768) with Property.Recognized { val index = 14 val name = "DEFAULT" override def isDefault: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object GIVEN extends Property(65536) with Property.Recognized { val index = 15 val name = "GIVEN" override def isGiven: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INLINE extends Property(131072) with Property.Recognized { val index = 16 val name = "INLINE" override def isInline: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OPEN extends Property(262144) with Property.Recognized { val index = 17 val name = "OPEN" override def isOpen: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TRANSPARENT extends Property(524288) with Property.Recognized { val index = 18 val name = "TRANSPARENT" override def isTransparent: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INFIX extends Property(1048576) with Property.Recognized { val index = 19 val name = "INFIX" override def isInfix: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OPAQUE extends Property(2097152) with Property.Recognized { val index = 20 val name = "OPAQUE" override def isOpaque: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE) @@ -626,8 +626,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 2097152 => OPAQUE case __other => Unrecognized(__other) } - - + + } final val SYMBOL_FIELD_NUMBER = 1 final val LANGUAGE_FIELD_NUMBER = 16 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala index 5d7670dfdd32..e68a0b6b9efe 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -15,21 +15,21 @@ final case class SymbolOccurrence( role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = symbol if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(2, __value) } }; - + { val __value = role.value if (__value != 0) { @@ -45,7 +45,7 @@ final case class SymbolOccurrence( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -72,10 +72,10 @@ final case class SymbolOccurrence( def withRange(__v: dotty.tools.dotc.semanticdb.Range): SymbolOccurrence = copy(range = Option(__v)) def withSymbol(__v: _root_.scala.Predef.String): SymbolOccurrence = copy(symbol = __v) def withRole(__v: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role): SymbolOccurrence = copy(role = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence]) } @@ -105,12 +105,12 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools role = __role ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolOccurrence( range = _root_.scala.None, symbol = "", @@ -121,35 +121,35 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools def isUnknownRole: _root_.scala.Boolean = false def isReference: _root_.scala.Boolean = false def isDefinition: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized]) } - + object Role { sealed trait Recognized extends Role - - + + @SerialVersionUID(0L) case object UNKNOWN_ROLE extends Role(0) with Role.Recognized { val index = 0 val name = "UNKNOWN_ROLE" override def isUnknownRole: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object REFERENCE extends Role(1) with Role.Recognized { val index = 1 val name = "REFERENCE" override def isReference: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object DEFINITION extends Role(2) with Role.Recognized { val index = 2 val name = "DEFINITION" override def isDefinition: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION) @@ -159,8 +159,8 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools case 2 => DEFINITION case __other => Unrecognized(__other) } - - + + } final val RANGE_FIELD_NUMBER = 1 final val SYMBOL_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala index 3c6fcfbf4c6a..bb7bcacea092 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -14,14 +14,14 @@ final case class Synthetic( tree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) if (__value.serializedSize != 0) { @@ -37,7 +37,7 @@ final case class Synthetic( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -59,10 +59,10 @@ final case class Synthetic( def clearRange: Synthetic = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Synthetic = copy(range = Option(__v)) def withTree(__v: dotty.tools.dotc.semanticdb.Tree): Synthetic = copy(tree = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Synthetic]) } @@ -88,12 +88,12 @@ object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(__tree.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Synthetic( range = _root_.scala.None, tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala index f0347e86d9e3..723df545c4c5 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -21,38 +21,38 @@ final case class TextDocument( synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = schema.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(1, __value) } }; - + { val __value = uri if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(2, __value) } }; - + { val __value = text if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(3, __value) } }; - + { val __value = md5 if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(11, __value) } }; - + { val __value = language.value if (__value != 0) { @@ -84,7 +84,7 @@ final case class TextDocument( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -163,10 +163,10 @@ final case class TextDocument( def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic *): TextDocument = addAllSynthetics(__vs) def addAllSynthetics(__vs: Iterable[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = synthetics ++ __vs) def withSynthetics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocument]) } @@ -220,12 +220,12 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot synthetics = __synthetics.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocument( schema = dotty.tools.dotc.semanticdb.Schema.LEGACY, uri = "", diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala index 41b8e1b3f491..cab86417cfc9 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -13,8 +13,8 @@ final case class TextDocuments( documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 documents.foreach { __item => val __value = __item @@ -29,7 +29,7 @@ final case class TextDocuments( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { documents.foreach { __v => @@ -43,10 +43,10 @@ final case class TextDocuments( def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument *): TextDocuments = addAllDocuments(__vs) def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs) def withDocuments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocuments]) } @@ -68,12 +68,12 @@ object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.do documents = __documents.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocuments( documents = _root_.scala.Seq.empty ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala index ed84d9b2f2d0..310e9c010826 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual { object Tree { case object Empty extends dotty.tools.dotc.semanticdb.Tree - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Tree def defaultInstance: dotty.tools.dotc.semanticdb.Tree = Empty - + implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] { override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree => __v.value @@ -52,8 +52,8 @@ final case class TreeMessage( sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.applyTree.isDefined) { val __value = sealedValue.applyTree.get @@ -96,7 +96,7 @@ final case class TreeMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.applyTree.foreach { __v => @@ -166,10 +166,10 @@ final case class TreeMessage( def withTypeApplyTree(__v: dotty.tools.dotc.semanticdb.TypeApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v)) def clearSealedValue: TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue): TreeMessage = copy(sealedValue = __v) - - - - + + + + def toTree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Tree]) } @@ -206,12 +206,12 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TreeMessage( sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) @@ -244,7 +244,7 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.ApplyTree @@ -324,10 +324,10 @@ final case class ApplyTree( arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) if (__value.serializedSize != 0) { @@ -347,7 +347,7 @@ final case class ApplyTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -370,10 +370,10 @@ final case class ApplyTree( def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree *): ApplyTree = addAllArguments(__vs) def addAllArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = arguments ++ __vs) def withArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ApplyTree]) } @@ -399,12 +399,12 @@ object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s arguments = __arguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ApplyTree( function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), arguments = _root_.scala.Seq.empty @@ -431,14 +431,14 @@ final case class FunctionTree( body: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 parameters.foreach { __item => val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) if (__value.serializedSize != 0) { @@ -454,7 +454,7 @@ final case class FunctionTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { parameters.foreach { __v => @@ -477,10 +477,10 @@ final case class FunctionTree( def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs) def withParameters(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Tree): FunctionTree = copy(body = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FunctionTree]) } @@ -506,12 +506,12 @@ object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.FunctionTree( parameters = _root_.scala.Seq.empty, body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) @@ -535,10 +535,10 @@ final case class IdTree( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -554,7 +554,7 @@ final case class IdTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -565,10 +565,10 @@ final case class IdTree( }; } def withSymbol(__v: _root_.scala.Predef.String): IdTree = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IdTree]) } @@ -590,12 +590,12 @@ object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sema symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IdTree( symbol = "" ) @@ -613,10 +613,10 @@ final case class LiteralTree( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) if (__value.serializedSize != 0) { @@ -632,7 +632,7 @@ final case class LiteralTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -645,10 +645,10 @@ final case class LiteralTree( }; } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): LiteralTree = copy(constant = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LiteralTree]) } @@ -670,12 +670,12 @@ object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LiteralTree( constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) @@ -696,17 +696,17 @@ final case class MacroExpansionTree( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -722,7 +722,7 @@ final case class MacroExpansionTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -744,10 +744,10 @@ final case class MacroExpansionTree( } def withBeforeExpansion(__v: dotty.tools.dotc.semanticdb.Tree): MacroExpansionTree = copy(beforeExpansion = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): MacroExpansionTree = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree]) } @@ -773,12 +773,12 @@ object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.too tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MacroExpansionTree( beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -804,8 +804,8 @@ final case class OriginalTree( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -820,7 +820,7 @@ final case class OriginalTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -833,10 +833,10 @@ final case class OriginalTree( def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: OriginalTree = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): OriginalTree = copy(range = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.OriginalTree]) } @@ -858,12 +858,12 @@ object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot range = __range ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.OriginalTree( range = _root_.scala.None ) @@ -882,10 +882,10 @@ final case class SelectTree( id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) if (__value.serializedSize != 0) { @@ -905,7 +905,7 @@ final case class SelectTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -927,10 +927,10 @@ final case class SelectTree( def getId: dotty.tools.dotc.semanticdb.IdTree = id.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance) def clearId: SelectTree = copy(id = _root_.scala.None) def withId(__v: dotty.tools.dotc.semanticdb.IdTree): SelectTree = copy(id = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SelectTree]) } @@ -956,12 +956,12 @@ object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. id = __id ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SelectTree( qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), id = _root_.scala.None @@ -986,10 +986,10 @@ final case class TypeApplyTree( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) if (__value.serializedSize != 0) { @@ -1009,7 +1009,7 @@ final case class TypeApplyTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1032,10 +1032,10 @@ final case class TypeApplyTree( def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeApplyTree = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeApplyTree]) } @@ -1061,12 +1061,12 @@ object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.do typeArguments = __typeArguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeApplyTree( function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), typeArguments = _root_.scala.Seq.empty diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index be9cc6034f2c..0b2a35a8e1cd 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual { object Type { case object Empty extends dotty.tools.dotc.semanticdb.Type - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Type def defaultInstance: dotty.tools.dotc.semanticdb.Type = Empty - + implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] { override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef => __v.value @@ -68,8 +68,8 @@ final case class TypeMessage( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.typeRef.isDefined) { val __value = sealedValue.typeRef.get @@ -144,7 +144,7 @@ final case class TypeMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.typeRef.foreach { __v => @@ -278,10 +278,10 @@ final case class TypeMessage( def withLambdaType(__v: dotty.tools.dotc.semanticdb.LambdaType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v)) def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v) - - - - + + + + def toType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Type]) } @@ -334,12 +334,12 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeMessage( sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) @@ -388,7 +388,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.TypeRef @@ -533,17 +533,17 @@ final case class TypeRef( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -563,7 +563,7 @@ final case class TypeRef( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -593,10 +593,10 @@ final case class TypeRef( def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeRef = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeRef]) } @@ -626,12 +626,12 @@ object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sem typeArguments = __typeArguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeRef( prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "", @@ -662,17 +662,17 @@ final case class SingleType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -688,7 +688,7 @@ final case class SingleType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -708,10 +708,10 @@ final case class SingleType( } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SingleType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SingleType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SingleType]) } @@ -737,12 +737,12 @@ object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SingleType( prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "" @@ -766,10 +766,10 @@ final case class ThisType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -785,7 +785,7 @@ final case class ThisType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -796,10 +796,10 @@ final case class ThisType( }; } def withSymbol(__v: _root_.scala.Predef.String): ThisType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ThisType]) } @@ -821,12 +821,12 @@ object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ThisType( symbol = "" ) @@ -845,17 +845,17 @@ final case class SuperType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -871,7 +871,7 @@ final case class SuperType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -891,10 +891,10 @@ final case class SuperType( } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SuperType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SuperType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SuperType]) } @@ -920,12 +920,12 @@ object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SuperType( prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "" @@ -949,10 +949,10 @@ final case class ConstantType( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) if (__value.serializedSize != 0) { @@ -968,7 +968,7 @@ final case class ConstantType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -981,10 +981,10 @@ final case class ConstantType( }; } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): ConstantType = copy(constant = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ConstantType]) } @@ -1006,12 +1006,12 @@ object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantType( constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) @@ -1031,8 +1031,8 @@ final case class IntersectionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__item) @@ -1047,7 +1047,7 @@ final case class IntersectionType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1061,10 +1061,10 @@ final case class IntersectionType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): IntersectionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntersectionType]) } @@ -1086,12 +1086,12 @@ object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntersectionType( types = _root_.scala.Seq.empty ) @@ -1111,8 +1111,8 @@ final case class UnionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__item) @@ -1127,7 +1127,7 @@ final case class UnionType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1141,10 +1141,10 @@ final case class UnionType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): UnionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnionType]) } @@ -1166,12 +1166,12 @@ object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnionType( types = _root_.scala.Seq.empty ) @@ -1191,8 +1191,8 @@ final case class WithType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__item) @@ -1207,7 +1207,7 @@ final case class WithType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1221,10 +1221,10 @@ final case class WithType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): WithType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.WithType]) } @@ -1246,12 +1246,12 @@ object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.WithType( types = _root_.scala.Seq.empty ) @@ -1272,10 +1272,10 @@ final case class StructuralType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1295,7 +1295,7 @@ final case class StructuralType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1317,10 +1317,10 @@ final case class StructuralType( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: StructuralType = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): StructuralType = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StructuralType]) } @@ -1346,12 +1346,12 @@ object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.StructuralType( tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations = _root_.scala.None @@ -1376,14 +1376,14 @@ final case class AnnotatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 annotations.foreach { __item => val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1399,7 +1399,7 @@ final case class AnnotatedType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1422,10 +1422,10 @@ final case class AnnotatedType( def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs) def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): AnnotatedType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.AnnotatedType]) } @@ -1451,12 +1451,12 @@ object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.AnnotatedType( annotations = _root_.scala.Seq.empty, tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -1481,10 +1481,10 @@ final case class ExistentialType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1504,7 +1504,7 @@ final case class ExistentialType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1526,10 +1526,10 @@ final case class ExistentialType( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: ExistentialType = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ExistentialType = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ExistentialType]) } @@ -1555,12 +1555,12 @@ object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools. declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ExistentialType( tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations = _root_.scala.None @@ -1585,14 +1585,14 @@ final case class UniversalType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1608,7 +1608,7 @@ final case class UniversalType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1630,10 +1630,10 @@ final case class UniversalType( def clearTypeParameters: UniversalType = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): UniversalType = copy(typeParameters = Option(__v)) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): UniversalType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UniversalType]) } @@ -1659,12 +1659,12 @@ object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UniversalType( typeParameters = _root_.scala.None, tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -1688,10 +1688,10 @@ final case class ByNameType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1707,7 +1707,7 @@ final case class ByNameType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1720,10 +1720,10 @@ final case class ByNameType( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ByNameType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByNameType]) } @@ -1745,12 +1745,12 @@ object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByNameType( tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) @@ -1770,10 +1770,10 @@ final case class RepeatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1789,7 +1789,7 @@ final case class RepeatedType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1802,10 +1802,10 @@ final case class RepeatedType( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): RepeatedType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.RepeatedType]) } @@ -1827,12 +1827,12 @@ object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.RepeatedType( tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) @@ -1853,10 +1853,10 @@ final case class MatchType( cases: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toBase(scrutinee) if (__value.serializedSize != 0) { @@ -1876,7 +1876,7 @@ final case class MatchType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1899,10 +1899,10 @@ final case class MatchType( def addCases(__vs: dotty.tools.dotc.semanticdb.MatchType.CaseType *): MatchType = addAllCases(__vs) def addAllCases(__vs: Iterable[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = cases ++ __vs) def withCases(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType]) } @@ -1928,12 +1928,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s cases = __cases.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType( scrutinee = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), cases = _root_.scala.Seq.empty @@ -1944,17 +1944,17 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s body: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toBase(key) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toBase(body) if (__value.serializedSize != 0) { @@ -1970,7 +1970,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1992,13 +1992,13 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s } def withKey(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(key = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(body = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType.CaseType]) } - + object CaseType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] { implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] = this def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType.CaseType = { @@ -2021,12 +2021,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType.CaseType( key = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -2046,7 +2046,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType]) } - + final val SCRUTINEE_FIELD_NUMBER = 1 final val CASES_FIELD_NUMBER = 2 @transient @sharable @@ -2067,14 +2067,14 @@ final case class LambdaType( returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (parameters.isDefined) { val __value = parameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) if (__value.serializedSize != 0) { @@ -2090,7 +2090,7 @@ final case class LambdaType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { parameters.foreach { __v => @@ -2112,10 +2112,10 @@ final case class LambdaType( def clearParameters: LambdaType = copy(parameters = _root_.scala.None) def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v)) def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): LambdaType = copy(returnType = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType]) } @@ -2141,12 +2141,12 @@ object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LambdaType( parameters = _root_.scala.None, returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala index 8aed9e5b9771..699f85c0e303 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala @@ -7,7 +7,7 @@ import java.io.InputStream import java.util.Arrays import java.nio.charset.StandardCharsets -import SemanticdbInputStream._ +import SemanticdbInputStream.* import scala.collection.mutable diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala index 37da7f868e25..359e861225b0 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala @@ -7,7 +7,7 @@ import java.io.OutputStream import java.nio.ByteBuffer import java.nio.charset.StandardCharsets -import SemanticdbOutputStream._ +import SemanticdbOutputStream.* object SemanticdbOutputStream { diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala index 8360d8e08211..85f2e84429c3 100644 --- a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -2,18 +2,19 @@ package dotty.tools.dotc package staging import dotty.tools.dotc.ast.{tpd, untpd} -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameKinds._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameKinds.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.util.Property -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.SrcPos /** Checks that staging level consistency holds and heals staged types. @@ -51,7 +52,7 @@ import dotty.tools.dotc.util.SrcPos * */ class CrossStageSafety extends TreeMapWithStages { - import tpd._ + import tpd.* private val InAnnotation = Property.Key[Unit]() @@ -104,6 +105,19 @@ class CrossStageSafety extends TreeMapWithStages { case _: DefDef if tree.symbol.isInlineMethod => tree + case tree: CaseDef if level == 0 => + val pat1 = new TreeMap { + // Encode all quote patterns to materialize the given `Type[ti]` bindings + // for each type binding `ti` of the quote pattern. These will be summoned + // by HealType in the right hand side of the case definition. + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match + case tree: QuotePattern if level == 0 => + super.transform(QuotePatterns.encode(tree)) + case tree => super.transform(tree) + }.transform(tree.pat) + val tree1 = cpy.CaseDef(tree)(pat1, tree.guard, tree.body) + super.transform(tree1) + case _ if !inQuoteOrSpliceScope => checkAnnotations(tree) // Check quotes in annotations super.transform(tree) @@ -214,6 +228,10 @@ class CrossStageSafety extends TreeMapWithStages { "\n\n" + "Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. " + "Try moving this inline definition in a statically accessible location such as an object (this definition can be private)." + else if level > 0 && sym.info.derivesFrom(defn.QuotesClass) then + s"""\n + |Hint: Nested quote needs a local context defined at level $level. + |One way to introduce this context is to give the outer quote the type `Expr[Quotes ?=> Expr[T]]`.""".stripMargin else "" report.error( em"""access to $symStr from wrong staging level: diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 7d3ca0ad2f63..8b77f0774cdc 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.QuoteTypeTags.* -import dotty.tools.dotc.transform.SymUtils._ + import dotty.tools.dotc.typer.Implicits.SearchFailureType import dotty.tools.dotc.util.SrcPos diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala index 0b5032ea5a6d..467f1f440fd6 100644 --- a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala +++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc.staging import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.util.Property diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala index 05b3efab408c..0a229881804e 100644 --- a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.util.SrcPos diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala index 674dfff2f642..c2607f3daa68 100644 --- a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -3,16 +3,16 @@ package staging import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} import dotty.tools.dotc.config.Printers.staging -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.staging.StagingLevel.* import scala.collection.mutable /** TreeMap that keeps track of staging levels using StagingLevel. */ abstract class TreeMapWithStages extends TreeMapWithImplicits { - import tpd._ + import tpd.* override def transform(tree: Tree)(using Context): Tree = if (tree.source != ctx.source && tree.source.exists) diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 3175ffceae49..6d445887e1d9 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -2,15 +2,14 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Flags._ -import Names._ -import NameOps._ -import Decorators._ -import TypeUtils._ -import Types._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* +import Names.* +import NameOps.* +import Decorators.* +import Types.* import util.Spans.Span import config.Printers.transforms @@ -18,8 +17,8 @@ import config.Printers.transforms * inline accessors and protected accessors. */ abstract class AccessProxies { - import ast.tpd._ - import AccessProxies._ + import ast.tpd.* + import AccessProxies.* /** accessor -> accessed */ private val accessedBy = MutableSymbolMap[Symbol]() @@ -32,7 +31,11 @@ abstract class AccessProxies { /** The accessor definitions that need to be added to class `cls` */ private def accessorDefs(cls: Symbol)(using Context): Iterator[DefDef] = for accessor <- cls.info.decls.iterator; accessed <- accessedBy.get(accessor) yield - DefDef(accessor.asTerm, prefss => { + accessorDef(accessor, accessed) + + protected def accessorDef(accessor: Symbol, accessed: Symbol)(using Context): DefDef = + DefDef(accessor.asTerm, + prefss => { def numTypeParams = accessed.info match { case info: PolyType => info.paramNames.length case _ => 0 @@ -42,7 +45,7 @@ abstract class AccessProxies { if (passReceiverAsArg(accessor.name)) (argss.head.head.select(accessed), targs.takeRight(numTypeParams), argss.tail) else - (if (accessed.isStatic) ref(accessed) else ref(TermRef(cls.thisType, accessed)), + (if (accessed.isStatic) ref(accessed) else ref(TermRef(accessor.owner.thisType, accessed)), targs, argss) val rhs = if (accessor.name.isSetterName && @@ -54,7 +57,8 @@ abstract class AccessProxies { .appliedToArgss(forwardedArgss) .etaExpandCFT(using ctx.withOwner(accessor)) rhs.withSpan(accessed.span) - }) + } + ) /** Add all needed accessors to the `body` of class `cls` */ def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] = { @@ -64,7 +68,7 @@ abstract class AccessProxies { } trait Insert { - import ast.tpd._ + import ast.tpd.* /** The name of the accessor for definition with given `name` in given `site` */ def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName @@ -149,7 +153,7 @@ abstract class AccessProxies { def accessorIfNeeded(tree: Tree)(using Context): Tree = tree match { case tree: RefTree if needsAccessor(tree.symbol) => if (tree.symbol.isConstructor) { - report.error("Implementation restriction: cannot use private constructors in inlineable methods", tree.srcPos) + report.error("Cannot use private constructors in inline methods. You can use @publicInBinary to make constructor accessible in inline methods.", tree.srcPos) tree // TODO: create a proper accessor for the private constructor } else useAccessor(tree) diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 872c7cc897de..98ca8f2e2b5b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -1,48 +1,87 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Symbols._ -import Flags._ -import StdNames._ -import dotty.tools.dotc.ast.tpd - - +import ast.tpd +import core.*, Contexts.*, Decorators.*, Symbols.*, Flags.*, StdNames.* +import reporting.trace +import util.Property +import MegaPhase.* /** This phase rewrites calls to `Array.apply` to a direct instantiation of the array in the bytecode. * * Transforms `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` */ class ArrayApply extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = ArrayApply.name override def description: String = ArrayApply.description - override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = + private val TransformListApplyBudgetKey = new Property.Key[Int] + private def transformListApplyBudget(using Context) = + ctx.property(TransformListApplyBudgetKey).getOrElse(8) // default is 8, as originally implemented in nsc + + override def prepareForApply(tree: Apply)(using Context): Context = tree match + case SeqApplyArgs(elems) => + ctx.fresh.setProperty(TransformListApplyBudgetKey, transformListApplyBudget - elems.length) + case _ => ctx + + override def transformApply(tree: Apply)(using Context): Tree = if isArrayModuleApply(tree.symbol) then - tree.args match { - case StripAscription(Apply(wrapRefArrayMeth, (seqLit: tpd.JavaSeqLiteral) :: Nil)) :: ct :: Nil + tree.args match + case StripAscription(Apply(wrapRefArrayMeth, (seqLit: JavaSeqLiteral) :: Nil)) :: ct :: Nil if defn.WrapArrayMethods().contains(wrapRefArrayMeth.symbol) && elideClassTag(ct) => seqLit - case elem0 :: StripAscription(Apply(wrapRefArrayMeth, (seqLit: tpd.JavaSeqLiteral) :: Nil)) :: Nil + case elem0 :: StripAscription(Apply(wrapRefArrayMeth, (seqLit: JavaSeqLiteral) :: Nil)) :: Nil if defn.WrapArrayMethods().contains(wrapRefArrayMeth.symbol) => - tpd.JavaSeqLiteral(elem0 :: seqLit.elems, seqLit.elemtpt) + JavaSeqLiteral(elem0 :: seqLit.elems, seqLit.elemtpt) case _ => tree - } - else tree + else tree match + case SeqApplyArgs(elems) if transformListApplyBudget > 0 || elems.isEmpty => + val consed = elems.foldRight(ref(defn.NilModule)): (elem, acc) => + New(defn.ConsType, List(elem.ensureConforms(defn.ObjectType), acc)) + consed.cast(tree.tpe) + case _ => tree private def isArrayModuleApply(sym: Symbol)(using Context): Boolean = sym.name == nme.apply && (sym.owner == defn.ArrayModuleClass || (sym.owner == defn.IArrayModuleClass && !sym.is(Extension))) + private def isListApply(tree: Tree)(using Context): Boolean = + (tree.symbol == defn.ListModule_apply || tree.symbol.name == nme.apply) && appliedCore(tree).match + case Select(qual, _) => + val sym = qual.symbol + sym == defn.ListModule + || sym == defn.ListModuleAlias + case _ => false + + private def isSeqApply(tree: Tree)(using Context): Boolean = + isListApply(tree) || tree.symbol == defn.SeqModule_apply && appliedCore(tree).match + case Select(qual, _) => + val sym = qual.symbol + sym == defn.SeqModule + || sym == defn.SeqModuleAlias + || sym == defn.CollectionSeqType.symbol.companionModule + case _ => false + + private object SeqApplyArgs: + def unapply(tree: Apply)(using Context): Option[List[Tree]] = + if isSeqApply(tree) then + tree.args match + // (a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) but only for reference types + case StripAscription(Apply(wrapArrayMeth, List(StripAscription(rest: JavaSeqLiteral)))) :: Nil + if defn.WrapArrayMethods().contains(wrapArrayMeth.symbol) => + Some(rest.elems) + case _ => None + else None + + /** Only optimize when classtag if it is one of * - `ClassTag.apply(classOf[XYZ])` * - `ClassTag.apply(java.lang.XYZ.Type)` for boxed primitives `XYZ`` diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala index e783961649dd..e94fa612e6cf 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Symbols._ -import Types._ -import StdNames._ +import core.* +import MegaPhase.* +import Contexts.* +import Symbols.* +import Types.* +import StdNames.* import dotty.tools.dotc.ast.tpd @@ -19,7 +19,7 @@ import scala.collection.immutable.:: * Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions */ class ArrayConstructors extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ArrayConstructors.name @@ -38,10 +38,10 @@ class ArrayConstructors extends MiniPhase { val cs = tp.tpe.classSymbol tree.fun match { case Apply(TypeApply(t: Ident, targ), dims) - if !TypeErasure.isGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) => + if !TypeErasure.isGeneric(targ.head.tpe) && !cs.isDerivedValueClass => expand(targ.head.tpe, dims) case Apply(TypeApply(t: Select, targ), dims) - if !TypeErasure.isGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) => + if !TypeErasure.isGeneric(targ.head.tpe) && !cs.isDerivedValueClass => Block(t.qualifier :: Nil, expand(targ.head.tpe, dims)) case _ => tree } diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala index 0c1f40d4f2bd..57aeb93a3b61 100644 --- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala +++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import core._ -import ast.tpd._ -import Annotations._ -import Contexts._ +import core.* +import ast.tpd.* +import Annotations.* +import Contexts.* import Symbols.* -import SymUtils.* -import Decorators._ -import Flags._ -import Names._ -import Types._ -import util.Spans._ -import DenotTransformers._ +import Decorators.* +import Flags.* +import Names.* +import Types.* +import util.Spans.* + +import DenotTransformers.* class BeanProperties(thisPhase: DenotTransformer): def addBeanMethods(impl: Template)(using Context): Template = diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index b8cbb4367db4..653a5e17990f 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Flags._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, Decorators._ +import core.* +import Flags.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, Decorators.* import StdNames.nme import ast.TreeTypeMap @@ -33,7 +33,7 @@ import scala.collection.mutable.ListBuffer * return context functions. See i6375.scala. */ class BetaReduce extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = BetaReduce.name @@ -45,7 +45,7 @@ class BetaReduce extends MiniPhase: app1 object BetaReduce: - import ast.tpd._ + import ast.tpd.* val name: String = "betaReduce" val description: String = "reduce closure applications" @@ -82,7 +82,7 @@ object BetaReduce: case _ => None case Block(stats, expr) if stats.forall(isPureBinding) => recur(expr, argss).map(cpy.Block(fn)(stats, _)) - case Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => + case fn @ Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => recur(expr, argss).map(cpy.Inlined(fn)(call, bindings, _)) case Typed(expr, tpt) => recur(expr, argss) @@ -90,7 +90,7 @@ object BetaReduce: recur(expr, argss) case _ => None tree match - case Apply(Select(fn, nme.apply), args) if defn.isFunctionType(fn.tpe) => + case Apply(Select(fn, nme.apply), args) if defn.isFunctionNType(fn.tpe) => recur(fn, List(args)) match case Some(reduced) => seq(bindingsBuf.result(), reduced).withSpan(tree.span) diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 569b16681cde..2110ac1464c2 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package transform -import core._ -import Symbols._, Types._, Contexts._, Decorators._, Flags._, Scopes._, Phases._ -import DenotTransformers._ +import core.* +import Symbols.*, Types.*, Contexts.*, Decorators.*, Flags.*, Scopes.*, Phases.* +import DenotTransformers.* import ast.untpd import collection.{mutable, immutable} import util.SrcPos @@ -16,7 +16,7 @@ import Erasure.Boxing.adaptClosure /** A helper class for generating bridge methods in class `root`. */ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { - import ast.tpd._ + import ast.tpd.* assert(ctx.phase == erasurePhase.next) private val preErasureCtx = ctx.withPhase(erasurePhase) @@ -129,25 +129,24 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { assert(ctx.typer.isInstanceOf[Erasure.Typer]) ctx.typer.typed(untpd.cpy.Apply(ref)(ref, args), member.info.finalResultType) else - val defn.ContextFunctionType(argTypes, resType, erasedParams) = tp: @unchecked - val anonFun = newAnonFun(ctx.owner, - MethodType( - argTypes.zip(erasedParams.padTo(argTypes.length, false)) - .flatMap((t, e) => if e then None else Some(t)), - resType), - coord = ctx.owner.coord) + val mtWithoutErasedParams = atPhase(erasurePhase) { + val defn.ContextFunctionType(argTypes, resType) = tp.dealias: @unchecked + val paramInfos = argTypes.filterNot(_.hasAnnotation(defn.ErasedParamAnnot)) + MethodType(paramInfos, resType) + } + val anonFun = newAnonFun(ctx.owner, mtWithoutErasedParams, coord = ctx.owner.coord) anonFun.info = transformInfo(anonFun, anonFun.info) def lambdaBody(refss: List[List[Tree]]) = val refs :: Nil = refss: @unchecked val expandedRefs = refs.map(_.withSpan(ctx.owner.span.endPos)) match case (bunchedParam @ Ident(nme.ALLARGS)) :: Nil => - argTypes.indices.toList.map(n => + mtWithoutErasedParams.paramInfos.indices.toList.map(n => bunchedParam .select(nme.primitive.arrayApply) .appliedTo(Literal(Constant(n)))) case refs1 => refs1 - expand(args ::: expandedRefs, resType, n - 1)(using ctx.withOwner(anonFun)) + expand(args ::: expandedRefs, mtWithoutErasedParams.resType, n - 1)(using ctx.withOwner(anonFun)) val unadapted = Closure(anonFun, lambdaBody) cpy.Block(unadapted)(unadapted.stats, diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 32bcc53184b1..c1725cbd0255 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -1,38 +1,32 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ +import core.Names.* import core.NameKinds.TempResultName -import core.Constants._ +import core.Constants.* import util.Store import dotty.tools.uncheckedNN +import ast.tpd.* +import compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => - import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - override def runsAfterGroupsOf: Set[String] = Set(LiftTry.name) - // lifting tries changes what variables are considered to be captured - - private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = _ - private def captured(using Context) = ctx.store(Captured) - - override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(util.ReadOnlySet.empty) + private val captured = util.HashSet[Symbol]() private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -58,33 +52,10 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - private class CollectCaptured extends TreeTraverser { - private val captured = util.HashSet[Symbol]() - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident => - val sym = id.symbol - if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { - val enclMeth = ctx.owner.enclosingMethod - if (sym.enclosingMethod != enclMeth) { - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - } - } - case _ => - traverseChildren(tree) - } - def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { - traverse(tree) - captured - } - } - - override def prepareForUnit(tree: Tree)(using Context): Context = { - val captured = atPhase(thisPhase) { - CollectCaptured().runOver(ctx.compilationUnit.tpdTree) - } - ctx.fresh.updateStore(Captured, captured) - } + override def prepareForUnit(tree: Tree)(using Context): Context = + captured.clear() + atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) + ctx /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -131,43 +102,29 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: * * intRef.elem = expr * - * rewrite using a temporary var to - * - * val ev$n = expr - * intRef.elem = ev$n - * - * That way, we avoid the problem that `expr` might contain a `try` that would - * run on a non-empty stack (which is illegal under JVM rules). Note that LiftTry - * has already run before, so such `try`s would not be eliminated. - * - * If the ref type lhs is followed by a cast (can be an artifact of nested translation), - * drop the cast. - * - * If the ref type is `ObjectRef` or `VolatileObjectRef`, immediately assign `null` - * to the temporary to make the underlying target of the reference available for - * garbage collection. Nullification is omitted if the `expr` is already `null`. - * - * var ev$n: RHS = expr - * objRef.elem = ev$n - * ev$n = null.asInstanceOf[RHS] + * the lhs can be followed by a cast as an artifact of nested translation. + * In that case, drop the cast. */ override def transformAssign(tree: Assign)(using Context): Tree = - def absolved: Boolean = tree.rhs match - case Literal(Constant(null)) | Typed(Literal(Constant(null)), _) => true - case _ => false - def recur(lhs: Tree): Tree = lhs match + tree.lhs match case TypeApply(Select(qual@Select(_, nme.elem), nme.asInstanceOf_), _) => - recur(qual) - case Select(_, nme.elem) if refInfo.boxedRefClasses.contains(lhs.symbol.maybeOwner) => - val tempDef = transformFollowing(SyntheticValDef(TempResultName.fresh(), tree.rhs, flags = Mutable)) - val update = cpy.Assign(tree)(lhs, ref(tempDef.symbol)) - def reset = cpy.Assign(tree)(ref(tempDef.symbol), nullLiteral.cast(tempDef.symbol.info)) - val res = if refInfo.objectRefClasses(lhs.symbol.maybeOwner) && !absolved then reset else unitLiteral - transformFollowing(Block(tempDef :: update :: Nil, res)) + cpy.Assign(tree)(qual, tree.rhs) case _ => tree - recur(tree.lhs) object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" + + def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + case _ => + traverseChildren(tree) +end CapturedVars diff --git a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala index 7c8082265161..8625d2dbb289 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala @@ -30,7 +30,7 @@ object CheckLoopingImplicits: */ class CheckLoopingImplicits extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CheckLoopingImplicits.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala index f43d000bbf44..127bd57b1bf2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala @@ -15,7 +15,7 @@ object CheckNoSuperThis: /** Checks that super and this calls do not pass `this` as (part of) an argument. */ class CheckNoSuperThis extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CheckNoSuperThis.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index b63773687f74..073086ac5e2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Decorators._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Decorators.* /** A no-op transform that checks whether the compiled sources are re-entrant. * If -Ycheck:reentrant is set, the phase makes sure that there are no variables @@ -27,7 +27,7 @@ import Decorators._ * for immutable array. */ class CheckReentrant extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = CheckReentrant.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala new file mode 100644 index 000000000000..a85cabdd5460 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -0,0 +1,313 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.Trees.EmptyTree +import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.report +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.util.{Property, SrcPos} +import dotty.tools.dotc.core.Symbols.ClassSymbol +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.Symbols.Symbol +import dotty.tools.dotc.core.Flags.EmptyFlags +import dotty.tools.dotc.ast.tpd.TreeTraverser +import dotty.tools.dotc.core.Types.watchList +import dotty.tools.dotc.core.Types.NoType +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types +import dotty.tools.dotc.semanticdb.TypeOps +import dotty.tools.dotc.cc.boxedCaptureSet +import dotty.tools.dotc.core.Symbols.{NoSymbol, isParamOrAccessor} +import scala.collection.mutable +import dotty.tools.dotc.core.Scopes.Scope +import scala.collection.immutable.HashMap +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.typer.ImportInfo +import dotty.tools.dotc.ast.untpd.ImportSelector +import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.core.Denotations.SingleDenotation +import dotty.tools.dotc.ast.Trees.Ident +import dotty.tools.dotc.core.Names.TypeName +import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.core.Mode.Type +import dotty.tools.dotc.core.Names.SimpleName + +class CheckShadowing extends MiniPhase: + import CheckShadowing.* + import ShadowingData.* + + private val _key = Property.Key[ShadowingData] + + private def shadowingDataApply[U](f: ShadowingData => U)(using Context): Context = + ctx.property(_key).foreach(f) + ctx + + override def phaseName: String = CheckShadowing.name + + override def description: String = CheckShadowing.description + + override def isRunnable(using Context): Boolean = + super.isRunnable && + ctx.settings.Xlint.value.nonEmpty && + !ctx.isJava + + // Setup before the traversal + override def prepareForUnit(tree: tpd.Tree)(using Context): Context = + val data = ShadowingData() + val fresh = ctx.fresh.setProperty(_key, data) + shadowingDataApply(sd => sd.registerRootImports())(using fresh) + + // Reporting on traversal's end + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = + shadowingDataApply(sd => + reportShadowing(sd.getShadowingResult) + ) + tree + + // MiniPhase traversal : + + override def prepareForPackageDef(tree: tpd.PackageDef)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForTemplate(tree: tpd.Template)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForBlock(tree: tpd.Block)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForOther(tree: tpd.Tree)(using Context): Context = + importTraverser.traverse(tree) + ctx + + override def prepareForValDef(tree: tpd.ValDef)(using Context): Context = + shadowingDataApply(sd => + sd.registerPrivateShadows(tree) + ) + + override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = + if tree.symbol.isAliasType then // if alias, the parent is the current symbol + nestedTypeTraverser(tree.symbol).traverse(tree.rhs) + if tree.symbol.is(Param) then // if param, the parent is up + val owner = tree.symbol.owner + val parent = if (owner.isConstructor) then owner.owner else owner + nestedTypeTraverser(parent).traverse(tree.rhs)(using ctx.outer) + shadowingDataApply(sd => sd.registerCandidate(parent, tree)) + else + ctx + + + override def transformPackageDef(tree: tpd.PackageDef)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformBlock(tree: tpd.Block)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = + if tree.symbol.is(Param) && isValidTypeParamOwner(tree.symbol.owner) then // Do not register for constructors the work is done for the Class owned equivalent TypeDef + shadowingDataApply(sd => sd.computeTypeParamShadowsFor(tree.symbol.owner)(using ctx.outer)) + if tree.symbol.isAliasType then // No need to start outer here, because the TypeDef reached here it's already the parent + shadowingDataApply(sd => sd.computeTypeParamShadowsFor(tree.symbol)(using ctx)) + tree + + // Helpers : + private def isValidTypeParamOwner(owner: Symbol)(using Context): Boolean = + !owner.isConstructor && !owner.is(Synthetic) && !owner.is(Exported) + + private def reportShadowing(res: ShadowingData.ShadowResult)(using Context): Unit = + res.warnings.sortBy(w => (w.pos.line, w.pos.startPos.column))(using Ordering[(Int, Int)]).foreach { + case PrivateShadowWarning(pos, shadow, shadowed) => + report.warning(s"${shadow.showLocated} shadows field ${shadowed.name} inherited from ${shadowed.owner}", pos) + case TypeParamShadowWarning(pos, shadow, parent, shadowed) => + if shadowed.exists then + report.warning(s"Type parameter ${shadow.name} for $parent shadows the type defined by ${shadowed.showLocated}", pos) + else + report.warning(s"Type parameter ${shadow.name} for $parent shadows an explicitly renamed type : ${shadow.name}", pos) + } + + private def nestedTypeTraverser(parent: Symbol) = new TreeTraverser: + import tpd.* + + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match + case t:tpd.TypeDef => + val newCtx = shadowingDataApply(sd => + sd.registerCandidate(parent, t) + ) + traverseChildren(tree)(using newCtx) + case _ => + traverseChildren(tree) + end traverse + end nestedTypeTraverser + + // To reach the imports during a miniphase traversal + private def importTraverser = new TreeTraverser: + import tpd.* + + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match + case t:tpd.Import => + val newCtx = shadowingDataApply(sd => sd.registerImport(t)) + traverseChildren(tree)(using newCtx) + case _ => + traverseChildren(tree) + +end CheckShadowing + + +object CheckShadowing: + + val name = "checkShadowing" + val description = "check for elements shadowing other elements in scope" + + private class ShadowingData: + import dotty.tools.dotc.transform.CheckShadowing.ShadowingData.* + import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} + + private val rootImports = MutSet[SingleDenotation]() + private val explicitsImports = MutStack[MutSet[tpd.Import]]() + private val renamedImports = MutStack[MutMap[SimpleName, Name]]() // original name -> renamed name + + private val typeParamCandidates = MutMap[Symbol, Seq[tpd.TypeDef]]().withDefaultValue(Seq()) + private val typeParamShadowWarnings = MutSet[TypeParamShadowWarning]() + + private val privateShadowWarnings = MutSet[PrivateShadowWarning]() + + def inNewScope()(using Context) = + explicitsImports.push(MutSet()) + renamedImports.push(MutMap()) + + def outOfScope()(using Context) = + explicitsImports.pop() + renamedImports.pop() + + /** Register the Root imports (at once per compilation unit)*/ + def registerRootImports()(using Context) = + val langPackageName = ctx.definitions.JavaLangPackageVal.name.toSimpleName // excludes lang package + rootImports.addAll(ctx.definitions.rootImportTypes.withFilter(_.name.toSimpleName != langPackageName).flatMap(_.typeMembers)) + + /* Register an import encountered in the current scope **/ + def registerImport(imp: tpd.Import)(using Context) = + val renamedImps = imp.selectors.collect(sel => { sel.renamed match + case Ident(rename) => + (sel.name.toSimpleName, rename) + }).toMap + explicitsImports.top += imp + renamedImports.top.addAll(renamedImps) + + /** Register a potential type definition which could shadows a Type already defined */ + def registerCandidate(parent: Symbol, typeDef: tpd.TypeDef) = + val actual = typeParamCandidates.getOrElseUpdate(parent, Seq()) + typeParamCandidates.update(parent, actual.+:(typeDef)) + + /** Compute if there is some TypeParam shadowing and register if it is the case */ + def computeTypeParamShadowsFor(parent: Symbol)(using Context): Unit = + typeParamCandidates(parent).foreach(typeDef => { + val sym = typeDef.symbol + val shadowedType = + lookForRootShadowedType(sym) + .orElse(lookForImportedShadowedType(sym)) + .orElse(lookForUnitShadowedType(sym)) + shadowedType.foreach(shadowed => + if !renamedImports.exists(_.contains(shadowed.name.toSimpleName)) then + typeParamShadowWarnings += TypeParamShadowWarning(typeDef.srcPos, typeDef.symbol, parent, shadowed) + ) + }) + + private def lookForRootShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + rootImports.find(p => p.name.toSimpleName == symbol.name.toSimpleName).map(_.symbol) + + private def lookForImportedShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + explicitsImports + .flatMap(_.flatMap(imp => symbol.isAnImportedType(imp))) + .headOption + + private def lookForUnitShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + if !ctx.owner.exists then + None + else + val declarationScope = ctx.effectiveScope + val res = declarationScope.lookup(symbol.name) + res match + case s: Symbol if s.isType => Some(s) + case _ => lookForUnitShadowedType(symbol)(using ctx.outer) + + /** Register if the valDef is a private declaration that shadows an inherited field */ + def registerPrivateShadows(valDef: tpd.ValDef)(using Context): Unit = + lookForShadowedField(valDef.symbol).foreach(shadowedField => + privateShadowWarnings += PrivateShadowWarning(valDef.startPos, valDef.symbol, shadowedField) + ) + + private def lookForShadowedField(symDecl: Symbol)(using Context): Option[Symbol] = + if symDecl.isPrivate then + val symDeclType = symDecl.info + val bClasses = symDecl.owner.info.baseClasses + bClasses match + case _ :: inherited => + inherited + .map(classSymbol => symDecl.denot.matchingDecl(classSymbol, symDeclType)) + .find(sym => sym.name == symDecl.name) + case Nil => + None + else + None + + /** Get the shadowing analysis's result */ + def getShadowingResult(using Context): ShadowResult = + val privateWarnings: List[ShadowWarning] = + if ctx.settings.XlintHas.privateShadow then + privateShadowWarnings.toList + else + Nil + val typeParamWarnings: List[ShadowWarning] = + if ctx.settings.XlintHas.typeParameterShadow then + typeParamShadowWarnings.toList + else + Nil + ShadowResult(privateWarnings ++ typeParamWarnings) + + extension (sym: Symbol) + /** Looks after any type import symbol in the given import that matches this symbol */ + private def isAnImportedType(imp: tpd.Import)(using Context): Option[Symbol] = + val tpd.Import(qual, sels) = imp + val simpleSelections = qual.tpe.member(sym.name).alternatives + val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) + sels + .find(is => is.rename.toSimpleName == sym.name.toSimpleName).map(_.symbol) + .orElse(typeSelections.map(_.symbol).find(sd => sd.name == sym.name)) + .orElse(simpleSelections.map(_.symbol).find(sd => sd.name == sym.name)) + + end ShadowingData + + private object ShadowingData: + sealed abstract class ShadowWarning(val pos: SrcPos, val shadow: Symbol, val shadowed: Symbol) + + case class PrivateShadowWarning( + override val pos: SrcPos, + override val shadow: Symbol, + override val shadowed: Symbol + ) extends ShadowWarning(pos, shadow, shadowed) + + case class TypeParamShadowWarning( + override val pos: SrcPos, + override val shadow: Symbol, + val shadowParent: Symbol, + override val shadowed: Symbol, + ) extends ShadowWarning(pos, shadow, shadowed) + + /** A container for the results of the shadow elements analysis */ + case class ShadowResult(warnings: List[ShadowWarning]) + +end CheckShadowing + diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala index 0d5154e212ee..26c94407f35b 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* import dotty.tools.dotc.ast.tpd -import reporting._ +import reporting.* + -import dotty.tools.dotc.transform.SymUtils._ /** A transformer that check that requirements of Static fields\methods are implemented: * 1. Only objects can have members annotated with `@static` @@ -24,7 +24,7 @@ import dotty.tools.dotc.transform.SymUtils._ * 6. `@static` Lazy vals are currently unsupported. */ class CheckStatic extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = CheckStatic.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index bd521c8679d0..7cff6fa5f1f0 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -1,6 +1,7 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.ast.untpd.ImportSelector @@ -205,7 +206,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke * corresponding context property */ private def traverser = new TreeTraverser: - import tpd._ + import tpd.* import UnusedData.ScopeType /* Register every imports, definition and usage */ @@ -229,15 +230,15 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke pushInBlockTemplatePackageDef(tree) traverseChildren(tree)(using newCtx) popOutBlockTemplatePackageDef() - case t:tpd.ValDef => + case t: tpd.ValDef => prepareForValDef(t) traverseChildren(tree)(using newCtx) transformValDef(t) - case t:tpd.DefDef => + case t: tpd.DefDef => prepareForDefDef(t) traverseChildren(tree)(using newCtx) transformDefDef(t) - case t:tpd.TypeDef => + case t: tpd.TypeDef => prepareForTypeDef(t) traverseChildren(tree)(using newCtx) transformTypeDef(t) @@ -248,6 +249,10 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke prepareForAssign(t) traverseChildren(tree) case _: tpd.InferredTypeTree => + case t@tpd.RefinedTypeTree(tpt, refinements) => + //! DIFFERS FROM MINIPHASE + typeTraverser(unusedDataApply).traverse(t.tpe) + traverse(tpt)(using newCtx) case t@tpd.TypeTree() => //! DIFFERS FROM MINIPHASE typeTraverser(unusedDataApply).traverse(t.tpe) @@ -291,9 +296,9 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke case UnusedSymbol(t, _, WarnTypes.PatVars) => report.warning(s"unused pattern variable", t) case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => - report.warning(s"unset local variable", t) + report.warning(s"unset local variable, consider using an immutable val instead", t) case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => - report.warning(s"unset private variable", t) + report.warning(s"unset private variable, consider using an immutable val instead", t) } end CheckUnused @@ -333,7 +338,7 @@ object CheckUnused: * - usage */ private class UnusedData: - import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} + import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack, ListBuffer => MutList} import UnusedData.* /** The current scope during the tree traversal */ @@ -342,7 +347,7 @@ object CheckUnused: var unusedAggregate: Option[UnusedResult] = None /* IMPORTS */ - private val impInScope = MutStack(MutSet[tpd.Import]()) + private val impInScope = MutStack(MutList[tpd.Import]()) /** * We store the symbol along with their accessibility without import. * Accessibility to their definition in outer context/scope @@ -419,12 +424,12 @@ object CheckUnused: if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then impInScope.top += imp unusedImport ++= imp.selectors.filter { s => - !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) + !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) } /** Register (or not) some `val` or `def` according to the context, scope and flags */ def registerDef(memDef: tpd.MemberDef)(using Context): Unit = - if memDef.isValidMemberDef then + if memDef.isValidMemberDef && !isDefIgnored(memDef) then if memDef.isValidParam then if memDef.symbol.isOneOf(GivenOrImplicit) then if !paramsToSkip.contains(memDef.symbol) then @@ -445,7 +450,7 @@ object CheckUnused: def pushScope(newScopeType: ScopeType): Unit = // unused imports : currScopeType.push(newScopeType) - impInScope.push(MutSet()) + impInScope.push(MutList()) usedInScope.push(MutSet()) def registerSetVar(sym: Symbol): Unit = @@ -503,7 +508,6 @@ object CheckUnused: def getUnused(using Context): UnusedResult = popScope() - val sortedImp = if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then unusedImport.map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList @@ -557,11 +561,19 @@ object CheckUnused: else Nil val warnings = - List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, - sortedPrivateDefs, sortedPatVars, unsetLocalDefs, unsetPrivateDefs).flatten.sortBy { s => - val pos = s.pos.sourcePos - (pos.line, pos.column) - } + val unsorted = + sortedImp ::: + sortedLocalDefs ::: + sortedExplicitParams ::: + sortedImplicitParams ::: + sortedPrivateDefs ::: + sortedPatVars ::: + unsetLocalDefs ::: + unsetPrivateDefs + unsorted.sortBy { s => + val pos = s.pos.sourcePos + (pos.line, pos.column) + } UnusedResult(warnings.toSet) end getUnused //============================ HELPERS ==================================== @@ -595,14 +607,14 @@ object CheckUnused: * package a: * val x: Int = 0 * package b: - * import a._ // no warning + * import a.* // no warning * }}} * --- WITH OBJECT : OK --- * {{{ * object a: * val x: Int = 0 * object b: - * import a._ // unused warning + * import a.* // unused warning * }}} */ private def isConstructorOfSynth(sym: Symbol)(using Context): Boolean = @@ -633,6 +645,20 @@ object CheckUnused: imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) ) + /** + * Ignore CanEqual imports + */ + private def isImportIgnored(imp: tpd.Import, sel: ImportSelector)(using Context): Boolean = + (sel.isWildcard && sel.isGiven && imp.expr.tpe.allMembers.exists(p => p.symbol.typeRef.baseClasses.exists(_.derivesFrom(defn.CanEqualClass)) && p.symbol.isOneOf(GivenOrImplicit))) || + (imp.expr.tpe.member(sel.name.toTermName).alternatives + .exists(p => p.symbol.isOneOf(GivenOrImplicit) && p.symbol.typeRef.baseClasses.exists(_.derivesFrom(defn.CanEqualClass)))) + + /** + * Ignore definitions of CanEqual given + */ + private def isDefIgnored(memDef: tpd.MemberDef)(using Context): Boolean = + memDef.symbol.isOneOf(GivenOrImplicit) && memDef.symbol.typeRef.baseClasses.exists(_.derivesFrom(defn.CanEqualClass)) + extension (tree: ImportSelector) def boundTpe: Type = tree.bound match { case untpd.TypedSplice(tree1) => tree1.tpe @@ -657,8 +683,10 @@ object CheckUnused: val simpleSelections = qual.tpe.member(sym.name).alternatives val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) val termSelections = sels.flatMap(n => qual.tpe.member(n.name.toTermName).alternatives) + val sameTermPath = qual.isTerm && sym.exists && sym.owner.isType && qual.tpe.typeSymbol == sym.owner.asType val selectionsToDealias = typeSelections ::: termSelections - val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) + val renamedSelection = if sameTermPath then sels.find(sel => sel.imported.name == sym.name) else None + val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) || renamedSelection.isDefined def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && symName.map(n => n.toTermName == sel.rename).getOrElse(true)) def dealiasedSelector = if(isDerived) sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { case (sel, sym) if dealias(sym) == dealiasedSym => sel @@ -668,7 +696,7 @@ object CheckUnused: else None def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) if qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(symName)) && sym.exists then - selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard) // selector with name or wildcard (or given) + selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard).orElse(renamedSelection) // selector with name or wildcard (or given) else None @@ -705,8 +733,7 @@ object CheckUnused: /** A function is overriden. Either has `override flags` or parent has a matching member (type and name) */ private def isOverriden(using Context): Boolean = - sym.is(Flags.Override) || - (sym.exists && sym.owner.thisType.parents.exists(p => sym.matchingMember(p).exists)) + sym.is(Flags.Override) || (sym.exists && sym.owner.thisType.parents.exists(p => sym.matchingMember(p).exists)) end extension diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala index 179625759b10..5534947c6799 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ +import core.* import ast.tpd -import MegaPhase._ -import Contexts._ -import Symbols._ -import Phases._ +import MegaPhase.* +import Contexts.* +import Symbols.* +import Phases.* import dotty.tools.io.JarArchive import dotty.tools.backend.jvm.GenBCode diff --git a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala index 7b89c8785e05..9433f7949163 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.transform.MegaPhase.MiniPhase -import dotty.tools.dotc.transform.SymUtils._ + import scala.collection.mutable @@ -40,7 +40,7 @@ object CollectNullableFields { * - defined in the same class as the lazy val */ class CollectNullableFields extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = CollectNullableFields.name diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index b7e8ccf4e7e1..5740f359cb77 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package transform -import core._ -import Names._ +import core.* +import Names.* import StdNames.nme -import Types._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Constants._ -import Decorators._ -import DenotTransformers._ -import SymUtils._ +import Types.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Constants.* +import Decorators.* +import DenotTransformers.* + object CompleteJavaEnums { @@ -29,8 +29,8 @@ object CompleteJavaEnums { * case to the java.lang.Enum class. */ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => - import CompleteJavaEnums._ - import ast.tpd._ + import CompleteJavaEnums.* + import ast.tpd.* override def phaseName: String = CompleteJavaEnums.name diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 4dd7205e4ee0..9a0df830c6d7 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -1,20 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ -import ast._ -import Flags._ +import core.* +import MegaPhase.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* +import ast.* +import Flags.* import Names.Name -import NameOps._ +import NameOps.* import NameKinds.{FieldName, ExplicitFieldName} -import SymUtils._ -import Symbols._ -import Decorators._ -import DenotTransformers._ + +import Symbols.* +import Decorators.* +import DenotTransformers.* import collection.mutable +import Types.* object Constructors { val name: String = "constructors" @@ -28,7 +29,7 @@ object Constructors { * into the constructor if possible. */ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase => - import tpd._ + import tpd.* override def phaseName: String = Constructors.name @@ -197,6 +198,10 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = ) && fn.symbol.info.resultType.classSymbol == outerParam.info.classSymbol => ref(outerParam) + case Assign(lhs, rhs) if lhs.symbol.name == nme.OUTER => // not transform LHS of assignment to $outer field + cpy.Assign(tree)(lhs, super.transform(rhs)) + case dd: DefDef if dd.name.endsWith(nme.OUTER.asSimpleName) => // not transform RHS of outer accessor + dd case tree: RefTree if tree.symbol.is(ParamAccessor) && tree.symbol.name == nme.OUTER => ref(outerParam) case _ => @@ -352,7 +357,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = val expandedConstr = if (cls.isAllOf(NoInitsTrait)) { assert(finalConstrStats.isEmpty || { - import dotty.tools.dotc.transform.sjs.JSSymUtils._ + import dotty.tools.dotc.transform.sjs.JSSymUtils.* ctx.settings.scalajs.value && cls.isJSType }) constr diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index b4eb71c541d3..80115ca651bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Symbols._, Types._, Annotations._, Constants._, Phases._ +import core.* +import Contexts.*, Symbols.*, Types.*, Annotations.*, Constants.*, Phases.* import StdNames.nme import ast.untpd -import ast.tpd._ +import ast.tpd.* import config.Config object ContextFunctionResults: @@ -20,7 +20,7 @@ object ContextFunctionResults: */ def annotateContextResults(mdef: DefDef)(using Context): Unit = def contextResultCount(rhs: Tree, tp: Type): Int = tp match - case defn.ContextFunctionType(_, resTpe, _) => + case defn.ContextFunctionType(_, resTpe) => rhs match case closureDef(meth) => 1 + contextResultCount(meth.rhs, resTpe) case _ => 0 @@ -58,7 +58,8 @@ object ContextFunctionResults: */ def contextResultsAreErased(sym: Symbol)(using Context): Boolean = def allErased(tp: Type): Boolean = tp.dealias match - case defn.ContextFunctionType(_, resTpe, erasedParams) => !erasedParams.contains(false) && allErased(resTpe) + case ft @ defn.FunctionTypeOfMethod(mt: MethodType) if mt.isContextualMethod => + mt.nonErasedParamCount == 0 && allErased(mt.resType) case _ => true contextResultCount(sym) > 0 && allErased(sym.info.finalResultType) @@ -67,13 +68,13 @@ object ContextFunctionResults: */ def integrateContextResults(tp: Type, crCount: Int)(using Context): Type = if crCount == 0 then tp - else tp match + else tp.dealias match case ExprType(rt) => integrateContextResults(rt, crCount) case tp: MethodOrPoly => tp.derivedLambdaType(resType = integrateContextResults(tp.resType, crCount)) - case defn.ContextFunctionType(argTypes, resType, erasedParams) => - MethodType(argTypes, integrateContextResults(resType, crCount - 1)) + case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => + mt.derivedLambdaType(resType = integrateContextResults(mt.resType, crCount - 1)) /** The total number of parameters of method `sym`, not counting * erased parameters, but including context result parameters. @@ -83,16 +84,13 @@ object ContextFunctionResults: def contextParamCount(tp: Type, crCount: Int): Int = if crCount == 0 then 0 else - val defn.ContextFunctionType(params, resTpe, erasedParams) = tp: @unchecked + val defn.ContextFunctionType(params, resTpe) = tp: @unchecked val rest = contextParamCount(resTpe, crCount - 1) - if erasedParams.contains(true) then erasedParams.count(_ == false) + rest else params.length + rest + val nonErasedParams = params.count(!_.hasAnnotation(defn.ErasedParamAnnot)) + nonErasedParams + rest def normalParamCount(tp: Type): Int = tp.widenExpr.stripPoly match - case mt @ MethodType(pnames) => - val rest = normalParamCount(mt.resType) - if mt.hasErasedParams then - mt.erasedParams.count(_ == false) + rest - else pnames.length + rest + case mt @ MethodType(pnames) => mt.nonErasedParamCount + normalParamCount(mt.resType) case _ => contextParamCount(tp, contextResultCount(sym)) normalParamCount(sym.info) @@ -103,7 +101,7 @@ object ContextFunctionResults: def recur(tp: Type, n: Int): Type = if n == 0 then tp else tp match - case defn.ContextFunctionType(_, resTpe, _) => recur(resTpe, n - 1) + case defn.FunctionTypeOfMethod(mt) => recur(mt.resType, n - 1) recur(meth.info.finalResultType, depth) /** Should selection `tree` be eliminated since it refers to an `apply` @@ -117,8 +115,8 @@ object ContextFunctionResults: else tree match case Select(qual, name) => if name == nme.apply then - qual.tpe match - case defn.ContextFunctionType(_, _, _) => + qual.tpe.nn.dealias match + case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => integrateSelect(qual, n + 1) case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs integrateSelect(qual, n + 1) diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index 27f34891fc2c..d443e31fdc39 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.ContextOps._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.ContextOps.* import dotty.tools.dotc.typer.Docstrings class CookComments extends MegaPhase.MiniPhase { diff --git a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala index 91b5bc6a3de4..b5c02347d5d2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala +++ b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import dotty.tools.dotc.core.Contexts._ -import ast._ -import Flags._ -import Symbols._ +import dotty.tools.dotc.core.Contexts.* +import ast.* +import Flags.* +import Symbols.* import ExplicitOuter.isOuterParamAccessor import collection.mutable @@ -33,7 +33,7 @@ object CountOuterAccesses: */ class CountOuterAccesses extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CountOuterAccesses.name diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 808cf928ecc2..89161cc8c013 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -1,7 +1,9 @@ package dotty.tools.dotc package transform -import core.Contexts._ +import core.Contexts.* + +import scala.compiletime.uninitialized /** Utility class for lazy values whose evaluation depends on a context. * This should be used whenever the evaluation of a lazy expression @@ -12,7 +14,7 @@ import core.Contexts._ * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. */ class CtxLazy[T](expr: Context ?=> T) { - private var myValue: T = _ + private var myValue: T = uninitialized private var forced = false def apply()(using Context): T = { if (!forced) { diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala index 0043c43073ed..523ea75be912 100644 --- a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala +++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala @@ -1,11 +1,13 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core.* import Symbols.*, Contexts.*, Types.*, Flags.*, Decorators.* -import SymUtils.* -import collection.mutable.{LinkedHashMap, TreeSet} + +import collection.mutable.{LinkedHashMap, LinkedHashSet} import annotation.constructorOnly +import scala.compiletime.uninitialized import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -13,7 +15,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * `freeVars`, `tracked`, and `logicalOwner`. */ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Context): - import ast.tpd._ + import ast.tpd.* /** The symbol is a method or a lazy val that will be mapped to a method */ protected def isExpr(sym: Symbol)(using Context): Boolean @@ -33,7 +35,7 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co */ def logicalOwner: collection.Map[Symbol, Symbol] = logicOwner - private type SymSet = TreeSet[Symbol] + private type SymSet = LinkedHashSet[Symbol] /** A map storing free variables of functions and classes */ private val free: LinkedHashMap[Symbol, SymSet] = new LinkedHashMap @@ -51,13 +53,12 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co private val logicOwner = new LinkedHashMap[Symbol, Symbol] /** A flag to indicate whether new free variables have been found */ - private var changedFreeVars: Boolean = _ + private var changedFreeVars: Boolean = uninitialized /** A flag to indicate whether lifted owners have changed */ - private var changedLogicOwner: Boolean = _ + private var changedLogicOwner: Boolean = uninitialized - private val ord: Ordering[Symbol] = Ordering.by(_.id) - private def newSymSet = TreeSet.empty[Symbol](ord) + private def newSymSet: LinkedHashSet[Symbol] = new LinkedHashSet[Symbol] private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet = f.getOrElseUpdate(sym, newSymSet) @@ -181,26 +182,47 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co if enclClass.isContainedIn(thisClass) then thisClass else enclClass) // unknown this reference, play it safe and assume the narrowest possible owner + /** Set the first owner of a local method or class that's nested inside a term. + * This is either the enclosing package or the enclosing class. If the former, + * the method will be be translated to a static method of its toplevel class. + * In that case, we might later re-adjust the owner to a nested class via + * `narrowTo` when we see that the method refers to the this-type of that class. + * We choose the enclosing package when there's something potentially to gain from this + * and when it is safe to do so + */ def setLogicOwner(local: Symbol) = val encClass = local.owner.enclosingClass + // When to prefer the enclosing class over the enclosing package: val preferEncClass = - ( encClass.isStatic - // non-static classes can capture owners, so should be avoided + // If class is not static, we try to hoist the method out of + // the class to avoid the outer pointer. && (encClass.isProperlyContainedIn(local.topLevelClass) - // can be false for symbols which are defined in some weird combination of supercalls. + // If class is nested in an outer object, we prefer to leave the method in the class, + // since putting it in the outer object makes access more complicated || encClass.is(ModuleClass, butNot = Package) - // needed to not cause deadlocks in classloader. see t5375.scala + // If class is an outermost object we also want to avoid making the + // method static since that could cause deadlocks in interacting + // with class initialization. See deadlock.scala ) - ) - || ( + && (!sym.isAnonymousFunction || sym.owner.ownersIterator.exists(_.isConstructor)) + // The previous conditions mean methods in static objects and nested static classes + // don't get lifted out to be static. In general it is prudent to do that. However, + // for anonymous functions, we prefer them to be static because that means lambdas + // are memoized and can be serialized even if the enclosing object or class + // is not serializable. See run/lambda-serialization-gc.scala and run/i19224.scala. + // On the other hand, we don't want to lift anonymous functions from inside the + // object or class constructor to be static since that can cause again deadlocks + // by its interaction with class initialization. See run/deadlock.scala, which works + // in Scala 3 but deadlocks in Scala 2. + || /* Scala.js: Never move any member beyond the boundary of a DynamicImportThunk. * DynamicImportThunk subclasses are boundaries between the eventual ES modules * that can be dynamically loaded. Moving members across that boundary changes * the dynamic and static dependencies between ES modules, which is forbidden. */ ctx.settings.scalajs.value && encClass.isSubClass(jsdefn.DynamicImportThunkClass) - ) + logicOwner(sym) = if preferEncClass then encClass else local.enclosingPackageClass tree match diff --git a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala index 3081bd5c2b20..5f26a6af6c3c 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala +++ b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala @@ -6,9 +6,9 @@ import ast.{Trees, tpd} import core.* import Decorators.* import NameKinds.BoundaryName -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._ -import Symbols._, StdNames._, Trees._ +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.* +import Symbols.*, StdNames.*, Trees.* import util.Property import Constants.Constant import Flags.MethodOrLazy @@ -41,7 +41,7 @@ object DropBreaks: class DropBreaks extends MiniPhase: import DropBreaks.* - import tpd._ + import tpd.* override def phaseName: String = DropBreaks.name @@ -122,7 +122,7 @@ class DropBreaks extends MiniPhase: case id: Ident => val arg = (args: @unchecked) match case arg :: Nil => arg - case Nil => Literal(Constant(())).withSpan(tree.span) + case Nil => unitLiteral.withSpan(tree.span) Some((id.symbol, arg)) case _ => None case _ => None diff --git a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled index 13adcf5c3f76..1a5cb0dfba47 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer import Phases.Phase -import Contexts._ -import Flags._ -import Symbols._ +import Contexts.* +import Flags.* +import Symbols.* import SymDenotations.SymDenotation -import ast.Trees._ +import ast.Trees.* import collection.mutable -import Decorators._ -import NameOps._ +import Decorators.* +import NameOps.* import MegaPhase.MiniPhase import dotty.tools.dotc.transform.MegaPhase.TransformerInfo @@ -29,7 +29,7 @@ import dotty.tools.dotc.transform.MegaPhase.TransformerInfo * at their destination. */ class DropEmptyCompanions extends MiniPhase { thisTransform => - import ast.tpd._ + import ast.tpd.* override def phaseName = "dropEmptyCompanions" override def runsAfter = Set(Flatten.name) diff --git a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala index a363ccaeb0d0..b3bd1ab8dd26 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import dotty.tools.dotc.core.Contexts._ -import ast._ -import Flags._ -import Symbols._ -import Contexts._ -import Decorators._ -import DenotTransformers._ +import dotty.tools.dotc.core.Contexts.* +import ast.* +import Flags.* +import Symbols.* +import Contexts.* +import Decorators.* +import DenotTransformers.* import ExplicitOuter.isOuterParamAccessor import CountOuterAccesses.mightBeDropped import collection.mutable @@ -24,7 +24,7 @@ object DropOuterAccessors: */ class DropOuterAccessors extends MiniPhase with IdentityDenotTransformer: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = DropOuterAccessors.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 151e841f0e48..eca3928569f1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Types._ -import Flags._ +import core.* +import Contexts.* +import Symbols.* +import Types.* +import Flags.* import SymDenotations.* import DenotTransformers.InfoTransformer import NameKinds.SuperArgName @@ -53,7 +53,7 @@ import dotty.tools.dotc.core.Names.Name class ElimByName extends MiniPhase, InfoTransformer: thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimByName.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala index 503561915040..0b0906148ba1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala @@ -3,12 +3,12 @@ package dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._, Phases._ -import Symbols._, StdNames._, Trees._ -import TypeErasure.ErasedValueType, ValueClasses._ -import reporting._ +import core.*, core.Decorators.* +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.*, Phases.* +import Symbols.*, StdNames.*, Trees.* +import TypeErasure.ErasedValueType, ValueClasses.* +import reporting.* import NameKinds.SuperAccessorName object ElimErasedValueType { @@ -36,7 +36,7 @@ object ElimErasedValueType { */ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => - import tpd._ + import tpd.* import ElimErasedValueType.elimEVT override def phaseName: String = ElimErasedValueType.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala index 2f55826ec2a3..0ee8781b6b70 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* import Denotations.{SingleDenotation, NonSymSingleDenotation} import SymDenotations.SymDenotation -import DenotTransformers._ -import Names._ +import DenotTransformers.* +import Names.* object ElimOpaque { val name: String = "elimOpaque" @@ -21,7 +21,7 @@ object ElimOpaque { /** Rewrites opaque type aliases to normal alias types */ class ElimOpaque extends MiniPhase with DenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimOpaque.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala index 3ddc8b614bae..8527ad26e51b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import Contexts._ -import Types._ +import Contexts.* +import Types.* import NameKinds.OuterSelectName /** This phase rewrites outer selects `E.n_` which were introduced by * inlining to outer paths. */ class ElimOuterSelect extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimOuterSelect.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala index 83349f1f6199..6c577a872c56 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._, Flags._, Types._, Contexts._, Symbols._ -import ast.tpd._ -import Flags._ +import core.* +import Decorators.*, Flags.*, Types.*, Contexts.*, Symbols.* +import ast.tpd.* +import Flags.* import MegaPhase.MiniPhase /** Eliminates syntactic references to package terms as prefixes of classes, so that there's no chance diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala index 756ddd9bf0eb..3ed337ee3a4a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala @@ -2,11 +2,11 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._, Phases.Phase -import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._ -import TypeErasure.ErasedValueType, ValueClasses._ +import core.*, core.Decorators.* +import MegaPhase.*, Phases.Phase +import Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.*, DenotTransformers.* +import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.*, Scopes.*, Denotations.* +import TypeErasure.ErasedValueType, ValueClasses.* /** This phase rewrite PolyFunction subclasses to FunctionN subclasses * @@ -20,7 +20,7 @@ import TypeErasure.ErasedValueType, ValueClasses._ */ class ElimPolyFunction extends MiniPhase with DenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = ElimPolyFunction.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index 359b882ef26b..b98d7d525089 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package transform -import core._ +import core.* import StdNames.nme -import Types._ -import transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Decorators._ -import Denotations._, SymDenotations._ -import DenotTransformers._ -import NullOpsDecorator._ +import Types.* +import transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Decorators.* +import Denotations.*, SymDenotations.* +import DenotTransformers.* +import NullOpsDecorator.* object ElimRepeated { val name: String = "elimRepeated" @@ -24,7 +24,7 @@ object ElimRepeated { * the transformed type if needed. */ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimRepeated.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala index 02612253c735..e2940532d463 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Flags._ +import core.* +import Contexts.* +import Flags.* import dotty.tools.dotc.ast.tpd import MegaPhase.MiniPhase import dotty.tools.dotc.core.Types.{ThisType, TermRef} @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Types.{ThisType, TermRef} * corresponding modules. */ class ElimStaticThis extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimStaticThis.name diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 981dd5f60aea..8582420d64ee 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -2,39 +2,38 @@ package dotty.tools package dotc package transform -import core.Phases._ -import core.DenotTransformers._ -import core.Denotations._ -import core.SymDenotations._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Names._ -import core.StdNames._ -import core.NameOps._ +import core.Phases.* +import core.DenotTransformers.* +import core.Denotations.* +import core.SymDenotations.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Names.* +import core.StdNames.* +import core.NameOps.* import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName} import core.Scopes.newScopeWith -import core.Decorators._ -import core.Constants._ -import core.Definitions._ +import core.Decorators.* +import core.Constants.* +import core.Definitions.* import core.Annotations.BodyAnnotation import typer.NoChecking import inlines.Inlines -import typer.ProtoTypes._ +import typer.ProtoTypes.* import typer.ErrorReporting.errorTree import typer.Checking.checkValue -import core.TypeErasure._ -import core.Decorators._ +import core.TypeErasure.* +import core.Decorators.* import dotty.tools.dotc.ast.{tpd, untpd} import ast.TreeTypeMap import dotty.tools.dotc.core.{Constants, Flags} -import ValueClasses._ -import TypeUtils._ -import ContextFunctionResults._ -import ExplicitOuter._ +import ValueClasses.* +import ContextFunctionResults.* +import ExplicitOuter.* import core.Mode import util.Property -import reporting._ +import reporting.* class Erasure extends Phase with DenotTransformer { @@ -190,18 +189,20 @@ class Erasure extends Phase with DenotTransformer { def assertErased(tp: Type, tree: tpd.Tree = tpd.EmptyTree)(using Context): Unit = { def isAllowed(cls: Symbol, sourceName: String) = tp.typeSymbol == cls && ctx.compilationUnit.source.file.name == sourceName - assert(isErasedType(tp) || - isAllowed(defn.ArrayClass, "Array.scala") || - isAllowed(defn.TupleClass, "Tuple.scala") || - isAllowed(defn.NonEmptyTupleClass, "Tuple.scala") || - isAllowed(defn.PairClass, "Tuple.scala"), - i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}") + assert( + isErasedType(tp) + || isAllowed(defn.ArrayClass, "Array.scala") + || isAllowed(defn.TupleClass, "Tuple.scala") + || isAllowed(defn.NonEmptyTupleClass, "Tuple.scala") + || isAllowed(defn.PairClass, "Tuple.scala") + || isAllowed(defn.PureClass, "Pure.scala"), + i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}") } } object Erasure { - import tpd._ - import TypeTestsCasts._ + import tpd.* + import TypeTestsCasts.* val name: String = "erasure" val description: String = "rewrite types to JVM model" @@ -317,7 +318,7 @@ object Erasure { cast(tree1, pt) case _ => val cls = pt.classSymbol - if (cls eq defn.UnitClass) constant(tree, Literal(Constant(()))) + if (cls eq defn.UnitClass) constant(tree, unitLiteral) else { assert(cls ne defn.ArrayClass) ref(unboxMethod(cls.asClass)).appliedTo(tree) @@ -539,7 +540,7 @@ object Erasure { end Boxing class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking { - import Boxing._ + import Boxing.* def isErased(tree: Tree)(using Context): Boolean = tree match { case TypeApply(Select(qual, _), _) if tree.symbol == defn.Any_typeCast => @@ -678,9 +679,7 @@ object Erasure { inContext(preErasureCtx) { val qualTp = tree.qualifier.typeOpt.widen if qualTp.derivesFrom(defn.PolyFunctionClass) then - erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol - else if defn.isErasedFunctionType(qualTp) then - eraseErasedFunctionApply(qualTp.select(nme.apply).widen.asInstanceOf[MethodType]).classSymbol + eraseRefinedFunctionApply(qualTp.select(nme.apply).widen).classSymbol else NoSymbol } @@ -762,7 +761,9 @@ object Erasure { val symIsPrimitive = sym.owner.isPrimitiveValueClass def originalQual: Type = - erasure(tree.qualifier.typeOpt.widen.finalResultType) + erasure( + inContext(preErasureCtx): + tree.qualifier.typeOpt.widen.finalResultType) if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType) recur(box(qual)) @@ -822,6 +823,11 @@ object Erasure { } } + override def typedBind(tree: untpd.Bind, pt: Type)(using Context): Bind = + atPhase(erasurePhase): + checkBind(promote(tree)) + super.typedBind(tree, pt) + /** Besides normal typing, this method does uncurrying and collects parameters * to anonymous functions of arity > 22. */ @@ -867,7 +873,7 @@ object Erasure { app(fun1) case t => - if ownArgs.isEmpty then fun1 + if ownArgs.isEmpty || t.isError then fun1 else throw new MatchError(i"tree $tree has unexpected type of function $fun/$fun1: $t, was $origFunType, args = $ownArgs") end typedApply diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala index cf62cffd4cdb..a8565d008f46 100644 --- a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala @@ -6,7 +6,7 @@ import MegaPhase.MiniPhase import core.* import Symbols.*, Contexts.*, Types.*, Decorators.* import StdNames.nme -import SymUtils.* + import NameKinds.AdaptedClosureName /** Rewrite `(x1, ... xN) => f(x1, ... xN)` for N >= 0 to `f`, @@ -28,7 +28,7 @@ import NameKinds.AdaptedClosureName * to performance degradation, and in some cases, stack overflows. */ class EtaReduce extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = EtaReduce.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala index 41e5b76ca874..fa2492a261d5 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala @@ -1,18 +1,18 @@ package dotty.tools.dotc package transform -import core._ +import core.* import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import Contexts._ -import Symbols._ -import Flags._ -import SymDenotations._ +import Contexts.* +import Symbols.* +import Flags.* +import SymDenotations.* -import Decorators._ -import MegaPhase._ +import Decorators.* +import MegaPhase.* import java.io.File.separatorChar -import ValueClasses._ +import ValueClasses.* /** Make private term members that are accessed from another class * non-private by resetting the Private flag and expanding their name. @@ -29,7 +29,7 @@ import ValueClasses._ * and https://github.com/lampepfl/dotty/issues/783 */ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExpandPrivate.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 0bfc444e0997..d0e90566f333 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -2,12 +2,13 @@ package dotty.tools package dotc package transform -import core._ +import core.* import Scopes.newScope -import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._ -import MegaPhase._ -import SymUtils._ -import NullOpsDecorator._ +import Contexts.*, Symbols.*, Types.*, Flags.*, Decorators.*, StdNames.*, Constants.* +import MegaPhase.* +import Names.TypeName + +import NullOpsDecorator.* import ast.untpd /** Expand SAM closures that cannot be represented by the JVM as lambdas to anonymous classes. @@ -37,7 +38,7 @@ object ExpandSAMs: case _ => false class ExpandSAMs extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExpandSAMs.name @@ -50,29 +51,34 @@ class ExpandSAMs extends MiniPhase: tree // it's a plain function case tpe if defn.isContextFunctionType(tpe) => tree - case tpe @ SAMType(_) if tpe.isRef(defn.PartialFunctionClass) => - val tpe1 = checkRefinements(tpe, fn) - toPartialFunction(tree, tpe1) - case tpe @ SAMType(_) if ExpandSAMs.isPlatformSam(tpe.classSymbol.asClass) => - checkRefinements(tpe, fn) + case SAMType(_, tpe) if tpe.isRef(defn.PartialFunctionClass) => + toPartialFunction(tree, tpe) + case SAMType(_, tpe) if ExpandSAMs.isPlatformSam(tpe.classSymbol.asClass) => tree case tpe => - val tpe1 = checkRefinements(tpe.stripNull, fn) + // A SAM type is allowed to have type aliases refinements (see + // SAMType#samParent) which must be converted into type members if + // the closure is desugared into a class. + val refinements = collection.mutable.ListBuffer[(TypeName, TypeAlias)]() + def collectAndStripRefinements(tp: Type): Type = tp match + case RefinedType(parent, name, info: TypeAlias) => + val res = collectAndStripRefinements(parent) + refinements += ((name.asTypeName, info)) + res + case _ => tp + val tpe1 = collectAndStripRefinements(tpe) val Seq(samDenot) = tpe1.possibleSamMethods cpy.Block(tree)(stats, - AnonClass(tpe1 :: Nil, fn.symbol.asTerm :: Nil, samDenot.symbol.asTerm.name :: Nil)) + AnonClass(List(tpe1), + List(samDenot.symbol.asTerm.name -> fn.symbol.asTerm), + refinements.toList + ) + ) } case _ => tree } - private def checkNoContextFunction(tpt: Tree)(using Context): Unit = - if defn.isContextFunctionType(tpt.tpe) then - report.error( - em"""Implementation restriction: cannot convert this expression to - |partial function with context function result type $tpt""", - tpt.srcPos) - /** A partial function literal: * * ``` @@ -115,8 +121,6 @@ class ExpandSAMs extends MiniPhase: private def toPartialFunction(tree: Block, tpe: Type)(using Context): Tree = { val closureDef(anon @ DefDef(_, List(List(param)), _, _)) = tree: @unchecked - checkNoContextFunction(anon.tpt) - // The right hand side from which to construct the partial function. This is always a Match. // If the original rhs is already a Match (possibly in braces), return that. // Otherwise construct a match `x match case _ => rhs` where `x` is the parameter of the closure. @@ -180,13 +184,4 @@ class ExpandSAMs extends MiniPhase: List(isDefinedAtDef, applyOrElseDef) } } - - private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match { - case RefinedType(parent, name, _) => - if (name.isTermName && tpe.member(name).symbol.ownersIterator.isEmpty) // if member defined in the refinement - report.error(em"Lambda does not define $name", tree.srcPos) - checkRefinements(parent, tree) - case tpe => - tpe - } end ExpandSAMs diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index deb1f665c022..f57595293ae1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -2,19 +2,19 @@ package dotty.tools package dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Phases._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Phases.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ -import core.NameOps._ +import core.Names.* +import core.NameOps.* import core.NameKinds.SuperArgName -import SymUtils._ + import dotty.tools.dotc.ast.tpd import collection.mutable @@ -35,8 +35,8 @@ import scala.annotation.tailrec * needs to run after pattern matcher as it can add outer checks and force creation of $outer */ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => - import ExplicitOuter._ - import ast.tpd._ + import ExplicitOuter.* + import ast.tpd.* override def phaseName: String = ExplicitOuter.name @@ -122,7 +122,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => } object ExplicitOuter { - import ast.tpd._ + import ast.tpd.* val name: String = "explicitOuter" val description: String = "add accessors to outer classes from nested ones" diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala index a6f7a29accd7..cc4f1e8f45b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._, Types._, MegaPhase._, ast.Trees._, Symbols._, Decorators._, Flags._ -import SymUtils.* +import core.* +import Contexts.*, Types.*, MegaPhase.*, ast.Trees.*, Symbols.*, Decorators.*, Flags.* + /** Transform references of the form * @@ -20,7 +20,7 @@ import SymUtils.* * Also replaces idents referring to the self type with ThisTypes. */ class ExplicitSelf extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExplicitSelf.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index a430f7532066..f0d1c687df8e 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -5,17 +5,16 @@ package dotty.tools.dotc package transform -import dotty.tools.dotc.transform.MegaPhase._ -import ValueClasses._ +import dotty.tools.dotc.transform.MegaPhase.* +import ValueClasses.* import dotty.tools.dotc.ast.tpd import scala.collection.mutable -import core._ -import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ -import SymDenotations._, Symbols._, StdNames._, Denotations._ +import core.* +import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.* +import SymDenotations.*, Symbols.*, StdNames.*, Denotations.* import TypeErasure.{ valueErasure, ErasedValueType } import NameKinds.{ExtMethName, BodyRetainerName} -import Decorators._ -import TypeUtils._ +import Decorators.* /** * Perform Step 1 in the inline classes SIP: Creates extension methods for all @@ -38,8 +37,8 @@ import TypeUtils._ */ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParameterization { thisPhase => - import tpd._ - import ExtensionMethods._ + import tpd.* + import ExtensionMethods.* override def phaseName: String = ExtensionMethods.name @@ -77,7 +76,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete // Create extension methods, except if the class comes from Scala 2 // because it adds extension methods before pickling. - if (!(valueClass.is(Scala2x))) + if !valueClass.is(Scala2x, butNot = Scala2Tasty) then for (decl <- valueClass.classInfo.decls) if isMethodWithExtension(decl) then enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol)) @@ -215,8 +214,8 @@ object ExtensionMethods { | | ${candidates.map(c => s"${c.name}:${c.info.signature}:${FullParameterization.memberSignature(c.info)}").mkString("\n")}""") if matching.tail.nonEmpty then - // this case will report a "have the same erasure" error later at erasure pahse - report.log(i"mutiple extension methods match $imeth: ${candidates.map(c => i"${c.name}:${c.info}")}") + // this case will report a "have the same erasure" error later at erasure phase + report.log(i"multiple extension methods match $imeth: ${candidates.map(c => i"${c.name}:${c.info}")}") matching.head.symbol.asTerm } } diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 03639c8af689..b5bc43ee762c 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -1,22 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import Names._ -import dotty.tools.dotc.transform.MegaPhase._ +import core.* +import Names.* +import dotty.tools.dotc.transform.MegaPhase.* import ast.untpd -import Flags._ -import Types._ +import Flags.* +import Types.* import Constants.Constant -import Contexts._ -import Symbols._ -import Decorators._ +import Contexts.* +import Symbols.* +import Decorators.* import scala.collection.mutable -import DenotTransformers._ -import NameOps._ +import DenotTransformers.* +import NameOps.* import NameKinds.OuterSelectName -import StdNames._ -import TypeUtils.isErasedValueType +import StdNames.* import config.Feature import inlines.Inlines.inInlineMethod @@ -37,7 +36,7 @@ object FirstTransform { * if (false) A else B ==> B */ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = FirstTransform.name diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala index 25df51d0916d..31c31a0f16ed 100644 --- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala +++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala @@ -1,18 +1,20 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer -import Contexts._ -import Flags._ +import Contexts.* +import Flags.* import SymDenotations.SymDenotation import collection.mutable import MegaPhase.MiniPhase import util.Store +import scala.compiletime.uninitialized + /** Lift nested classes to toplevel */ class Flatten extends MiniPhase with SymTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Flatten.name @@ -24,7 +26,7 @@ class Flatten extends MiniPhase with SymTransformer { override def changesMembers: Boolean = true // the phase removes inner classes - private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = _ + private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = uninitialized private def liftedDefs(using Context) = ctx.store(LiftedDefs) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala index bf8a6fa6c7bf..afe78ce1296d 100644 --- a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala +++ b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala @@ -10,6 +10,8 @@ import collection.immutable import ast.tpd import MegaPhase.MiniPhase +import scala.compiletime.uninitialized + object ForwardDepChecks: import tpd.* @@ -37,8 +39,8 @@ object ForwardDepChecks: (m1, idx + 1) }._1 var maxIndex: Int = Int.MinValue - var refSpan: Span = _ - var refSym: Symbol = _ + var refSpan: Span = uninitialized + var refSym: Symbol = uninitialized override def enterReference(sym: Symbol, span: Span): Unit = if (sym.exists && sym.owner.isTerm) @@ -63,7 +65,7 @@ class ForwardDepChecks extends MiniPhase: override def runsAfter: Set[String] = Set(ElimByName.name) - private var LevelInfo: Store.Location[OptLevelInfo] = _ + private var LevelInfo: Store.Location[OptLevelInfo] = uninitialized private def currentLevel(using Context): OptLevelInfo = ctx.store(LevelInfo) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala index 8ca600577244..dbb4c72ab311 100644 --- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -1,14 +1,13 @@ package dotty.tools.dotc package transform -import core._ -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import TypeUtils._ +import core.* +import Types.* +import Contexts.* +import Symbols.* +import Decorators.* import StdNames.nme -import ast._ +import ast.* /** Provides methods to produce fully parameterized versions of instance methods, * where the `this` of the enclosing class is abstracted out in an extra leading @@ -49,7 +48,7 @@ import ast._ */ trait FullParameterization { - import tpd._ + import tpd.* /** If references to original symbol `referenced` from within fully parameterized method * `derived` should be rewired to some fully parameterized method, the rewiring target symbol, @@ -207,7 +206,7 @@ trait FullParameterization { .subst(origLeadingTypeParamSyms ++ origOtherParamSyms, (trefs ++ argRefs).tpes) .substThisUnlessStatic(origClass, thisRef.tpe), treeMap = { - case tree: This if tree.symbol == origClass => thisRef + case tree: This if tree.symbol == origClass => thisRef.withSpan(tree.span) case tree => rewireTree(tree, Nil) orElse tree }, oldOwners = origMeth :: Nil, diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala index cc1c0048b68f..4cf176cfda3a 100644 --- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala +++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ +import core.* import Constants.Constant -import Contexts._ -import Flags._ -import Definitions._ -import DenotTransformers._ -import StdNames._ -import Symbols._ -import MegaPhase._ -import Types._ +import Contexts.* +import Flags.* +import Definitions.* +import DenotTransformers.* +import StdNames.* +import Symbols.* +import MegaPhase.* +import Types.* /** This phase adds forwarder for XXL functions `apply` methods that are implemented with a method @@ -23,7 +23,7 @@ import Types._ * is generated. */ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = FunctionXXLForwarders.name diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index a1baeac272b9..c75ac9982317 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -2,20 +2,19 @@ package dotty.tools package dotc package transform -import core.Annotations._ -import core.Contexts._ -import core.Phases._ +import core.Annotations.* +import core.Contexts.* +import core.Phases.* import core.Decorators.* import core.Definitions -import core.Flags._ +import core.Flags.* import core.Names.Name -import core.Symbols._ +import core.Symbols.* import core.TypeApplications.{EtaExpansion, TypeParamInfo} -import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement} -import core.Types._ +import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement, tupleArity} +import core.Types.* import core.classfile.ClassfileConstants -import SymUtils._ -import TypeUtils._ + import config.Printers.transforms import reporting.trace import java.lang.StringBuilder @@ -255,10 +254,10 @@ object GenericSignatures { case _ => jsig(elemtp) case RefOrAppliedType(sym, pre, args) => - if (sym == defn.PairClass && tp.tupleArity > Definitions.MaxTupleArity) + if (sym == defn.PairClass && tupleArity(tp) > Definitions.MaxTupleArity) jsig(defn.TupleXXLClass.typeRef) else if (isTypeParameterInSig(sym, sym0)) { - assert(!sym.isAliasType, "Unexpected alias type: " + sym) + assert(!sym.isAliasType || sym.info.isLambdaSub, "Unexpected alias type: " + sym) typeParamSig(sym.name.lastPart) } else if (defn.specialErasure.contains(sym)) @@ -273,7 +272,7 @@ object GenericSignatures { if (!primitiveOK) jsig(defn.ObjectType) else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) - else if (ValueClasses.isDerivedValueClass(sym)) { + else if (sym.isDerivedValueClass) { val erasedUnderlying = fullErasure(tp) if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) classSig(sym, pre, args) @@ -407,7 +406,6 @@ object GenericSignatures { // only refer to type params that will actually make it into the sig, this excludes: - // * higher-order type parameters // * type parameters appearing in method parameters // * type members not visible in an enclosing template private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol)(using Context) = diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index ad06bfb0a504..43289209d146 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -1,17 +1,16 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer -import Contexts._ +import Contexts.* import SymDenotations.SymDenotation -import Types._ -import Symbols._ -import MegaPhase._ -import Flags._ -import ValueClasses._ -import SymUtils._ -import NameOps._ +import Types.* +import Symbols.* +import MegaPhase.* +import Flags.* + +import NameOps.* /** Performs the following rewritings for fields of a class: @@ -57,7 +56,7 @@ import NameOps._ * This allows subsequent code motions in Flatten. */ class Getters extends MiniPhase with SymTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Getters.name @@ -66,7 +65,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => override def transformSym(d: SymDenotation)(using Context): SymDenotation = { def noGetterNeeded = d.isOneOf(NoGetterNeededFlags) || - d.isAllOf(PrivateLocal) && !d.owner.is(Trait) && !isDerivedValueClass(d.owner) && !d.is(Lazy) || + d.isAllOf(PrivateLocal) && !d.owner.is(Trait) && !d.owner.isDerivedValueClass && !d.is(Lazy) || d.is(Module) && d.isStatic || d.hasAnnotation(defn.ScalaStaticAnnot) || d.isSelfSym diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index 9a36d65babe8..96cffeb1097d 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -1,18 +1,18 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* import ast.TreeTypeMap -import core.Types._ -import core.Flags._ -import core.Decorators._ +import core.Types.* +import core.Flags.* +import core.Decorators.* import collection.mutable -import ast.Trees._ +import ast.Trees.* import core.NameKinds.SuperArgName -import SymUtils._ + import core.Decorators.* object HoistSuperArgs { @@ -43,7 +43,7 @@ object HoistSuperArgs { * or, if that is a package, it is made a static method of the class itself. */ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = HoistSuperArgs.name diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 798f34757b35..18333ae506fd 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, Decorators._ -import NameOps._ -import Names._ +import core.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, Decorators.* +import NameOps.* +import Names.* import scala.collection.mutable.ListBuffer @@ -26,7 +26,7 @@ import scala.collection.mutable.ListBuffer * This removes placeholders added by inline `unapply`/`unapplySeq` patterns. */ class InlinePatterns extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = InlinePatterns.name diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala index 047a187bad68..cff1632ffcd2 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package transform -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.inlines.Inlines /** Check that `tree.rhs` can be right hand-side of an `inline` value definition. */ class InlineVals extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = InlineVals.name diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 10f73fa94e08..907fe948ac30 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc package transform -import core._ -import Flags._ -import Contexts._ -import Symbols._ -import SymUtils._ +import core.* +import Flags.* +import Contexts.* +import Symbols.* + import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.ast.TreeMapWithImplicits import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer @@ -17,9 +17,10 @@ import dotty.tools.dotc.staging.StagingLevel import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ -class Inlining extends MacroTransform { +class Inlining extends MacroTransform, IdentityDenotTransformer { + self => - import tpd._ + import tpd.* override def phaseName: String = Inlining.name @@ -75,7 +76,7 @@ class Inlining extends MacroTransform { && StagingLevel.level == 0 && MacroAnnotations.hasMacroAnnotation(tree.symbol) then - val trees = (new MacroAnnotations).expandAnnotations(tree) + val trees = (new MacroAnnotations(self)).expandAnnotations(tree) val trees1 = trees.map(super.transform) // Find classes added to the top level from a package object diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 29572a4ae30d..2723f726f064 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -97,7 +97,9 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: id = id, start = pos.start, end = pos.end, - line = pos.line, + // +1 to account for the line number starting at 1 + // the internal line number is 0-base https://github.com/lampepfl/dotty/blob/18ada516a85532524a39a962b2ddecb243c65376/compiler/src/dotty/tools/dotc/util/SourceFile.scala#L173-L176 + line = pos.line + 1, desc = sourceFile.content.slice(pos.start, pos.end).mkString, symbolName = tree.symbol.name.toSimpleName.toString, treeName = tree.getClass.getSimpleName.nn, @@ -188,12 +190,9 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: * If the tree is empty, return itself and don't instrument. */ private def transformBranch(tree: Tree)(using Context): Tree = - import dotty.tools.dotc.core.Decorators.{show,i} - if tree.isEmpty || tree.span.isSynthetic then + if tree.isEmpty then // - If t.isEmpty then `transform(t) == t` always hold, // so we can avoid calling transform in that case. - // - If tree.span.isSynthetic then the branch has been generated - // by the frontend phases, so we don't want to instrument it. tree else val transformed = transform(tree) @@ -237,8 +236,8 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: val InstrumentedParts(pre, coverageCall, expr) = tryInstrument(fun) if coverageCall.isEmpty then - // `fun` cannot be instrumented, and `args` is a type so we keep this tree as it is - tree + // `fun` cannot be instrumented and `args` is a type, but `expr` may have been transformed + cpy.TypeApply(tree)(expr, args) else // expr[T] shouldn't be transformed to: // {invoked(...), expr}[T] @@ -353,10 +352,8 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: // recursively transform the guard, but keep the pat val transformedGuard = transform(guard) - // ensure that the body is always instrumented by inserting a call to Invoker.invoked at its beginning - val coverageCall = createInvokeCall(tree.body, pos) - val transformedBody = transform(tree.body) - val instrumentedBody = InstrumentedParts.singleExprTree(coverageCall, transformedBody) + // ensure that the body is always instrumented as a branch + val instrumentedBody = transformBranch(tree.body) cpy.CaseDef(tree)(pat, transformedGuard, instrumentedBody) @@ -457,8 +454,13 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: * they shouldn't be lifted. */ val sym = fun.symbol - sym.exists && (isShortCircuitedOp(sym) || StringInterpolatorOpt.isCompilerIntrinsic(sym)) - end + sym.exists && ( + isShortCircuitedOp(sym) + || StringInterpolatorOpt.isCompilerIntrinsic(sym) + || sym == defn.Object_synchronized + || isContextFunctionApply(fun) + ) + end isUnliftableFun val fun = tree.fun val nestedApplyNeedsLift = fun match @@ -468,6 +470,12 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: nestedApplyNeedsLift || !isUnliftableFun(fun) && !tree.args.isEmpty && !tree.args.forall(LiftCoverage.noLift) + private def isContextFunctionApply(fun: Tree)(using Context): Boolean = + fun match + case Select(prefix, nme.apply) => + defn.isContextFunctionType(prefix.tpe.widen) + case _ => false + /** Check if an Apply can be instrumented. Prevents this phase from generating incorrect code. */ private def canInstrumentApply(tree: Apply)(using Context): Boolean = def isSecondaryCtorDelegateCall: Boolean = tree.fun match diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 046147f20d82..9f99e7a6fbd3 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -2,23 +2,25 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Flags._ - -import Decorators._ -import MegaPhase._ -import Names._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* + +import Decorators.* +import MegaPhase.* +import Names.* import Constants.Constant +import scala.compiletime.uninitialized + /** The phase is enabled if the -Yinstrument option is set. * If enabled, it counts the number of closures or allocations for each source position. * It does this by generating a call to dotty.tools.dotc.util.Stats.doRecord. */ class Instrumentation extends MiniPhase { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Instrumentation.name @@ -40,11 +42,11 @@ class Instrumentation extends MiniPhase { thisPhase => "::", "+=", "toString", "newArray", "box", "toCharArray", "termName", "typeName", "slice", "staticRef", "requiredClass") - private var namesToRecord: Set[Name] = _ - private var collectionNamesToRecord: Set[Name] = _ - private var Stats_doRecord: Symbol = _ - private var Stats_doRecordSize: Symbol = _ - private var CollectionIterableClass: ClassSymbol = _ + private var namesToRecord: Set[Name] = uninitialized + private var collectionNamesToRecord: Set[Name] = uninitialized + private var Stats_doRecord: Symbol = uninitialized + private var Stats_doRecordSize: Symbol = uninitialized + private var CollectionIterableClass: ClassSymbol = uninitialized override def prepareForUnit(tree: Tree)(using Context): Context = namesToRecord = namesOfInterest.map(_.toTermName).toSet diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index c95500d856be..c2fdccc2861e 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -3,11 +3,11 @@ package transform import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase object InterceptedMethods { @@ -23,7 +23,7 @@ object InterceptedMethods { * using the most precise overload available */ class InterceptedMethods extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = InterceptedMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled index 68b493a0b9db..f6df2be19dc4 100644 --- a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import dotty.tools.dotc.util.Positions._ +import dotty.tools.dotc.util.Positions.* import MegaPhase.MiniPhase -import core._ -import Contexts._, Types._, Constants._, Decorators._, Symbols._ -import TypeUtils._, TypeErasure._, Flags._ +import core.* +import Contexts.*, Types.*, Constants.*, Decorators.*, Symbols.* +import TypeUtils.*, TypeErasure.*, Flags.* /** Implements partial evaluation of `sc.isInstanceOf[Sel]` according to: * @@ -31,7 +31,7 @@ import TypeUtils._, TypeErasure._, Flags._ */ class IsInstanceOfEvaluator extends MiniPhase { - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* val phaseName = "isInstanceOfEvaluator" diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index 6ec0f330efff..47a280af6abc 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -1,25 +1,27 @@ package dotty.tools.dotc package transform -import MegaPhase._ +import MegaPhase.* import core.Denotations.NonSymSingleDenotation -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ -import core.NameOps._ +import core.Names.* +import core.NameOps.* import core.NameKinds.ExpandPrefixName -import SymUtils._ + import ExplicitOuter.outer import util.Store import collection.mutable.{HashMap, LinkedHashMap, ListBuffer} +import scala.compiletime.uninitialized + object LambdaLift: - import ast.tpd._ + import ast.tpd.* val name: String = "lambdaLift" val description: String = "lifts out nested functions to class scope" @@ -249,8 +251,8 @@ end LambdaLift * } */ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => - import LambdaLift._ - import ast.tpd._ + import LambdaLift.* + import ast.tpd.* override def phaseName: String = LambdaLift.name @@ -266,7 +268,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => // lambda lift for super calls right. Witness the implementation restrictions to // this effect in scalac. - private var Lifter: Store.Location[Lifter] = _ + private var Lifter: Store.Location[Lifter] = uninitialized private def lifter(using Context) = ctx.store(Lifter) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index b433e37e39c0..e2712a7d6302 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -16,13 +16,13 @@ import core.Types.* import core.{Names, StdNames} import dotty.tools.dotc.config.Feature import transform.MegaPhase.MiniPhase -import transform.SymUtils.* import scala.collection.mutable +import scala.compiletime.uninitialized class LazyVals extends MiniPhase with IdentityDenotTransformer { - import LazyVals._ - import tpd._ + import LazyVals.* + import tpd.* /** * The map contains the list of the offset trees. @@ -47,7 +47,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerFlagsMask: FlagSet = Method | Lazy | Accessor | Module /** A map of lazy values to the fields they should null after initialization. */ - private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = _ + private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = uninitialized private def nullableFor(sym: Symbol)(using Context) = { // optimisation: value only used once, we can remove the value from the map val nullables = lazyValNullables.nn.remove(sym) @@ -455,8 +455,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { - import dotty.tools.dotc.core.Types._ - import dotty.tools.dotc.core.Flags._ + import dotty.tools.dotc.core.Types.* + import dotty.tools.dotc.core.Flags.* val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) diff --git a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala index e7ff6d10c222..302001347d67 100644 --- a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Symbols._, Decorators._ -import MegaPhase._ +import core.* +import Contexts.*, Symbols.*, Decorators.* +import MegaPhase.* /** Rewrite `{ stats; expr}.f(args)` to `{ stats; expr.f(args) }` and * `{ stats; expr }(args)` to `{ stats; expr(args) }` before proceeding, @@ -12,7 +12,7 @@ import MegaPhase._ * collapse applies of IFTs (this is done in Erasure). */ class LetOverApply extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = LetOverApply.name diff --git a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala deleted file mode 100644 index 6acb1013d509..000000000000 --- a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala +++ /dev/null @@ -1,88 +0,0 @@ -package dotty.tools.dotc -package transform - -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ -import core.NameKinds.LiftedTreeName -import NonLocalReturns._ -import util.Store - -/** Lifts try's that might be executed on non-empty expression stacks - * to their own methods. I.e. - * - * try body catch handler - * - * is lifted to - * - * { def liftedTree$n() = try body catch handler; liftedTree$n() } - * - * However, don't lift try's without catch expressions (try-finally). - * Lifting is needed only for try-catch expressions that are evaluated in a context - * where the stack might not be empty. `finally` does not attempt to continue evaluation - * after an exception, so the fact that values on the stack are 'lost' does not matter - * (copied from https://github.com/scala/scala/pull/922). - */ -class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ - - override def phaseName: String = LiftTry.name - - override def description: String = LiftTry.description - - private var NeedLift: Store.Location[Boolean] = _ - private def needLift(using Context): Boolean = ctx.store(NeedLift) - - override def initContext(ctx: FreshContext): Unit = - NeedLift = ctx.addLocation(false) - - private def liftingCtx(p: Boolean)(using Context) = - if (needLift == p) ctx else ctx.fresh.updateStore(NeedLift, p) - - override def prepareForApply(tree: Apply)(using Context): Context = - liftingCtx(true) - - override def prepareForDefDef(tree: DefDef)(using Context): Context = - liftingCtx(false) - - override def prepareForValDef(tree: ValDef)(using Context): Context = - if !tree.symbol.exists - || tree.symbol.isSelfSym - || tree.symbol.owner == ctx.owner.enclosingMethod - && !tree.symbol.is(Lazy) - // The current implementation wraps initializers of lazy vals in - // calls to an initialize method, which means that a `try` in the - // initializer needs to be lifted. Note that the new scheme proposed - // in #6979 would avoid this. - then ctx - else liftingCtx(true) - - override def prepareForAssign(tree: Assign)(using Context): Context = - if (tree.lhs.symbol.maybeOwner == ctx.owner.enclosingMethod) ctx - else liftingCtx(true) - - override def prepareForReturn(tree: Return)(using Context): Context = - if (!isNonLocalReturn(tree)) ctx - else liftingCtx(true) - - override def prepareForTemplate(tree: Template)(using Context): Context = - liftingCtx(false) - - override def transformTry(tree: Try)(using Context): Tree = - if (needLift && tree.cases.nonEmpty) { - report.debuglog(i"lifting tree at ${tree.span}, current owner = ${ctx.owner}") - val fn = newSymbol( - ctx.owner, LiftedTreeName.fresh(), Synthetic | Method, - MethodType(Nil, tree.tpe.widenIfUnstable), coord = tree.span) - tree.changeOwnerAfter(ctx.owner, fn, thisPhase) - Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone) - } - else tree -} -object LiftTry: - val name = "liftTry" - val description: String = "lift any try that might be executed on a non-empty expression stack" diff --git a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled index 626cb9687df4..b9e6efe1b06b 100644 --- a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import ast.Trees._ +import ast.Trees.* import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Constants.* /** This phase rewrites idempotent expressions with constant types to Literals. * The constant types are eliminated by erasure, so we need to keep @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Constants._ * in the type of the literal. */ class Literalize extends MiniPhase { thisTransform => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = "literalize" diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index cc2e6118d1fa..dbc1639f4b55 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -9,11 +9,11 @@ import dotty.tools.dotc.config.Printers.{macroAnnot => debug} import dotty.tools.dotc.core.Annotations.* import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.DenotTransformers.DenotTransformer +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.MacroClassLoader import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.quoted.* import dotty.tools.dotc.util.SrcPos import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} @@ -23,7 +23,8 @@ import scala.util.control.NonFatal import java.lang.reflect.InvocationTargetException -class MacroAnnotations: +class MacroAnnotations(phase: IdentityDenotTransformer): + import tpd.* import MacroAnnotations.* @@ -53,38 +54,16 @@ class MacroAnnotations: debug.println(i"Expanding macro annotation: ${annot}") // Interpret call to `new myAnnot(..).transform(using )()` - val transformedTrees = - try callMacro(macroInterpreter, tree, annot) - catch - // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used - // Replace this case with the nested cases. - case ex0: InvocationTargetException => - ex0.getCause match - case ex: scala.quoted.runtime.StopMacroExpansion => - if !ctx.reporter.hasErrors then - report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) - List(tree) - case Interpreter.MissingClassDefinedInCurrentRun(sym) => - Interpreter.suspendOnMissing(sym, annot.tree) - case NonFatal(ex) => - val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") - val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) - val msg = - em"""Failed to evaluate macro. - | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} - | ${stack.mkString("\n ")} - |""" - report.error(msg, annot.tree) - List(tree) - case _ => - throw ex0 + val transformedTrees = callMacro(macroInterpreter, tree, annot) transformedTrees.span(_.symbol != tree.symbol) match case (prefixed, newTree :: suffixed) => allTrees ++= prefixed insertedAfter = suffixed :: insertedAfter - prefixed.foreach(checkMacroDef(_, tree, annot)) - suffixed.foreach(checkMacroDef(_, tree, annot)) - transform.TreeChecker.checkMacroGeneratedTree(tree, newTree) + for prefixedTree <- prefixed do + checkMacroDef(prefixedTree, tree, annot) + for suffixedTree <- suffixed do + checkMacroDef(suffixedTree, tree, annot) + TreeChecker.checkMacroGeneratedTree(tree, newTree) newTree case (Nil, Nil) => report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) @@ -100,6 +79,7 @@ class MacroAnnotations: insertedAfter.foreach(allTrees.++=) val result = allTrees.result() + for tree <- result do enterMissingSymbols(tree) debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) result @@ -117,11 +97,34 @@ class MacroAnnotations: assert(annotInstance.getClass.getClassLoader.loadClass("scala.annotation.MacroAnnotation").isInstance(annotInstance)) val quotes = QuotesImpl()(using SpliceScope.contextWithNewSpliceScope(tree.symbol.sourcePos)(using MacroExpansion.context(tree)).withOwner(tree.symbol.owner)) - annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + try annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + catch + // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used + // Replace this case with the nested cases. + case ex0: InvocationTargetException => + ex0.getCause match + case ex: scala.quoted.runtime.StopMacroExpansion => + if !ctx.reporter.hasErrors then + report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) + List(tree) + case Interpreter.MissingClassDefinedInCurrentRun(sym) => + Interpreter.suspendOnMissing(sym, annot.tree) + case NonFatal(ex) => + val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") + val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) + val msg = + em"""Failed to evaluate macro. + | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} + | ${stack.mkString("\n ")} + |""" + report.error(msg, annot.tree) + List(tree) + case _ => + throw ex0 /** Check that this tree can be added by the macro annotation */ private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = - transform.TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) + TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) val sym = newTree.symbol val annotated = annotatedTree.symbol if sym.isType && !sym.isClass then @@ -131,6 +134,22 @@ class MacroAnnotations: else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + /** + * Enter the symbols generated by MacroAnnotations + */ + private def enterMissingSymbols(tree: DefTree)(using Context) = new TreeTraverser { + def traverse(tree: tpd.Tree)(using Context): Unit = tree match + case tdef @ TypeDef(_, template: Template) => + val isSymbolInDecls = tdef.symbol.asClass.info.decls.toList.toSet + for tree <- template.body do + if tree.symbol.owner != tdef.symbol then + report.error(em"Macro added a definition with the wrong owner - ${tree.symbol.owner} - ${tdef.symbol} in ${tree.source}", tree.srcPos) + else if !isSymbolInDecls(tree.symbol) then + tree.symbol.enteredAfter(phase) + traverseChildren(tree) + case _ => traverseChildren(tree) + }.traverse(tree) + object MacroAnnotations: /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 7bb7ed365ebe..887a962f7a65 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ -import Phases._ -import ast.Trees._ -import Contexts._ +import core.* +import Phases.* +import ast.Trees.* +import Contexts.* /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ abstract class MacroTransform extends Phase { - import ast.tpd._ + import ast.tpd.* override def run(using Context): Unit = { val unit = ctx.compilationUnit diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index b4e8c3acbc5c..252babe7058f 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -2,8 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Phases._, Symbols._, Decorators._ +import scala.compiletime.uninitialized + +import core.* +import Contexts.*, Phases.*, Symbols.*, Decorators.* import Flags.PackageVal import staging.StagingLevel.* @@ -14,7 +16,7 @@ import staging.StagingLevel.* * is described in his thesis. */ object MegaPhase { - import ast.tpd._ + import ast.tpd.* /** The base class of tree transforms. For each kind of tree K, there are * two methods which can be overridden: @@ -26,13 +28,13 @@ object MegaPhase { * * - Stats: to prepare/transform a statement sequence in a block, template, or package def, * - Unit : to prepare/transform a whole compilation unit - * - Other: to prepape/transform a tree that does not have a specific prepare/transform + * - Other: to prepare/transform a tree that does not have a specific prepare/transform * method pair. */ abstract class MiniPhase extends Phase { - private[MegaPhase] var superPhase: MegaPhase = _ - private[MegaPhase] var idxInGroup: Int = _ + private[MegaPhase] var superPhase: MegaPhase = uninitialized + private[MegaPhase] var idxInGroup: Int = uninitialized /** List of names of phases that should have finished their processing of all compilation units * before this phase starts @@ -136,16 +138,22 @@ object MegaPhase { singletonGroup.run } } -import MegaPhase._ +import MegaPhase.* class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { - import ast.tpd._ + import ast.tpd.* override val phaseName: String = if (miniPhases.length == 1) miniPhases(0).phaseName else miniPhases.map(_.phaseName).mkString("MegaPhase{", ", ", "}") - private var relaxedTypingCache: Boolean = _ + /** Used in progress reporting to avoid super long phase names, also the precision is not so important here */ + lazy val shortPhaseName: String = + if (miniPhases.length == 1) miniPhases(0).phaseName + else + s"MegaPhase{${miniPhases.head.phaseName},...,${miniPhases.last.phaseName}}" + + private var relaxedTypingCache: Boolean = uninitialized private var relaxedTypingKnown = false override final def relaxedTyping: Boolean = { @@ -396,7 +404,7 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { case tree: Inlined => inContext(prepInlined(tree, start)(using outerCtx)) { val bindings = transformSpecificTrees(tree.bindings, start) - val expansion = transformTree(tree.expansion, start)(using inlineContext(tree.call)) + val expansion = transformTree(tree.expansion, start)(using inlineContext(tree)) goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), start) } case tree: Quote => diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index 03ac15b39ffe..0b4d4c7dbf59 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -1,24 +1,25 @@ package dotty.tools.dotc package transform -import core._ -import DenotTransformers._ -import Contexts._ +import core.* +import DenotTransformers.* +import Contexts.* import Phases.* import SymDenotations.SymDenotation -import Denotations._ -import Symbols._ -import SymUtils._ -import Constants._ -import MegaPhase._ -import NameOps._ -import Flags._ -import Decorators._ +import Denotations.* +import Symbols.* + +import Constants.* +import MegaPhase.* +import NameOps.* +import Flags.* +import Decorators.* import StdNames.nme -import sjs.JSSymUtils._ +import sjs.JSSymUtils.* import util.Store +import scala.compiletime.uninitialized object Memoize { val name: String = "memoize" @@ -46,13 +47,13 @@ object Memoize { */ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => import Memoize.MyState - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Memoize.name override def description: String = Memoize.description - private var MyState: Store.Location[MyState] = _ + private var MyState: Store.Location[MyState] = uninitialized private def myState(using Context): MyState = ctx.store(MyState) override def initContext(ctx: FreshContext): Unit = @@ -173,7 +174,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => myState.classesThatNeedReleaseFence += sym.owner val initializer = if isErasableBottomField(field, tree.termParamss.head.head.tpt.tpe.classSymbol) - then Literal(Constant(())) + then unitLiteral else Assign(ref(field), adaptToField(field, ref(tree.termParamss.head.head.symbol))) val setterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(using ctx.withOwner(sym))) sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 5ca09dd6188f..6df4bebde132 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -2,21 +2,21 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Flags._ -import SymUtils._ -import Symbols._ -import SymDenotations._ -import Types._ -import Decorators._ -import DenotTransformers._ -import StdNames._ -import Names._ -import NameKinds._ -import NameOps._ -import ast.Trees._ +import core.* +import MegaPhase.* +import Contexts.* +import Flags.* + +import Symbols.* +import SymDenotations.* +import Types.* +import Decorators.* +import DenotTransformers.* +import StdNames.* +import Names.* +import NameKinds.* +import NameOps.* +import ast.Trees.* object Mixin { val name: String = "mixin" @@ -111,7 +111,7 @@ object Mixin { * are symbolic. */ class Mixin extends MiniPhase with SymTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Mixin.name @@ -184,7 +184,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => override def transformTemplate(impl: Template)(using Context): Template = { val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* def traitDefs(stats: List[Tree]): List[Tree] = { stats.flatMap { diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala index fa1c09806893..1b2d3e79c9a4 100644 --- a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala +++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ -import Symbols._, Types._, Contexts._, DenotTransformers._, Flags._ -import util.Spans._ -import SymUtils._ -import StdNames._, NameOps._ +import core.* +import Symbols.*, Types.*, Contexts.*, DenotTransformers.*, Flags.* +import util.Spans.* + +import StdNames.*, NameOps.* import typer.Nullables class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { - import ast.tpd._ + import ast.tpd.* val superCls: Symbol = cls.superClass val mixins: List[ClassSymbol] = cls.mixins diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index db96aeefe231..95975ad9e6b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -1,26 +1,26 @@ package dotty.tools.dotc package transform -import core._ -import Flags._ -import Contexts._ -import Symbols._ +import core.* +import Flags.* +import Contexts.* +import Symbols.* import DenotTransformers.SymTransformer import Types.MethodType import Annotations.Annotation import SymDenotations.SymDenotation import Names.Name import StdNames.nme -import NameOps._ +import NameOps.* -import ast._ +import ast.* -import SymUtils._ -import MegaPhase._ + +import MegaPhase.* /** Move static methods from companion to the class itself */ class MoveStatics extends MiniPhase with SymTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = MoveStatics.name diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index a75d6da9dd6a..6ff81ab13cf1 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -1,15 +1,16 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._, Symbols._, Types._, Flags._, StdNames._ -import MegaPhase._ +import core.* +import Contexts.*, Symbols.*, Types.*, Flags.*, StdNames.* +import MegaPhase.* import NameKinds.NonLocalReturnKeyName import config.SourceVersion.* import Decorators.em +import dotty.tools.dotc.config.MigrationVersion object NonLocalReturns { - import ast.tpd._ + import ast.tpd.* val name: String = "nonLocalReturns" val description: String = "expand non-local returns" @@ -26,8 +27,8 @@ class NonLocalReturns extends MiniPhase { override def description: String = NonLocalReturns.description - import NonLocalReturns._ - import ast.tpd._ + import NonLocalReturns.* + import ast.tpd.* override def runsAfter: Set[String] = Set(ElimByName.name) @@ -96,11 +97,10 @@ class NonLocalReturns extends MiniPhase { override def transformReturn(tree: Return)(using Context): Tree = if isNonLocalReturn(tree) then - report.gradualErrorOrMigrationWarning( + report.errorOrMigrationWarning( em"Non local returns are no longer supported; use `boundary` and `boundary.break` in `scala.util` instead", tree.srcPos, - warnFrom = `3.2`, - errorFrom = future) + MigrationVersion.NonLocalReturns) nonLocalReturnThrow(tree.expr, tree.from.symbol).withSpan(tree.span) else tree } diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 48dc7c818360..4020291dded0 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -2,12 +2,15 @@ package dotty.tools package dotc package transform -import core._ -import Flags._, Symbols._, Contexts._, Scopes._, Decorators._, Types.Type +import core.* +import Flags.*, Symbols.*, Contexts.*, Scopes.*, Decorators.*, Types.Type import NameKinds.DefaultGetterName -import NullOpsDecorator._ +import NullOpsDecorator.* import collection.immutable.BitSet import scala.annotation.tailrec +import cc.isCaptureChecking + +import scala.compiletime.uninitialized /** A module that can produce a kind of iterator (`Cursor`), * which yields all pairs of overriding/overridden symbols @@ -31,7 +34,7 @@ object OverridingPairs: */ protected def exclude(sym: Symbol): Boolean = !sym.memberCanMatchInheritedSymbols - || ctx.phase == Phases.checkCapturesPhase && sym.is(Recheck.ResetPrivate) + || isCaptureChecking && sym.is(Recheck.ResetPrivate) /** The parents of base that are checked when deciding whether an overriding * pair has already been treated in a parent class. @@ -117,10 +120,10 @@ object OverridingPairs: private var nextEntry = curEntry /** The current candidate symbol for overriding */ - var overriding: Symbol = _ + var overriding: Symbol = uninitialized /** If not null: The symbol overridden by overriding */ - var overridden: Symbol = _ + var overridden: Symbol = uninitialized //@M: note that next is called once during object initialization final def hasNext: Boolean = nextEntry != null diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala index 8c93ffb90232..5c038cee2617 100644 --- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala +++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Types._, Symbols._, Flags._, TypeUtils._, DenotTransformers._, StdNames._ -import Decorators._ -import MegaPhase._ +import core.* +import Contexts.*, Types.*, Symbols.*, Flags.*, DenotTransformers.*, StdNames.* +import Decorators.* +import MegaPhase.* import NameKinds.ParamAccessorName /** For all private parameter accessors diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index ac1e1868f26e..7b196692a9c9 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -2,22 +2,21 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, StdNames._, NameOps._ +import core.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, StdNames.*, NameOps.* import patmat.SpaceEngine -import util.Spans._ +import util.Spans.* import typer.Applications.* -import SymUtils._ -import TypeUtils.* + import Annotations.* -import Flags._, Constants._ -import Decorators._ +import Flags.*, Constants.* +import Decorators.* import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName} import config.Printers.patmatch -import reporting._ -import ast._ -import util.Property._ +import reporting.* +import ast.* +import util.Property.* import scala.annotation.tailrec import scala.collection.mutable @@ -27,8 +26,8 @@ import scala.collection.mutable * where every pattern is an integer or string constant */ class PatternMatcher extends MiniPhase { - import ast.tpd._ - import PatternMatcher._ + import ast.tpd.* + import PatternMatcher.* override def phaseName: String = PatternMatcher.name @@ -36,6 +35,13 @@ class PatternMatcher extends MiniPhase { override def runsAfter: Set[String] = Set(ElimRepeated.name) + private val InInlinedCode = new util.Property.Key[Boolean] + private def inInlinedCode(using Context) = ctx.property(InInlinedCode).getOrElse(false) + + override def prepareForInlined(tree: Inlined)(using Context): Context = + if inInlinedCode then ctx + else ctx.fresh.setProperty(InInlinedCode, true) + override def transformMatch(tree: Match)(using Context): Tree = if (tree.isInstanceOf[InlineMatch]) tree else { @@ -47,16 +53,16 @@ class PatternMatcher extends MiniPhase { case rt => tree.tpe val translated = new Translator(matchType, this).translateMatch(tree) - // check exhaustivity and unreachability - SpaceEngine.checkExhaustivity(tree) - SpaceEngine.checkRedundancy(tree) + if !inInlinedCode then + // check exhaustivity and unreachability + SpaceEngine.checkMatch(tree) translated.ensureConforms(matchType) } } object PatternMatcher { - import ast.tpd._ + import ast.tpd.* val name: String = "patternMatcher" val description: String = "compile pattern matches" @@ -327,10 +333,10 @@ object PatternMatcher { /** Plan for matching the result of an unapply against argument patterns `args` */ def unapplyPlan(unapp: Tree, args: List[Tree]): Plan = { def caseClass = unapp.symbol.owner.linkedClass - lazy val caseAccessors = caseClass.caseAccessors.filter(_.is(Method)) + lazy val caseAccessors = caseClass.caseAccessors def isSyntheticScala2Unapply(sym: Symbol) = - sym.isAllOf(SyntheticCase) && sym.owner.is(Scala2x) + sym.is(Synthetic) && sym.owner.is(Scala2x) def tupleApp(i: Int, receiver: Tree) = // manually inlining the call to NonEmptyTuple#apply, because it's an inline method ref(defn.RuntimeTuplesModule) @@ -353,13 +359,13 @@ object PatternMatcher { .map(ref(unappResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } + else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) { + unapplySeqPlan(unappResult, args) + } else if (isUnapplySeq && isProductSeqMatch(unapp.tpe.widen, args.length, unapp.srcPos)) { val arity = productArity(unapp.tpe.widen, unapp.srcPos) unapplyProductSeqPlan(unappResult, args, arity) } - else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) { - unapplySeqPlan(unappResult, args) - } else if unappResult.info <:< defn.NonEmptyTupleTypeRef then val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult))) matchArgsPlan(components, args, onSuccess) @@ -370,8 +376,9 @@ object PatternMatcher { val arity = productArity(get.tpe, unapp.srcPos) if (isUnapplySeq) letAbstract(get) { getResult => - if (arity > 0) unapplyProductSeqPlan(getResult, args, arity) - else unapplySeqPlan(getResult, args) + if unapplySeqTypeElemTp(get.tpe).exists + then unapplySeqPlan(getResult, args) + else unapplyProductSeqPlan(getResult, args, arity) } else letAbstract(get) { getResult => @@ -947,7 +954,7 @@ object PatternMatcher { case LabeledPlan(label, expr) => Labeled(label, emit(expr)) case ReturnPlan(label) => - Return(Literal(Constant(())), ref(label)) + Return(unitLiteral, ref(label)) case plan: SeqPlan => def default = seq(emit(plan.head) :: Nil, emit(plan.tail)) def maybeEmitSwitch(scrutinee: Tree): Tree = { @@ -1023,7 +1030,7 @@ object PatternMatcher { case Block((_: ValDef) :: Block(_, Match(_, cases)) :: Nil, _) => cases case _ => Nil val caseThreshold = - if ValueClasses.isDerivedValueClass(tpt.tpe.typeSymbol) then 1 + if tpt.tpe.typeSymbol.isDerivedValueClass then 1 else MinSwitchCases def typesInPattern(pat: Tree): List[Type] = pat match case Alternative(pats) => pats.flatMap(typesInPattern) diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index 15a1a823589c..624ec9628d87 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -1,29 +1,29 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import Constants._ -import ast.Trees._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* +import Constants.* +import ast.Trees.* import ast.untpd import ast.TreeTypeMap -import SymUtils._ -import NameKinds._ + +import NameKinds.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.config.ScalaRelease.* -import scala.collection.mutable -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.inlines.Inlines import scala.annotation.constructorOnly +import scala.collection.mutable /** Translates quoted terms and types to `unpickleExprV2` or `unpickleType` method calls. * @@ -69,8 +69,8 @@ import scala.annotation.constructorOnly * */ class PickleQuotes extends MacroTransform { - import PickleQuotes._ - import tpd._ + import PickleQuotes.* + import tpd.* override def phaseName: String = PickleQuotes.name @@ -106,16 +106,19 @@ class PickleQuotes extends MacroTransform { private def extractHolesContents(quote: tpd.Quote)(using Context): (List[Tree], tpd.Quote) = class HoleContentExtractor extends Transformer: private val holeContents = List.newBuilder[Tree] + private val stagedClasses = mutable.HashSet.empty[Symbol] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case tree @ Hole(isTerm, _, _, content) => assert(isTerm) assert(!content.isEmpty) holeContents += content - val holeType = getTermHoleType(tree.tpe) + val holeType = getPicklableHoleType(tree.tpe, stagedClasses) val hole = untpd.cpy.Hole(tree)(content = EmptyTree).withType(holeType) - cpy.Inlined(tree)(EmptyTree, Nil, hole) + Inlined(EmptyTree, Nil, hole).withSpan(tree.span) case tree: DefTree => + if tree.symbol.isClass then + stagedClasses += tree.symbol val newAnnotations = tree.symbol.annotations.mapconserve { annot => annot.derivedAnnotation(transform(annot.tree)(using ctx.withOwner(tree.symbol))) } @@ -134,19 +137,6 @@ class PickleQuotes extends MacroTransform { } } - /** Remove references to local types that will not be defined in this quote */ - private def getTermHoleType(using Context) = new TypeMap() { - override def apply(tp: Type): Type = tp match - case tp @ TypeRef(NoPrefix, _) => - // reference to term with a type defined in outer quote - getTypeHoleType(tp) - case tp @ TermRef(NoPrefix, _) => - // widen term refs to terms defined in outer quote - apply(tp.widenTermRefExpr) - case tp => - mapOver(tp) - } - /** Get the holeContents of the transformed tree */ def getContents() = val res = holeContents.result @@ -196,11 +186,11 @@ class PickleQuotes extends MacroTransform { cpy.Quote(quote)(Block(tdefs, body1), quote.tags) private def mkTagSymbolAndAssignType(typeArg: Tree, idx: Int)(using Context): TypeDef = { - val holeType = getTypeHoleType(typeArg.tpe.select(tpnme.Underlying)) + val holeType = getPicklableHoleType(typeArg.tpe.select(tpnme.Underlying), _ => false) val hole = untpd.cpy.Hole(typeArg)(isTerm = false, idx, Nil, EmptyTree).withType(holeType) val local = newSymbol( owner = ctx.owner, - name = UniqueName.fresh(hole.tpe.dealias.typeSymbol.name.toTypeName), + name = UniqueName.fresh(typeArg.symbol.name.toTypeName), flags = Synthetic, info = TypeAlias(typeArg.tpe.select(tpnme.Underlying)), coord = typeArg.span @@ -209,29 +199,15 @@ class PickleQuotes extends MacroTransform { ctx.typeAssigner.assignType(untpd.TypeDef(local.name, hole), local).withSpan(typeArg.span) } - /** Remove references to local types that will not be defined in this quote */ - private def getTypeHoleType(using Context) = new TypeMap() { - override def apply(tp: Type): Type = tp match - case tp: TypeRef if tp.typeSymbol.isTypeSplice => - apply(tp.dealias) - case tp @ TypeRef(pre, _) if isLocalPath(pre) => - val hiBound = tp.typeSymbol.info match - case info: ClassInfo => info.parents.reduce(_ & _) - case info => info.hiBound - apply(hiBound) - case tp => - mapOver(tp) - - private def isLocalPath(tp: Type): Boolean = tp match - case NoPrefix => true - case tp: TermRef if !tp.symbol.is(Package) => isLocalPath(tp.prefix) - case tp => false - } - + /** Avoid all non-static types except those defined in the quote. */ + private def getPicklableHoleType(tpe: Type, isStagedClasses: Symbol => Boolean)(using Context) = + new TypeOps.AvoidMap { + def toAvoid(tp: NamedType) = !isStagedClasses(tp.typeSymbol) && !isStaticPrefix(tp) + }.apply(tpe) } object PickleQuotes { - import tpd._ + import tpd.* val name: String = "pickleQuotes" val description: String = "turn quoted trees into explicit run-time data structures" @@ -350,7 +326,7 @@ object PickleQuotes { defn.QuotedExprClass.typeRef.appliedTo(defn.AnyType)), args => val cases = holeContents.zipWithIndex.map { case (splice, idx) => - val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked + val defn.FunctionNOf(argTypes, defn.FunctionNOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked val rhs = { val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index f5fe34bafc2f..0be66828d58c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -2,20 +2,25 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Decorators._ -import tasty._ +import core.* +import Contexts.* +import Decorators.* +import tasty.* import config.Printers.{noPrinter, pickling} +import config.Feature import java.io.PrintStream -import Periods._ -import Phases._ -import Symbols._ +import io.ClassfileWriterOps +import StdNames.{str, nme} +import Periods.* +import Phases.* +import Symbols.* import Flags.Module import reporting.{ThrowingReporter, Profile, Message} import collection.mutable import util.concurrent.{Executor, Future} import compiletime.uninitialized +import dotty.tools.io.JarArchive +import dotty.tools.dotc.printing.OutlinePrinter object Pickler { val name: String = "pickler" @@ -26,11 +31,14 @@ object Pickler { * only in backend. */ inline val ParallelPickling = true + + class EarlyFileWriter(writer: ClassfileWriterOps): + export writer.{writeTasty, close} } /** This phase pickles trees */ class Pickler extends Phase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Pickler.name @@ -38,7 +46,10 @@ class Pickler extends Phase { // No need to repickle trees coming from TASTY override def isRunnable(using Context): Boolean = - super.isRunnable && !ctx.settings.fromTasty.value + super.isRunnable && (!ctx.settings.fromTasty.value || ctx.settings.YjavaTasty.value) + + // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + override def skipIfJava(using Context): Boolean = false private def output(name: String, msg: String) = { val s = new PrintStream(name) @@ -48,7 +59,7 @@ class Pickler extends Phase { // Maps that keep a record if -Ytest-pickler is set. private val beforePickling = new mutable.HashMap[ClassSymbol, String] - private val pickledBytes = new mutable.HashMap[ClassSymbol, Array[Byte]] + private val pickledBytes = new mutable.HashMap[ClassSymbol, (CompilationUnit, Array[Byte])] /** Drop any elements of this list that are linked module classes of other elements in the list */ private def dropCompanionModuleClasses(clss: List[ClassSymbol])(using Context): List[ClassSymbol] = { @@ -73,7 +84,12 @@ class Pickler extends Phase { private val executor = Executor[Array[Byte]]() private def useExecutor(using Context) = - Pickler.ParallelPickling && !ctx.settings.YtestPickler.value + Pickler.ParallelPickling && !ctx.settings.YtestPickler.value && + !ctx.settings.YjavaTasty.value // disable parallel pickling when `-Yjava-tasty` is set (internal testing only) + + private def printerContext(isOutline: Boolean)(using Context): Context = + if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) + else ctx override def run(using Context): Unit = { val unit = ctx.compilationUnit @@ -83,10 +99,29 @@ class Pickler extends Phase { cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) do - if ctx.settings.YtestPickler.value then beforePickling(cls) = tree.show + if ctx.settings.YtestPickler.value then beforePickling(cls) = + tree.show(using printerContext(unit.typedAsJava)) + + val sourceRelativePath = + val reference = ctx.settings.sourceroot.value + util.SourceFile.relativePath(unit.source, reference) + val isJavaAttr = unit.isJava // we must always set JAVAattr when pickling Java sources + if isJavaAttr then + // assert that Java sources didn't reach Pickler without `-Yjava-tasty`. + assert(ctx.settings.YjavaTasty.value, "unexpected Java source file without -Yjava-tasty") + val isOutline = isJavaAttr // TODO: later we may want outline for Scala sources too + val attributes = Attributes( + sourceFile = sourceRelativePath, + scala2StandardLibrary = ctx.settings.YcompileScala2Library.value, + explicitNulls = ctx.settings.YexplicitNulls.value, + captureChecked = Feature.ccEnabled, + withPureFuns = Feature.pureFunsEnabled, + isJava = isJavaAttr, + isOutline = isOutline + ) val pickler = new TastyPickler(cls) - val treePkl = new TreePickler(pickler) + val treePkl = new TreePickler(pickler, attributes) treePkl.pickle(tree :: Nil) Profile.current.recordTasty(treePkl.buf.length) @@ -108,6 +143,8 @@ class Pickler extends Phase { pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, scratch.commentBuffer) + AttributePickler.pickleAttributes(attributes, pickler, scratch.attributeBuffer) + val pickled = pickler.assembleParts() def rawBytes = // not needed right now, but useful to print raw format. @@ -116,9 +153,10 @@ class Pickler extends Phase { } // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG - if pickling ne noPrinter then + if ctx.settings.YprintTasty.value || pickling != noPrinter then println(i"**** pickled info of $cls") println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) + println(i"**** end of pickled info of $cls") pickled } } @@ -136,7 +174,7 @@ class Pickler extends Phase { else val pickled = computePickled() reportPositionWarnings() - if ctx.settings.YtestPickler.value then pickledBytes(cls) = pickled + if ctx.settings.YtestPickler.value then pickledBytes(cls) = (unit, pickled) () => pickled unit.pickled += (cls -> demandPickled) @@ -144,39 +182,82 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val result = - if useExecutor then - executor.start() - try super.runOn(units) - finally executor.close() + val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YjavaTastyOutput.value match + case jar: JarArchive if jar.exists => + Some(Pickler.EarlyFileWriter(ClassfileWriterOps(jar))) + case _ => + None + val units0 = + if ctx.settings.fromTasty.value then + // we still run the phase for the side effect of writing the pipeline tasty files + units else - super.runOn(units) + if useExecutor then + executor.start() + try super.runOn(units) + finally executor.close() + else + super.runOn(units) if ctx.settings.YtestPickler.value then - val ctx2 = ctx.fresh.setSetting(ctx.settings.YreadComments, true) + val ctx2 = ctx.fresh + .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.YshowPrintErrors, true) testUnpickler( using ctx2 - .setPeriod(Period(ctx.runId + 1, ctx.base.typerPhase.id)) - .setReporter(new ThrowingReporter(ctx.reporter)) - .addMode(Mode.ReadPositions) - .addMode(Mode.PrintShowExceptions)) + .setPeriod(Period(ctx.runId + 1, ctx.base.typerPhase.id)) + .setReporter(new ThrowingReporter(ctx.reporter)) + .addMode(Mode.ReadPositions) + ) + val result = + if ctx.settings.YjavaTasty.value then + sigWriter.foreach(writeJavaSigFiles(units0, _)) + units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set + else + units0 result } - private def testUnpickler(using Context): Unit = { + private def writeJavaSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { + var count = 0 + try + for + unit <- units if unit.typedAsJava + (cls, pickled) <- unit.pickled + if cls.isDefinedInCurrentRun + do + val binaryName = cls.binaryClassName.replace('.', java.io.File.separatorChar).nn + val binaryClassName = if (cls.is(Module)) binaryName.stripSuffix(str.MODULE_SUFFIX).nn else binaryName + writer.writeTasty(binaryClassName, pickled()) + count += 1 + finally + writer.close() + if ctx.settings.verbose.value then + report.echo(s"[$count java sig files written]") + end try + } + + private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() val unpicklers = - for ((cls, bytes) <- pickledBytes) yield { - val unpickler = new DottyUnpickler(bytes) + for ((cls, (unit, bytes)) <- pickledBytes) yield { + val unpickler = new DottyUnpickler(unit.source.file, bytes) unpickler.enter(roots = Set.empty) - cls -> unpickler + cls -> (unit, unpickler) } pickling.println("************* entered toplevel ***********") - for ((cls, unpickler) <- unpicklers) { + val rootCtx = ctx + for ((cls, (unit, unpickler)) <- unpicklers) do + val testJava = unit.typedAsJava + if testJava then + if unpickler.unpickler.nameAtRef.contents.exists(_ == nme.FromJavaObject) then + report.error(em"Pickled reference to FromJavaObject in Java defined $cls in ${cls.source}") val unpickled = unpickler.rootTrees - testSame(i"$unpickled%\n%", beforePickling(cls), cls) - } - } + val freshUnit = CompilationUnit(rootCtx.compilationUnit.source) + freshUnit.needsCaptureChecking = unit.needsCaptureChecking + freshUnit.knowsPureFuns = unit.knowsPureFuns + inContext(printerContext(testJava)(using rootCtx.fresh.setCompilationUnit(freshUnit))): + testSame(i"$unpickled%\n%", beforePickling(cls), cls) private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(using Context) = import java.nio.charset.StandardCharsets.UTF_8 diff --git a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala index 0cfd3650ad0b..26c956d85d18 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package transform -import core._ +import core.* import Contexts.* import DenotTransformers.IdentityDenotTransformer import SyntheticMembers.* diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index ac3dc15092a0..63f6af2beb86 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -4,20 +4,20 @@ package transform import dotty.tools.dotc.ast.{Trees, tpd, untpd, desugar} import scala.collection.mutable -import core._ +import core.* import dotty.tools.dotc.typer.Checking import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.typer.VarianceChecker import typer.ErrorReporting.errorTree -import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ -import SymDenotations._, StdNames._, Annotations._, Trees._, Scopes._ -import Decorators._ -import Symbols._, SymUtils._, NameOps._ +import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.* +import SymDenotations.*, StdNames.*, Annotations.*, Trees.*, Scopes.* +import Decorators.* +import Symbols.*, NameOps.* import ContextFunctionResults.annotateContextResults import config.Printers.typr import config.Feature import util.SrcPos -import reporting._ +import reporting.* import NameKinds.WildcardParamName object PostTyper { @@ -60,8 +60,8 @@ object PostTyper { * mini-phase or subfunction of a macro phase equally well. But taken by themselves * they do not warrant their own group of miniphases before pickling. */ -class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase => - import tpd._ +class PostTyper extends MacroTransform with InfoTransformer { thisPhase => + import tpd.* override def phaseName: String = PostTyper.name @@ -80,6 +80,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase def newTransformer(using Context): Transformer = new PostTyperTransformer + private var compilingScala2StdLib = false + override def initContext(ctx: FreshContext): Unit = + compilingScala2StdLib = ctx.settings.YcompileScala2Library.value(using ctx) + val superAcc: SuperAccessors = new SuperAccessors(thisPhase) val synthMbr: SyntheticMembers = new SyntheticMembers(thisPhase) val beanProps: BeanProperties = new BeanProperties(thisPhase) @@ -168,7 +172,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if sym.is(Param) then sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) else if sym.is(ParamAccessor) then + // @publicInBinary is not a meta-annotation and therefore not kept by `keepAnnotationsCarrying` + val publicInBinaryAnnotOpt = sym.getAnnotation(defn.PublicInBinaryAnnot) sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot)) + for publicInBinaryAnnot <- publicInBinaryAnnotOpt do sym.addAnnotation(publicInBinaryAnnot) else sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) if sym.isScala2Macro && !ctx.settings.XignoreScala2Macros.value then @@ -246,7 +253,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private object dropInlines extends TreeMap { override def transform(tree: Tree)(using Context): Tree = tree match { - case Inlined(call, _, expansion) => + case tree @ Inlined(call, _, expansion) => val newExpansion = PruneErasedDefs.trivialErasedTree(tree) cpy.Inlined(tree)(call, Nil, newExpansion) case _ => super.transform(tree) @@ -261,9 +268,11 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase def check(qual: Tree) = if !qual.tpe.isStable then report.error(em"Parameter untupling cannot be used for call-by-name parameters", tree.srcPos) - tree match - case Select(qual, _) => check(qual) // simple select _n - case Apply(TypeApply(Select(qual, _), _), _) => check(qual) // generic select .apply[T](n) + appliedCore(closureBody(tree)) match + case Select(qual, _) => check(qual) + // simple select _n Select(qual, _n) + // generic select .apply[T](n) Apply(TypeApply(Select(qual, _), _), _) + // context closure x ?=> f(using x) Block(List(DefDef($anonfun, _, _, Apply(Select(Select(qual, _n), _), _))) def checkNotPackage(tree: Tree)(using Context): Tree = if !tree.symbol.is(Package) then tree @@ -357,13 +366,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case _ => super.transform(tree1) } - case Inlined(call, bindings, expansion) if !call.isEmpty => + case tree @ Inlined(call, bindings, expansion) if !tree.inlinedFromOuterScope => val pos = call.sourcePos CrossVersionChecks.checkExperimentalRef(call.symbol, pos) - withMode(Mode.InlinedCall)(transform(call)) + withMode(Mode.NoInline)(transform(call)) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) - cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) + cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(tree))) case templ: Template => + Checking.checkPolyFunctionExtension(templ) withNoCheckNews(templ.parents.flatMap(newPart)) { forwardParamAccessors(templ) synthMbr.addSyntheticMembers( @@ -373,15 +383,19 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) } case tree: ValDef => + annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) + Checking.checkPolyFunctionType(tree.tpt) val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => + annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) + Checking.checkPolyFunctionType(tree.tpt) annotateContextResults(tree) val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) @@ -406,14 +420,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) - // Add SourceFile annotation to top-level classes if sym.owner.is(Package) then + // Add SourceFile annotation to top-level classes + // TODO remove this annotation once the reference compiler uses the TASTy source file attribute. if ctx.compilationUnit.source.exists && sym != defn.SourceFileAnnot then val reference = ctx.settings.sourceroot.value val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) - sym.addAnnotation(Annotation.makeSourceFile(relativePath, tree.span)) - if Feature.pureFunsEnabled && sym != defn.WithPureFunsAnnot then - sym.addAnnotation(Annotation(defn.WithPureFunsAnnot, tree.span)) + sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) @@ -423,7 +436,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if sym.isOpaqueAlias then VarianceChecker.checkLambda(rhs, TypeBounds.upper(sym.opaqueAlias)) case _ => - processMemberDef(super.transform(tree)) + processMemberDef(super.transform(scala2LibPatch(tree))) case tree: Bind => if tree.symbol.isType && !tree.symbol.name.is(WildcardParamName) then Checking.checkGoodBounds(tree.symbol) @@ -483,7 +496,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) case Block(_, Closure(_, _, tpt)) if ExpandSAMs.needsWrapperClass(tpt.tpe) => superAcc.withInvalidCurrentClass(super.transform(tree)) - case _: Quote => + case tree: RefinedTypeTree => + Checking.checkPolyFunctionType(tree) + super.transform(tree) + case _: Quote | _: QuotePattern => ctx.compilationUnit.needsStaging = true super.transform(tree) case tree => @@ -506,7 +522,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs private def registerNeedsInlining(tree: Tree)(using Context): Unit = - if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.InlinedCall) then + if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.NoInline) then ctx.compilationUnit.needsInlining = true /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ @@ -528,9 +544,65 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase report.error("`erased` definition cannot be implemented with en expression of type Null", tree.srcPos) private def annotateExperimental(sym: Symbol)(using Context): Unit = - if sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot) then + def isTopLevelDefinitionInSource(sym: Symbol) = + !sym.is(Package) && !sym.name.isPackageObjectName && + (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) + if !sym.hasAnnotation(defn.ExperimentalAnnot) + && (ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym)) + || (sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot)) + then sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) - sym.companionModule.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) + private def scala2LibPatch(tree: TypeDef)(using Context) = + val sym = tree.symbol + if compilingScala2StdLib && sym.is(ModuleClass) then + // Add Serializable to companion objects of serializable classes, + // and add AbstractFunction1 to companion objects of case classes with 1 parameter. + tree.rhs match + case impl: Template => + var parents1 = impl.parents + val companionClass = sym.companionClass + if !sym.derivesFrom(defn.SerializableClass) && companionClass.derivesFrom(defn.SerializableClass) then + parents1 = parents1 :+ TypeTree(defn.SerializableType) + argTypeOfCaseClassThatNeedsAbstractFunction1(sym) match + case Some(args) if parents1.head.symbol.owner == defn.ObjectClass => + parents1 = New(defn.AbstractFunctionClass(1).typeRef).select(nme.CONSTRUCTOR).appliedToTypes(args).ensureApplied :: parents1.tail + case _ => + val impl1 = cpy.Template(impl)(parents = parents1) + cpy.TypeDef(tree)(rhs = impl1) + else tree } + + protected override def infoMayChange(sym: Symbol)(using Context): Boolean = + compilingScala2StdLib && sym.isAllOf(ModuleClass, butNot = Package) + + def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match + case info: ClassInfo => + var parents1 = info.parents + val companionClass = sym.companionClass + if !sym.derivesFrom(defn.SerializableClass) && companionClass.derivesFrom(defn.SerializableClass) then + parents1 = parents1 :+ defn.SerializableType + argTypeOfCaseClassThatNeedsAbstractFunction1(sym) match + case Some(args) if parents1.head.typeSymbol == defn.ObjectClass => + parents1 = defn.AbstractFunctionClass(1).typeRef.appliedTo(args) :: parents1.tail + case _ => + if parents1 ne info.parents then info.derivedClassInfo(declaredParents = parents1) + else tp + case _ => tp + + private def argTypeOfCaseClassThatNeedsAbstractFunction1(sym: Symbol)(using Context): Option[List[Type]] = + val companionClass = sym.companionClass + if companionClass.is(CaseClass) + && !companionClass.primaryConstructor.is(Private) + && !companionClass.primaryConstructor.info.isVarArgsMethod + then + sym.info.decl(nme.apply).info match + case info: MethodType => + info.paramInfos match + case arg :: Nil => + Some(arg :: info.resultType :: Nil) + case args => None + case _ => None + else + None } diff --git a/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala b/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala index db9e28d7aad7..ba60d3b97adc 100644 --- a/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala @@ -14,6 +14,8 @@ abstract class PreRecheck extends Phase, DenotTransformer: override def changesBaseTypes: Boolean = true + var pastRecheck = false + def run(using Context): Unit = () override def isCheckable = false diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 6d8f7bdb32cb..482da0edb82b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import core.Contexts._ -import core.NameKinds._ -import core.Symbols._ -import core.Flags._ -import core.Decorators._ +import core.Contexts.* +import core.NameKinds.* +import core.Symbols.* +import core.Flags.* +import core.Decorators.* import core.Names.TermName import MegaPhase.MiniPhase import config.Printers.transforms @@ -37,7 +37,7 @@ object ProtectedAccessors { * is not in a subclass or subtrait of `sym`? */ def needsAccessorIfNotInSubclass(sym: Symbol)(using Context): Boolean = - sym.isTerm && sym.is(Protected) && + sym.isTerm && sym.is(Protected) && !sym.hasPublicInBinary && !sym.owner.is(Trait) && // trait methods need to be handled specially, are currently always public !insideBoundaryOf(sym) @@ -48,7 +48,7 @@ object ProtectedAccessors { } class ProtectedAccessors extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ProtectedAccessors.name diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 17f2d11ccfec..9bb30926d45a 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ +import core.* +import Contexts.* import DenotTransformers.SymTransformer -import Flags._ -import SymDenotations._ -import Symbols._ +import Flags.* +import SymDenotations.* +import Symbols.* import typer.RefChecks import MegaPhase.MiniPhase import ast.tpd -import SymUtils._ + import config.Feature import Decorators.* import dotty.tools.dotc.core.Types.MethodType @@ -23,8 +23,8 @@ import dotty.tools.dotc.core.Types.MethodType * as IsInstanceOfChecker don't give false negatives. */ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => - import tpd._ - import PruneErasedDefs._ + import tpd.* + import PruneErasedDefs.* override def phaseName: String = PruneErasedDefs.name @@ -66,7 +66,7 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => } object PruneErasedDefs { - import tpd._ + import tpd.* val name: String = "pruneErasedDefs" val description: String = "drop erased definitions and simplify erased expressions" diff --git a/compiler/src/dotty/tools/dotc/transform/PureStats.scala b/compiler/src/dotty/tools/dotc/transform/PureStats.scala index b747d7d6b9e4..22fdef3aaab6 100644 --- a/compiler/src/dotty/tools/dotc/transform/PureStats.scala +++ b/compiler/src/dotty/tools/dotc/transform/PureStats.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._ -import Symbols._, StdNames._, Trees._ +import core.*, core.Decorators.* +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.* +import Symbols.*, StdNames.*, Trees.* object PureStats { val name: String = "pureStats" @@ -15,7 +15,7 @@ object PureStats { /** Remove pure statements in blocks */ class PureStats extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = PureStats.name diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 527c73d02250..3d7b81a606ab 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -4,7 +4,7 @@ package transform import core.* import Symbols.*, Contexts.*, Types.*, ContextOps.*, Decorators.*, SymDenotations.* -import Flags.*, SymUtils.*, NameKinds.*, Denotations.{Denotation, SingleDenotation} +import Flags.*, NameKinds.*, Denotations.{Denotation, SingleDenotation} import ast.* import Names.Name import Phases.Phase @@ -15,6 +15,7 @@ import typer.ErrorReporting.err import typer.ProtoTypes.* import typer.TypeAssigner.seqLitType import typer.ConstFold +import typer.ErrorReporting.{Addenda, NothingToAdd} import NamerOps.methodType import config.Printers.recheckr import util.Property @@ -45,30 +46,23 @@ object Recheck: case Some(tpe) => tree1.withType(tpe) case None => tree1 - extension (sym: Symbol) + extension (sym: Symbol)(using Context) - /** Update symbol's info to newInfo from prevPhase.next to lastPhase. - * Reset to previous info for phases after lastPhase. + /** Update symbol's info to newInfo after `prevPhase`. + * Also update owner to newOwnerOrNull if it is not null. + * The update is valid until after Recheck. After that the symbol's denotation + * is reset to what it was before PreRecheck. */ - def updateInfoBetween(prevPhase: DenotTransformer, lastPhase: DenotTransformer, newInfo: Type)(using Context): Unit = - if sym.info ne newInfo then - sym.copySymDenotation( - initFlags = - if sym.flags.isAllOf(ResetPrivateParamAccessor) - then sym.flags &~ ResetPrivate | Private - else sym.flags - ).installAfter(lastPhase) // reset - sym.copySymDenotation( - info = newInfo, - initFlags = - if newInfo.isInstanceOf[LazyType] then sym.flags &~ Touched - else sym.flags - ).installAfter(prevPhase) + def updateInfo(prevPhase: DenotTransformer, newInfo: Type, newFlags: FlagSet = sym.flags, newOwner: Symbol = sym.owner): Unit = + if (sym.info ne newInfo) || sym.flags != newFlags || (sym.maybeOwner ne newOwner) then + val flags = if newInfo.isInstanceOf[LazyType] then newFlags &~ Touched else newFlags + sym.copySymDenotation(owner = newOwner, info = newInfo, initFlags = flags) + .installAfter(prevPhase) /** Does symbol have a new denotation valid from phase.next that is different * from the denotation it had before? */ - def isUpdatedAfter(phase: Phase)(using Context) = + def isUpdatedAfter(phase: Phase) = val symd = sym.denot symd.validFor.firstPhaseId == phase.id + 1 && (sym.originDenotation ne symd) @@ -84,7 +78,7 @@ object Recheck: * type stored in the tree itself */ def rememberTypeAlways(tpe: Type)(using Context): Unit = - if tpe ne tree.tpe then tree.putAttachment(RecheckedType, tpe) + if tpe ne tree.knownType then tree.putAttachment(RecheckedType, tpe) /** The remembered type of the tree, or if none was installed, the original type */ def knownType: Type = @@ -96,17 +90,43 @@ object Recheck: case Some(tpe) => tree.withType(tpe).asInstanceOf[T] case None => tree - extension (tpe: Type) - - /** Map ExprType => T to () ?=> T (and analogously for pure versions). - * Even though this phase runs after ElimByName, ExprTypes can still occur - * as by-name arguments of applied types. See note in doc comment for - * ElimByName phase. Test case is bynamefun.scala. - */ - def mapExprType(using Context): Type = tpe match - case ExprType(rt) => defn.ByNameFunction(rt) - case _ => tpe - + /** Map ExprType => T to () ?=> T (and analogously for pure versions). + * Even though this phase runs after ElimByName, ExprTypes can still occur + * as by-name arguments of applied types. See note in doc comment for + * ElimByName phase. Test case is bynamefun.scala. + */ + private def mapExprType(tp: Type)(using Context): Type = tp match + case ExprType(rt) => defn.ByNameFunction(rt) + case _ => tp + + /** Normalize `=> A` types to `() ?=> A` types + * - at the top level + * - in function and method parameter types + * - under annotations + */ + def normalizeByName(tp: Type)(using Context): Type = tp.dealias match + case tp: ExprType => + mapExprType(tp) + case tp: PolyType => + tp.derivedLambdaType(resType = normalizeByName(tp.resType)) + case tp: MethodType => + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(mapExprType), + resType = normalizeByName(tp.resType)) + case tp @ RefinedType(parent, nme.apply, rinfo) if defn.isFunctionType(tp) => + tp.derivedRefinedType(parent, nme.apply, normalizeByName(rinfo)) + case tp @ defn.FunctionOf(pformals, restpe, isContextual) => + val pformals1 = pformals.mapConserve(mapExprType) + val restpe1 = normalizeByName(restpe) + if (pformals1 ne pformals) || (restpe1 ne restpe) then + defn.FunctionOf(pformals1, restpe1, isContextual) + else + tp + case tp @ AnnotatedType(parent, ann) => + tp.derivedAnnotatedType(normalizeByName(parent), ann) + case _ => + tp +end Recheck /** A base class that runs a simplified typer pass over an already re-typed program. The pass * does not transform trees but returns instead the re-typed type of each tree as it is @@ -118,25 +138,39 @@ abstract class Recheck extends Phase, SymTransformer: import ast.tpd.* import Recheck.* + /** The phase before rechecking, used to setup symbol infos. */ def preRecheckPhase = this.prev.asInstanceOf[PreRecheck] + /** The first phase that pepares for rechecking. This is usually preRecheckPhase + * but could also be before. Updated symbols will snap back to their + * denotations at firestPrepPhase after rechecking. + */ + def firstPrepPhase: Phase = preRecheckPhase + override def changesBaseTypes: Boolean = true override def isCheckable = false // TODO: investigate what goes wrong we Ycheck directly after rechecking. // One failing test is pos/i583a.scala - /** Change any `ResetPrivate` flags back to `Private` */ - def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(Recheck.ResetPrivateParamAccessor) then - sym.copySymDenotation(initFlags = sym.flags &~ Recheck.ResetPrivate | Private) - else sym + /** Change denotation back to what it was before (pre-)rechecking` */ + def transformSym(symd: SymDenotation)(using Context): SymDenotation = + val sym = symd.symbol + def updatedAfter(p: Phase): Boolean = + sym.isUpdatedAfter(p) || p != preRecheckPhase && updatedAfter(p.next) + if updatedAfter(firstPrepPhase) + then atPhase(firstPrepPhase)(sym.denot.copySymDenotation()) + else symd def run(using Context): Unit = val rechecker = newRechecker() rechecker.checkUnit(ctx.compilationUnit) rechecker.reset() + override def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = + try super.runOn(units) + finally preRecheckPhase.pastRecheck = true + def newRechecker()(using Context): Rechecker /** The typechecker pass */ @@ -155,8 +189,14 @@ abstract class Recheck extends Phase, SymTransformer: */ def keepType(tree: Tree): Boolean = keepAllTypes + /** A map from NamedTypes to the denotations they had before this phase. + * Needed so that we can `reset` them after this phase. + */ private val prevSelDenots = util.HashMap[NamedType, Denotation]() + /** Reset all references in `prevSelDenots` to the denotations they had + * before this phase. + */ def reset()(using Context): Unit = for (ref, mbr) <- prevSelDenots.iterator do ref.withDenot(mbr) @@ -167,7 +207,7 @@ abstract class Recheck extends Phase, SymTransformer: val tree2 = ConstFold(tree1) if tree2 ne tree1 then tree2.tpe else tp - def recheckIdent(tree: Ident)(using Context): Type = + def recheckIdent(tree: Ident, pt: Type)(using Context): Type = tree.tpe def recheckSelect(tree: Select, pt: Type)(using Context): Type = @@ -177,32 +217,22 @@ abstract class Recheck extends Phase, SymTransformer: else AnySelectionProto recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) - /** When we select the `apply` of a function with type such as `(=> A) => B`, - * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment - * of `mapExprType`. - */ - def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match - case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => - mbr.derivedSingleDenotation(mbr.symbol, - mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) - case _ => - mbr - def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = if name.is(OuterSelectName) then tree.tpe else //val pre = ta.maybeSkolemizePrefix(qualType, name) - val mbr = normalizeByName( + val mbr = sharpen( qualType.findMember(name, qualType, excluded = if tree.symbol.is(Private) then EmptyFlags else Private - )).suchThat(tree.symbol == _)) + )).suchThat(tree.symbol == _) val newType = tree.tpe match case prevType: NamedType => val prevDenot = prevType.denot val newType = qualType.select(name, mbr) if (newType eq prevType) && (mbr.info ne prevDenot.info) && !prevSelDenots.contains(prevType) then + // remember previous denot of NamedType, so that it can be reset after this phase prevSelDenots(prevType) = prevDenot newType case _ => @@ -210,7 +240,6 @@ abstract class Recheck extends Phase, SymTransformer: constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") - /** Keep the symbol of the `select` but re-infer its type */ def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context): Type = recheckSelection(tree, qualType, name, sharpen = identity[Denotation]) @@ -218,22 +247,25 @@ abstract class Recheck extends Phase, SymTransformer: def recheckBind(tree: Bind, pt: Type)(using Context): Type = tree match case Bind(name, body) => recheck(body, pt) - val sym = tree.symbol - if sym.isType then sym.typeRef else sym.info + tree.symbol.namedType def recheckLabeled(tree: Labeled, pt: Type)(using Context): Type = tree match case Labeled(bind, expr) => - val bindType = recheck(bind, pt) + val (bindType: NamedType) = recheck(bind, pt): @unchecked val exprType = recheck(expr, defn.UnitType) - bindType + bindType.symbol.info - def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = - if !tree.rhs.isEmpty then recheck(tree.rhs, sym.info) + def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = + val resType = recheck(tree.tpt) + if tree.rhs.isEmpty then resType + else recheck(tree.rhs, resType) - def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = - val rhsCtx = linkConstructorParams(sym).withOwner(sym) - if !tree.rhs.isEmpty && !sym.isInlineMethod && !sym.isEffectivelyErased then - inContext(rhsCtx) { recheck(tree.rhs, recheck(tree.tpt)) } + def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Type = + inContext(linkConstructorParams(sym).withOwner(sym)): + val resType = recheck(tree.tpt) + if tree.rhs.isEmpty || sym.isInlineMethod || sym.isEffectivelyErased + then resType + else recheck(tree.rhs, resType) def recheckTypeDef(tree: TypeDef, sym: Symbol)(using Context): Type = recheck(tree.rhs) @@ -247,32 +279,40 @@ abstract class Recheck extends Phase, SymTransformer: sym.typeRef /** Assuming `formals` are parameters of a Java-defined method, remap Object - * to FromJavaObject since it got lost in ElimRepeated + * to FromJavaObject since it got lost in ElimRepeated. + * NOTE: It seems this is no longer true, and `mapJavaArgs` is not needed. + * The invocation is currently disabled in recheckApply. */ private def mapJavaArgs(formals: List[Type])(using Context): List[Type] = val tm = new TypeMap with IdempotentCaptRefMap: - def apply(t: Type) = t match - case t: TypeRef if t.symbol == defn.ObjectClass => defn.FromJavaObjectType - case _ => mapOver(t) + def apply(t: Type) = + t match + case t: TypeRef if t.symbol == defn.ObjectClass => defn.FromJavaObjectType + case _ => mapOver(t) formals.mapConserve(tm) /** Hook for method type instantiation */ protected def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = mt.instantiate(argTypes) + /** A hook to massage the type of an applied method; currently not overridden */ + protected def prepareFunction(funtpe: MethodType, meth: Symbol)(using Context): MethodType = funtpe + def recheckApply(tree: Apply, pt: Type)(using Context): Type = - val funTp = recheck(tree.fun) + val funtpe0 = recheck(tree.fun) // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one - val funtpe = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funTp - funtpe.widen match - case fntpe: MethodType => + val funtpe1 = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funtpe0 + funtpe1.widen match + case fntpe1: MethodType => + val fntpe = prepareFunction(fntpe1, tree.fun.symbol) assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val formals = - if tree.symbol.is(JavaDefined) then mapJavaArgs(fntpe.paramInfos) + if false && tree.symbol.is(JavaDefined) // see NOTE in mapJavaArgs + then mapJavaArgs(fntpe.paramInfos) else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, formals.head.mapExprType) + val argType = recheck(arg, normalizeByName(formals.head)) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -285,10 +325,12 @@ abstract class Recheck extends Phase, SymTransformer: constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") case tp => - assert(false, i"unexpected type of ${tree.fun}: $funtpe") + assert(false, i"unexpected type of ${tree.fun}: $tp") def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = - recheck(tree.fun).widen match + val funtpe = recheck(tree.fun) + tree.fun.rememberType(funtpe) // remember type to support later bounds checks + funtpe.widen match case fntpe: PolyType => assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val argTypes = tree.args.map(recheck(_)) @@ -300,31 +342,37 @@ abstract class Recheck extends Phase, SymTransformer: tptType def recheckAssign(tree: Assign)(using Context): Type = - val lhsType = recheck(tree.lhs) + val lhsType = recheck(tree.lhs, LhsProto) recheck(tree.rhs, lhsType.widen) defn.UnitType - def recheckBlock(stats: List[Tree], expr: Tree, pt: Type)(using Context): Type = + private def recheckBlock(stats: List[Tree], expr: Tree)(using Context): Type = recheckStats(stats) val exprType = recheck(expr) + TypeOps.avoid(exprType, localSyms(stats).filterConserve(_.isTerm)) + + def recheckBlock(tree: Block, pt: Type)(using Context): Type = tree match + case Block(Nil, expr: Block) => recheckBlock(expr, pt) + case Block((mdef : DefDef) :: Nil, closure: Closure) => + recheckClosureBlock(mdef, closure.withSpan(tree.span), pt) + case Block(stats, expr) => recheckBlock(stats, expr) // The expected type `pt` is not propagated. Doing so would allow variables in the // expected type to contain references to local symbols of the block, so the // local symbols could escape that way. - TypeOps.avoid(exprType, localSyms(stats).filterConserve(_.isTerm)) - def recheckBlock(tree: Block, pt: Type)(using Context): Type = - recheckBlock(tree.stats, tree.expr, pt) + def recheckClosureBlock(mdef: DefDef, expr: Closure, pt: Type)(using Context): Type = + recheckBlock(mdef :: Nil, expr) def recheckInlined(tree: Inlined, pt: Type)(using Context): Type = - recheckBlock(tree.bindings, tree.expansion, pt)(using inlineContext(tree.call)) + recheckBlock(tree.bindings, tree.expansion)(using inlineContext(tree)) def recheckIf(tree: If, pt: Type)(using Context): Type = recheck(tree.cond, defn.BooleanType) recheck(tree.thenp, pt) | recheck(tree.elsep, pt) - def recheckClosure(tree: Closure, pt: Type)(using Context): Type = + def recheckClosure(tree: Closure, pt: Type, forceDependent: Boolean = false)(using Context): Type = if tree.tpt.isEmpty then - tree.meth.tpe.widen.toFunctionType(tree.meth.symbol.is(JavaDefined)) + tree.meth.tpe.widen.toFunctionType(tree.meth.symbol.is(JavaDefined), alwaysDependent = forceDependent) else recheck(tree.tpt) @@ -389,7 +437,7 @@ abstract class Recheck extends Phase, SymTransformer: seqLitType(tree, TypeComparer.lub(declaredElemType :: elemTypes)) def recheckTypeTree(tree: TypeTree)(using Context): Type = - knownType(tree) // allows to install new types at Setup + tree.knownType // allows to install new types at Setup def recheckAnnotated(tree: Annotated)(using Context): Type = tree.tpe match @@ -415,7 +463,7 @@ abstract class Recheck extends Phase, SymTransformer: case _ => traverse(stats) - def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Unit = + def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type = inContext(ctx.localContext(tree, sym)) { tree match case tree: ValDef => recheckValDef(tree, sym) @@ -431,7 +479,7 @@ abstract class Recheck extends Phase, SymTransformer: def recheckNamed(tree: NameTree, pt: Type)(using Context): Type = val sym = tree.symbol tree match - case tree: Ident => recheckIdent(tree) + case tree: Ident => recheckIdent(tree, pt) case tree: Select => recheckSelect(tree, pt) case tree: Bind => recheckBind(tree, pt) case tree: ValOrDefDef => @@ -485,19 +533,17 @@ abstract class Recheck extends Phase, SymTransformer: * @param pt the expected type */ def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = - checkConforms(tpe, pt, tree) - if keepType(tree) then tree.rememberType(tpe) - tpe + val tpe1 = checkConforms(tpe, pt, tree) + if keepType(tree) then tree.rememberType(tpe1) + tpe1 def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = - trace(i"rechecking $tree with pt = $pt", recheckr, show = true) { - try recheckFinish(recheckStart(tree, pt), tree, pt) - catch case ex: Exception => - println(i"error while rechecking $tree") - throw ex - } + try recheckFinish(recheckStart(tree, pt), tree, pt) + catch case ex: Exception => + println(i"error while rechecking $tree") + throw ex - /** Typing and previous transforms sometiems leaves skolem types in prefixes of + /** Typing and previous transforms sometimes leaves skolem types in prefixes of * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does * not complain (need to find out why), but a full recheck does. We compensate * by de-skolemizing everywhere in `expected` except when variance is negative. @@ -524,33 +570,26 @@ abstract class Recheck extends Phase, SymTransformer: private val debugSuccesses = false /** Check that widened types of `tpe` and `pt` are compatible. */ - def checkConforms(tpe: Type, pt: Type, tree: Tree)(using Context): Unit = tree match - case _: DefTree | EmptyTree | _: TypeTree | _: Closure => - // Don't report closure nodes, since their span is a point; wait instead - // for enclosing block to preduce an error - case _ => - checkConformsExpr(tpe.widenExpr, pt.widenExpr, tree) - - def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = - //println(i"check conforms $actual <:< $expected") + def checkConforms(tpe: Type, pt: Type, tree: Tree)(using Context): Type = tree match + case _: DefTree | EmptyTree | _: TypeTree => tpe + case _ => checkConformsExpr(tpe.widenExpr, pt.widenExpr, tree) + + def isCompatible(actual: Type, expected: Type)(using Context): Boolean = + actual <:< expected + || expected.isRepeatedParam + && isCompatible(actual, + expected.translateFromRepeated(toArray = actual.isRef(defn.ArrayClass))) + || { + val widened = widenSkolems(expected) + (widened ne expected) && isCompatible(actual, widened) + } - def isCompatible(expected: Type): Boolean = - actual <:< expected - || expected.isRepeatedParam - && isCompatible(expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass))) - || { - val widened = widenSkolems(expected) - (widened ne expected) && isCompatible(widened) - } - if !isCompatible(expected) then + def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda = NothingToAdd)(using Context): Type = + //println(i"check conforms $actual <:< $expected") + if !isCompatible(actual, expected) then recheckr.println(i"conforms failed for ${tree}: $actual vs $expected") - err.typeMismatch(tree.withType(actual), expected) - else if debugSuccesses then - tree match - case _: Ident => - println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") - case _ => - end checkConformsExpr + err.typeMismatch(tree.withType(actual), expected, addenda) + actual def checkUnit(unit: CompilationUnit)(using Context): Unit = recheck(unit.tpdTree) @@ -559,9 +598,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase) { - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) - } + atPhase(thisPhase): + withMode(Mode.Printing): + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index 6e73d683fa2c..90c5ac85167c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -1,22 +1,22 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import SymUtils._ -import NameKinds._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* + +import NameKinds.* import dotty.tools.dotc.ast.tpd -import tpd._ +import tpd.* import scala.collection.mutable -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import scala.annotation.constructorOnly diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index d6c11fe36748..8ed1edcd0308 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ -import ast.tpd._ -import Contexts._ -import MegaPhase._ -import Annotations._ +import core.* +import ast.tpd.* +import Contexts.* +import MegaPhase.* +import Annotations.* import Symbols.defn -import Constants._ -import Types._ -import Decorators._ -import Flags._ +import Constants.* +import Types.* +import Decorators.* +import Flags.* import scala.collection.mutable diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index 99b6be1eea8a..dd3f41be5a8e 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Flags._ -import SymUtils._ -import Symbols._ -import Decorators._ -import DenotTransformers._ -import Names._ -import NameOps._ -import NameKinds._ -import NullOpsDecorator._ -import ResolveSuper._ +import core.* +import MegaPhase.* +import Contexts.* +import Flags.* + +import Symbols.* +import Decorators.* +import DenotTransformers.* +import Names.* +import NameOps.* +import NameKinds.* +import NullOpsDecorator.* +import ResolveSuper.* import reporting.IllegalSuperAccessor /** This phase implements super accessors in classes that need them. @@ -31,7 +31,7 @@ import reporting.IllegalSuperAccessor * Mixin, which runs after erasure. */ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ResolveSuper.name @@ -45,7 +45,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = override def transformTemplate(impl: Template)(using Context): Template = { val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* def superAccessors(mixin: ClassSymbol): List[Tree] = for superAcc <- mixin.info.decls.filter(_.isSuperAccessor) @@ -66,7 +66,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = assert(ddef.rhs.isEmpty, ddef.symbol) val cls = meth.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* DefDef(meth, forwarderRhsFn(rebindSuper(cls, meth))) } else ddef diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala index d01be0419a4d..e66f5e4b37b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala +++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.IdentityDenotTransformer -import Contexts._ -import Symbols._ -import Scopes._ +import Contexts.* +import Symbols.* +import Scopes.* import MegaPhase.MiniPhase /** The preceding lambda lift and flatten phases move symbols to different scopes @@ -13,7 +13,7 @@ import MegaPhase.MiniPhase * class scopes contain the symbols defined in them. */ class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = RestoreScopes.name diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 1df9809c2f62..6dc718ef526b 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -2,13 +2,13 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core._ -import dotty.tools.dotc.transform.MegaPhase._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.* +import dotty.tools.dotc.transform.MegaPhase.* + /** Removes `Select`s that would be compiled into `GetStatic`. * @@ -43,7 +43,7 @@ import dotty.tools.dotc.transform.SymUtils._ * @author Dmytro Petrashko */ class SelectStatic extends MiniPhase with IdentityDenotTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SelectStatic.name diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala index 2f586104c4e3..20f4e6d85daa 100644 --- a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala +++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Contexts._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Contexts.* /** A transformer that eliminates SeqLiteral's, transforming `SeqLiteral(elems)` to an operation * equivalent to @@ -15,7 +15,7 @@ import Contexts._ * keep a precise type after erasure, whereas SeqLiterals only get the erased type `Seq`, */ class SeqLiterals extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SeqLiterals.name diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala index d17dbbecc555..f62b1f5f01f2 100644 --- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala +++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase /** Set the `rootTreeOrProvider` property of class symbols. */ diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala index 6ffa05075201..fd314b94e50c 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import ast.Trees._, ast.tpd, core._ -import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._ -import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._ +import ast.Trees.*, ast.tpd, core.* +import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* +import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* import MegaPhase.MiniPhase import scala.collection.mutable @@ -18,14 +18,14 @@ import scala.collection.mutable * different standard library. */ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SpecializeApplyMethods.name override def description: String = SpecializeApplyMethods.description override def isEnabled(using Context): Boolean = - !ctx.settings.scalajs.value + !ctx.settings.scalajs.value && !ctx.settings.YcompileScala2Library.value private def specApplySymbol(sym: Symbol, args: List[Type], ret: Type)(using Context): Symbol = { val name = nme.apply.specializedFunction(ret, args) @@ -90,7 +90,7 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { override def transformTemplate(tree: Template)(using Context) = { val cls = tree.symbol.owner.asClass - def synthesizeApply(names: collection.Set[TermName]): Tree = { + def synthesizeApply(names: List[TermName]): Tree = { val applyBuf = new mutable.ListBuffer[DefDef] names.foreach { name => val applySym = cls.info.decls.lookup(name) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index 2248fbc8d570..094d6024eb4e 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import ast.Trees._, ast.tpd, core._ -import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._ -import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._ +import ast.Trees.*, ast.tpd, core.* +import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* +import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* import MegaPhase.MiniPhase @@ -11,7 +11,7 @@ import MegaPhase.MiniPhase * specialized form. */ class SpecializeFunctions extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SpecializeFunctions.name @@ -85,10 +85,10 @@ class SpecializeFunctions extends MiniPhase { case Select(qual, _) => val qual1 = qual.tpe.widen match case defn.ByNameFunction(res) => - // Need to cast to regular function, since specialied apply methods + // Need to cast to regular function, since specialized apply methods // are not members of ContextFunction0. The cast will be eliminated in // erasure. - qual.cast(defn.FunctionOf(Nil, res)) + qual.cast(defn.FunctionNOf(Nil, res)) case _ => qual qual1.select(specializedApply) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeTuples.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeTuples.scala index 5237ecbcef8a..264659930505 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeTuples.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeTuples.scala @@ -37,15 +37,14 @@ class SpecializeTuples extends MiniPhase: end transformApply override def transformSelect(tree: Select)(using Context): Tree = tree match - case Select(qual, nme._1) if isAppliedSpecializableTuple(qual.tpe.widen) => - Select(qual, nme._1.specializedName(qual.tpe.widen.argInfos.slice(0, 1))) - case Select(qual, nme._2) if isAppliedSpecializableTuple(qual.tpe.widen) => - Select(qual, nme._2.specializedName(qual.tpe.widen.argInfos.slice(1, 2))) + case Select(qual, name @ (nme._1 | nme._2)) => + qual.tpe.widenDealias match + case AppliedType(tycon, args) if defn.isSpecializableTuple(tycon.classSymbol, args) => + val argIdx = if name == nme._1 then 0 else 1 + Select(qual, name.specializedName(args(argIdx) :: Nil)) + case _ => + tree case _ => tree - - private def isAppliedSpecializableTuple(tp: Type)(using Context) = tp match - case AppliedType(tycon, args) => defn.isSpecializableTuple(tycon.classSymbol, args) - case _ => false end SpecializeTuples object SpecializeTuples: diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 741c770e2c77..93ba1845e484 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -7,14 +7,14 @@ import java.io.{PrintWriter, StringWriter} import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.FlatName import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant @@ -30,7 +30,8 @@ import scala.reflect.ClassTag import dotty.tools.dotc.quoted.{PickledQuotes, QuoteUtils} import scala.quoted.Quotes -import scala.quoted.runtime.impl._ +import scala.quoted.runtime.impl.* +import dotty.tools.dotc.core.NameKinds /** Utility class to splice quoted expressions */ object Splicer { @@ -89,7 +90,7 @@ object Splicer { /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = new TreeTraverser { - private[this] var locals = Set.empty[Symbol] + private var locals = Set.empty[Symbol] private def markSymbol(sym: Symbol)(using Context): Unit = locals = locals + sym private def markDef(tree: Tree)(using Context): Unit = tree match { @@ -214,6 +215,13 @@ object Splicer { report.error("Macro cannot be implemented with an `inline` method", fn.srcPos) args.flatten.foreach(checkIfValidArgument) + case Call(fn, args) if fn.symbol.name.is(NameKinds.InlineAccessorName) => + // TODO suggest use of @binaryAPI once we have the annotation + report.error( + i"""Macro implementation is not statically accessible. + | + |Non-static inline accessor was generated in ${fn.symbol.owner} + |""".stripMargin, tree.srcPos) case _ => report.error( """Malformed macro. @@ -245,7 +253,7 @@ object Splicer { case expr: Ident if expr.symbol.isAllOf(InlineByNameProxy) => // inline proxy for by-name parameter expr.symbol.defTree.asInstanceOf[DefDef].rhs - case Inlined(EmptyTree, _, body1) => body1 + case tree: Inlined if tree.inlinedFromOuterScope => tree.expansion case _ => body } new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(body1, ctx.owner)).withSpan(body1.span), SpliceScope.getCurrent) diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index ff5dc5042eaf..967c1cb6d19b 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -1,25 +1,25 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import Constants._ -import ast.Trees._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* +import Constants.* +import ast.Trees.* import ast.{TreeTypeMap, untpd} -import util.Spans._ -import SymUtils._ -import NameKinds._ +import util.Spans.* + +import NameKinds.* import dotty.tools.dotc.ast.tpd import scala.collection.mutable -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.config.ScalaRelease.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.QuoteTypeTags @@ -72,7 +72,7 @@ object Splicing: * */ class Splicing extends MacroTransform: - import tpd._ + import tpd.* override def phaseName: String = Splicing.name @@ -197,7 +197,7 @@ class Splicing extends MacroTransform: if tree.isTerm then if isCaptured(tree.symbol) then val tpe = tree.tpe.widenTermRefExpr match { - case tpw: MethodicType => tpw.toFunctionType(isJava = false) + case tpw: MethodicType => tpw.toFunctionType() case tpw => tpw } spliced(tpe)(capturedTerm(tree)) @@ -209,7 +209,7 @@ class Splicing extends MacroTransform: // Dealias references to captured types TypeTree(tree.tpe.dealias) else super.transform(tree) - case tree: TypeTree => + case _: TypeTree | _: SingletonTypeTree => if containsCapturedType(tree.tpe) && level >= 1 then getTagRefFor(tree) else tree case tree @ Assign(lhs: RefTree, rhs) => @@ -291,7 +291,7 @@ class Splicing extends MacroTransform: private def capturedTerm(tree: Tree)(using Context): Tree = val tpe = tree.tpe.widenTermRefExpr match - case tpw: MethodicType => tpw.toFunctionType(isJava = false) + case tpw: MethodicType => tpw.toFunctionType() case tpw => tpw capturedTerm(tree, tpe) @@ -314,10 +314,7 @@ class Splicing extends MacroTransform: ) private def capturedType(tree: Tree)(using Context): Symbol = - val tpe = tree.tpe.widenTermRefExpr - val bindingSym = refBindingMap - .getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newQuotedTypeClassBinding(tpe)))._2 - bindingSym + refBindingMap.getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newQuotedTypeClassBinding(tree.tpe)))._2 private def capturedPartTypes(quote: Quote)(using Context): Tree = val (tags, body1) = inContextWithQuoteTypeTags { diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 43cbe80ce8c4..a48718b2d60a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -2,15 +2,15 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.util.SrcPos -import dotty.tools.dotc.transform.SymUtils._ + import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.CrossStageSafety import dotty.tools.dotc.staging.HealType @@ -20,7 +20,7 @@ import dotty.tools.dotc.staging.HealType * See `CrossStageSafety` */ class Staging extends MacroTransform { - import tpd._ + import tpd.* override def phaseName: String = Staging.name diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index b78c75d58340..ce2b8fa591d8 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -5,13 +5,13 @@ package transform import dotty.tools.dotc.ast.{Trees, tpd} import scala.collection.mutable import ValueClasses.isMethodWithExtension -import core._ -import Contexts._, Flags._, Symbols._, Names._, StdNames._, NameOps._, Trees._ -import TypeUtils._, SymUtils._ +import core.* +import Contexts.*, Flags.*, Symbols.*, Names.*, StdNames.*, NameOps.*, Trees.* + import DenotTransformers.DenotTransformer -import Symbols._ -import util.Spans._ -import Decorators._ +import Symbols.* +import util.Spans.* +import Decorators.* import NameKinds.{ SuperAccessorName, ExpandPrefixName } /** This class adds super accessors for all super calls that either @@ -32,7 +32,7 @@ import NameKinds.{ SuperAccessorName, ExpandPrefixName } */ class SuperAccessors(thisPhase: DenotTransformer) { - import tpd._ + import tpd.* /** Some parts of trees will get a new owner in subsequent phases. * These are value class methods, which will become extension methods. @@ -153,8 +153,11 @@ class SuperAccessors(thisPhase: DenotTransformer) { } } - val needAccessor = name.isTermName && ( - clazz != currentClass || !validCurrentClass || mix.name.isEmpty && clazz.is(Trait)) + val needAccessor = + name.isTermName // Types don't need super accessors + && !sym.isEffectivelyErased // Erased and concrete inline methods are not called at runtime + && !sym.isInlineMethod // so they don't need superaccessors. + && (clazz != currentClass || !validCurrentClass || mix.name.isEmpty && clazz.is(Trait)) if (needAccessor) atPhase(thisPhase.next)(superAccessorCall(sel, mix.name)) else sel diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 48bcbaab3511..6d2aedb9b47b 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -1,21 +1,20 @@ package dotty.tools.dotc package transform -import core._ -import Symbols._, Types._, Contexts._, Names._, StdNames._, Constants._, SymUtils._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import NameOps._ +import core.* +import Symbols.*, Types.*, Contexts.*, Names.*, StdNames.*, Constants.* +import Flags.* +import DenotTransformers.* +import Decorators.* +import NameOps.* import Annotations.Annotation import typer.ProtoTypes.constrained import ast.untpd -import ValueClasses.isDerivedValueClass -import SymUtils._ + import util.Property import util.Spans.Span import config.Printers.derive -import NullOpsDecorator._ +import NullOpsDecorator.* object SyntheticMembers { @@ -53,8 +52,8 @@ object SyntheticMembers { * def hashCode(): Int */ class SyntheticMembers(thisPhase: DenotTransformer) { - import SyntheticMembers._ - import ast.tpd._ + import SyntheticMembers.* + import ast.tpd.* private var myValueSymbols: List[Symbol] = Nil private var myCaseSymbols: List[Symbol] = Nil @@ -90,7 +89,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def caseAndValueMethods(clazz: ClassSymbol)(using Context): List[Tree] = { val clazzType = clazz.appliedRef lazy val accessors = - if (isDerivedValueClass(clazz)) clazz.paramAccessors.take(1) // Tail parameters can only be `erased` + if clazz.isDerivedValueClass then clazz.paramAccessors.take(1) // Tail parameters can only be `erased` else clazz.caseAccessors val isEnumValue = clazz.isAnonymousClass && clazz.info.parents.head.classSymbol.is(Enum) val isSimpleEnumValue = isEnumValue && !clazz.owner.isAllOf(EnumCase) @@ -98,12 +97,12 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val isNonJavaEnumValue = isEnumValue && !isJavaEnumValue val symbolsToSynthesize: List[Symbol] = - if (clazz.is(Case)) - if (clazz.is(Module)) caseModuleSymbols + if clazz.is(Case) then + if clazz.is(Module) then caseModuleSymbols else caseSymbols - else if (isNonJavaEnumValue) nonJavaEnumValueSymbols - else if (isEnumValue) enumValueSymbols - else if (isDerivedValueClass(clazz)) valueSymbols + else if isNonJavaEnumValue then nonJavaEnumValueSymbols + else if isEnumValue then enumValueSymbols + else if clazz.isDerivedValueClass then valueSymbols else Nil def syntheticDefIfMissing(sym: Symbol): List[Tree] = @@ -161,7 +160,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case nme.productArity => Literal(Constant(accessors.length)) case nme.productPrefix if isEnumValue => nameRef case nme.productPrefix => ownName - case nme.productElement => productElementBody(accessors.length, vrefss.head.head) + case nme.productElement => + if ctx.settings.YcompileScala2Library.value then productElementBodyForScala2Compat(accessors.length, vrefss.head.head) + else productElementBody(accessors.length, vrefss.head.head) case nme.productElementName => productElementNameBody(accessors.length, vrefss.head.head) } report.log(s"adding $synthetic to $clazz at ${ctx.phase}") @@ -185,7 +186,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * ``` */ def productElementBody(arity: Int, index: Tree)(using Context): Tree = { - // case N => _${N + 1} + // case N => this._${N + 1} val cases = 0.until(arity).map { i => val sel = This(clazz).select(nme.selectorName(i), _.info.isParameterless) CaseDef(Literal(Constant(i)), EmptyTree, sel) @@ -194,6 +195,33 @@ class SyntheticMembers(thisPhase: DenotTransformer) { Match(index, (cases :+ generateIOBECase(index)).toList) } + /** The class + * + * ``` + * case class C(x: T, y: T) + * ``` + * + * gets the `productElement` method: + * + * ``` + * def productElement(index: Int): Any = index match { + * case 0 => this.x + * case 1 => this.y + * case _ => throw new IndexOutOfBoundsException(index.toString) + * } + * ``` + */ + def productElementBodyForScala2Compat(arity: Int, index: Tree)(using Context): Tree = { + val caseParams = ctx.owner.owner.caseAccessors + // case N => this.${paramNames(N)} + val cases = caseParams.zipWithIndex.map { (caseParam, i) => + val sel = This(clazz).select(caseParam) + CaseDef(Literal(Constant(i)), EmptyTree, sel) + } + + Match(index, (cases :+ generateIOBECase(index)).toList) + } + /** The class * * ``` @@ -501,12 +529,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { (rawRef, rawInfo) baseInfo match case tl: PolyType => - val (tl1, tpts) = constrained(tl, untpd.EmptyTree, alwaysAddTypeVars = true) - val targs = - for (tpt <- tpts) yield - tpt.tpe match { - case tvar: TypeVar => tvar.instantiate(fromBelow = false) - } + val tvars = constrained(tl) + val targs = for tvar <- tvars yield + tvar.instantiate(fromBelow = false) (baseRef.appliedTo(targs), extractParams(tl.instantiate(targs))) case methTpe => (baseRef, extractParams(methTpe)) @@ -552,7 +577,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { .map((pre, child) => rawRef(child).asSeenFrom(pre, child.owner)) case _ => cls.children.map(rawRef) - end computeChildTypes + val childTypes = computeChildTypes val cases = for (patType, idx) <- childTypes.zipWithIndex yield @@ -639,8 +664,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val clazz = ctx.owner.asClass val syntheticMembers = serializableObjectMethod(clazz) ::: serializableEnumValueMethod(clazz) ::: caseAndValueMethods(clazz) checkInlining(syntheticMembers) - addMirrorSupport( - cpy.Template(impl)(body = syntheticMembers ::: impl.body)) + val impl1 = cpy.Template(impl)(body = syntheticMembers ::: impl.body) + if ctx.settings.YcompileScala2Library.value then impl1 + else addMirrorSupport(impl1) } private def checkInlining(syntheticMembers: List[Tree])(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 741b9d1627fe..43c740ce7d38 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -13,6 +13,8 @@ import transform.MegaPhase.MiniPhase import util.LinearSet import dotty.tools.uncheckedNN +import scala.compiletime.uninitialized + /** A Tail Rec Transformer. * * What it does: @@ -105,7 +107,7 @@ import dotty.tools.uncheckedNN * moved after erasure and adapted to emit `Labeled` blocks by Sébastien Doeraene */ class TailRec extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = TailRec.name @@ -232,7 +234,7 @@ class TailRec extends MiniPhase { var failureReported: Boolean = false /** The `tailLabelN` label symbol, used to encode a `continue` from the infinite `while` loop. */ - private var myContinueLabel: Symbol | Null = _ + private var myContinueLabel: Symbol | Null = uninitialized def continueLabel(using Context): Symbol = { if (myContinueLabel == null) myContinueLabel = newSymbol(method, TailLabelName.fresh(), Label, defn.UnitType) diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala index ffed65f7676e..2be41ba208f1 100644 --- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala +++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Contexts._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Contexts.* import ast.tpd /** This phase transforms wildcards in valdefs with their default value. @@ -12,7 +12,7 @@ import ast.tpd * */ class TransformWildcards extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = TransformWildcards.name diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 34b3183a6b15..d847d14603a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -1,25 +1,26 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core.Names.Name -import core.DenotTransformers._ -import core.SymDenotations._ -import core.Contexts._ -import core.Symbols._ -import core.Types._ -import core.Flags._ -import core.StdNames._ +import core.DenotTransformers.* +import core.SymDenotations.* +import core.Contexts.* +import core.Symbols.* +import core.Types.* +import core.Flags.* +import core.StdNames.* import core.NameKinds.{DocArtifactName, OuterSelectName} -import core.Decorators._ -import core.Phases._ +import core.Decorators.* +import core.Phases.* import core.Mode -import typer._ -import reporting._ -import ast.Trees._ +import typer.* +import reporting.* +import ast.Trees.* import ast.{tpd, untpd} -import util.Chars._ +import util.Chars.* import collection.mutable -import ProtoTypes._ +import ProtoTypes.* import staging.StagingLevel import inlines.Inlines.inInlineMethod @@ -38,8 +39,8 @@ import scala.util.control.NonFatal * represented as TypeTrees then). */ class TreeChecker extends Phase with SymTransformer { - import ast.tpd._ - import TreeChecker._ + import ast.tpd.* + import TreeChecker.* private val seenClasses = collection.mutable.HashMap[String, Symbol]() private val seenModuleVals = collection.mutable.HashMap[String, Symbol]() @@ -89,7 +90,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.phaseId <= erasurePhase.id) { val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, - i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} + i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prev.megaPhase} |Initial info: ${initial.info} |Initial sig : ${initial.signature} |Current info: ${symd.info} @@ -108,7 +109,7 @@ class TreeChecker extends Phase with SymTransformer { check(ctx.base.allPhases.toIndexedSeq, ctx) def check(phasesToRun: Seq[Phase], ctx: Context): Tree = { - val fusedPhase = ctx.phase.prevMega(using ctx) + val fusedPhase = ctx.phase.prev.megaPhase(using ctx) report.echo(s"checking ${ctx.compilationUnit} after phase ${fusedPhase}")(using ctx) inContext(ctx) { @@ -129,7 +130,7 @@ class TreeChecker extends Phase with SymTransformer { catch { case NonFatal(ex) => //TODO CHECK. Check that we are bootstrapped inContext(checkingCtx) { - println(i"*** error while checking ${ctx.compilationUnit} after phase ${ctx.phase.prevMega(using ctx)} ***") + println(i"*** error while checking ${ctx.compilationUnit} after phase ${ctx.phase.prev.megaPhase(using ctx)} ***") } throw ex } @@ -185,7 +186,7 @@ object TreeChecker { * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. */ object TreeNodeChecker extends untpd.TreeTraverser: - import untpd._ + import untpd.* def traverse(tree: Tree)(using Context) = tree match case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") case t @ TypeApply(fun, _targs) => traverse(fun) @@ -206,7 +207,7 @@ object TreeChecker { class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { - import ast.tpd._ + import ast.tpd.* protected val nowDefinedSyms = util.HashSet[Symbol]() private val patBoundSyms = util.HashSet[Symbol]() @@ -445,14 +446,13 @@ object TreeChecker { assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) val tpe = tree.typeOpt - // Polymorphic apply methods stay structural until Erasure - val isPolyFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.PolyFunctionClass) - // Erased functions stay structural until Erasure - val isErasedFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.ErasedFunctionClass) + // PolyFunction apply method stay structural until Erasure + val isRefinedFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.PolyFunctionClass) + // Outer selects are pickled specially so don't require a symbol val isOuterSelect = tree.name.is(OuterSelectName) val isPrimitiveArrayOp = ctx.erasedTypes && nme.isPrimitiveName(tree.name) - if !(tree.isType || isPolyFunctionApply || isErasedFunctionApply || isOuterSelect || isPrimitiveArrayOp) then + if !(tree.isType || isRefinedFunctionApply || isOuterSelect || isPrimitiveArrayOp) then val denot = tree.denot assert(denot.exists, i"Selection $tree with type $tpe does not have a denotation") assert(denot.symbol.exists, i"Denotation $denot of selection $tree with type $tpe does not have a symbol, qualifier type = ${tree.qualifier.typeOpt}") @@ -495,6 +495,18 @@ object TreeChecker { assert(tree.qual.typeOpt.isInstanceOf[ThisType], i"expect prefix of Super to be This, actual = ${tree.qual}") super.typedSuper(tree, pt) + override def typedNew(tree: untpd.New, pt: Type)(using Context): Tree = + val tree1 = super.typedNew(tree, pt).asInstanceOf[tpd.New] + val sym = tree1.tpe.typeSymbol + if postTyperPhase <= ctx.phase then // postTyper checks that `New` nodes can be instantiated + assert(!tree1.tpe.isInstanceOf[TermRef], s"New should not have a TermRef type: ${tree1.tpe}") + assert( + !sym.is(Module) + || ctx.erasedTypes // TODO add check for module initialization after erasure (LazyVals transformation) + || ctx.owner == sym.companionModule, + i"new of $sym module should only exist in ${sym.companionModule} but was in ${ctx.owner}") + tree1 + override def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree = tree match case Apply(Select(qual, nme.CONSTRUCTOR), _) if !ctx.phase.erasedTypes @@ -569,6 +581,14 @@ object TreeChecker { super.typedClassDef(cdef, cls) } + override def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = + val tpdTree = super.typedValDef(vdef, sym) + vdef.tpt.tpe match + case _: ValueType => () // ok + case _: ExprType if sym.isOneOf(TermParamOrAccessor) => () // ok + case _ => assert(false, i"wrong type, expected a value type for ${sym.fullName}, but found: ${sym.info}") + tpdTree + override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = def defParamss = ddef.paramss.filter(!_.isEmpty).nestedMap(_.symbol) def layout(symss: List[List[Symbol]]): String = @@ -687,6 +707,25 @@ object TreeChecker { assert(!tree.expr.isInstanceOf[untpd.Quote] || inInlineMethod, i"missed quote cancellation in $tree") super.typedSplice(tree, pt) + override def typedQuotePattern(tree: untpd.QuotePattern, pt: Type)(using Context): Tree = + assert(ctx.mode.is(Mode.Pattern)) + for binding <- tree.bindings do + assert(binding.isInstanceOf[untpd.Bind], i"expected Bind in QuotePattern bindings but was: $binding") + super.typedQuotePattern(tree, pt) + + override def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = + assert(ctx.mode.isQuotedPattern) + def isAppliedIdent(rhs: untpd.Tree): Boolean = rhs match + case _: Ident => true + case rhs: GenericApply => isAppliedIdent(rhs.fun) + case _ => false + def isEtaExpandedIdent(arg: untpd.Tree): Boolean = arg match + case closureDef(ddef) => isAppliedIdent(ddef.rhs) || isEtaExpandedIdent(ddef.rhs) + case _ => false + for arg <- tree.args do + assert(arg.isInstanceOf[untpd.Ident] || isEtaExpandedIdent(arg), i"HOAS argument expected Ident or eta-expanded Ident but was: $arg") + super.typedSplicePattern(tree, pt) + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = { val tree1 @ Hole(isTerm, idx, args, content) = super.typedHole(tree, pt): @unchecked @@ -696,7 +735,7 @@ object TreeChecker { // Check that we only add the captured type `T` instead of a more complex type like `List[T]`. // If we have `F[T]` with captured `F` and `T`, we should list `F` and `T` separately in the args. for arg <- args do - assert(arg.isTerm || arg.tpe.isInstanceOf[TypeRef], "Expected TypeRef in Hole type args but got: " + arg.tpe) + assert(arg.isTerm || arg.tpe.isInstanceOf[TypeRef | TermRef | ThisType], "Unexpected type arg in Hole: " + arg.tpe) // Check result type of the hole if isTerm then assert(tree1.typeOpt <:< pt) @@ -712,15 +751,15 @@ object TreeChecker { defn.AnyType case tpe => tpe defn.QuotedExprClass.typeRef.appliedTo(tpe) - else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt.widenTermRefExpr) + else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt) } val expectedResultType = if isTerm then defn.QuotedExprClass.typeRef.appliedTo(tree1.typeOpt) else defn.QuotedTypeClass.typeRef.appliedTo(tree1.typeOpt) val contextualResult = - defn.FunctionOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) + defn.FunctionNOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) val expectedContentType = - defn.FunctionOf(argQuotedTypes, contextualResult) + defn.FunctionNOf(argQuotedTypes, contextualResult) assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 @@ -732,23 +771,16 @@ object TreeChecker { override def adapt(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { def isPrimaryConstructorReturn = ctx.owner.isPrimaryConstructor && pt.isRef(ctx.owner.owner) && tree.tpe.isRef(defn.UnitClass) - def infoStr(tp: Type) = tp match { - case tp: TypeRef => - val sym = tp.symbol - i"${sym.showLocated} with ${tp.designator}, flags = ${sym.flagsString}, underlying = ${tp.underlyingIterator.toList}%, %" - case _ => - "??" - } - if (ctx.mode.isExpr && - !tree.isEmpty && - !isPrimaryConstructorReturn && - !pt.isInstanceOf[FunOrPolyProto]) + if ctx.mode.isExpr + && !tree.isEmpty + && !isPrimaryConstructorReturn + && !pt.isInstanceOf[FunOrPolyProto] + then assert(tree.tpe <:< pt, { val mismatch = TypeMismatch(tree.tpe, pt, Some(tree)) - i"""|${mismatch.msg} - |found: ${infoStr(tree.tpe)} - |expected: ${infoStr(pt)} - |tree = $tree""".stripMargin + i"""|Type Mismatch: + |${mismatch.message} + |tree = $tree ${tree.className}""".stripMargin }) tree } diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala index aec44d5987bf..8d5b7c28bbbc 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala @@ -2,12 +2,12 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._ -import Contexts._, Trees._, Types._, StdNames._, Symbols._ -import ValueClasses._ +import core.* +import Contexts.*, Trees.*, Types.*, StdNames.*, Symbols.* +import ValueClasses.* object TreeExtractors { - import tpd._ + import tpd.* /** Match arg1.op(arg2) and extract (arg1, op.symbol, arg2) */ object BinaryOp { diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala index 92d22b1cc57e..095c6af60766 100644 --- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core.Symbols._ -import core.StdNames._ -import core.Types._ +import core.Symbols.* +import core.StdNames.* +import core.Types.* import core.NameKinds.ExceptionBinderName import dotty.tools.dotc.core.Flags -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.util.Spans.Span @@ -39,7 +39,7 @@ import dotty.tools.dotc.util.Spans.Span * */ class TryCatchPatterns extends MiniPhase { - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* override def phaseName: String = TryCatchPatterns.name diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 6fba0bca4ce3..bdb7072a6530 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -1,21 +1,20 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Decorators._ -import Definitions._ -import DenotTransformers._ -import StdNames._ -import Symbols._ -import MegaPhase._ -import Types._ +import core.* +import Contexts.* +import Decorators.* +import Definitions.* +import DenotTransformers.* +import StdNames.* +import Symbols.* +import MegaPhase.* +import Types.* import dotty.tools.dotc.ast.tpd - /** Optimize generic operations on tuples */ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = TupleOptimizations.name @@ -33,7 +32,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def transformTupleCons(tree: tpd.Apply)(using Context): Tree = { val head :: tail :: Nil = tree.args: @unchecked - defn.tupleTypes(tree.tpe.widenTermRefExpr.dealias) match { + tree.tpe.widenTermRefExpr.tupleElementTypes match { case Some(tpes) => // Generate a the tuple directly with TupleN+1.apply val size = tpes.size @@ -61,7 +60,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def transformTupleTail(tree: tpd.Apply)(using Context): Tree = { val Apply(_, tup :: Nil) = tree: @unchecked - defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias, MaxTupleArity + 1) match { + tup.tpe.widenTermRefExpr.tupleElementTypesUpTo(MaxTupleArity + 1) match { case Some(tpes) => // Generate a the tuple directly with TupleN-1.apply val size = tpes.size @@ -104,7 +103,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def transformTupleConcat(tree: tpd.Apply)(using Context): Tree = { val Apply(_, self :: that :: Nil) = tree: @unchecked - (defn.tupleTypes(self.tpe.widenTermRefExpr.dealias), defn.tupleTypes(that.tpe.widenTermRefExpr.dealias)) match { + (self.tpe.widenTermRefExpr.tupleElementTypes, that.tpe.widenTermRefExpr.tupleElementTypes) match { case (Some(tpes1), Some(tpes2)) => // Generate a the tuple directly with TupleN+M.apply val n = tpes1.size @@ -139,7 +138,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def transformTupleApply(tree: tpd.Apply)(using Context): Tree = { val Apply(_, tup :: nTree :: Nil) = tree: @unchecked - (defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias), nTree.tpe) match { + (tup.tpe.widenTermRefExpr.tupleElementTypes, nTree.tpe) match { case (Some(tpes), nTpe: ConstantType) => // Get the element directly with TupleM._n+1 or TupleXXL.productElement(n) val size = tpes.size @@ -166,7 +165,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def transformTupleToArray(tree: tpd.Apply)(using Context): Tree = { val Apply(_, tup :: Nil) = tree: @unchecked - defn.tupleTypes(tup.tpe.widen, MaxTupleArity) match { + tup.tpe.widen.tupleElementTypesUpTo(MaxTupleArity) match { case Some(tpes) => val size = tpes.size if (size == 0) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index f5cb8eab73a4..74a4845424ea 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -4,19 +4,19 @@ package transform import scala.language.unsafeNulls as _ -import core._ -import Contexts._, Symbols._, Types._, Constants._, StdNames._, Decorators._ +import core.* +import Contexts.*, Symbols.*, Types.*, Constants.*, StdNames.*, Decorators.* import ast.untpd -import Erasure.Boxing._ -import TypeErasure._ -import ValueClasses._ -import SymUtils._ -import core.Flags._ -import util.Spans._ -import reporting._ +import Erasure.Boxing.* +import TypeErasure.* + +import core.Flags.* +import util.Spans.* +import reporting.* import config.Printers.{ transforms => debug } import patmat.Typ +import dotty.tools.dotc.util.SrcPos /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check @@ -29,7 +29,7 @@ import patmat.Typ * cannot be rewritten before erasure. That's why TypeTestsCasts is called from Erasure. */ object TypeTestsCasts { - import ast.tpd._ + import ast.tpd.* import typer.Inferencing.maximizeType import typer.ProtoTypes.constrained @@ -74,7 +74,7 @@ object TypeTestsCasts { }.apply(tp) /** Returns true if the type arguments of `P` can be determined from `X` */ - def typeArgsTrivial(X: Type, P: AppliedType)(using Context) = inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds) { + def typeArgsDeterminable(X: Type, P: AppliedType)(using Context) = inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds) { val AppliedType(tycon, _) = P def underlyingLambda(tp: Type): TypeLambda = tp.ensureLambdaSub match { @@ -82,7 +82,7 @@ object TypeTestsCasts { case tp: TypeProxy => underlyingLambda(tp.superType) } val typeLambda = underlyingLambda(tycon) - val tvars = constrained(typeLambda, untpd.EmptyTree, alwaysAddTypeVars = true)._2.map(_.tpe) + val tvars = constrained(typeLambda) val P1 = tycon.appliedTo(tvars) debug.println("before " + ctx.typerState.constraint.show) @@ -154,8 +154,12 @@ object TypeTestsCasts { case x => // always false test warnings are emitted elsewhere - TypeComparer.provablyDisjoint(x, tpe.derivedAppliedType(tycon, targs.map(_ => WildcardType))) - || typeArgsTrivial(X, tpe) + // provablyDisjoint wants fully applied types as input; because we're in the middle of erasure, we sometimes get raw types here + val xApplied = + val tparams = x.typeParams + if tparams.isEmpty then x else x.appliedTo(tparams.map(_ => WildcardType)) + TypeComparer.provablyDisjoint(xApplied, tpe.derivedAppliedType(tycon, targs.map(_ => WildcardType))) + || typeArgsDeterminable(X, tpe) ||| i"its type arguments can't be determined from $X" } case AndType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) @@ -218,7 +222,7 @@ object TypeTestsCasts { !(!testCls.isPrimitiveValueClass && foundCls.isPrimitiveValueClass) && // foundCls can be `Boolean`, while testCls is `Integer` // it can happen in `(3: Boolean | Int).isInstanceOf[Int]` - !isDerivedValueClass(foundCls) && !isDerivedValueClass(testCls) + !foundCls.isDerivedValueClass && !testCls.isDerivedValueClass // we don't have the logic to handle derived value classes /** Check whether a runtime test that a value of `foundCls` can be a `testCls` @@ -285,7 +289,7 @@ object TypeTestsCasts { Typed(expr, tree.args.head) // Replace cast by type ascription (which does not generate any bytecode) else if (testCls eq defn.BoxedUnitClass) // as a special case, casting to Unit always successfully returns Unit - Block(expr :: Nil, Literal(Constant(()))).withSpan(expr.span) + Block(expr :: Nil, unitLiteral).withSpan(expr.span) else if (foundClsSymPrimitive) if (testCls.isPrimitiveValueClass) primitiveConversion(expr, testCls) else derivedTree(box(expr), defn.Any_asInstanceOf, testType) @@ -359,11 +363,8 @@ object TypeTestsCasts { if (sym.isTypeTest) { val argType = tree.args.head.tpe val isTrusted = tree.hasAttachment(PatternMatcher.TrustedTypeTestKey) - val isUnchecked = expr.tpe.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) - if !isTrusted && !isUnchecked then - val whyNot = whyUncheckable(expr.tpe, argType, tree.span) - if whyNot.nonEmpty then - report.uncheckedWarning(em"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) + if !isTrusted then + checkTypePattern(expr.tpe, argType, expr.srcPos) transformTypeTest(expr, argType, flagUnrelated = enclosingInlineds.isEmpty) // if test comes from inlined code, dont't flag it even if it always false } @@ -382,6 +383,19 @@ object TypeTestsCasts { interceptWith(expr) } + /** After PatternMatcher, only Bind nodes are present in simple try-catch trees + * See i19013 + */ + def checkBind(tree: Bind)(using Context) = + checkTypePattern(defn.ThrowableType, tree.body.tpe, tree.srcPos) + + private def checkTypePattern(exprTpe: Type, castTpe: Type, pos: SrcPos)(using Context) = + val isUnchecked = exprTpe.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) + if !isUnchecked then + val whyNot = whyUncheckable(exprTpe, castTpe, pos.span) + if whyNot.nonEmpty then + report.uncheckedWarning(UncheckedTypePattern(castTpe, whyNot), pos) + private def effectiveClass(tp: Type)(using Context): Symbol = if tp.isRef(defn.PairClass) then effectiveClass(erasure(tp)) else if tp.isRef(defn.AnyValClass) then defn.AnyClass diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala deleted file mode 100644 index a897503ef275..000000000000 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ /dev/null @@ -1,120 +0,0 @@ -package dotty.tools -package dotc -package transform - -import core._ -import TypeErasure.ErasedValueType -import Types._ -import Contexts._ -import Symbols._ -import Names.Name - -import dotty.tools.dotc.core.Decorators.* - -object TypeUtils { - /** A decorator that provides methods on types - * that are needed in the transformer pipeline. - */ - extension (self: Type) { - - def isErasedValueType(using Context): Boolean = - self.isInstanceOf[ErasedValueType] - - def isPrimitiveValueType(using Context): Boolean = - self.classSymbol.isPrimitiveValueClass - - def isErasedClass(using Context): Boolean = - self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) - - /** Is this type a checked exception? This is the case if the type - * derives from Exception but not from RuntimeException. According to - * that definition Throwable is unchecked. That makes sense since you should - * neither throw nor catch `Throwable` anyway, so we should not define - * a capability to do so. - */ - def isCheckedException(using Context): Boolean = - self.derivesFrom(defn.ExceptionClass) - && !self.derivesFrom(defn.RuntimeExceptionClass) - - def isByName: Boolean = - self.isInstanceOf[ExprType] - - def ensureMethodic(using Context): Type = self match { - case self: MethodicType => self - case _ => if (ctx.erasedTypes) MethodType(Nil, self) else ExprType(self) - } - - def widenToParents(using Context): Type = self.parents match { - case Nil => self - case ps => ps.reduceLeft(AndType(_, _)) - } - - /** The arity of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs, - * or -1 if this is not a tuple type. - */ - def tupleArity(using Context): Int = self/*.dealias*/ match { // TODO: why does dealias cause a failure in tests/run-deep-subtype/Tuple-toArray.scala - case AppliedType(tycon, _ :: tl :: Nil) if tycon.isRef(defn.PairClass) => - val arity = tl.tupleArity - if (arity < 0) arity else arity + 1 - case self: SingletonType => - if self.termSymbol == defn.EmptyTupleModule then 0 else -1 - case self: AndOrType => - val arity1 = self.tp1.tupleArity - val arity2 = self.tp2.tupleArity - if arity1 == arity2 then arity1 else -1 - case _ => - if defn.isTupleNType(self) then self.dealias.argInfos.length - else -1 - } - - /** The element types of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs */ - def tupleElementTypes(using Context): Option[List[Type]] = self.dealias match { - case AppliedType(tycon, hd :: tl :: Nil) if tycon.isRef(defn.PairClass) => - tl.tupleElementTypes.map(hd :: _) - case self: SingletonType => - if self.termSymbol == defn.EmptyTupleModule then Some(Nil) else None - case AndType(tp1, tp2) => - // We assume that we have the following property: - // (T1, T2, ..., Tn) & (U1, U2, ..., Un) = (T1 & U1, T2 & U2, ..., Tn & Un) - tp1.tupleElementTypes.zip(tp2.tupleElementTypes).map { case (t1, t2) => t1.intersect(t2) } - case OrType(tp1, tp2) => - None // We can't combine the type of two tuples - case _ => - if defn.isTupleClass(self.typeSymbol) then Some(self.dealias.argInfos) - else None - } - - /** The `*:` equivalent of an instance of a Tuple class */ - def toNestedPairs(using Context): Type = - tupleElementTypes match - case Some(types) => TypeOps.nestedPairs(types) - case None => throw new AssertionError("not a tuple") - - def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) - - /** The TermRef referring to the companion of the underlying class reference - * of this type, while keeping the same prefix. - */ - def mirrorCompanionRef(using Context): TermRef = self match { - case AndType(tp1, tp2) => - val c1 = tp1.classSymbol - val c2 = tp2.classSymbol - if c1.isSubClass(c2) then tp1.mirrorCompanionRef - else tp2.mirrorCompanionRef // precondition: the parts of the AndType have already been checked to be non-overlapping - case self @ TypeRef(prefix, _) if self.symbol.isClass => - prefix.select(self.symbol.companionModule).asInstanceOf[TermRef] - case self: TypeProxy => - self.superType.mirrorCompanionRef - } - - /** Is this type a methodic type that takes at least one parameter? */ - def takesParams(using Context): Boolean = self.stripPoly match - case mt: MethodType => mt.paramNames.nonEmpty || mt.resType.takesParams - case _ => false - - /** Is this type a methodic type that takes implicit parameters (both old and new) at some point? */ - def takesImplicitParams(using Context): Boolean = self.stripPoly match - case mt: MethodType => mt.isImplicitMethod || mt.resType.takesImplicitParams - case _ => false - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala index 95d40102c5a7..6e02ea7c227c 100644 --- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala +++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import MegaPhase._ +import MegaPhase.* import core.DenotTransformers.{IdentityDenotTransformer} -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* import ast.tpd object UncacheGivenAliases: @@ -23,7 +23,7 @@ object UncacheGivenAliases: */ class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = UncacheGivenAliases.name diff --git a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala index a7ccaa19d90a..f22fc53e9b6e 100644 --- a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Flags._ -import Symbols._ +import core.* +import Contexts.* +import Flags.* +import Symbols.* import MegaPhase.MiniPhase import StdNames.nme import ast.tpd @@ -19,7 +19,7 @@ import ast.tpd * @syntax markdown */ class UninitializedDefs extends MiniPhase: - import tpd._ + import tpd.* override def phaseName: String = UninitializedDefs.name diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala index 879a885d626e..6430dd7248b1 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc package transform import ast.tpd -import core._ -import Contexts._, Symbols._, Types._, Flags._, Phases._ -import DenotTransformers._, MegaPhase._ -import TreeExtractors._, ValueClasses._ +import core.* +import Contexts.*, Symbols.*, Types.*, Flags.*, Phases.* +import DenotTransformers.*, MegaPhase.* +import TreeExtractors.*, ValueClasses.* /** This phase elides unnecessary value class allocations * @@ -16,7 +16,7 @@ import TreeExtractors._, ValueClasses._ * (new V(u)).underlying() => u */ class VCElideAllocations extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = VCElideAllocations.name diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala index 219945d4ebb1..fb1dd04bd6ad 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala @@ -3,10 +3,10 @@ package dotc package transform import ast.{Trees, tpd} -import core._ -import Contexts._, Trees._, Types._ -import DenotTransformers._, MegaPhase._ -import ExtensionMethods._, ValueClasses._ +import core.* +import Contexts.*, Trees.*, Types.* +import DenotTransformers.*, MegaPhase.* +import ExtensionMethods.*, ValueClasses.* /** This phase inlines calls to methods of value classes. @@ -40,7 +40,7 @@ import ExtensionMethods._, ValueClasses._ * need to have any knowledge of the name mangling done by other phases. */ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = VCInlineMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala index 28d1255eaa72..5cdd5d8ded43 100644 --- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala @@ -1,26 +1,17 @@ package dotty.tools.dotc package transform -import core._ -import Types._ -import Symbols._ -import Contexts._ -import Phases._ -import Flags._ -import StdNames._ -import SymUtils._ +import core.* +import Types.* +import Symbols.* +import Contexts.* +import Phases.* +import Flags.* +import StdNames.* /** Methods that apply to user-defined value classes */ object ValueClasses { - def isDerivedValueClass(sym: Symbol)(using Context): Boolean = sym.isClass && { - val d = sym.denot - !d.isRefinementClass && - d.isValueClass && - (d.initial.symbol ne defn.AnyValClass) && // Compare the initial symbol because AnyVal does not exist after erasure - !d.isPrimitiveValueClass - } - def isMethodWithExtension(sym: Symbol)(using Context): Boolean = val d = sym.denot.initial d.validFor.firstPhaseId <= extensionMethodsPhase.id diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index 8080a7c911b3..222f3fec24dc 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -2,16 +2,16 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.{tpd, untpd} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Phases.{Phase, postTyperPhase} -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.SourceFile /** Ycheck inlined positions */ class YCheckPositions extends Phase { - import tpd._ + import tpd.* override def phaseName: String = YCheckPositions.name @@ -35,20 +35,22 @@ class YCheckPositions extends Phase { val currentSource = sources.head assert(tree.source == currentSource, i"wrong source set for $tree # ${tree.uniqueId} of ${tree.getClass}, set to ${tree.source} but context had $currentSource\n ${tree.symbol.flagsString}") - // Recursivlely check children while keeping track of current source + // Recursively check children while keeping track of current source reporting.trace(i"check pos ${tree.getClass} ${tree.source} ${sources.head} $tree") { tree match { - case Inlined(EmptyTree, bindings, expansion) => + case tree @ Inlined(_, bindings, expansion) if tree.inlinedFromOuterScope => assert(bindings.isEmpty) val old = sources sources = old.tail - traverse(expansion)(using inlineContext(EmptyTree).withSource(sources.head)) + traverse(expansion)(using inlineContext(tree).withSource(sources.head)) sources = old - case Inlined(call, bindings, expansion) => + case tree @ Inlined(call, bindings, expansion) => // bindings.foreach(traverse(_)) // TODO check inline proxies (see tests/tun/lst) sources = call.symbol.topLevelClass.source :: sources - if (!isMacro(call)) // FIXME macro implementations can drop Inlined nodes. We should reinsert them after macro expansion based on the positions of the trees - traverse(expansion)(using inlineContext(call).withSource(sources.head)) + if !isMacro(call) // FIXME macro implementations can drop Inlined nodes. We should reinsert them after macro expansion based on the positions of the trees + && !isBootstrappedPredefWithPatchedMethods(call) // FIXME The patched symbol has a different source as the definition of Predef. Solution: define them directly in `Predef`s TASTy and do not patch (see #19231). + then + traverse(expansion)(using inlineContext(tree).withSource(sources.head)) sources = sources.tail case _ => traverseChildren(tree) } @@ -59,6 +61,11 @@ class YCheckPositions extends Phase { case _ => } + private def isBootstrappedPredefWithPatchedMethods(call: Tree)(using Context) = + val sym = call.symbol + (sym.is(Inline) && sym.owner == defn.ScalaPredefModuleClass && sym.owner.is(Scala2Tasty)) + || (sym == defn.ScalaPredefModuleClass && sym.is(Scala2Tasty)) + private def isMacro(call: Tree)(using Context) = call.symbol.is(Macro) || (call.symbol.isClass && call.tpe.derivesFrom(defn.MacroAnnotationClass)) || diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 1efb3c88149e..7cf028c95064 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -2,22 +2,23 @@ package dotty.tools.dotc package transform package init -import dotty.tools.dotc._ +import dotty.tools.dotc.* import ast.tpd -import tpd._ +import tpd.* -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import StdNames._ +import dotty.tools.dotc.core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* -import dotty.tools.dotc.transform._ -import Phases._ +import dotty.tools.dotc.transform.* +import Phases.* import scala.collection.mutable -import Semantic._ +import Semantic.* +import dotty.tools.unsupported class Checker extends Phase: @@ -28,21 +29,34 @@ class Checker extends Phase: override val runsAfter = Set(Pickler.name) override def isEnabled(using Context): Boolean = - super.isEnabled && ctx.settings.YcheckInit.value + super.isEnabled && (ctx.settings.YcheckInit.value || ctx.settings.YcheckInitGlobal.value) + + def traverse(traverser: InitTreeTraverser)(using Context): Boolean = monitor(phaseName): + val unit = ctx.compilationUnit + traverser.traverse(unit.tpdTree) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val checkCtx = ctx.fresh.setPhase(this.start) val traverser = new InitTreeTraverser() - units.foreach { unit => traverser.traverse(unit.tpdTree) } - val classes = traverser.getClasses() + val unitContexts = units.map(unit => checkCtx.fresh.setCompilationUnit(unit)) + + val units0 = + for unitContext <- unitContexts if traverse(traverser)(using unitContext) yield unitContext.compilationUnit + + cancellable { + val classes = traverser.getClasses() + + if ctx.settings.YcheckInit.value then + Semantic.checkClasses(classes)(using checkCtx) - Semantic.checkClasses(classes)(using checkCtx) + if ctx.settings.YcheckInitGlobal.value then + Objects.checkClasses(classes)(using checkCtx) + } - units + units0 + end runOn - def run(using Context): Unit = - // ignore, we already called `Semantic.check()` in `runOn` - () + def run(using Context): Unit = unsupported("run") class InitTreeTraverser extends TreeTraverser: private val classes: mutable.ArrayBuffer[ClassSymbol] = new mutable.ArrayBuffer diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 366fd6be96a2..85feb609c90a 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -3,11 +3,11 @@ package dotc package transform package init -import ast.tpd._ -import core._ +import ast.tpd.* +import core.* import util.Property import util.SourcePosition -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import Trace.Trace diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala new file mode 100644 index 000000000000..763b71619de8 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -0,0 +1,1749 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import Symbols.* +import Types.* +import Denotations.Denotation +import StdNames.* +import Names.TermName +import NameKinds.OuterSelectName +import NameKinds.SuperAccessorName + +import ast.tpd.* +import util.{ SourcePosition, NoSourcePosition } +import config.Printers.init as printer +import reporting.StoreReporter +import reporting.trace as log +import typer.Applications.* + +import Errors.* +import Trace.* +import Util.* + +import scala.collection.immutable.ListSet +import scala.collection.mutable +import scala.annotation.tailrec +import scala.annotation.constructorOnly + +/** Check initialization safety of static objects + * + * The problem is illustrated by the example below: + * + * class Foo(val opposite: Foo) + * case object A extends Foo(B) // A -> B + * case object B extends Foo(A) // B -> A + * + * In the code above, the initialization of object `A` depends on `B` and vice versa. There is no + * correct way to initialize the code above. The current checker issues a warning for the code + * above. + * + * At the high-level, the analysis has the following characteristics: + * + * 1. The check enforces the principle of "initialization-time irrelevance", which means that the + * time when an object is initialized should not change program semantics. For that purpose, it + * enforces the following rule: + * + * The initialization of a static object should not directly or indirectly read or write + * mutable state of another static object. + * + * This principle not only put initialization of static objects on a solid foundation, but also + * avoids whole-program analysis. + * + * 2. The design is based on the concept of "cold aliasing" --- a cold alias may not be actively + * used during initialization, i.e., it's forbidden to call methods or access fields of a cold + * alias. Method arguments are cold aliases by default unless specified to be sensitive. Method + * parameters captured in lambdas or inner classes are always cold aliases. + * + * 3. It is inter-procedural and flow-sensitive. + * + * 4. It is object-sensitive by default and parameter-sensitive on-demand. + * + * 5. The check is modular in the sense that each object is checked separately and there is no + * whole-program analysis. However, the check is not modular in terms of project boundaries. + * + */ +object Objects: + + // ----------------------------- abstract domain ----------------------------- + + /** Syntax for the data structure abstraction used in abstract domain: + * + * ve ::= ObjectRef(class) // global object + * | OfClass(class, vs[outer], ctor, args, env) // instance of a class + * | OfArray(object[owner], regions) + * | Fun(..., env) // value elements that can be contained in ValueSet + * vs ::= ValueSet(ve) // set of abstract values + * Bottom ::= ValueSet(Empty) + * val ::= ve | Cold | vs // all possible abstract values in domain + * Ref ::= ObjectRef | OfClass // values that represent a reference to some (global or instance) object + * ThisValue ::= Ref | Cold // possible values for 'this' + * + * refMap = Ref -> ( valsMap, varsMap, outersMap ) // refMap stores field informations of an object or instance + * valsMap = valsym -> val // maps immutable fields to their values + * varsMap = valsym -> addr // each mutable field has an abstract address + * outersMap = class -> val // maps outer objects to their values + * + * arrayMap = OfArray -> addr // an array has one address that stores the join value of every element + * + * heap = addr -> val // heap is mutable + * + * env = (valsMap, Option[env]) // stores local variables in the residing method, and possibly outer environments + * + * addr ::= localVarAddr(regions, valsym, owner) + * | fieldVarAddr(regions, valsym, owner) // independent of OfClass/ObjectRef + * | arrayAddr(regions, owner) // independent of array element type + * + * regions ::= List(sourcePosition) + */ + + sealed abstract class Value: + def show(using Context): String + + /** ValueElement are elements that can be contained in a RefSet */ + sealed abstract class ValueElement extends Value + + /** + * A reference caches the values for outers and immutable fields. + */ + sealed abstract class Ref( + valsMap: mutable.Map[Symbol, Value], + varsMap: mutable.Map[Symbol, Heap.Addr], + outersMap: mutable.Map[ClassSymbol, Value]) + extends ValueElement: + protected val vals: mutable.Map[Symbol, Value] = valsMap + protected val vars: mutable.Map[Symbol, Heap.Addr] = varsMap + protected val outers: mutable.Map[ClassSymbol, Value] = outersMap + + def isObjectRef: Boolean = this.isInstanceOf[ObjectRef] + + def klass: ClassSymbol + + def valValue(sym: Symbol): Value = vals(sym) + + def varAddr(sym: Symbol): Heap.Addr = vars(sym) + + def outerValue(cls: ClassSymbol): Value = outers(cls) + + def hasVal(sym: Symbol): Boolean = vals.contains(sym) + + def hasVar(sym: Symbol): Boolean = vars.contains(sym) + + def hasOuter(cls: ClassSymbol): Boolean = outers.contains(cls) + + def initVal(field: Symbol, value: Value)(using Context) = log("Initialize " + field.show + " = " + value + " for " + this, printer) { + assert(!field.is(Flags.Mutable), "Field is mutable: " + field.show) + assert(!vals.contains(field), "Field already set: " + field.show) + vals(field) = value + } + + def initVar(field: Symbol, addr: Heap.Addr)(using Context) = log("Initialize " + field.show + " = " + addr + " for " + this, printer) { + assert(field.is(Flags.Mutable), "Field is not mutable: " + field.show) + assert(!vars.contains(field), "Field already set: " + field.show) + vars(field) = addr + } + + def initOuter(cls: ClassSymbol, value: Value)(using Context) = log("Initialize outer " + cls.show + " = " + value + " for " + this, printer) { + assert(!outers.contains(cls), "Outer already set: " + cls) + outers(cls) = value + } + + /** A reference to a static object */ + case class ObjectRef(klass: ClassSymbol) + extends Ref(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty): + val owner = klass + + def show(using Context) = "ObjectRef(" + klass.show + ")" + + /** + * Represents values that are instances of the specified class. + * + * Note that the 2nd parameter block does not take part in the definition of equality. + */ + case class OfClass private ( + klass: ClassSymbol, outer: Value, ctor: Symbol, args: List[Value], env: Env.Data)( + valsMap: mutable.Map[Symbol, Value], varsMap: mutable.Map[Symbol, Heap.Addr], outersMap: mutable.Map[ClassSymbol, Value]) + extends Ref(valsMap, varsMap, outersMap): + def widenedCopy(outer: Value, args: List[Value], env: Env.Data): OfClass = + new OfClass(klass, outer, ctor, args, env)(this.valsMap, this.varsMap, this.outersMap) + + def show(using Context) = + val valFields = vals.map(_.show + " -> " + _.show) + "OfClass(" + klass.show + ", outer = " + outer + ", args = " + args.map(_.show) + ", vals = " + valFields + ")" + + object OfClass: + def apply( + klass: ClassSymbol, outer: Value, ctor: Symbol, args: List[Value], env: Env.Data)( + using Context + ): OfClass = + val instance = new OfClass(klass, outer, ctor, args, env)( + valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty + ) + instance.initOuter(klass, outer) + instance + + /** + * Represents arrays. + * + * Note that the 2nd parameter block does not take part in the definition of equality. + * + * Different arrays are distinguished by the context. Currently the default context is the static + * object whose initialization triggers the creation of the array. + * + * In the future, it is possible that we introduce a mechanism for end-users to mark the context. + * + * @param owner The static object whose initialization creates the array. + */ + case class OfArray(owner: ClassSymbol, regions: Regions.Data)(using @constructorOnly ctx: Context) extends ValueElement: + val klass: ClassSymbol = defn.ArrayClass + val addr: Heap.Addr = Heap.arrayAddr(regions, owner) + def show(using Context) = "OfArray(owner = " + owner.show + ")" + + /** + * Represents a lambda expression + */ + case class Fun(code: Tree, thisV: ThisValue, klass: ClassSymbol, env: Env.Data) extends ValueElement: + def show(using Context) = "Fun(" + code.show + ", " + thisV.show + ", " + klass.show + ")" + + /** + * Represents a set of values + * + * It comes from `if` expressions. + */ + case class ValueSet(values: ListSet[ValueElement]) extends Value: + def show(using Context) = values.map(_.show).mkString("[", ",", "]") + + /** A cold alias which should not be used during initialization. + * + * Cold is not ValueElement since RefSet containing Cold is equivalent to Cold + */ + case object Cold extends Value: + def show(using Context) = "Cold" + + val Bottom = ValueSet(ListSet.empty) + + /** Possible types for 'this' */ + type ThisValue = Ref | Cold.type + + /** Checking state */ + object State: + class Data: + // objects under check + private[State] val checkingObjects = new mutable.ArrayBuffer[ObjectRef] + private[State] val checkedObjects = new mutable.ArrayBuffer[ObjectRef] + private[State] val pendingTraces = new mutable.ArrayBuffer[Trace] + end Data + + def currentObject(using data: Data): ClassSymbol = data.checkingObjects.last.klass + + private def doCheckObject(classSym: ClassSymbol)(using ctx: Context, data: Data) = + val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] + + var count = 0 + given Cache.Data = new Cache.Data + + @tailrec + def iterate()(using Context): ObjectRef = + count += 1 + + given Trace = Trace.empty.add(classSym.defTree) + given Env.Data = Env.emptyEnv(tpl.constr.symbol) + given Heap.MutableData = Heap.empty() + given returns: Returns.Data = Returns.empty() + given regions: Regions.Data = Regions.empty // explicit name to avoid naming conflict + + val obj = ObjectRef(classSym) + log("Iteration " + count) { + data.checkingObjects += obj + init(tpl, obj, classSym) + assert(data.checkingObjects.last.klass == classSym, "Expect = " + classSym.show + ", found = " + data.checkingObjects.last.klass) + data.checkingObjects.remove(data.checkingObjects.size - 1) + } + + val hasError = ctx.reporter.pendingMessages.nonEmpty + if cache.hasChanged && !hasError then + cache.prepareForNextIteration() + iterate() + else + data.checkedObjects += obj + obj + end iterate + + val reporter = new StoreReporter(ctx.reporter) + val obj = iterate()(using ctx.fresh.setReporter(reporter)) + for warning <- reporter.pendingMessages do + ctx.reporter.report(warning) + + obj + end doCheckObject + + def checkObjectAccess(clazz: ClassSymbol)(using data: Data, ctx: Context, pendingTrace: Trace): ObjectRef = + val index = data.checkingObjects.indexOf(ObjectRef(clazz)) + + if index != -1 then + if data.checkingObjects.size - 1 > index then + // Only report errors for non-trivial cycles, ignore self cycles. + val joinedTrace = data.pendingTraces.slice(index + 1, data.checkingObjects.size).foldLeft(pendingTrace) { (a, acc) => acc ++ a } + val callTrace = Trace.buildStacktrace(joinedTrace, "Calling trace:\n") + val cycle = data.checkingObjects.slice(index, data.checkingObjects.size) + val pos = clazz.defTree.sourcePos.focus + report.warning("Cyclic initialization: " + cycle.map(_.klass.show).mkString(" -> ") + " -> " + clazz.show + ". " + callTrace, pos) + end if + data.checkingObjects(index) + else + val objOpt = data.checkedObjects.find(_.klass == clazz) + objOpt match + case Some(obj) => obj + + case None => + data.pendingTraces += pendingTrace + val obj = doCheckObject(clazz) + data.pendingTraces.remove(data.pendingTraces.size - 1) + obj + end checkObjectAccess + end State + + /** Environment for parameters */ + object Env: + abstract class Data: + private[Env] def getVal(x: Symbol)(using Context): Option[Value] + private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] + + def widen(height: Int)(using Context): Data + + def level: Int + + def show(using Context): String + + /** Local environments can be deeply nested, therefore we need `outer`. + * + * For local variables in rhs of class field definitions, the `meth` is the primary constructor. + */ + private case class LocalEnv + (private[Env] val params: Map[Symbol, Value], meth: Symbol, outer: Data) + (valsMap: mutable.Map[Symbol, Value], varsMap: mutable.Map[Symbol, Heap.Addr]) + (using Context) + extends Data: + val level = outer.level + 1 + + if (level > 3) + report.warning("[Internal error] Deeply nested environment, level = " + level + ", " + meth.show + " in " + meth.enclosingClass.show, meth.defTree) + + private[Env] val vals: mutable.Map[Symbol, Value] = valsMap + private[Env] val vars: mutable.Map[Symbol, Heap.Addr] = varsMap + + private[Env] def getVal(x: Symbol)(using Context): Option[Value] = + if x.is(Flags.Param) then params.get(x) + else vals.get(x) + + private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] = + vars.get(x) + + def widen(height: Int)(using Context): Data = + new LocalEnv(params.map(_ -> _.widen(height)), meth, outer.widen(height))(this.vals, this.vars) + + def show(using Context) = + "owner: " + meth.show + "\n" + + "params: " + params.map(_.show + " ->" + _.show).mkString("{", ", ", "}") + "\n" + + "vals: " + vals.map(_.show + " ->" + _.show).mkString("{", ", ", "}") + "\n" + + "vars: " + vars.map(_.show + " ->" + _).mkString("{", ", ", "}") + "\n" + + "outer = {\n" + outer.show + "\n}" + + end LocalEnv + + object NoEnv extends Data: + val level = 0 + + private[Env] def getVal(x: Symbol)(using Context): Option[Value] = + throw new RuntimeException("Invalid usage of non-existent env") + + private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] = + throw new RuntimeException("Invalid usage of non-existent env") + + def widen(height: Int)(using Context): Data = this + + def show(using Context): String = "NoEnv" + end NoEnv + + /** An empty environment can be used for non-method environments, e.g., field initializers. + * + * The owner for the local environment for field initializers is the primary constructor of the + * enclosing class. + */ + def emptyEnv(meth: Symbol)(using Context): Data = + new LocalEnv(Map.empty, meth, NoEnv)(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty) + + def valValue(x: Symbol)(using data: Data, ctx: Context, trace: Trace): Value = + data.getVal(x) match + case Some(theValue) => + theValue + case _ => + report.warning("[Internal error] Value not found " + x.show + "\nenv = " + data.show + ". " + Trace.show, Trace.position) + Bottom + + def getVal(x: Symbol)(using data: Data, ctx: Context): Option[Value] = data.getVal(x) + + def getVar(x: Symbol)(using data: Data, ctx: Context): Option[Heap.Addr] = data.getVar(x) + + def of(ddef: DefDef, args: List[Value], outer: Data)(using Context): Data = + val params = ddef.termParamss.flatten.map(_.symbol) + assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size) + assert(ddef.symbol.owner.isClass ^ (outer != NoEnv), "ddef.owner = " + ddef.symbol.owner.show + ", outer = " + outer + ", " + ddef.source) + new LocalEnv(params.zip(args).toMap, ddef.symbol, outer)(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty) + + def setLocalVal(x: Symbol, value: Value)(using data: Data, ctx: Context): Unit = + assert(!x.isOneOf(Flags.Param | Flags.Mutable), "Only local immutable variable allowed") + data match + case localEnv: LocalEnv => + assert(!localEnv.vals.contains(x), "Already initialized local " + x.show) + localEnv.vals(x) = value + case _ => + throw new RuntimeException("Incorrect local environment for initializing " + x.show) + + def setLocalVar(x: Symbol, addr: Heap.Addr)(using data: Data, ctx: Context): Unit = + assert(x.is(Flags.Mutable, butNot = Flags.Param), "Only local mutable variable allowed") + data match + case localEnv: LocalEnv => + assert(!localEnv.vars.contains(x), "Already initialized local " + x.show) + localEnv.vars(x) = addr + case _ => + throw new RuntimeException("Incorrect local environment for initializing " + x.show) + + /** + * Resolve the environment owned by the given method. + * + * The method could be located in outer scope with intermixed classes between its definition + * site and usage site. + * + * Due to widening, the corresponding environment might not exist. As a result reading the local + * variable will return `Cold` and it's forbidden to write to the local variable. + * + * @param meth The method which owns the environment + * @param thisV The value for `this` of the enclosing class where the local variable is referenced. + * @param env The local environment where the local variable is referenced. + * + * @return the environment and value for `this` owned by the given method. + */ + def resolveEnv(meth: Symbol, thisV: ThisValue, env: Data)(using Context): Option[(ThisValue, Data)] = log("Resolving env for " + meth.show + ", this = " + thisV.show + ", env = " + env.show, printer) { + env match + case localEnv: LocalEnv => + if localEnv.meth == meth then Some(thisV -> env) + else resolveEnv(meth, thisV, localEnv.outer) + case NoEnv => + thisV match + case ref: OfClass => + ref.outer match + case outer : ThisValue => + resolveEnv(meth, outer, ref.env) + case _ => + // TODO: properly handle the case where ref.outer is ValueSet + None + case _ => + None + } + + def withEnv[T](env: Data)(fn: Data ?=> T): T = fn(using env) + end Env + + /** Abstract heap for mutable fields + */ + object Heap: + abstract class Addr: + /** The static object which owns the mutable slot */ + def owner: ClassSymbol + + /** The address for mutable fields of objects. */ + private case class FieldAddr(regions: Regions.Data, field: Symbol, owner: ClassSymbol) extends Addr + + /** The address for mutable local variables . */ + private case class LocalVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol) extends Addr + + /** Immutable heap data used in the cache. + * + * We need to use structural equivalence so that in different iterations the cache can be effective. + * + * TODO: speed up equality check for heap. + */ + opaque type Data = Map[Addr, Value] + + /** Store the heap as a mutable field to avoid threading it through the program. */ + class MutableData(private[Heap] var heap: Data): + private[Heap] def writeJoin(addr: Addr, value: Value): Unit = + heap.get(addr) match + case None => + heap = heap.updated(addr, value) + + case Some(current) => + val value2 = value.join(current) + if value2 != current then + heap = heap.updated(addr, value2) + end MutableData + + def empty(): MutableData = new MutableData(Map.empty) + + def contains(addr: Addr)(using mutable: MutableData): Boolean = + mutable.heap.contains(addr) + + def read(addr: Addr)(using mutable: MutableData): Value = + mutable.heap(addr) + + def writeJoin(addr: Addr, value: Value)(using mutable: MutableData): Unit = + mutable.writeJoin(addr, value) + + def localVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = + LocalVarAddr(regions, sym, owner) + + def fieldVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = + FieldAddr(regions, sym, owner) + + def arrayAddr(regions: Regions.Data, owner: ClassSymbol)(using Context): Addr = + FieldAddr(regions, defn.ArrayClass, owner) + + def getHeapData()(using mutable: MutableData): Data = mutable.heap + + /** Cache used to terminate the check */ + object Cache: + case class Config(thisV: Value, env: Env.Data, heap: Heap.Data) + case class Res(value: Value, heap: Heap.Data) + + class Data extends Cache[Config, Res]: + def get(thisV: Value, expr: Tree)(using Heap.MutableData, Env.Data): Option[Value] = + val config = Config(thisV, summon[Env.Data], Heap.getHeapData()) + super.get(config, expr).map(_.value) + + def cachedEval(thisV: ThisValue, expr: Tree, cacheResult: Boolean)(fun: Tree => Value)(using Heap.MutableData, Env.Data): Value = + val config = Config(thisV, summon[Env.Data], Heap.getHeapData()) + val result = super.cachedEval(config, expr, cacheResult, default = Res(Bottom, Heap.getHeapData())) { expr => + Res(fun(expr), Heap.getHeapData()) + } + result.value + end Cache + + /** + * Region context for mutable states + * + * By default, the region context is empty. + */ + object Regions: + opaque type Data = List[SourcePosition] + val empty: Data = Nil + def extend(pos: SourcePosition)(using data: Data): Data = pos :: data + def exists(pos: SourcePosition)(using data: Data): Boolean = data.indexOf(pos) >= 0 + def show(using data: Data, ctx: Context): String = data.map(_.show).mkString("[", ", ", "]") + + inline def cache(using c: Cache.Data): Cache.Data = c + + + /** + * Handle return statements in methods and non-local returns in functions. + */ + object Returns: + private class ReturnData(val method: Symbol, val values: mutable.ArrayBuffer[Value]) + opaque type Data = mutable.ArrayBuffer[ReturnData] + + def empty(): Data = mutable.ArrayBuffer() + + def installHandler(meth: Symbol)(using data: Data): Unit = + data.addOne(ReturnData(meth, mutable.ArrayBuffer())) + + def popHandler(meth: Symbol)(using data: Data): Value = + val returnData = data.remove(data.size - 1) + assert(returnData.method == meth, "Symbol mismatch in return handlers, expect = " + meth + ", found = " + returnData.method) + returnData.values.join + + def handle(meth: Symbol, value: Value)(using data: Data, trace: Trace, ctx: Context): Unit = + data.findLast(_.method == meth) match + case Some(returnData) => + returnData.values.addOne(value) + + case None => + report.warning("[Internal error] Unhandled return for method " + meth + " in " + meth.owner.show + ". Trace:\n" + Trace.show, Trace.position) + + type Contextual[T] = (Context, State.Data, Env.Data, Cache.Data, Heap.MutableData, Regions.Data, Returns.Data, Trace) ?=> T + + // --------------------------- domain operations ----------------------------- + + case class ArgInfo(value: Value, trace: Trace, tree: Tree) + + extension (a: Value) + def join(b: Value): Value = + (a, b) match + case (Cold, _) => Cold + case (_, Cold) => Cold + case (Bottom, b) => b + case (a, Bottom) => a + case (ValueSet(values1), ValueSet(values2)) => ValueSet(values1 ++ values2) + case (a : ValueElement, ValueSet(values)) => ValueSet(values + a) + case (ValueSet(values), b : ValueElement) => ValueSet(values + b) + case (a : ValueElement, b : ValueElement) => ValueSet(ListSet(a, b)) + + def widen(height: Int)(using Context): Value = + if height == 0 then Cold + else + a match + case Bottom => Bottom + + case ValueSet(values) => + values.map(ref => ref.widen(height)).join + + case Fun(code, thisV, klass, env) => + Fun(code, thisV.widenRefOrCold(height), klass, env.widen(height - 1)) + + case ref @ OfClass(klass, outer, _, args, env) => + val outer2 = outer.widen(height - 1) + val args2 = args.map(_.widen(height - 1)) + val env2 = env.widen(height - 1) + ref.widenedCopy(outer2, args2, env2) + + case _ => a + + extension (value: Ref | Cold.type) + def widenRefOrCold(height : Int)(using Context) : Ref | Cold.type = value.widen(height).asInstanceOf[ThisValue] + + extension (values: Iterable[Value]) + def join: Value = if values.isEmpty then Bottom else values.reduce { (v1, v2) => v1.join(v2) } + + def widen(height: Int): Contextual[List[Value]] = values.map(_.widen(height)).toList + + /** Handle method calls `e.m(args)`. + * + * @param value The value for the receiver. + * @param meth The symbol of the target method (could be virtual or abstract method). + * @param args Arguments of the method call (all parameter blocks flatten to a list). + * @param receiver The type of the receiver. + * @param superType The type of the super in a super call. NoType for non-super calls. + * @param needResolve Whether the target of the call needs resolution? + */ + def call(value: Value, meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", this = " + value.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + value match + case Cold => + report.warning("Using cold alias. " + Trace.show, Trace.position) + Bottom + + case Bottom => + Bottom + + case arr: OfArray => + val target = resolve(defn.ArrayClass, meth) + + if target == defn.Array_apply || target == defn.Array_clone then + if arr.addr.owner == State.currentObject then + Heap.read(arr.addr) + else + errorReadOtherStaticObject(State.currentObject, arr.addr.owner) + Bottom + else if target == defn.Array_update then + assert(args.size == 2, "Incorrect number of arguments for Array update, found = " + args.size) + if arr.addr.owner != State.currentObject then + errorMutateOtherStaticObject(State.currentObject, arr.addr.owner) + else + Heap.writeJoin(arr.addr, args.tail.head.value) + Bottom + else + // Array.length is OK + Bottom + + case ref: Ref => + val isLocal = !meth.owner.isClass + val target = + if !needResolve then + meth + else if superType.exists then + meth + else if meth.name.is(SuperAccessorName) then + ResolveSuper.rebindSuper(ref.klass, meth) + else + resolve(ref.klass, meth) + + if target.isOneOf(Flags.Method) then + if target.owner == defn.ArrayModuleClass && target.name == nme.apply then + val arr = OfArray(State.currentObject, summon[Regions.Data]) + Heap.writeJoin(arr.addr, args.map(_.value).join) + arr + else if target.hasSource then + val cls = target.owner.enclosingClass.asClass + val ddef = target.defTree.asInstanceOf[DefDef] + val meth = ddef.symbol + + val (thisV : ThisValue, outerEnv) = + if meth.owner.isClass then + (ref, Env.NoEnv) + else + Env.resolveEnv(meth.owner.enclosingMethod, ref, summon[Env.Data]).getOrElse(Cold -> Env.NoEnv) + + val env2 = Env.of(ddef, args.map(_.value), outerEnv) + extendTrace(ddef) { + given Env.Data = env2 + cache.cachedEval(ref, ddef.rhs, cacheResult = true) { expr => + Returns.installHandler(meth) + val res = cases(expr, thisV, cls) + val returns = Returns.popHandler(meth) + res.join(returns) + } + } + else + Bottom + else if target.exists then + select(ref, target, receiver, needResolve = false) + else + if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) + Bottom + else + // This is possible due to incorrect type cast. + // See tests/init/pos/Type.scala + Bottom + + case Fun(code, thisV, klass, env) => + // meth == NoSymbol for poly functions + if meth.name == nme.tupled then + value // a call like `fun.tupled` + else + code match + case ddef: DefDef => + if meth.name == nme.apply then + given Env.Data = Env.of(ddef, args.map(_.value), env) + extendTrace(code) { eval(ddef.rhs, thisV, klass, cacheResult = true) } + else + // The methods defined in `Any` and `AnyRef` are trivial and don't affect initialization. + if meth.owner == defn.AnyClass || meth.owner == defn.ObjectClass then + value + else + // In future, we will have Tasty for stdlib classes and can abstractly interpret that Tasty. + // For now, return `Cold` to ensure soundness and trigger a warning. + Cold + end if + end if + + case _ => + // by-name closure + given Env.Data = env + extendTrace(code) { eval(code, thisV, klass, cacheResult = true) } + + case ValueSet(vs) => + vs.map(v => call(v, meth, args, receiver, superType)).join + } + + /** Handle constructor calls `(args)`. + * + * @param value The value for the receiver. + * @param ctor The symbol of the target method. + * @param args Arguments of the constructor call (all parameter blocks flatten to a list). + */ + def callConstructor(value: Value, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("call " + ctor.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + + value match + case ref: Ref => + if ctor.hasSource then + val cls = ctor.owner.enclosingClass.asClass + val ddef = ctor.defTree.asInstanceOf[DefDef] + val argValues = args.map(_.value) + + given Env.Data = Env.of(ddef, argValues, Env.NoEnv) + if ctor.isPrimaryConstructor then + val tpl = cls.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] + extendTrace(cls.defTree) { eval(tpl, ref, cls, cacheResult = true) } + else + extendTrace(ddef) { // The return values for secondary constructors can be ignored + Returns.installHandler(ctor) + eval(ddef.rhs, ref, cls, cacheResult = true) + Returns.popHandler(ctor) + } + else + // no source code available + Bottom + + case _ => + report.warning("[Internal error] unexpected constructor call, meth = " + ctor + ", this = " + value + Trace.show, Trace.position) + Bottom + } + + /** Handle selection `e.f`. + * + * @param value The value for the receiver. + * @param field The symbol of the target field (could be virtual or abstract). + * @param receiver The type of the receiver. + * @param needResolve Whether the target of the selection needs resolution? + */ + def select(value: Value, field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value.show, printer, (_: Value).show) { + value match + case Cold => + report.warning("Using cold alias", Trace.position) + Bottom + + case ref: Ref => + val target = if needResolve then resolve(ref.klass, field) else field + if target.is(Flags.Lazy) then + given Env.Data = Env.emptyEnv(target.owner.asInstanceOf[ClassSymbol].primaryConstructor) + if target.hasSource then + val rhs = target.defTree.asInstanceOf[ValDef].rhs + eval(rhs, ref, target.owner.asClass, cacheResult = true) + else + Bottom + else if target.exists then + if target.isOneOf(Flags.Mutable) then + if ref.hasVar(target) then + val addr = ref.varAddr(target) + if addr.owner == State.currentObject then + Heap.read(addr) + else + errorReadOtherStaticObject(State.currentObject, addr.owner) + Bottom + else if ref.isObjectRef && ref.klass.hasSource then + report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) + Bottom + else + // initialization error, reported by the initialization checker + Bottom + else if ref.hasVal(target) then + ref.valValue(target) + else if ref.isObjectRef && ref.klass.hasSource then + report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) + Bottom + else + // initialization error, reported by the initialization checker + Bottom + + else + if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) + Bottom + else + // This is possible due to incorrect type cast. + // See tests/init/pos/Type.scala + Bottom + + case fun: Fun => + report.warning("[Internal error] unexpected tree in selecting a function, fun = " + fun.code.show + Trace.show, fun.code) + Bottom + + case arr: OfArray => + report.warning("[Internal error] unexpected tree in selecting an array, array = " + arr.show + Trace.show, Trace.position) + Bottom + + case Bottom => + if field.isStaticObject then ObjectRef(field.moduleClass.asClass) + else Bottom + + case ValueSet(values) => + values.map(ref => select(ref, field, receiver)).join + } + + /** Handle assignment `lhs.f = rhs`. + * + * @param lhs The value of the object to be mutated. + * @param field The symbol of the target field. + * @param rhs The value to be assigned. + * @param rhsTyp The type of the right-hand side. + */ + def assign(lhs: Value, field: Symbol, rhs: Value, rhsTyp: Type): Contextual[Value] = log("Assign" + field.show + " of " + lhs.show + ", rhs = " + rhs.show, printer, (_: Value).show) { + lhs match + case fun: Fun => + report.warning("[Internal error] unexpected tree in assignment, fun = " + fun.code.show + Trace.show, Trace.position) + + case arr: OfArray => + report.warning("[Internal error] unexpected tree in assignment, array = " + arr.show + Trace.show, Trace.position) + + case Cold => + report.warning("Assigning to cold aliases is forbidden. " + Trace.show, Trace.position) + + case Bottom => + + case ValueSet(values) => + values.foreach(ref => assign(ref, field, rhs, rhsTyp)) + + case ref: Ref => + if ref.hasVar(field) then + val addr = ref.varAddr(field) + if addr.owner != State.currentObject then + errorMutateOtherStaticObject(State.currentObject, addr.owner) + else + Heap.writeJoin(addr, rhs) + else + report.warning("Mutating a field before its initialization: " + field.show + ". " + Trace.show, Trace.position) + end match + + Bottom + } + + /** Handle new expression `new p.C(args)`. + * + * @param outer The value for `p`. + * @param klass The symbol of the class `C`. + * @param ctor The symbol of the target constructor. + * @param args The arguments passsed to the constructor. + */ + def instantiate(outer: Value, klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("instantiating " + klass.show + ", outer = " + outer + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + outer match + + case _ : Fun | _: OfArray => + report.warning("[Internal error] unexpected outer in instantiating a class, outer = " + outer.show + ", class = " + klass.show + ", " + Trace.show, Trace.position) + Bottom + + case outer: (Ref | Cold.type | Bottom.type) => + if klass == defn.ArrayClass then + args.head.tree.tpe match + case ConstantType(Constants.Constant(0)) => + // new Array(0) + Bottom + case _ => + val arr = OfArray(State.currentObject, summon[Regions.Data]) + Heap.writeJoin(arr.addr, Bottom) + arr + else + // Widen the outer to finitize the domain. Arguments already widened in `evalArgs`. + val (outerWidened, envWidened) = + outer match + case _ : Bottom.type => // For top-level classes + (Bottom, Env.NoEnv) + case thisV : (Ref | Cold.type) => + if klass.owner.isClass then + if klass.owner.is(Flags.Package) then + report.warning("[Internal error] top-level class should have `Bottom` as outer, class = " + klass.show + ", outer = " + outer.show + ", " + Trace.show, Trace.position) + (Bottom, Env.NoEnv) + else + (thisV.widenRefOrCold(1), Env.NoEnv) + else + // klass.enclosingMethod returns its primary constructor + Env.resolveEnv(klass.owner.enclosingMethod, thisV, summon[Env.Data]).getOrElse(Cold -> Env.NoEnv) + + val instance = OfClass(klass, outerWidened, ctor, args.map(_.value), envWidened) + callConstructor(instance, ctor, args) + instance + + case ValueSet(values) => + values.map(ref => instantiate(ref, klass, ctor, args)).join + } + + /** Handle local variable definition, `val x = e` or `var x = e`. + * + * @param sym The symbol of the variable. + * @param value The value of the initializer. + */ + def initLocal(sym: Symbol, value: Value): Contextual[Unit] = log("initialize local " + sym.show + " with " + value.show, printer) { + if sym.is(Flags.Mutable) then + val addr = Heap.localVarAddr(summon[Regions.Data], sym, State.currentObject) + Env.setLocalVar(sym, addr) + Heap.writeJoin(addr, value) + else + Env.setLocalVal(sym, value) + } + + /** Read local variable `x`. + * + * @param thisV The value for `this` where the variable is used. + * @param sym The symbol of the variable. + */ + def readLocal(thisV: ThisValue, sym: Symbol): Contextual[Value] = log("reading local " + sym.show, printer, (_: Value).show) { + def isByNameParam(sym: Symbol) = sym.is(Flags.Param) && sym.info.isInstanceOf[ExprType] + Env.resolveEnv(sym.enclosingMethod, thisV, summon[Env.Data]) match + case Some(thisV -> env) => + if sym.is(Flags.Mutable) then + // Assume forward reference check is doing a good job + given Env.Data = env + Env.getVar(sym) match + case Some(addr) => + if addr.owner == State.currentObject then + Heap.read(addr) + else + errorReadOtherStaticObject(State.currentObject, addr.owner) + Bottom + end if + case _ => + // Only vals can be lazy + report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". " + Trace.show, Trace.position) + Bottom + else + given Env.Data = env + if sym.is(Flags.Lazy) then + val rhs = sym.defTree.asInstanceOf[ValDef].rhs + eval(rhs, thisV, sym.enclosingClass.asClass, cacheResult = true) + else + // Assume forward reference check is doing a good job + val value = Env.valValue(sym) + if isByNameParam(sym) then + value match + case fun: Fun => + given Env.Data = fun.env + eval(fun.code, fun.thisV, fun.klass) + case Cold => + report.warning("Calling cold by-name alias. " + Trace.show, Trace.position) + Bottom + case _: ValueSet | _: Ref | _: OfArray => + report.warning("[Internal error] Unexpected by-name value " + value.show + ". " + Trace.show, Trace.position) + Bottom + else + value + + case None => + if isByNameParam(sym) then + report.warning("Calling cold by-name alias. " + Trace.show, Trace.position) + Bottom + else + Cold + } + + /** Handle local variable assignmenbt, `x = e`. + * + * @param thisV The value for `this` where the assignment locates. + * @param sym The symbol of the variable. + * @param value The value of the rhs of the assignment. + */ + def writeLocal(thisV: ThisValue, sym: Symbol, value: Value): Contextual[Value] = log("write local " + sym.show + " with " + value.show, printer, (_: Value).show) { + assert(sym.is(Flags.Mutable), "Writing to immutable variable " + sym.show) + + Env.resolveEnv(sym.enclosingMethod, thisV, summon[Env.Data]) match + case Some(thisV -> env) => + given Env.Data = env + Env.getVar(sym) match + case Some(addr) => + if addr.owner != State.currentObject then + errorMutateOtherStaticObject(State.currentObject, addr.owner) + else + Heap.writeJoin(addr, value) + case _ => + report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". " + Trace.show, Trace.position) + + case _ => + report.warning("Assigning to variables in outer scope. " + Trace.show, Trace.position) + + Bottom + } + + // -------------------------------- algorithm -------------------------------- + + /** Check an individual object */ + private def accessObject(classSym: ClassSymbol)(using Context, State.Data, Trace): ObjectRef = log("accessing " + classSym.show, printer, (_: Value).show) { + if classSym.hasSource then + State.checkObjectAccess(classSym) + else + ObjectRef(classSym) + } + + + def checkClasses(classes: List[ClassSymbol])(using Context): Unit = + given State.Data = new State.Data + given Trace = Trace.empty + + for + classSym <- classes if classSym.isStaticObject + do + accessObject(classSym) + + /** Evaluate an expression with the given value for `this` in a given class `klass` + * + * Note that `klass` might be a super class of the object referred by `thisV`. + * The parameter `klass` is needed for `this` resolution. Consider the following code: + * + * class A { + * A.this + * class B extends A { A.this } + * } + * + * As can be seen above, the meaning of the expression `A.this` depends on where + * it is located. + * + * This method only handles cache logic and delegates the work to `cases`. + * + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression is located. + * @param cacheResult It is used to reduce the size of the cache. + */ + def eval(expr: Tree, thisV: ThisValue, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + ", regions = " + Regions.show + " in " + klass.show, printer, (_: Value).show) { + cache.cachedEval(thisV, expr, cacheResult) { expr => cases(expr, thisV, klass) } + } + + + /** Evaluate a list of expressions */ + def evalExprs(exprs: List[Tree], thisV: ThisValue, klass: ClassSymbol): Contextual[List[Value]] = + exprs.map { expr => eval(expr, thisV, klass) } + + /** Handles the evaluation of different expressions + * + * Note: Recursive call should go to `eval` instead of `cases`. + * + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression `expr` is located. + */ + def cases(expr: Tree, thisV: ThisValue, klass: ClassSymbol): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { + val trace2 = trace.add(expr) + + expr match + case Ident(nme.WILDCARD) => + // TODO: disallow `var x: T = _` + Bottom + + case id @ Ident(name) if !id.symbol.is(Flags.Method) => + assert(name.isTermName, "type trees should not reach here") + withTrace(trace2) { evalType(expr.tpe, thisV, klass) } + + case NewExpr(tref, New(tpt), ctor, argss) => + // check args + val args = evalArgs(argss.flatten, thisV, klass) + + val cls = tref.classSymbol.asClass + withTrace(trace2) { + val outer = outerValue(tref, thisV, klass) + instantiate(outer, cls, ctor, args) + } + + case Apply(ref, arg :: Nil) if ref.symbol == defn.InitRegionMethod => + val regions2 = Regions.extend(expr.sourcePos) + if Regions.exists(expr.sourcePos) then + report.warning("Cyclic region detected. Trace:\n" + Trace.show, expr) + Bottom + else + given Regions.Data = regions2 + eval(arg, thisV, klass) + + case Call(ref, argss) => + // check args + val args = evalArgs(argss.flatten, thisV, klass) + + ref match + case Select(supert: Super, _) => + val SuperType(thisTp, superTp) = supert.tpe: @unchecked + val thisValue2 = extendTrace(ref) { + thisTp match + case thisTp: ThisType => + evalType(thisTp, thisV, klass) + case AndType(thisTp: ThisType, _) => + evalType(thisTp, thisV, klass) + case _ => + report.warning("[Internal error] Unexpected type " + thisTp.show + ", trace:\n" + Trace.show, ref) + Bottom + } + withTrace(trace2) { call(thisValue2, ref.symbol, args, thisTp, superTp) } + + case Select(qual, _) => + val receiver = eval(qual, thisV, klass) + if ref.symbol.isConstructor then + withTrace(trace2) { callConstructor(receiver, ref.symbol, args) } + else + withTrace(trace2) { call(receiver, ref.symbol, args, receiver = qual.tpe, superType = NoType) } + + case id: Ident => + id.tpe match + case TermRef(NoPrefix, _) => + // resolve this for the local method + val enclosingClass = id.symbol.owner.enclosingClass.asClass + val thisValue2 = extendTrace(ref) { resolveThis(enclosingClass, thisV, klass) } + // local methods are not a member, but we can reuse the method `call` + withTrace(trace2) { call(thisValue2, id.symbol, args, receiver = NoType, superType = NoType, needResolve = false) } + case TermRef(prefix, _) => + val receiver = withTrace(trace2) { evalType(prefix, thisV, klass) } + if id.symbol.isConstructor then + withTrace(trace2) { callConstructor(receiver, id.symbol, args) } + else + withTrace(trace2) { call(receiver, id.symbol, args, receiver = prefix, superType = NoType) } + + case Select(qualifier, name) => + val qual = eval(qualifier, thisV, klass) + + name match + case OuterSelectName(_, _) => + val current = qualifier.tpe.classSymbol + val target = expr.tpe.widenSingleton.classSymbol.asClass + withTrace(trace2) { resolveThis(target, qual, current.asClass) } + case _ => + withTrace(trace2) { select(qual, expr.symbol, receiver = qualifier.tpe) } + + case _: This => + evalType(expr.tpe, thisV, klass) + + case Literal(_) => + Bottom + + case Typed(expr, tpt) => + if tpt.tpe.hasAnnotation(defn.UncheckedAnnot) then + Bottom + else + eval(expr, thisV, klass) + + case NamedArg(name, arg) => + eval(arg, thisV, klass) + + case Assign(lhs, rhs) => + var isLocal = false + val receiver = + lhs match + case Select(qual, _) => + eval(qual, thisV, klass) + case id: Ident => + id.tpe match + case TermRef(NoPrefix, _) => + isLocal = true + thisV + case TermRef(prefix, _) => + extendTrace(id) { evalType(prefix, thisV, klass) } + + val value = eval(rhs, thisV, klass) + + if isLocal then + writeLocal(thisV, lhs.symbol, value) + else + withTrace(trace2) { assign(receiver, lhs.symbol, value, rhs.tpe) } + + case closureDef(ddef) => + Fun(ddef, thisV, klass, summon[Env.Data]) + + case PolyFun(ddef) => + Fun(ddef, thisV, klass, summon[Env.Data]) + + case Block(stats, expr) => + evalExprs(stats, thisV, klass) + eval(expr, thisV, klass) + + case If(cond, thenp, elsep) => + eval(cond, thisV, klass) + evalExprs(thenp :: elsep :: Nil, thisV, klass).join + + case Annotated(arg, annot) => + if expr.tpe.hasAnnotation(defn.UncheckedAnnot) then + Bottom + else + eval(arg, thisV, klass) + + case Match(scrutinee, cases) => + val scrutineeValue = eval(scrutinee, thisV, klass) + patternMatch(scrutineeValue, cases, thisV, klass) + + case Return(expr, from) => + Returns.handle(from.symbol, eval(expr, thisV, klass)) + Bottom + + case WhileDo(cond, body) => + evalExprs(cond :: body :: Nil, thisV, klass) + Bottom + + case Labeled(_, expr) => + eval(expr, thisV, klass) + + case Try(block, cases, finalizer) => + val res = evalExprs(block :: cases.map(_.body), thisV, klass).join + if !finalizer.isEmpty then + eval(finalizer, thisV, klass) + res + + case SeqLiteral(elems, elemtpt) => + evalExprs(elems, thisV, klass).join + + case Inlined(call, bindings, expansion) => + evalExprs(bindings, thisV, klass) + eval(expansion, thisV, klass) + + case Thicket(List()) => + // possible in try/catch/finally, see tests/crash/i6914.scala + Bottom + + case vdef : ValDef => + // local val definition + val sym = vdef.symbol + if !sym.is(Flags.Lazy) then + val rhs = eval(vdef.rhs, thisV, klass) + initLocal(sym, rhs) + Bottom + + case ddef : DefDef => + // local method + Bottom + + case tdef: TypeDef => + // local type definition + Bottom + + case _: Import | _: Export => + Bottom + + case tpl: Template => + init(tpl, thisV.asInstanceOf[Ref], klass) + + case _ => + report.warning("[Internal error] unexpected tree: " + expr + "\n" + Trace.show, expr) + Bottom + } + + /** Evaluate the cases against the scrutinee value. + * + * It returns the scrutinee in most cases. The main effect of the function is for its side effects of adding bindings + * to the environment. + * + * See https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html + * + * @param scrutinee The abstract value of the scrutinee. + * @param cases The cases to match. + * @param thisV The value for `C.this` where `C` is represented by `klass`. + * @param klass The enclosing class where the type `tp` is located. + */ + def patternMatch(scrutinee: Value, cases: List[CaseDef], thisV: ThisValue, klass: ClassSymbol): Contextual[Value] = + // expected member types for `unapplySeq` + def lengthType = ExprType(defn.IntType) + def lengthCompareType = MethodType(List(defn.IntType), defn.IntType) + def applyType(elemTp: Type) = MethodType(List(defn.IntType), elemTp) + def dropType(elemTp: Type) = MethodType(List(defn.IntType), defn.CollectionSeqType.appliedTo(elemTp)) + def toSeqType(elemTp: Type) = ExprType(defn.CollectionSeqType.appliedTo(elemTp)) + + def getMemberMethod(receiver: Type, name: TermName, tp: Type): Denotation = + receiver.member(name).suchThat(receiver.memberInfo(_) <:< tp) + + def evalCase(caseDef: CaseDef): Value = + evalPattern(scrutinee, caseDef.pat) + eval(caseDef.guard, thisV, klass) + eval(caseDef.body, thisV, klass) + + /** Abstract evaluation of patterns. + * + * It augments the local environment for bound pattern variables. As symbols are globally + * unique, we can put them in a single environment. + * + * Currently, we assume all cases are reachable, thus all patterns are assumed to match. + */ + def evalPattern(scrutinee: Value, pat: Tree): Value = log("match " + scrutinee.show + " against " + pat.show, printer, (_: Value).show): + val trace2 = Trace.trace.add(pat) + pat match + case Alternative(pats) => + for pat <- pats do evalPattern(scrutinee, pat) + scrutinee + + case bind @ Bind(_, pat) => + val value = evalPattern(scrutinee, pat) + initLocal(bind.symbol, value) + scrutinee + + case UnApply(fun, implicits, pats) => + given Trace = trace2 + + val fun1 = funPart(fun) + val funRef = fun1.tpe.asInstanceOf[TermRef] + val unapplyResTp = funRef.widen.finalResultType + + val receiver = fun1 match + case ident: Ident => + evalType(funRef.prefix, thisV, klass) + case select: Select => + eval(select.qualifier, thisV, klass) + + def implicitArgsBeforeScrutinee(fun: Tree): Contextual[List[ArgInfo]] = fun match + case Apply(f, implicitArgs) => + implicitArgsBeforeScrutinee(f) ++ evalArgs(implicitArgs.map(Arg.apply), thisV, klass) + case _ => List() + + val implicitArgsAfterScrutinee = evalArgs(implicits.map(Arg.apply), thisV, klass) + val args = implicitArgsBeforeScrutinee(fun) ++ (ArgInfo(scrutinee, summon[Trace], EmptyTree) :: implicitArgsAfterScrutinee) + val unapplyRes = call(receiver, funRef.symbol, args, funRef.prefix, superType = NoType, needResolve = true) + + if fun.symbol.name == nme.unapplySeq then + var resultTp = unapplyResTp + var elemTp = unapplySeqTypeElemTp(resultTp) + var arity = productArity(resultTp, NoSourcePosition) + var needsGet = false + if (!elemTp.exists && arity <= 0) { + needsGet = true + resultTp = resultTp.select(nme.get).finalResultType + elemTp = unapplySeqTypeElemTp(resultTp.widen) + arity = productSelectorTypes(resultTp, NoSourcePosition).size + } + + var resToMatch = unapplyRes + + if needsGet then + // Get match + val isEmptyDenot = unapplyResTp.member(nme.isEmpty).suchThat(_.info.isParameterless) + call(unapplyRes, isEmptyDenot.symbol, Nil, unapplyResTp, superType = NoType, needResolve = true) + + val getDenot = unapplyResTp.member(nme.get).suchThat(_.info.isParameterless) + resToMatch = call(unapplyRes, getDenot.symbol, Nil, unapplyResTp, superType = NoType, needResolve = true) + end if + + if elemTp.exists then + // sequence match + evalSeqPatterns(resToMatch, resultTp, elemTp, pats) + else + // product sequence match + val selectors = productSelectors(resultTp) + assert(selectors.length <= pats.length) + selectors.init.zip(pats).map { (sel, pat) => + val selectRes = call(resToMatch, sel, Nil, resultTp, superType = NoType, needResolve = true) + evalPattern(selectRes, pat) + } + val seqPats = pats.drop(selectors.length - 1) + val toSeqRes = call(resToMatch, selectors.last, Nil, resultTp, superType = NoType, needResolve = true) + val toSeqResTp = resultTp.memberInfo(selectors.last).finalResultType + evalSeqPatterns(toSeqRes, toSeqResTp, elemTp, seqPats) + end if + + else + // distribute unapply to patterns + if isProductMatch(unapplyResTp, pats.length) then + // product match + val selectors = productSelectors(unapplyResTp) + assert(selectors.length == pats.length) + selectors.zip(pats).map { (sel, pat) => + val selectRes = call(unapplyRes, sel, Nil, unapplyResTp, superType = NoType, needResolve = true) + evalPattern(selectRes, pat) + } + else if unapplyResTp <:< defn.BooleanType then + // Boolean extractor, do nothing + () + else + // Get match + val isEmptyDenot = unapplyResTp.member(nme.isEmpty).suchThat(_.info.isParameterless) + call(unapplyRes, isEmptyDenot.symbol, Nil, unapplyResTp, superType = NoType, needResolve = true) + + val getDenot = unapplyResTp.member(nme.get).suchThat(_.info.isParameterless) + val getRes = call(unapplyRes, getDenot.symbol, Nil, unapplyResTp, superType = NoType, needResolve = true) + if pats.length == 1 then + // single match + evalPattern(getRes, pats.head) + else + val getResTp = getDenot.info.finalResultType + val selectors = productSelectors(getResTp).take(pats.length) + selectors.zip(pats).map { (sel, pat) => + val selectRes = call(unapplyRes, sel, Nil, getResTp, superType = NoType, needResolve = true) + evalPattern(selectRes, pat) + } + end if + end if + end if + scrutinee + + case Ident(nme.WILDCARD) | Ident(nme.WILDCARD_STAR) => + scrutinee + + case Typed(pat, _) => + evalPattern(scrutinee, pat) + + case tree => + // For all other trees, the semantics is normal. + eval(tree, thisV, klass) + + end evalPattern + + /** + * Evaluate a sequence value against sequence patterns. + */ + def evalSeqPatterns(scrutinee: Value, scrutineeType: Type, elemType: Type, pats: List[Tree])(using Trace): Unit = + // call .lengthCompare or .length + val lengthCompareDenot = getMemberMethod(scrutineeType, nme.lengthCompare, lengthCompareType) + if lengthCompareDenot.exists then + call(scrutinee, lengthCompareDenot.symbol, ArgInfo(Bottom, summon[Trace], EmptyTree) :: Nil, scrutineeType, superType = NoType, needResolve = true) + else + val lengthDenot = getMemberMethod(scrutineeType, nme.length, lengthType) + call(scrutinee, lengthDenot.symbol, Nil, scrutineeType, superType = NoType, needResolve = true) + end if + + // call .apply + val applyDenot = getMemberMethod(scrutineeType, nme.apply, applyType(elemType)) + val applyRes = call(scrutinee, applyDenot.symbol, ArgInfo(Bottom, summon[Trace], EmptyTree) :: Nil, scrutineeType, superType = NoType, needResolve = true) + + if isWildcardStarArgList(pats) then + if pats.size == 1 then + // call .toSeq + val toSeqDenot = scrutineeType.member(nme.toSeq).suchThat(_.info.isParameterless) + val toSeqRes = call(scrutinee, toSeqDenot.symbol, Nil, scrutineeType, superType = NoType, needResolve = true) + evalPattern(toSeqRes, pats.head) + else + // call .drop + val dropDenot = getMemberMethod(scrutineeType, nme.drop, applyType(elemType)) + val dropRes = call(scrutinee, dropDenot.symbol, ArgInfo(Bottom, summon[Trace], EmptyTree) :: Nil, scrutineeType, superType = NoType, needResolve = true) + for pat <- pats.init do evalPattern(applyRes, pat) + evalPattern(dropRes, pats.last) + end if + else + // no patterns like `xs*` + for pat <- pats do evalPattern(applyRes, pat) + end if + end evalSeqPatterns + + + cases.map(evalCase).join + end patternMatch + + /** Handle semantics of leaf nodes + * + * For leaf nodes, their semantics is determined by their types. + * + * @param tp The type to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by `klass`. + * @param klass The enclosing class where the type `tp` is located. + * @param elideObjectAccess Whether object access should be omitted. + * + * Object access elission happens when the object access is used as a prefix + * in `new o.C` and `C` does not need an outer. + */ + def evalType(tp: Type, thisV: ThisValue, klass: ClassSymbol, elideObjectAccess: Boolean = false): Contextual[Value] = log("evaluating " + tp.show, printer, (_: Value).show) { + tp match + case _: ConstantType => + Bottom + + case tmref: TermRef if tmref.prefix == NoPrefix => + val sym = tmref.symbol + if sym.is(Flags.Package) then + Bottom + else if sym.owner.isClass then + // The typer incorrectly assigns a TermRef with NoPrefix for `config`, + // while the actual denotation points to the symbol of the class member + // instead of the parameter symbol for the primary constructor. + // + // abstract class Base(implicit config: Int) + // case class A(x: Int)(implicit config: Int) extends Base + evalType(sym.termRef, thisV, klass, elideObjectAccess) + else + readLocal(thisV, sym) + + case tmref: TermRef => + val sym = tmref.symbol + if sym.isStaticObject then + if elideObjectAccess then + ObjectRef(sym.moduleClass.asClass) + else + accessObject(sym.moduleClass.asClass) + else + val value = evalType(tmref.prefix, thisV, klass) + select(value, tmref.symbol, tmref.prefix) + + case tp @ ThisType(tref) => + val sym = tref.symbol + if sym.is(Flags.Package) then + Bottom + else if sym.isStaticObject && sym != klass then + // The typer may use ThisType to refer to an object outside its definition. + if elideObjectAccess then + ObjectRef(sym.moduleClass.asClass) + else + accessObject(sym.moduleClass.asClass) + + else + resolveThis(tref.classSymbol.asClass, thisV, klass) + + case _ => + throw new Exception("unexpected type: " + tp + ", Trace:\n" + Trace.show) + } + + /** Evaluate arguments of methods and constructors */ + def evalArgs(args: List[Arg], thisV: ThisValue, klass: ClassSymbol): Contextual[List[ArgInfo]] = + val argInfos = new mutable.ArrayBuffer[ArgInfo] + args.foreach { arg => + val res = + if arg.isByName then + Fun(arg.tree, thisV, klass, summon[Env.Data]) + else + eval(arg.tree, thisV, klass) + + val widened = + arg.tree.tpe.getAnnotation(defn.InitWidenAnnot) match + case Some(annot) => + annot.argument(0).get match + case arg @ Literal(c: Constants.Constant) => + val height = c.intValue + if height < 0 then + report.warning("The argument should be positive", arg) + res.widen(1) + else + res.widen(c.intValue) + case arg => + report.warning("The argument should be a constant integer value", arg) + res.widen(1) + case _ => + res.widen(1) + + argInfos += ArgInfo(widened, trace.add(arg.tree), arg.tree) + } + argInfos.toList + + /** Initialize part of an abstract object in `klass` of the inheritance chain + * + * @param tpl The class body to be evaluated. + * @param thisV The value of the current object to be initialized. + * @param klass The class to which the template belongs. + */ + def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Ref] = log("init " + klass.show, printer, (_: Value).show) { + val paramsMap = tpl.constr.termParamss.flatten.map { vdef => + vdef.name -> Env.valValue(vdef.symbol) + }.toMap + + // init param fields + klass.paramGetters.foreach { acc => + val value = paramsMap(acc.name.toTermName) + if acc.is(Flags.Mutable) then + val addr = Heap.fieldVarAddr(summon[Regions.Data], acc, State.currentObject) + thisV.initVar(acc, addr) + Heap.writeJoin(addr, value) + else + thisV.initVal(acc, value) + printer.println(acc.show + " initialized with " + value) + } + + // Tasks is used to schedule super constructor calls. + // Super constructor calls are delayed until all outers are set. + type Tasks = mutable.ArrayBuffer[() => Unit] + def superCall(tref: TypeRef, ctor: Symbol, args: List[ArgInfo], tasks: Tasks): Unit = + val cls = tref.classSymbol.asClass + // update outer for super class + val res = outerValue(tref, thisV, klass) + thisV.initOuter(cls, res) + + // follow constructor + if cls.hasSource then + tasks.append { () => + printer.println("init super class " + cls.show) + callConstructor(thisV, ctor, args) + () + } + + // parents + def initParent(parent: Tree, tasks: Tasks) = + parent match + case tree @ Block(stats, NewExpr(tref, New(tpt), ctor, argss)) => // can happen + evalExprs(stats, thisV, klass) + val args = evalArgs(argss.flatten, thisV, klass) + superCall(tref, ctor, args, tasks) + + case tree @ NewExpr(tref, New(tpt), ctor, argss) => // extends A(args) + val args = evalArgs(argss.flatten, thisV, klass) + superCall(tref, ctor, args, tasks) + + case _ => // extends A or extends A[T] + val tref = typeRefOf(parent.tpe) + superCall(tref, tref.classSymbol.primaryConstructor, Nil, tasks) + + // see spec 5.1 about "Template Evaluation". + // https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html + if !klass.is(Flags.Trait) then + // outers are set first + val tasks = new mutable.ArrayBuffer[() => Unit] + + // 1. first init parent class recursively + // 2. initialize traits according to linearization order + val superParent = tpl.parents.head + val superCls = superParent.tpe.classSymbol.asClass + extendTrace(superParent) { initParent(superParent, tasks) } + + val parents = tpl.parents.tail + val mixins = klass.baseClasses.tail.takeWhile(_ != superCls) + + // The interesting case is the outers for traits. The compiler + // synthesizes proxy accessors for the outers in the class that extends + // the trait. As those outers must be stable values, they are initialized + // immediately following class parameters and before super constructor + // calls and user code in the class body. + mixins.reverse.foreach { mixin => + parents.find(_.tpe.classSymbol == mixin) match + case Some(parent) => + extendTrace(parent) { initParent(parent, tasks) } + case None => + // According to the language spec, if the mixin trait requires + // arguments, then the class must provide arguments to it explicitly + // in the parent list. That means we will encounter it in the Some + // branch. + // + // When a trait A extends a parameterized trait B, it cannot provide + // term arguments to B. That can only be done in a concrete class. + val tref = typeRefOf(klass.typeRef.baseType(mixin).typeConstructor) + val ctor = tref.classSymbol.primaryConstructor + if ctor.exists then + // The parameter check of traits comes late in the mixin phase. + // To avoid crash we supply hot values for erroneous parent calls. + // See tests/neg/i16438.scala. + val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => new ArgInfo(Bottom, Trace.empty, EmptyTree)) + extendTrace(superParent) { + superCall(tref, ctor, args, tasks) + } + } + + // initialize super classes after outers are set + tasks.foreach(task => task()) + end if + + // class body + tpl.body.foreach { + case vdef : ValDef if !vdef.symbol.is(Flags.Lazy) && !vdef.rhs.isEmpty => + val res = eval(vdef.rhs, thisV, klass) + val sym = vdef.symbol + if sym.is(Flags.Mutable) then + val addr = Heap.fieldVarAddr(summon[Regions.Data], sym, State.currentObject) + thisV.initVar(sym, addr) + Heap.writeJoin(addr, res) + else + thisV.initVal(sym, res) + + case _: MemberDef => + + case tree => + eval(tree, thisV, klass) + } + + thisV + } + + + /** Resolve C.this that appear in `klass` + * + * @param target The class symbol for `C` for which `C.this` is to be resolved. + * @param thisV The value for `D.this` where `D` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `C.this` is located. + * @param elideObjectAccess Whether object access should be omitted. + * + * Object access elision happens when the object access is used as a prefix + * in `new o.C` and `C` does not need an outer. + */ + def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol, elideObjectAccess: Boolean = false): Contextual[Value] = log("resolveThis target = " + target.show + ", this = " + thisV.show, printer, (_: Value).show) { + if target == klass then + thisV + else if target.is(Flags.Package) then + Bottom + else if target.isStaticObject then + val res = ObjectRef(target.moduleClass.asClass) + if elideObjectAccess then res + else accessObject(target) + else + thisV match + case Bottom => Bottom + case Cold => Cold + case ref: Ref => + val outerCls = klass.owner.lexicallyEnclosingClass.asClass + if !ref.hasOuter(klass) then + val error = "[Internal error] outer not yet initialized, target = " + target + ", klass = " + klass + Trace.show + report.warning(error, Trace.position) + Bottom + else + resolveThis(target, ref.outerValue(klass), outerCls) + case ValueSet(values) => + values.map(ref => resolveThis(target, ref, klass)).join + case _: Fun | _ : OfArray => + report.warning("[Internal error] unexpected thisV = " + thisV + ", target = " + target.show + ", klass = " + klass.show + Trace.show, Trace.position) + Bottom + } + + /** Compute the outer value that corresponds to `tref.prefix` + * + * @param tref The type whose prefix is to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `tref` is located. + */ + def outerValue(tref: TypeRef, thisV: ThisValue, klass: ClassSymbol): Contextual[Value] = + val cls = tref.classSymbol.asClass + if tref.prefix == NoPrefix then + val enclosing = cls.owner.lexicallyEnclosingClass.asClass + resolveThis(enclosing, thisV, klass, elideObjectAccess = cls.isStatic) + else + if cls.isAllOf(Flags.JavaInterface) then Bottom + else evalType(tref.prefix, thisV, klass, elideObjectAccess = cls.isStatic) + + def errorMutateOtherStaticObject(currentObj: ClassSymbol, otherObj: ClassSymbol)(using Trace, Context) = + val msg = + s"Mutating ${otherObj.show} during initialization of ${currentObj.show}.\n" + + "Mutating other static objects during the initialization of one static object is forbidden. " + Trace.show + + report.warning(msg, Trace.position) + + def errorReadOtherStaticObject(currentObj: ClassSymbol, otherObj: ClassSymbol)(using Trace, Context) = + val msg = + "Reading mutable state of " + otherObj.show + " during initialization of " + currentObj.show + ".\n" + + "Reading mutable state of other static objects is forbidden as it breaks initialization-time irrelevance. " + Trace.show + + report.warning(msg, Trace.position) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index 4548dccb598f..caf3435608d2 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -589,7 +589,7 @@ object Semantic: Hot else if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.error("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) Hot else // This is possible due to incorrect type cast. @@ -597,7 +597,7 @@ object Semantic: Hot case fun: Fun => - report.error("[Internal error] unexpected tree in selecting a function, fun = " + fun.expr.show + Trace.show, fun.expr) + report.warning("[Internal error] unexpected tree in selecting a function, fun = " + fun.expr.show + Trace.show, fun.expr) Hot case RefSet(refs) => @@ -725,7 +725,7 @@ object Semantic: value.select(target, receiver, needResolve = false) else if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.error("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) Hot else // This is possible due to incorrect type cast. @@ -755,7 +755,7 @@ object Semantic: value match { case Hot | Cold | _: RefSet | _: Fun => - report.error("[Internal error] unexpected constructor call, meth = " + ctor + ", value = " + value + Trace.show, Trace.position) + report.warning("[Internal error] unexpected constructor call, meth = " + ctor + ", value = " + value + Trace.show, Trace.position) Hot case ref: Warm if ref.isPopulatingParams => @@ -862,7 +862,7 @@ object Semantic: warm case Fun(body, thisV, klass) => - report.error("[Internal error] unexpected tree in instantiating a function, fun = " + body.show + Trace.show, Trace.position) + report.warning("[Internal error] unexpected tree in instantiating a function, fun = " + body.show + Trace.show, Trace.position) Hot case RefSet(refs) => @@ -882,7 +882,7 @@ object Semantic: case Hot => Hot case ref: Ref => ref.objekt.field(sym) case _ => - report.error("[Internal error] unexpected this value accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) + report.warning("[Internal error] unexpected this value accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) Hot else if sym.is(Flags.Param) then Hot @@ -900,7 +900,7 @@ object Semantic: case ref: Ref => eval(vdef.rhs, ref, enclosingClass, cacheResult = sym.is(Flags.Lazy)) case _ => - report.error("[Internal error] unexpected this value when accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) + report.warning("[Internal error] unexpected this value when accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) Hot end match @@ -1040,7 +1040,7 @@ object Semantic: // // This invariant holds because of the Scala/Java/JVM restriction that we cannot use `this` in super constructor calls. if subClassSegmentHot && !isHotSegment then - report.error("[Internal error] Expect current segment to be transitively initialized (Hot) in promotion, current klass = " + klass.show + + report.warning("[Internal error] Expect current segment to be transitively initialized (Hot) in promotion, current klass = " + klass.show + ", subclass = " + subClass.show + Trace.show, Trace.position) // If the outer and parameters of a class are all hot, then accessing fields and methods of the current @@ -1140,7 +1140,7 @@ object Semantic: */ def checkClasses(classes: List[ClassSymbol])(using Context): Unit = given Cache.Data() - for classSym <- classes if isConcreteClass(classSym) do + for classSym <- classes if isConcreteClass(classSym) && !classSym.isStaticObject do checkClass(classSym) // ----- Semantic definition -------------------------------- @@ -1229,7 +1229,20 @@ object Semantic: ref match case Select(supert: Super, _) => val SuperType(thisTp, superTp) = supert.tpe: @unchecked - val thisValue2 = extendTrace(ref) { resolveThis(thisTp.classSymbol.asClass, thisV, klass) } + val thisValue2 = extendTrace(ref) { + thisTp match + case thisTp: ThisType => + cases(thisTp, thisV, klass) + + case AndType(thisTp: ThisType, _) => + // Self-type annotation will generate an intersection type for `this`. + // See examples/i17997.scala + cases(thisTp, thisV, klass) + + case _ => + report.warning("[Internal error] Unexpected type " + thisTp.show + ", trace:\n" + Trace.show, ref) + Hot + } withTrace(trace2) { thisValue2.call(ref.symbol, args, thisTp, superTp) } case Select(qual, _) => @@ -1300,8 +1313,8 @@ object Semantic: case closureDef(ddef) => Fun(ddef.rhs, thisV, klass) - case PolyFun(body) => - Fun(body, thisV, klass) + case PolyFun(ddef) => + Fun(ddef.rhs, thisV, klass) case Block(stats, expr) => eval(stats, thisV, klass) @@ -1370,11 +1383,11 @@ object Semantic: case tpl: Template => init(tpl, thisV, klass) - case _: Import | _: Export => + case _: Import | _: Export | _: Quote | _: Splice | _: QuotePattern | _: SplicePattern => Hot case _ => - report.error("[Internal error] unexpected tree" + Trace.show, expr) + report.warning("[Internal error] unexpected tree: " + expr.getClass + ", trace:\n" + Trace.show, expr) Hot /** Handle semantics of leaf nodes @@ -1418,7 +1431,7 @@ object Semantic: Hot case _ => - report.error("[Internal error] unexpected type " + tp + Trace.show, Trace.position) + report.warning("[Internal error] unexpected type " + tp + Trace.show, Trace.position) Hot } @@ -1439,14 +1452,14 @@ object Semantic: val outerCls = klass.owner.lexicallyEnclosingClass.asClass if !obj.hasOuter(klass) then val error = "[Internal error] outer not yet initialized, target = " + target + ", klass = " + klass + ", object = " + obj + Trace.show - report.error(error, Trace.position) + report.warning(error, Trace.position) Hot else resolveThis(target, obj.outer(klass), outerCls) case RefSet(refs) => refs.map(ref => resolveThis(target, ref, klass)).join case fun: Fun => - report.error("[Internal error] unexpected thisV = " + thisV + ", target = " + target.show + ", klass = " + klass.show + Trace.show, Trace.position) + report.warning("[Internal error] unexpected thisV = " + thisV + ", target = " + target.show + ", klass = " + klass.show + Trace.show, Trace.position) Cold case Cold => Cold diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala index 7dfbc0b6cfa5..ffaccad963af 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -7,7 +7,7 @@ import Contexts.* import ast.tpd.* import util.SourcePosition -import Decorators._, printing.SyntaxHighlighting +import Decorators.*, printing.SyntaxHighlighting import scala.collection.mutable @@ -22,12 +22,17 @@ object Trace: val empty: Trace = Vector.empty + val EMPTY_PADDING = " " + val CONNECTING_INDENT = "\u2502 " // "| " + val CHILD = "\u251c\u2500\u2500 " // "|-- " + val LAST_CHILD = "\u2514\u2500\u2500 " // "\-- " + extension (trace: Trace) def add(node: Tree): Trace = trace :+ node def toVector: Vector[Tree] = trace def ++(trace2: Trace): Trace = trace ++ trace2 - def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") + def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "Calling trace:" + System.lineSeparator()) def position(using trace: Trace): Tree = trace.last @@ -41,25 +46,33 @@ object Trace: var lastLineNum = -1 var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer trace.foreach { tree => + val isLastTraceItem = tree `eq` trace.last val pos = tree.sourcePos - val prefix = "-> " val line = if pos.source.exists then val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) i"$code\t$loc" else - tree.show + tree match + case defDef: DefTree => + // The definition can be huge, avoid printing the whole definition. + defDef.symbol.showFullName + case _ => + tree.show.split(System.lineSeparator(), 2).nn.head.nn + val positionMarkerLine = if pos.exists && pos.source.exists then - positionMarker(pos) - else "" + (if isLastTraceItem then EMPTY_PADDING else CONNECTING_INDENT)+ positionMarker(pos) + else + "" // always use the more precise trace location - if lastLineNum == pos.line then + if lastLineNum >= 0 && lastLineNum == pos.line then lines.dropRightInPlace(1) - lines += (prefix + line + "\n" + positionMarkerLine) + val prefix = if isLastTraceItem then LAST_CHILD else CHILD + lines += (prefix + line + System.lineSeparator() + positionMarkerLine) lastLineNum = pos.line } @@ -72,11 +85,11 @@ object Trace: * pos.source must exist */ private def positionMarker(pos: SourcePosition): String = - val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length - val padding = pos.startColumnPadding.substring(trimmed).nn + " " + val trimmed = pos.source.lineContent(pos.start).takeWhile(c => c.isWhitespace).length + val padding = pos.startColumnPadding.substring(trimmed).nn val carets = if (pos.startLine == pos.endLine) "^" * math.max(1, pos.endColumn - pos.startColumn) else "^" - s"$padding$carets\n" + s"$padding$carets" + System.lineSeparator() diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index ba2216504aef..70390028e84f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -26,6 +26,9 @@ object Util: opaque type Arg = Tree | ByNameArg case class ByNameArg(tree: Tree) + object Arg: + def apply(tree: Tree): Arg = tree + extension (arg: Arg) def isByName = arg.isInstanceOf[ByNameArg] def tree: Tree = arg match @@ -64,14 +67,14 @@ object Util: case _ => None object PolyFun: - def unapply(tree: Tree)(using Context): Option[Tree] = + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType => val body = cdef.rhs.asInstanceOf[Template].body val apply = body.head.asInstanceOf[DefDef] - Some(apply.rhs) + Some(apply) case _ => None @@ -98,3 +101,8 @@ object Util: // A concrete class may not be instantiated if the self type is not satisfied instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass + + /** Whether the class or its super class/trait contains any mutable fields? */ + def isMutable(cls: ClassSymbol)(using Context): Boolean = + cls.classInfo.decls.exists(_.is(Flags.Mutable)) || + cls.parentSyms.exists(parentCls => isMutable(parentCls.asClass)) diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala index ff8d89920791..9e40792895c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala @@ -10,9 +10,9 @@ import scala.util.matching.Regex.Match import PartialFunction.cond import dotty.tools.dotc.ast.tpd.{Match => _, *} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.Phases.typerPhase import dotty.tools.dotc.util.Spans.Span diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala index 5cad7ba72831..7743054f5487 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala @@ -4,12 +4,12 @@ package transform.localopt import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.typer.ConstFold diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index eab65890c227..0df81f756925 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -3,23 +3,21 @@ package dotc package transform package patmat -import core._ -import Types._ -import TypeUtils._ -import Contexts._ -import Flags._ -import ast._ +import core.* +import Types.* +import Contexts.* +import Flags.* +import ast.* import Decorators.{ show => _, * } -import Symbols._ -import StdNames._ -import NameOps._ -import Constants._ -import typer._ -import Applications._ -import Inferencing._ -import ProtoTypes._ -import transform.SymUtils._ -import reporting._ +import Symbols.* +import StdNames.* +import NameOps.* +import Constants.* +import typer.* +import Applications.* +import Inferencing.* +import ProtoTypes.* +import reporting.* import config.Printers.{exhaustivity => debug} import util.{SrcPos, NoSourcePosition} @@ -116,7 +114,7 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space case class Or(spaces: Seq[Space]) extends Space object SpaceEngine { - import tpd._ + import tpd.* def simplify(space: Space)(using Context): Space = space.simplify def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) @@ -149,7 +147,7 @@ object SpaceEngine { if (spaces.lengthCompare(1) <= 0 || spaces.lengthCompare(10) >= 0) spaces else { val res = spaces.map(sp => (sp, spaces.filter(_ ne sp))).find { - case (sp, sps) => isSubspace(sp, Or(LazyList(sps: _*))) + case (sp, sps) => isSubspace(sp, Or(LazyList(sps*))) } if (res.isEmpty) spaces else res.get._2 @@ -158,7 +156,7 @@ object SpaceEngine { /** Flatten space to get rid of `Or` for pretty print */ def flatten(space: Space)(using Context): Seq[Space] = space match { case Prod(tp, fun, spaces) => - val ss = LazyList(spaces: _*).map(flatten) + val ss = LazyList(spaces*).map(flatten) ss.foldLeft(LazyList(Nil : List[Space])) { (acc, flat) => for { sps <- acc; s <- flat } @@ -168,7 +166,7 @@ object SpaceEngine { } case Or(spaces) => - LazyList(spaces: _*).flatMap(flatten) + LazyList(spaces*).flatMap(flatten) case _ => List(space) @@ -193,10 +191,11 @@ object SpaceEngine { || canDecompose(b) && isSubspace(a, Or(decompose(b))) case (Prod(tp1, _, _), Typ(tp2, _)) => isSubType(tp1, tp2) - case (Typ(tp1, _), Prod(tp2, fun, ss)) => + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => isSubType(tp1, tp2) && covers(fun, tp1, ss.length) && isSubspace(Prod(tp2, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) + || canDecompose(a) && isSubspace(Or(decompose(a)), b) case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) => isSameUnapply(fun1, fun2) && ss1.lazyZip(ss2).forall(isSubspace) } @@ -211,17 +210,13 @@ object SpaceEngine { case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => if isSubType(tp1, tp2) then a else if isSubType(tp2, tp1) then b - else if canDecompose(a) then intersect(Or(decompose(a)), b) - else if canDecompose(b) then intersect(a, Or(decompose(b))) else intersectUnrelatedAtomicTypes(tp1, tp2)(a) case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => if isSubType(tp2, tp1) then b - else if canDecompose(a) then intersect(Or(decompose(a)), b) else if isSubType(tp1, tp2) then a // problematic corner case: inheriting a case class else intersectUnrelatedAtomicTypes(tp1, tp2)(b) case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => if isSubType(tp1, tp2) then a - else if canDecompose(b) then intersect(a, Or(decompose(b))) else if isSubType(tp2, tp1) then a // problematic corner case: inheriting a case class else intersectUnrelatedAtomicTypes(tp1, tp2)(a) case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => @@ -271,7 +266,7 @@ object SpaceEngine { else if cache.forall(sub => isSubspace(sub.nn, Empty)) then Empty else // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)` - val spaces = LazyList(range: _*).flatMap { i => + val spaces = LazyList(range*).flatMap { i => flatten(sub(i)).map(s => Prod(tp1, fun1, ss1.updated(i, s))) } Or(spaces) @@ -288,11 +283,9 @@ object SpaceEngine { || (unapp.symbol.is(Synthetic) && unapp.symbol.owner.linkedClass.is(Case)) // scala2 compatibility || unapplySeqTypeElemTp(unappResult).exists // only for unapplySeq || isProductMatch(unappResult, argLen) - || { - val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) - isEmptyTp <:< ConstantType(Constant(false)) - } + || extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) <:< ConstantType(Constant(false)) || unappResult.derivesFrom(defn.NonEmptyTupleClass) + || unapp.symbol == defn.TupleXXL_unapplySeq // Fixes TupleXXL.unapplySeq which returns Some but declares Option } /** Is the unapply or unapplySeq irrefutable? @@ -305,23 +298,13 @@ object SpaceEngine { } /** Is this an `'{..}` or `'[..]` irrefutable quoted patterns? - * @param unapp The unapply function tree - * @param implicits The implicits of the unapply - * @param pt The scrutinee type + * @param body The body of the quoted pattern + * @param bodyPt The scrutinee body type */ - def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = { - implicits.headOption match - // pattern '{ $x: T } - case Some(tpd.Apply(tpd.Select(tpd.Quote(tpd.TypeApply(fn, List(tpt)), _), nme.apply), _)) - if unapp.symbol.owner.eq(defn.QuoteMatching_ExprMatchModule) - && fn.symbol.eq(defn.QuotedRuntimePatterns_patternHole) => - pt <:< defn.QuotedExprClass.typeRef.appliedTo(tpt.tpe) - - // pattern '[T] - case Some(tpd.Apply(tpd.TypeApply(fn, List(tpt)), _)) - if unapp.symbol.owner.eq(defn.QuoteMatching_TypeMatchModule) => - pt =:= defn.QuotedTypeClass.typeRef.appliedTo(tpt.tpe) - + def isIrrefutableQuotePattern(pat: tpd.QuotePattern, pt: Type)(using Context): Boolean = { + if pat.body.isType then pat.bindings.isEmpty && pt =:= pat.tpe + else pat.body match + case _: SplicePattern | Typed(_: SplicePattern, _) => pat.bindings.isEmpty && pt <:< pat.tpe case _ => false } @@ -394,7 +377,7 @@ object SpaceEngine { project(pat) case Typed(_, tpt) => - Typ(erase(tpt.tpe.stripAnnots, isValue = true), decomposed = false) + Typ(erase(tpt.tpe.stripAnnots, isValue = true, isTyped = true), decomposed = false) case This(_) => Typ(pat.tpe.stripAnnots, decomposed = false) @@ -444,7 +427,7 @@ object SpaceEngine { * * We cannot use type erasure here, as it would lose the constraints * involving GADTs. For example, in the following code, type - * erasure would loose the constraint that `x` and `y` must be + * erasure would lose the constraint that `x` and `y` must be * the same type, resulting in false inexhaustive warnings: * * sealed trait Expr[T] @@ -458,38 +441,41 @@ object SpaceEngine { * * @param inArray whether `tp` is a type argument to `Array` * @param isValue whether `tp` is the type which match against values + * @param isTyped whether `tp` is the type from a `Typed` tree * * If `isValue` is true, then pattern-bound symbols are erased to its upper bound. * This is needed to avoid spurious unreachable warnings. See tests/patmat/i6197.scala. */ - private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false)(using Context): Type = - trace(i"erase($tp${if inArray then " inArray" else ""}${if isValue then " isValue" else ""})", debug)(tp match { + private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false, isTyped: Boolean = false)(using Context): Type = + trace(i"erase($tp${if inArray then " inArray" else ""}${if isValue then " isValue" else ""}${if isTyped then " isTyped" else ""})", debug)(tp match { case tp @ AppliedType(tycon, args) if tycon.typeSymbol.isPatternBound => WildcardType case tp @ AppliedType(tycon, args) => - val inArray = tycon.isRef(defn.ArrayClass) - val args2 = args.map(arg => erase(arg, inArray = inArray, isValue = false)) + val inArray = tycon.isRef(defn.ArrayClass) || tp.translucentSuperType.isRef(defn.ArrayClass) + val args2 = + if isTyped && !inArray then args.map(_ => WildcardType) + else args.map(arg => erase(arg, inArray = inArray, isValue = false)) tp.derivedAppliedType(erase(tycon, inArray, isValue = false), args2) case tp @ OrType(tp1, tp2) => - OrType(erase(tp1, inArray, isValue), erase(tp2, inArray, isValue), tp.isSoft) + OrType(erase(tp1, inArray, isValue, isTyped), erase(tp2, inArray, isValue, isTyped), tp.isSoft) case AndType(tp1, tp2) => - AndType(erase(tp1, inArray, isValue), erase(tp2, inArray, isValue)) + AndType(erase(tp1, inArray, isValue, isTyped), erase(tp2, inArray, isValue, isTyped)) case tp @ RefinedType(parent, _, _) => - erase(parent, inArray, isValue) + erase(parent, inArray, isValue, isTyped) case tref: TypeRef if tref.symbol.isPatternBound => - if inArray then tref.underlying - else if isValue then tref.superType + if inArray then erase(tref.underlying, inArray, isValue, isTyped) + else if isValue then erase(tref.superType, inArray, isValue, isTyped) else WildcardType case _ => tp }) - /** Space of the pattern: unapplySeq(a, b, c: _*) + /** Space of the pattern: unapplySeq(a, b, c*) */ def projectSeq(pats: List[Tree])(using Context): Space = { if (pats.isEmpty) return Typ(defn.NilType, false) @@ -513,6 +499,7 @@ object SpaceEngine { def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { if tp1 == ConstantType(Constant(null)) && !ctx.mode.is(Mode.SafeNulls) then tp2 == ConstantType(Constant(null)) + else if tp1.isTupleXXLExtract(tp2) then true // See isTupleXXLExtract, fixes TupleXXL parameter type else tp1 <:< tp2 } @@ -522,10 +509,14 @@ object SpaceEngine { * We assume that unapply methods are pure, but the same method may * be called with different prefixes, thus behaving differently. */ - def isSameUnapply(tp1: TermRef, tp2: TermRef)(using Context): Boolean = + def isSameUnapply(tp1: TermRef, tp2: TermRef)(using Context): Boolean = trace(i"isSameUnapply($tp1, $tp2)") { + def isStable(tp: TermRef) = + !tp.symbol.is(ExtensionMethod) // The "prefix" of an extension method may be, but the receiver isn't, so exclude + && tp.prefix.isStable // always assume two TypeTest[S, T].unapply are the same if they are equal in types - (tp1.prefix.isStable && tp2.prefix.isStable || tp1.symbol == defn.TypeTest_unapply) + (isStable(tp1) && isStable(tp2) || tp1.symbol == defn.TypeTest_unapply) && tp1 =:= tp2 + } /** Return term parameter types of the extractor `unapp`. * Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ @@ -537,7 +528,7 @@ object SpaceEngine { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => - val tvars = pt.paramInfos.map(newTypeVar(_)) + val tvars = constrained(pt) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] scrutineeTp <:< mt.paramInfos(0) // force type inference to infer a narrower type: could be singleton @@ -579,7 +570,13 @@ object SpaceEngine { if (arity > 0) productSelectorTypes(resTp, unappSym.srcPos) else { - val getTp = resTp.select(nme.get).finalResultType.widenTermRefExpr + val getTp = resTp.select(nme.get).finalResultType match + case tp: TermRef if !tp.isOverloaded => + // Like widenTermRefExpr, except not recursively. + // For example, in i17184 widen Option[foo.type]#get + // to Option[foo.type] instead of Option[Int]. + tp.underlying.widenExpr + case tp => tp if (argLen == 1) getTp :: Nil else productSelectorTypes(getTp, unappSym.srcPos) } @@ -592,7 +589,7 @@ object SpaceEngine { } /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = + def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = trace(i"covers($unapp, $scrutineeTp, $argLen)") { SpaceEngine.isIrrefutable(unapp, argLen) || unapp.symbol == defn.TypeTest_unapply && { val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias: @unchecked @@ -602,6 +599,7 @@ object SpaceEngine { val AppliedType(_, tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp } + } /** Decompose a type into subspaces -- assume the type can be decomposed */ def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug) { @@ -623,7 +621,7 @@ object SpaceEngine { case tp if tp.isRef(defn.UnitClass) => ConstantType(Constant(())) :: Nil case tp @ NamedType(Parts(parts), _) => parts.map(tp.derivedSelect) case _: SingletonType => ListOfNoType - case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => tp.classSymbol.children.map(_.termRef) + case tp if tp.classSymbol.isAllOf(JavaEnum) => tp.classSymbol.children.map(_.termRef) // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => @@ -771,7 +769,7 @@ object SpaceEngine { checkConstraint(genConstraint(sp))(using ctx.fresh.setNewTyperState()) } - def showSpaces(ss: Seq[Space])(using Context): String = ss.map(show).mkString(", ") + def showSpaces(ss: Seq[Space])(using Context): Seq[String] = ss.map(show) /** Display spaces */ def show(s: Space)(using Context): String = { @@ -786,7 +784,7 @@ object SpaceEngine { def doShow(s: Space, flattenList: Boolean = false): String = s match { case Empty => "empty" - case Typ(c: ConstantType, _) => "" + c.value.value + case Typ(c: ConstantType, _) => c.value.show case Typ(tp: TermRef, _) => if (flattenList && tp <:< defn.NilType) "" else tp.symbol.showName @@ -833,14 +831,15 @@ object SpaceEngine { def isCheckable(tp: Type): Boolean = val tpw = tp.widen.dealias val classSym = tpw.classSymbol - classSym.is(Sealed) || + classSym.is(Sealed) && !tpw.isLargeGenericTuple || // exclude large generic tuples from exhaustivity + // requires an unknown number of changes to make work tpw.isInstanceOf[OrType] || (tpw.isInstanceOf[AndType] && { val and = tpw.asInstanceOf[AndType] isCheckable(and.tp1) || isCheckable(and.tp2) }) || tpw.isRef(defn.BooleanClass) || - classSym.isAllOf(JavaEnumTrait) || + classSym.isAllOf(JavaEnum) || classSym.is(Case) && { if seen.add(tpw) then productSelectorTypes(tpw, sel.srcPos).exists(isCheckable(_)) else true // recursive case class: return true and other members can still fail the check @@ -877,7 +876,7 @@ object SpaceEngine { case _ => tp }) - def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug) { + def checkExhaustivity(m: Match)(using Context): Unit = trace(i"checkExhaustivity($m)", debug) { val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") @@ -896,12 +895,11 @@ object SpaceEngine { if uncovered.nonEmpty then - val hasMore = uncovered.lengthCompare(6) > 0 - val deduped = dedup(uncovered.take(6)) - report.warning(PatternMatchExhaustivity(showSpaces(deduped), hasMore), m.selector) + val deduped = dedup(uncovered) + report.warning(PatternMatchExhaustivity(showSpaces(deduped), m), m.selector) } - private def redundancyCheckable(sel: Tree)(using Context): Boolean = + private def reachabilityCheckable(sel: Tree)(using Context): Boolean = // Ignore Expr[T] and Type[T] for unreachability as a special case. // Quote patterns produce repeated calls to the same unapply method, but with different implicit parameters. // Since we assume that repeated calls to the same unapply method overlap @@ -911,7 +909,7 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug) { + def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)", debug) { val cases = m.cases.toIndexedSeq val selTyp = toUnderlying(m.selector.tpe).dealias @@ -963,4 +961,8 @@ object SpaceEngine { i += 1 } } + + def checkMatch(m: Match)(using Context): Unit = + if exhaustivityCheckable(m.selector) then checkExhaustivity(m) + if reachabilityCheckable(m.selector) then checkReachability(m) } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala index 6471e58d4ddc..951024f3d4db 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala @@ -3,12 +3,12 @@ package dotc package transform package sjs -import MegaPhase._ +import MegaPhase.* import core.Constants -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import core.StdNames.nme -import core.Symbols._ +import core.Symbols.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -46,7 +46,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn */ class AddLocalJSFakeNews extends MiniPhase { thisPhase => import ExplicitOuter.outer - import ast.tpd._ + import ast.tpd.* override def phaseName: String = AddLocalJSFakeNews.name diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 705b3cc404a8..853fead6f799 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -3,28 +3,29 @@ package dotc package transform package sjs - -import MegaPhase._ -import core.Annotations._ -import core.Constants._ -import core.Denotations._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import scala.compiletime.uninitialized + +import MegaPhase.* +import core.Annotations.* +import core.Constants.* +import core.Denotations.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme import core.SymDenotations.SymDenotation -import core.Names._ -import core.NameKinds._ -import SymUtils._ +import core.Names.* +import core.NameKinds.* + import util.Store import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import JSSymUtils._ +import JSSymUtils.* /** This phase makes all JS classes explicit (their definitions and references to them). * @@ -229,14 +230,14 @@ import JSSymUtils._ * created by step (C). */ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => - import ExplicitJSClasses._ - import ast.tpd._ + import ExplicitJSClasses.* + import ast.tpd.* override def phaseName: String = ExplicitJSClasses.name override def description: String = ExplicitJSClasses.description - private var MyState: Store.Location[MyState] = _ + private var MyState: Store.Location[MyState] = uninitialized private def myState(using Context) = ctx.store(MyState) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala index 9abf9a919d6d..2b0ed3c4880e 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala @@ -4,9 +4,9 @@ package sjs import scala.language.unsafeNulls -import core._ +import core.* import NameKinds.DefaultGetterName -import Names._ +import Names.* /** Utilities for JS exports handling. */ diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 115d41dd3d46..936b6958fb33 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -2,18 +2,18 @@ package dotty.tools.dotc package transform package sjs -import core._ -import Constants._ -import Contexts._ -import Flags._ -import NameOps._ -import Names._ -import Phases._ -import StdNames._ -import Symbols._ -import SymUtils._ -import ast.Trees._ -import Types._ +import core.* +import Constants.* +import Contexts.* +import Flags.* +import NameOps.* +import Names.* +import Phases.* +import StdNames.* +import Symbols.* + +import ast.Trees.* +import Types.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -185,7 +185,7 @@ object JSSymUtils { val list = for ((name, info) <- paramNamesAndTypes) yield { val v = - if (info.isRepeatedParam) Some(info.repeatedToSingle.widenDealias) + if (info.isRepeatedParam) Some(TypeErasure.erasure(info.repeatedToSingle)) else None name -> v } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala index b911d7dfab96..b7a179ac7562 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala @@ -4,18 +4,18 @@ package sjs import scala.annotation.tailrec -import dotty.tools.dotc.core._ -import Constants._ -import Contexts._ -import Flags._ -import Names._ -import Scopes._ -import Symbols._ -import StdNames._ -import Types._ +import dotty.tools.dotc.core.* +import Constants.* +import Contexts.* +import Flags.* +import Names.* +import Scopes.* +import Symbols.* +import StdNames.* +import Types.* import Decorators.em -import dotty.tools.dotc.transform.MegaPhase._ +import dotty.tools.dotc.transform.MegaPhase.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -106,8 +106,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * some point in the future. */ class JUnitBootstrappers extends MiniPhase { - import JUnitBootstrappers._ - import ast.tpd._ + import JUnitBootstrappers.* + import ast.tpd.* override def phaseName: String = JUnitBootstrappers.name @@ -156,7 +156,7 @@ class JUnitBootstrappers extends MiniPhase { val moduleSym = newCompleteModuleSymbol(owner, bootstrapperName, Synthetic, Synthetic, List(defn.ObjectType, junitdefn.BootstrapperType), newScope, - coord = testClass.span, assocFile = testClass.assocFile).entered + coord = testClass.span, compUnitInfo = testClass.compUnitInfo).entered val classSym = moduleSym.moduleClass.asClass val constr = genConstructor(classSym) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index 25ab46712e70..dbd6e1a8f412 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -3,29 +3,29 @@ package transform package sjs import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ +import dotty.tools.dotc.core.* +import Contexts.* +import Decorators.* +import Denotations.* +import Flags.* import NameKinds.DefaultGetterName -import StdNames._ -import Symbols._ -import SymUtils._ -import Types._ +import StdNames.* +import Symbols.* + +import Types.* import util.Spans.Span import util.SrcPos import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import JSExportUtils._ -import JSSymUtils._ +import JSExportUtils.* +import JSSymUtils.* import org.scalajs.ir.Names.DefaultModuleID import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { - import tpd._ + import tpd.* import PrepJSInterop.{checkSetterSignature, isJSAny, isPrivateMaybeWithin} private sealed abstract class ExportDestination diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index a2f9a0fb45a3..610fca869ad2 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -5,23 +5,23 @@ package sjs import scala.collection.mutable import ast.tpd -import core._ +import core.* import typer.Checking import util.SrcPos -import Annotations._ -import Constants._ -import Contexts._ -import Decorators._ -import DenotTransformers._ -import Flags._ +import Annotations.* +import Constants.* +import Contexts.* +import Decorators.* +import DenotTransformers.* +import Flags.* import NameKinds.{DefaultGetterName, ModuleClassName} -import NameOps._ -import StdNames._ -import Symbols._ -import SymUtils._ -import Types._ +import NameOps.* +import StdNames.* +import Symbols.* -import JSSymUtils._ +import Types.* + +import JSSymUtils.* import org.scalajs.ir.Trees.JSGlobalRef @@ -52,8 +52,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * pickling. */ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisPhase => - import PrepJSInterop._ - import tpd._ + import PrepJSInterop.* + import tpd.* override def phaseName: String = PrepJSInterop.name @@ -68,7 +68,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP new ScalaJSPrepJSInteropTransformer class ScalaJSPrepJSInteropTransformer extends Transformer with Checking { - import PrepJSExports._ + import PrepJSExports.* /** Kind of the directly enclosing (most nested) owner. */ private var enclosingOwner: OwnerKind = OwnerKind.None diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 9c23b7e2024f..004b21ce4fb5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -2,33 +2,31 @@ package dotty.tools package dotc package typer -import core._ +import core.* import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Contexts._ -import Flags._ -import Symbols._ +import Contexts.* +import Flags.* +import Symbols.* import Denotations.Denotation -import Types._ -import Decorators._ -import ErrorReporting._ -import Trees._ -import Names._ -import StdNames._ -import ContextOps._ +import Types.* +import Decorators.* +import ErrorReporting.* +import Trees.* +import Names.* +import StdNames.* +import ContextOps.* import NameKinds.DefaultGetterName -import ProtoTypes._ -import Inferencing._ -import reporting._ -import transform.TypeUtils._ -import transform.SymUtils._ -import Nullables._, NullOpsDecorator.* +import ProtoTypes.* +import Inferencing.* +import reporting.* +import Nullables.*, NullOpsDecorator.* import config.Feature import collection.mutable import config.Printers.{overload, typr, unapp} -import TypeApplications._ +import TypeApplications.* import Annotations.Annotation import Constants.{Constant, IntTag} @@ -38,7 +36,7 @@ import annotation.threadUnsafe import scala.util.control.NonFatal object Applications { - import tpd._ + import tpd.* def extractorMember(tp: Type, name: Name)(using Context): SingleDenotation = tp.member(name).suchThat(sym => sym.info.isParameterless && sym.info.widenExpr.isValueType) @@ -333,7 +331,7 @@ object Applications { // it's crucial that the type tree is not copied directly as argument to // `cpy$default$1`. If it was, the variable `X'` would already be interpolated // when typing the default argument, which is too early. - spliceMeth(meth, fn).appliedToTypes(targs.tpes) + spliceMeth(meth, fn).appliedToTypeTrees(targs.map(targ => TypeTree(targ.tpe).withSpan(targ.span))) case _ => meth } @@ -352,7 +350,7 @@ object Applications { trait Applications extends Compatibility { self: Typer & Dynamic => - import Applications._ + import Applications.* import tpd.{ cpy => _, _ } import untpd.cpy @@ -503,7 +501,21 @@ trait Applications extends Compatibility { def infoStr = if methType.isErroneous then "" else i": $methType" i"${err.refStr(methRef)}$infoStr" - /** Re-order arguments to correctly align named arguments */ + /** Re-order arguments to correctly align named arguments + * Issue errors in the following situations: + * + * - "positional after named argument" if a positional argument follows a named + * argument and one of the following is true: + * + * - There is a formal argument before the argument position + * that has not yet been instantiated with a previous actual argument, + * (either named or positional), or + * - The formal parameter at the argument position is also mentioned + * in a subsequent named parameter. + * - "parameter already instantiated" if a two named arguments have the same name. + * - "does not have parameter" if a named parameter does not mention a formal + * parameter name. + */ def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { /** @param pnames The list of parameter names that are missing arguments @@ -517,18 +529,19 @@ trait Applications extends Compatibility { * 2. For every `(name -> arg)` in `nameToArg`, `arg` is an element of `args` */ def handleNamed(pnames: List[Name], args: List[Trees.Tree[T]], - nameToArg: Map[Name, Trees.NamedArg[T]], toDrop: Set[Name]): List[Trees.Tree[T]] = pnames match { + nameToArg: Map[Name, Trees.NamedArg[T]], toDrop: Set[Name], + missingArgs: Boolean): List[Trees.Tree[T]] = pnames match { case pname :: pnames1 if nameToArg contains pname => // there is a named argument for this parameter; pick it - nameToArg(pname) :: handleNamed(pnames1, args, nameToArg - pname, toDrop + pname) + nameToArg(pname) :: handleNamed(pnames1, args, nameToArg - pname, toDrop + pname, missingArgs) case _ => def pnamesRest = if (pnames.isEmpty) pnames else pnames.tail args match { case (arg @ NamedArg(aname, _)) :: args1 => if (toDrop contains aname) // argument is already passed - handleNamed(pnames, args1, nameToArg, toDrop - aname) + handleNamed(pnames, args1, nameToArg, toDrop - aname, missingArgs) else if ((nameToArg contains aname) && pnames.nonEmpty) // argument is missing, pass an empty tree - genericEmptyTree :: handleNamed(pnames.tail, args, nameToArg, toDrop) + genericEmptyTree :: handleNamed(pnames.tail, args, nameToArg, toDrop, missingArgs = true) else { // name not (or no longer) available for named arg def msg = if (methodType.paramNames contains aname) @@ -536,13 +549,15 @@ trait Applications extends Compatibility { else em"$methString does not have a parameter $aname" fail(msg, arg.asInstanceOf[Arg]) - arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) + arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop, missingArgs) } case arg :: args1 => - arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) // unnamed argument; pick it + if toDrop.nonEmpty || missingArgs then + report.error(i"positional after named argument", arg.srcPos) + arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop, missingArgs) // unnamed argument; pick it case Nil => // no more args, continue to pick up any preceding named args if (pnames.isEmpty) Nil - else handleNamed(pnamesRest, args, nameToArg, toDrop) + else handleNamed(pnamesRest, args, nameToArg, toDrop, missingArgs) } } @@ -550,7 +565,7 @@ trait Applications extends Compatibility { args match { case (arg: NamedArg @unchecked) :: _ => val nameAssocs = for (case arg @ NamedArg(name, _) <- args) yield (name, arg) - handleNamed(pnames, args, nameAssocs.toMap, Set()) + handleNamed(pnames, args, nameAssocs.toMap, toDrop = Set(), missingArgs = false) case arg :: args1 => arg :: handlePositional(if (pnames.isEmpty) Nil else pnames.tail, args1) case Nil => Nil @@ -695,8 +710,8 @@ trait Applications extends Compatibility { val argtpe1 = argtpe.widen def SAMargOK = - defn.isFunctionType(argtpe1) && formal.match - case SAMType(sam) => argtpe <:< sam.toFunctionType(isJava = formal.classSymbol.is(JavaDefined)) + defn.isFunctionNType(argtpe1) && formal.match + case SAMType(samMeth, samParent) => argtpe <:< samMeth.toFunctionType(isJava = samParent.classSymbol.is(JavaDefined)) case _ => false isCompatible(argtpe, formal) @@ -959,7 +974,7 @@ trait Applications extends Compatibility { val resultType = if !originalResultType.isRef(defn.ObjectClass) then originalResultType else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match - case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _) => resTp + case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _, _) => resTp case resTp if isFullyDefined(resTp, ForceDegree.all) => resTp case _ => defn.ObjectType val methType = MethodType(proto.typedArgs().map(_.tpe.widen), resultType) @@ -1278,15 +1293,22 @@ trait Applications extends Compatibility { /** Report errors buffered in state. * @pre state has errors to report - * If there is a single error stating that "unapply" is not a member, print - * the more informative "notAnExtractor" message instead. + * If the last reported error states that "unapply" is not a member, report + * the more informative `NotAnExtractor` message instead. + * If the last reported error states that the qualifier was not found, report + * the more informative `ExtractorNotFound` message instead. */ def reportErrors(tree: Tree, state: TyperState): Tree = assert(state.reporter.hasErrors) - if saysNotFound(state, nme.unapply) then notAnExtractor(tree) - else - state.reporter.flush() - tree + if saysNotFound(state, nme.unapply) then + notAnExtractor(tree) + else qual match + case qual: Ident if saysNotFound(state, qual.name) => + report.error(ExtractorNotFound(qual.name), tree.srcPos) + tree + case _ => + state.reporter.flush() + tree /** If this is a term ref tree, try to typecheck with its type name. * If this refers to a type alias, follow the alias, and if @@ -1519,10 +1541,7 @@ trait Applications extends Compatibility { && isApplicableType( normalize(tp.select(xname, mbr), WildcardType), argType :: Nil, resultType) - tp.memberBasedOnFlags(xname, required = ExtensionMethod) match { - case mbr: SingleDenotation => qualifies(mbr) - case mbr => mbr.hasAltWith(qualifies(_)) - } + tp.memberBasedOnFlags(xname, required = ExtensionMethod).hasAltWithInline(qualifies) } /** Drop any leading type or implicit parameter sections */ @@ -1710,7 +1729,7 @@ trait Applications extends Compatibility { def apply(t: Type) = t match { case t @ AppliedType(tycon, args) => def mapArg(arg: Type, tparam: TypeParamInfo) = - if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionOf(arg :: Nil, defn.UnitType) + if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) else arg mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) case _ => mapOver(t) @@ -1759,14 +1778,20 @@ trait Applications extends Compatibility { def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") - if (ownerScore == 1) - if (winsType1 || !winsType2) 1 else 0 - else if (ownerScore == -1) - if (winsType2 || !winsType1) -1 else 0 - else if (winsType1) - if (winsType2) 0 else 1 + if winsType1 && winsType2 + && alt1.widenExpr.isStable && (alt1.widenExpr frozen_=:= alt2.widenExpr) + then + // alternatives are the same after following ExprTypes, pick one of them + // (prefer the one that is not a method, but that's arbitrary). + if alt1.widenExpr =:= alt2 then -1 else 1 + else if ownerScore == 1 then + if winsType1 || !winsType2 then 1 else 0 + else if ownerScore == -1 then + if winsType2 || !winsType1 then -1 else 0 + else if winsType1 then + if winsType2 then 0 else 1 else - if (winsType2) -1 else 0 + if winsType2 then -1 else 0 } if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 @@ -1937,7 +1962,7 @@ trait Applications extends Compatibility { /** The shape of given tree as a type; cannot handle named arguments. */ def typeShape(tree: untpd.Tree): Type = tree match { case untpd.Function(args, body) => - defn.FunctionOf( + defn.FunctionNOf( args.map(Function.const(defn.AnyType)), typeShape(body), isContextual = untpd.isContextualClosure(tree)) case Match(EmptyTree, _) => @@ -1977,8 +2002,8 @@ trait Applications extends Compatibility { def paramCount(ref: TermRef) = val formals = ref.widen.firstParamTypes if formals.length > idx then - formals(idx) match - case defn.FunctionOf(args, _, _) => args.length + formals(idx).dealias match + case defn.FunctionNOf(args, _, _) => args.length case _ => -1 else -1 @@ -2063,8 +2088,8 @@ trait Applications extends Compatibility { else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) case pt => - val compat0 = pt match - case defn.FunctionOf(args, resType, _) => + val compat0 = pt.dealias match + case defn.FunctionNOf(args, resType, _) => narrowByTypes(alts, args, resType) case _ => Nil @@ -2080,7 +2105,7 @@ trait Applications extends Compatibility { * new java.io.ObjectOutputStream(f) */ pt match { - case SAMType(mtp) => + case SAMType(mtp, _) => narrowByTypes(alts, mtp.paramInfos, mtp.resultType) case _ => // pick any alternatives that are not methods since these might be convertible @@ -2205,7 +2230,8 @@ trait Applications extends Compatibility { } val mapped = reverseMapping.map(_._1) overload.println(i"resolve mapped: ${mapped.map(_.widen)}%, % with $pt") - resolveOverloaded(mapped, pt).map(reverseMapping.toMap) + resolveOverloaded(mapped, pt)(using ctx.retractMode(Mode.SynthesizeExtMethodReceiver)) + .map(reverseMapping.toMap) /** Try to typecheck any arguments in `pt` that are function values missing a * parameter type. If the formal parameter types corresponding to a closure argument @@ -2252,7 +2278,7 @@ trait Applications extends Compatibility { false val commonFormal = if (isPartial) defn.PartialFunctionOf(commonParamTypes.head, WildcardType) - else defn.FunctionOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) + else defn.FunctionNOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) overload.println(i"pretype arg $arg with expected type $commonFormal") if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom))) withMode(Mode.ImplicitsEnabled) { diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index df5639b50302..81375fe73549 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._ -import Types._ -import Flags._ -import Names._ -import StdNames._ -import Symbols._ -import Trees._ -import ProtoTypes._ -import Scopes._ -import CheckRealizable._ +import core.* +import ast.* +import Contexts.* +import Types.* +import Flags.* +import Names.* +import StdNames.* +import Symbols.* +import Trees.* +import ProtoTypes.* +import Scopes.* +import CheckRealizable.* import ErrorReporting.errorTree import util.Spans.Span import Phases.refchecksPhase @@ -23,28 +23,28 @@ import util.SrcPos import util.Spans.Span import rewrites.Rewrites.patch import inlines.Inlines -import transform.SymUtils._ -import transform.ValueClasses._ -import Decorators._ +import Decorators.* import ErrorReporting.{err, errorType} import config.Printers.{typr, patmatch} import NameKinds.DefaultGetterName -import NameOps._ +import NameOps.* import SymDenotations.{NoCompleter, NoDenotation} import Applications.unapplyArgs import Inferencing.isFullyDefined -import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotedPattern} +import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} +import transform.ValueClasses.underlyingOfValueClass import config.Feature import config.Feature.sourceVersion -import config.SourceVersion._ +import config.SourceVersion.* +import config.MigrationVersion import printing.Formatting.hlAsKeyword -import transform.TypeUtils.* +import cc.isCaptureChecking import collection.mutable -import reporting._ +import reporting.* object Checking { - import tpd._ + import tpd.* /** Add further information for error messages involving applied types if the * type is inferred: @@ -67,7 +67,7 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType, tpt: Tree = EmptyTree)(using Context): Unit = - if ctx.phase != Phases.checkCapturesPhase then + if !isCaptureChecking then args.lazyZip(boundss).foreach { (arg, bound) => if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then errorTree(arg, @@ -132,7 +132,7 @@ object Checking { if tp.isUnreducibleWild then report.errorOrMigrationWarning( showInferred(UnreducibleApplication(tycon), tp, tpt), - tree.srcPos, from = `3.0`) + tree.srcPos, MigrationVersion.Scala2to3) case _ => } def checkValidIfApply(using Context): Unit = @@ -145,7 +145,18 @@ object Checking { val checker = new TypeTraverser: def traverse(tp: Type) = tp match - case AppliedType(tycon, argTypes) => + case AppliedType(tycon, argTypes) + if !(tycon.typeSymbol.is(JavaDefined) && ctx.compilationUnit.isJava) + // Don't check bounds in Java units that refer to Java type constructors. + // Scala is not obliged to do Java type checking and in fact i17763 goes wrong + // if we attempt to check bounds of F-bounded mutually recursive Java interfaces. + // Do check all bounds in Scala units and those bounds in Java units that + // occur in applications of Scala type constructors. + && !isCaptureChecking || tycon.typeSymbol.is(CaptureChecked) + // Don't check bounds when capture checking type constructors that were not + // themselves capture checked. Since the type constructor could not foresee + // possible capture sets, it's better to be lenient for backwards compatibility. + => checkAppliedType( untpd.AppliedTypeTree(TypeTree(tycon), argTypes.map(TypeTree(_))) .withType(tp).withSpan(tpt.span.toSynthetic), @@ -207,7 +218,7 @@ object Checking { val rstatus = realizability(tp) if (rstatus ne Realizable) report.errorOrMigrationWarning( - em"$tp is not a legal $what\nsince it${rstatus.msg}", pos, from = `3.0`) + em"$tp is not a legal $what\nsince it${rstatus.msg}", pos, MigrationVersion.Scala2to3) } /** Given a parent `parent` of a class `cls`, if `parent` is a trait check that @@ -371,7 +382,7 @@ object Checking { /** Check that `info` of symbol `sym` is not cyclic. * @pre sym is not yet initialized (i.e. its type is a Completer). - * @return `info` where every legal F-bounded reference is proctected + * @return `info` where every legal F-bounded reference is protected * by a `LazyRef`, or `ErrorType` if a cycle was detected and reported. */ def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(using Context): Type = { @@ -412,7 +423,7 @@ object Checking { case tree: RefTree => checkRef(tree, tree.symbol) foldOver(x, tree) - case tree: This => + case tree: This if tree.tpe.classSymbol == refineCls => selfRef(tree) case tree: TypeTree => val checkType = new TypeAccumulator[Unit] { @@ -507,11 +518,7 @@ object Checking { // but they can never be one of ClassOnlyFlags if !sym.isClass && sym.isOneOf(ClassOnlyFlags) then val illegal = sym.flags & ClassOnlyFlags - if sym.is(TypeParam) && illegal == Sealed && Feature.ccEnabled && cc.allowUniversalInBoxed then - if !sym.owner.is(Method) then - fail(em"only method type parameters can be sealed") - else - fail(em"only classes can be ${illegal.flagsString}") + fail(em"only classes can be ${illegal.flagsString}") if (sym.is(AbsOverride) && !sym.owner.is(Trait)) fail(AbstractOverrideOnlyInTraits(sym)) if sym.is(Trait) then @@ -531,6 +538,12 @@ object Checking { fail(em"Inline methods cannot be @tailrec") if sym.hasAnnotation(defn.TargetNameAnnot) && sym.isClass && sym.isTopLevelClass then fail(TargetNameOnTopLevelClass(sym)) + if sym.hasAnnotation(defn.PublicInBinaryAnnot) then + if sym.is(Enum) then fail(em"@publicInBinary cannot be used on enum definitions") + else if sym.isType && !sym.is(Module) && !(sym.is(Given) || sym.companionModule.is(Given)) then fail(em"@publicInBinary cannot be used on ${sym.showKind} definitions") + else if !sym.owner.isClass && !(sym.is(Param) && sym.owner.isConstructor) then fail(em"@publicInBinary cannot be used on local definitions") + else if sym.is(ParamAccessor) && sym.is(Private) then fail(em"@publicInBinary cannot be non `val` constructor parameters") + else if sym.is(Private) && !sym.privateWithin.exists && !sym.isConstructor then fail(em"@publicInBinary cannot be used on private definitions\n\nConsider using `private[${sym.owner.name}]` or `protected` instead") if (sym.hasAnnotation(defn.NativeAnnot)) { if (!sym.is(Deferred)) fail(NativeMembersMayNotHaveImplementation(sym)) @@ -555,6 +568,8 @@ object Checking { fail(CannotHaveSameNameAs(sym, cls, CannotHaveSameNameAs.CannotBeOverridden)) sym.setFlag(Private) // break the overriding relationship by making sym Private } + if sym.isWrappedToplevelDef && !sym.isType && sym.flags.is(Infix, butNot = Extension) then + fail(ModifierNotAllowedForDefinition(Flags.Infix, s"A top-level ${sym.showKind} cannot be infix.")) checkApplicable(Erased, !sym.isOneOf(MutableOrLazy, butNot = Given) && !sym.isType || sym.isClass) checkCombination(Final, Open) @@ -584,6 +599,8 @@ object Checking { report.error(ModifierNotAllowedForDefinition(Sealed), flagSourcePos(Sealed)) if mods.is(Final, butNot = Synthetic) then report.warning(RedundantModifier(Final), flagSourcePos(Final)) + if mods.is(Infix) then + report.error(ModifierNotAllowedForDefinition(Infix), flagSourcePos(Infix)) /** Check the type signature of the symbol `M` defined by `tree` does not refer * to a private type or value which is invisible at a point where `M` is still @@ -680,7 +697,7 @@ object Checking { } val notPrivate = new NotPrivate val info = notPrivate(sym.info) - notPrivate.errors.foreach(report.errorOrMigrationWarning(_, sym.srcPos, from = `3.0`)) + notPrivate.errors.foreach(report.errorOrMigrationWarning(_, sym.srcPos, MigrationVersion.Scala2to3)) info } @@ -698,9 +715,11 @@ object Checking { case _ => report.error(ValueClassesMayNotContainInitalization(clazz), stat.srcPos) } - if (isDerivedValueClass(clazz)) { + if (clazz.isDerivedValueClass) { if (clazz.is(Trait)) report.error(CannotExtendAnyVal(clazz), clazz.srcPos) + if clazz.is(Module) then + report.error(CannotExtendAnyVal(clazz), clazz.srcPos) if (clazz.is(Abstract)) report.error(ValueClassesMayNotBeAbstract(clazz), clazz.srcPos) if (!clazz.isStatic) @@ -787,7 +806,9 @@ object Checking { for case imp @ Import(qual, selectors) <- trees do def isAllowedImport(sel: untpd.ImportSelector) = val name = Feature.experimental(sel.name) - name == Feature.scala2macros || name == Feature.erasedDefinitions + name == Feature.scala2macros + || name == Feature.erasedDefinitions + || name == Feature.captureChecking languageImport(qual) match case Some(nme.experimental) @@ -804,11 +825,44 @@ object Checking { else Feature.checkExperimentalFeature("features", imp.srcPos) case _ => end checkExperimentalImports + + /** Checks that PolyFunction only have valid refinements. + * + * It only supports `apply` methods with one parameter list and optional type arguments. + */ + def checkPolyFunctionType(tree: Tree)(using Context): Unit = new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = tree match + case tree: RefinedTypeTree if tree.tpe.derivesFrom(defn.PolyFunctionClass) => + if tree.refinements.isEmpty then + reportNoRefinements(tree.srcPos) + tree.refinements.foreach { + case refinement: DefDef if refinement.name != nme.apply => + report.error("PolyFunction only supports apply method refinements", refinement.srcPos) + case refinement: DefDef if !defn.PolyFunctionOf.isValidPolyFunctionInfo(refinement.tpe.widen) => + report.error("Implementation restriction: PolyFunction apply must have exactly one parameter list and optionally type arguments. No by-name nor varags are allowed.", refinement.srcPos) + case _ => + } + case _: RefTree if tree.symbol == defn.PolyFunctionClass => + reportNoRefinements(tree.srcPos) + case _ => + traverseChildren(tree) + + def reportNoRefinements(pos: SrcPos) = + report.error("PolyFunction subtypes must refine the apply method", pos) + }.traverse(tree) + + /** Check that users do not extend the `PolyFunction` trait. + * We only allow compiler generated `PolyFunction`s. + */ + def checkPolyFunctionExtension(templ: Template)(using Context): Unit = + templ.parents.find(_.tpe.derivesFrom(defn.PolyFunctionClass)) match + case Some(parent) => report.error(s"`PolyFunction` marker trait is reserved for compiler generated refinements", parent.srcPos) + case None => } trait Checking { - import tpd._ + import tpd.* def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = Checking.checkNonCyclic(sym, info, reportErrors) @@ -839,7 +893,7 @@ trait Checking { case NonConforming, RefutableExtractor def fail(pat: Tree, pt: Type, reason: Reason): Boolean = { - import Reason._ + import Reason.* val message = reason match case NonConforming => var reportedPt = pt.dropAnnot(defn.UncheckedAnnot) @@ -847,19 +901,15 @@ trait Checking { val problem = if pat.tpe <:< reportedPt then "is more specialized than" else "does not match" em"pattern's type ${pat.tpe} $problem the right hand side expression's type $reportedPt" case RefutableExtractor => - val extractor = - val UnApply(fn, _, _) = pat: @unchecked - tpd.funPart(fn) match - case Select(id, _) => id - case _ => EmptyTree - if extractor.isEmpty then - em"pattern binding uses refutable extractor" - else if extractor.symbol eq defn.QuoteMatching_ExprMatch then - em"pattern binding uses refutable extractor `'{...}`" - else if extractor.symbol eq defn.QuoteMatching_TypeMatch then - em"pattern binding uses refutable extractor `'[...]`" - else - em"pattern binding uses refutable extractor `$extractor`" + val extractor = pat match + case UnApply(fn, _, _) => + tpd.funPart(fn) match + case Select(id, _) if !id.isEmpty => id.show + case _ => "" + case QuotePattern(_, body, _) => + if body.isTerm then "'{...}" else "'[...]" + if extractor.isEmpty then em"pattern binding uses refutable extractor" + else em"pattern binding uses refutable extractor `$extractor`" val fix = if isPatDef then "adding `: @unchecked` after the expression" @@ -876,17 +926,26 @@ trait Checking { case RefutableExtractor => pat.source.atSpan(pat.span union sel.span) else pat.srcPos def rewriteMsg = Message.rewriteNotice("This patch", `3.2-migration`) - report.gradualErrorOrMigrationWarning( + report.errorOrMigrationWarning( message.append( i"""| | |If $usage is intentional, this can be communicated by $fix, |which $addendum.$rewriteMsg"""), - pos, warnFrom = `3.2`, errorFrom = `future`) + pos, + // we tighten for-comprehension without `case` to error in 3.4, + // but we keep pat-defs as warnings for now ("@unchecked"), + // until we propose an alternative way to assert exhaustivity to the typechecker. + if isPatDef then MigrationVersion.ForComprehensionUncheckedPathDefs + else MigrationVersion.ForComprehensionPatternWithoutCase + ) false } - def check(pat: Tree, pt: Type): Boolean = (pt <:< pat.tpe) || fail(pat, pt, Reason.NonConforming) + def check(pat: Tree, pt: Type): Boolean = + pt.isTupleXXLExtract(pat.tpe) // See isTupleXXLExtract, fixes TupleXXL parameter type + || pt <:< pat.tpe + || fail(pat, pt, Reason.NonConforming) def recur(pat: Tree, pt: Type): Boolean = !sourceVersion.isAtLeast(`3.2`) @@ -898,7 +957,7 @@ trait Checking { recur(pat1, pt) case UnApply(fn, implicits, pats) => check(pat, pt) && - (isIrrefutable(fn, pats.length) || isIrrefutableQuotedPattern(fn, implicits, pt) || fail(pat, pt, Reason.RefutableExtractor)) && { + (isIrrefutable(fn, pats.length) || fail(pat, pt, Reason.RefutableExtractor)) && { val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) pats.corresponds(argPts)(recur) } @@ -908,6 +967,8 @@ trait Checking { check(pat, pt) && recur(arg, pt) case Ident(nme.WILDCARD) => true + case pat: QuotePattern => + isIrrefutableQuotePattern(pat, pt) || fail(pat, pt, Reason.RefutableExtractor) case _ => check(pat, pt) } @@ -953,6 +1014,11 @@ trait Checking { em"Implementation restriction: ${path.tpe.classSymbol} is not a valid prefix for a wildcard export, as it is a package", path.srcPos) + /** Check that the definition name isn't root. */ + def checkNonRootName(name: Name, nameSpan: Span)(using Context): Unit = + if name == nme.ROOTPKG then + report.error(em"Illegal use of root package name.", ctx.source.atSpan(nameSpan)) + /** Check that module `sym` does not clash with a class of the same name * that is concurrently compiled in another source file. */ @@ -1021,14 +1087,18 @@ trait Checking { def checkValidInfix(tree: untpd.InfixOp, meth: Symbol)(using Context): Unit = { tree.op match { case id @ Ident(name: Name) => + def methCompiledBeforeDeprecation = + meth.tastyInfo match + case Some(info) => info.version.major == 28 && info.version.minor < 4 // compiled before 3.4 + case _ => false // compiled with the current compiler name.toTermName match { case name: SimpleName if !untpd.isBackquoted(id) && !name.isOperatorName && !meth.isDeclaredInfix && !meth.maybeOwner.is(Scala2x) && - !infixOKSinceFollowedBy(tree.right) && - sourceVersion.isAtLeast(future) => + !methCompiledBeforeDeprecation && + !infixOKSinceFollowedBy(tree.right) => val (kind, alternative) = if (ctx.mode.is(Mode.Type)) ("type", (n: Name) => s"prefix syntax $n[...]") @@ -1036,15 +1106,15 @@ trait Checking { ("extractor", (n: Name) => s"prefix syntax $n(...)") else ("method", (n: Name) => s"method syntax .$n(...)") - def rewriteMsg = Message.rewriteNotice("The latter", options = "-deprecation") - report.deprecationWarning( + def rewriteMsg = Message.rewriteNotice("The latter", version = `3.4-migration`) + report.errorOrMigrationWarning( em"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", - tree.op.srcPos) - if (ctx.settings.deprecation.value) { + tree.op.srcPos, + MigrationVersion.AlphanumericInfix) + if MigrationVersion.AlphanumericInfix.needsPatch then patch(Span(tree.op.span.start, tree.op.span.start), "`") patch(Span(tree.op.span.end, tree.op.span.end), "`") - } case _ => } } @@ -1077,7 +1147,7 @@ trait Checking { case tp @ AppliedType(tycon, args) => tp.derivedAppliedType(tycon, args.mapConserve(checkGoodBounds)) case tp: RefinedType => - tp.derivedRefinedType(tp.parent, tp.refinedName, checkGoodBounds(tp.refinedInfo)) + tp.derivedRefinedType(refinedInfo = checkGoodBounds(tp.refinedInfo)) case _ => tp } @@ -1506,17 +1576,23 @@ trait Checking { * (2) Check that no import selector is renamed more than once. */ def checkImportSelectors(qualType: Type, selectors: List[untpd.ImportSelector])(using Context): Unit = - val seen = mutable.Set.empty[Name] + val originals = mutable.Set.empty[Name] + val targets = mutable.Set.empty[Name] def checkIdent(sel: untpd.ImportSelector): Unit = if sel.name != nme.ERROR && !qualType.member(sel.name).exists && !qualType.member(sel.name.toTypeName).exists then - report.error(NotAMember(qualType, sel.name, "value"), sel.imported.srcPos) - if seen.contains(sel.name) then - report.error(ImportRenamedTwice(sel.imported), sel.imported.srcPos) - seen += sel.name + report.error(NotAMember(qualType, sel.name, "value", WildcardType), sel.imported.srcPos) + if sel.isUnimport then + if originals.contains(sel.name) then + report.error(UnimportedAndImported(sel.name, targets.contains(sel.name)), sel.imported.srcPos) + else + if targets.contains(sel.rename) then + report.error(ImportedTwice(sel.rename), sel.renamed.orElse(sel.imported).srcPos) + targets += sel.rename + originals += sel.name if !ctx.compilationUnit.isJava then for sel <- selectors do @@ -1525,7 +1601,7 @@ trait Checking { } trait ReChecking extends Checking { - import tpd._ + import tpd.* override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () @@ -1541,7 +1617,7 @@ trait ReChecking extends Checking { } trait NoChecking extends ReChecking { - import tpd._ + import tpd.* override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala index 81b1de67b707..bd726afe5bba 100644 --- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala +++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala @@ -3,19 +3,18 @@ package typer import java.lang.ArithmeticException -import ast._ -import core._ -import Symbols._ -import Types._ -import Constants._ -import Names._ -import StdNames._ -import Contexts._ -import transform.TypeUtils._ +import ast.* +import core.* +import Symbols.* +import Types.* +import Constants.* +import Names.* +import StdNames.* +import Contexts.* object ConstFold: - import tpd._ + import tpd.* private val foldedBinops = Set[Name]( nme.ZOR, nme.OR, nme.XOR, nme.ZAND, nme.AND, nme.EQ, nme.NE, @@ -23,7 +22,11 @@ object ConstFold: nme.ADD, nme.SUB, nme.MUL, nme.DIV, nme.MOD) val foldedUnops = Set[Name]( - nme.UNARY_!, nme.UNARY_~, nme.UNARY_+, nme.UNARY_-) + nme.UNARY_!, nme.UNARY_~, nme.UNARY_+, nme.UNARY_-, + nme.toChar, nme.toInt, nme.toFloat, nme.toLong, nme.toDouble, + // toByte and toShort are NOT included because we cannot write + // the type of a constant byte or short + ) def Apply[T <: Apply](tree: T)(using Context): T = tree.fun match @@ -89,6 +92,12 @@ object ConstFold: case (nme.UNARY_- , FloatTag ) => Constant(-x.floatValue) case (nme.UNARY_- , DoubleTag ) => Constant(-x.doubleValue) + case (nme.toChar , _ ) if x.isNumeric => Constant(x.charValue) + case (nme.toInt , _ ) if x.isNumeric => Constant(x.intValue) + case (nme.toLong , _ ) if x.isNumeric => Constant(x.longValue) + case (nme.toFloat , _ ) if x.isNumeric => Constant(x.floatValue) + case (nme.toDouble, _ ) if x.isNumeric => Constant(x.doubleValue) + case _ => null } diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 4087c5faf404..91303b00618c 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -3,7 +3,7 @@ package dotc package transform import core.* -import Symbols.*, Types.*, Contexts.*, Flags.*, SymUtils.*, Decorators.*, reporting.* +import Symbols.*, Types.*, Contexts.*, Flags.*, Decorators.*, reporting.* import util.SrcPos import config.{ScalaVersion, NoScalaVersion, Feature, ScalaRelease} import MegaPhase.MiniPhase diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 8fdc468780ba..6def1ecc30a8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import ast.Trees._ -import StdNames._ -import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._ -import ProtoTypes._, ContextOps._ -import util.Spans._ +import core.* +import ast.* +import ast.Trees.* +import StdNames.* +import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.*, Decorators.* +import ProtoTypes.*, ContextOps.* +import util.Spans.* import util.SrcPos import collection.mutable import ErrorReporting.errorTree @@ -165,7 +165,7 @@ trait Deriving { // case (a) ... see description above val derivedParams = clsParams.dropRight(instanceArity) val instanceType = - if (instanceArity == clsArity) clsType.EtaExpand(clsParams) + if (instanceArity == clsArity) clsType.etaExpand(clsParams) else { val derivedParamTypes = derivedParams.map(_.typeRef) @@ -266,7 +266,7 @@ trait Deriving { /** The synthesized type class instance definitions */ def syntheticDefs: List[tpd.Tree] = { - import tpd._ + import tpd.* /** The type class instance definition with symbol `sym` */ def typeclassInstance(sym: Symbol)(using Context): List[List[tpd.Tree]] => tpd.Tree = diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index d819528ff556..33ef3e85e14e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package typer -import core._ -import Contexts._, Symbols._, Decorators._, Comments.{_, given} +import core.* +import Contexts.*, Symbols.*, Decorators.*, Comments.{_, given} import ast.tpd object Docstrings { diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 717966923708..14cc7bf963a6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -7,6 +7,8 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Names.{Name, TermName} import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Types.* @@ -14,10 +16,10 @@ import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.TypeErasure import util.Spans.* import core.Symbols.* +import transform.ValueClasses import ErrorReporting.* -import dotty.tools.dotc.transform.ValueClasses -import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType import reporting.* +import inlines.Inlines object Dynamic { private def isDynamicMethod(name: Name): Boolean = @@ -67,8 +69,8 @@ object DynamicUnapply { trait Dynamic { self: Typer & Applications => - import Dynamic._ - import tpd._ + import Dynamic.* + import tpd.* /** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed. * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...) @@ -210,7 +212,12 @@ trait Dynamic { case _ => tree case other => tree case _ => tree - addClassOfs(typed(scall)) + + // We type the application of `applyDynamic` without inlining (arguments are already typed and inlined), + // to be able to add the add the Class arguments before we inline the method. + val call = addClassOfs(withMode(Mode.NoInline)(typed(scall))) + if Inlines.needsInlining(call) then Inlines.inlineCall(call) + else call } def fail(reason: String): Tree = @@ -232,17 +239,17 @@ trait Dynamic { */ def maybeBoxingCast(tpe: Type) = val maybeBoxed = - if ValueClasses.isDerivedValueClass(tpe.classSymbol) && qual.tpe <:< defn.ReflectSelectableTypeRef then + if tpe.classSymbol.isDerivedValueClass && qual.tpe <:< defn.ReflectSelectableTypeRef then val genericUnderlying = ValueClasses.valueClassUnbox(tpe.classSymbol.asClass) val underlying = tpe.select(genericUnderlying).widen.resultType - New(tpe, tree.cast(underlying) :: Nil) + New(tpe.widen, tree.cast(underlying) :: Nil) else tree maybeBoxed.cast(tpe) fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(fun.tpe.widenExpr) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 126d109889e1..68143dfd2ba0 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -2,21 +2,22 @@ package dotty.tools package dotc package typer -import ast._ -import core._ -import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._ -import Implicits._, Flags._, Constants.Constant -import Trees._ -import NameOps._ +import ast.* +import core.* +import Types.*, ProtoTypes.*, Contexts.*, Decorators.*, Denotations.*, Symbols.* +import Implicits.*, Flags.*, Constants.Constant +import Trees.* +import NameOps.* +import util.Spans.NoSpan import util.SrcPos import config.Feature -import reporting._ +import reporting.* import collection.mutable object ErrorReporting { - import tpd._ + import tpd.* def errorTree(tree: untpd.Tree, msg: Message, pos: SrcPos)(using Context): tpd.Tree = tree.withType(errorType(msg, pos)) @@ -55,7 +56,7 @@ object ErrorReporting { val meth = err.exprStr(methPart(tree)) val info = if tree.symbol.exists then tree.symbol.info else mt if isCallableWithSingleEmptyArgumentList(info) then - report.error(MissingEmptyArgumentList(meth), tree.srcPos) + report.error(MissingEmptyArgumentList(meth, tree), tree.srcPos) else report.error(MissingArgumentList(meth, tree.symbol), tree.srcPos) @@ -70,6 +71,15 @@ object ErrorReporting { case _ => foldOver(s, tp) tps.foldLeft("")(collectMatchTrace) + /** A mixin trait that can produce added elements for an error message */ + trait Addenda: + self => + def toAdd(using Context): List[String] = Nil + def ++ (follow: Addenda) = new Addenda: + override def toAdd(using Context) = self.toAdd ++ follow.toAdd + + object NothingToAdd extends Addenda + class Errors(using Context) { /** An explanatory note to be added to error messages @@ -162,12 +172,12 @@ object ErrorReporting { def patternConstrStr(tree: Tree): String = ??? - def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailureType = NoMatchingImplicits): Tree = { + def typeMismatch(tree: Tree, pt: Type, addenda: Addenda = NothingToAdd): Tree = { val normTp = normalize(tree.tpe, pt) val normPt = normalize(pt, pt) def contextFunctionCount(tp: Type): Int = tp.stripped match - case defn.ContextFunctionType(_, restp, _) => 1 + contextFunctionCount(restp) + case defn.ContextFunctionType(_, restp) => 1 + contextFunctionCount(restp) case _ => 0 def strippedTpCount = contextFunctionCount(tree.tpe) - contextFunctionCount(normTp) def strippedPtCount = contextFunctionCount(pt) - contextFunctionCount(normPt) @@ -184,7 +194,7 @@ object ErrorReporting { "\nMaybe you are missing an else part for the conditional?" case _ => "" - errorTree(tree, TypeMismatch(treeTp, expectedTp, Some(tree), implicitFailure.whyNoConversion, missingElse)) + errorTree(tree, TypeMismatch(treeTp, expectedTp, Some(tree), (addenda.toAdd :+ missingElse)*)) } /** A subtype log explaining why `found` does not conform to `expected` */ @@ -194,7 +204,7 @@ object ErrorReporting { | $found |conforms to | $expected - |but the comparison trace ended with `false`: + |but none of the attempts shown below succeeded: |""" val c = ctx.typerState.constraint val constraintText = @@ -203,7 +213,7 @@ object ErrorReporting { else i"""a constraint with: |$c""" - i"""${TypeComparer.explained(_.isSubType(found, expected), header)} + i"""${TypeComparer.explained(_.isSubType(found, expected), header, short = !ctx.settings.Ydebug.value)} | |The tests were made under $constraintText""" @@ -266,7 +276,7 @@ object ErrorReporting { else val add = suggestImports( ViewProto(qualType.widen, - SelectionProto(tree.name, WildcardType, NoViewsAllowed, privateOK = false))) + SelectionProto(tree.name, WildcardType, NoViewsAllowed, privateOK = false, NoSpan))) if add.isEmpty then "" else ", but could be made available as an extension method." ++ add end selectErrorAddendum diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index b1513df777ec..2c441c2f915e 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package typer -import core._ +import core.* import ast.{Trees, untpd, tpd} -import Contexts._ -import Types._ -import Flags._ -import Symbols._ -import Names._ +import Contexts.* +import Types.* +import Flags.* +import Symbols.* +import Names.* import NameKinds.UniqueName -import util.Spans._ +import util.Spans.* import util.Property import collection.mutable -import Trees._ +import Trees.* /** A class that handles argument lifting. Argument lifting is needed in the following * scenarios: @@ -25,7 +25,7 @@ import Trees._ * arguments can be duplicated as arguments to default argument methods. */ abstract class Lifter { - import tpd._ + import tpd.* /** Test indicating `expr` does not need lifting */ def noLift(expr: Tree)(using Context): Boolean @@ -76,10 +76,11 @@ abstract class Lifter { tree } - /** Lift a function argument, stripping any NamedArg wrapper */ + /** Lift a function argument, stripping any NamedArg wrapper and repeated Typed trees */ private def liftArg(defs: mutable.ListBuffer[Tree], arg: Tree, prefix: TermName = EmptyTermName)(using Context): Tree = arg match { case arg @ NamedArg(name, arg1) => cpy.NamedArg(arg)(name, lift(defs, arg1, prefix)) + case arg @ Typed(arg1, tpt) if tpt.typeOpt.isRepeatedParam => cpy.Typed(arg)(lift(defs, arg1, prefix), tpt) case arg => lift(defs, arg, prefix) } @@ -207,7 +208,7 @@ object LiftToDefs extends LiftComplex { /** Lifter for eta expansion */ object EtaExpansion extends LiftImpure { - import tpd._ + import tpd.* /** Eta-expanding a tree means converting a method reference to a function value. * @param tree The tree to expand @@ -263,7 +264,7 @@ object EtaExpansion extends LiftImpure { * But see comment on the `ExprType` case in function `prune` in class `ConstraintHandling`. */ def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(using Context): untpd.Tree = { - import untpd._ + import untpd.* assert(!ctx.isAfterTyper || (ctx.phase eq ctx.base.inliningPhase), ctx.phase) val defs = new mutable.ListBuffer[tpd.Tree] val lifted: Tree = TypedSplice(liftApp(defs, tree)) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 4bbd6ee080b6..389669beff01 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -3,43 +3,44 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ +import core.* import ast.{TreeTypeMap, untpd, tpd} -import util.Spans._ +import util.Spans.* import util.Stats.{record, monitored} import printing.{Showable, Printer} -import printing.Texts._ -import Contexts._ -import Types._ -import Flags._ +import printing.Texts.* +import Contexts.* +import Types.* +import Flags.* import Mode.ImplicitsEnabled -import NameKinds.{LazyImplicitName, EvidenceParamName} -import Symbols._ -import Types._ -import Decorators._ -import Names._ -import StdNames._ -import ProtoTypes._ -import ErrorReporting._ +import NameKinds.{LazyImplicitName, ContextBoundParamName} +import Symbols.* +import Types.* +import Decorators.* +import Names.* +import StdNames.* +import ProtoTypes.* +import ErrorReporting.* import Inferencing.{fullyDefinedType, isFullyDefined} import Scopes.newScope -import transform.TypeUtils._ -import Hashable._ +import Typer.BindingPrec, BindingPrec.* +import Hashable.* import util.{EqHashMap, Stats} -import config.{Config, Feature} -import Feature.migrateTo3 +import config.{Config, Feature, SourceVersion} +import Feature.{migrateTo3, sourceVersion} import config.Printers.{implicits, implicitsDetailed} import collection.mutable -import reporting._ +import reporting.* import transform.Splicer import annotation.tailrec import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import scala.compiletime.uninitialized /** Implicit resolution */ object Implicits: - import tpd._ + import tpd.* /** An implicit definition `implicitRef` that is visible under a different name, `alias`. * Gets generated if an implicit ref is imported via a renaming import. @@ -75,7 +76,7 @@ object Implicits: * method with the selecting name? False otherwise. */ def hasExtMethod(tp: Type, expected: Type)(using Context) = expected match - case selProto @ SelectionProto(selName: TermName, _, _, _) => + case selProto @ SelectionProto(selName: TermName, _, _, _, _) => tp.memberBasedOnFlags(selName, required = ExtensionMethod).exists case _ => false @@ -92,7 +93,7 @@ object Implicits: if (initctx eq NoContext) initctx else initctx.retractMode(Mode.ImplicitsEnabled) protected given Context = irefCtx - /** The nesting level of this context. Non-zero only in ContextialImplicits */ + /** The nesting level of this context. Non-zero only in ContextualImplicits */ def level: Int = 0 /** The implicit references */ @@ -222,7 +223,7 @@ object Implicits: case pt: ViewProto => viewCandidateKind(ref.widen, pt.argType, pt.resType) case _: ValueTypeOrProto => - if (defn.isFunctionType(pt)) Candidate.Value + if (defn.isFunctionNType(pt)) Candidate.Value else valueTypeCandidateKind(ref.widen) case _ => Candidate.Value @@ -301,7 +302,7 @@ object Implicits: class ContextualImplicits( val refs: List[ImplicitRef], val outerImplicits: ContextualImplicits | Null, - isImport: Boolean)(initctx: Context) extends ImplicitRefs(initctx) { + val isImport: Boolean)(initctx: Context) extends ImplicitRefs(initctx) { private val eligibleCache = EqHashMap[Type, List[Candidate]]() /** The level increases if current context has a different owner or scope than @@ -323,23 +324,39 @@ object Implicits: /** Is this the outermost implicits? This is the case if it either the implicits * of NoContext, or the last one before it. */ - private def isOuterMost = { + private def isOutermost = { val finalImplicits = NoContext.implicits (this eq finalImplicits) || (outerImplicits eqn finalImplicits) } + def bindingPrec: BindingPrec = + if isImport then if ctx.importInfo.uncheckedNN.isWildcardImport then WildImport else NamedImport else Definition + private def combineEligibles(ownEligible: List[Candidate], outerEligible: List[Candidate]): List[Candidate] = if ownEligible.isEmpty then outerEligible else if outerEligible.isEmpty then ownEligible else - val shadowed = ownEligible.map(_.ref.implicitName).toSet - ownEligible ::: outerEligible.filterConserve(cand => !shadowed.contains(cand.ref.implicitName)) + val ownNames = mutable.Set(ownEligible.map(_.ref.implicitName)*) + val outer = outerImplicits.uncheckedNN + if !migrateTo3(using irefCtx) && level == outer.level && outer.bindingPrec.beats(bindingPrec) then + val keptOuters = outerEligible.filterConserve: cand => + if ownNames.contains(cand.ref.implicitName) then + val keepOuter = cand.level == level + if keepOuter then ownNames -= cand.ref.implicitName + keepOuter + else true + val keptOwn = ownEligible.filterConserve: cand => + ownNames.contains(cand.ref.implicitName) + keptOwn ::: keptOuters + else + ownEligible ::: outerEligible.filterConserve: cand => + !ownNames.contains(cand.ref.implicitName) def uncachedEligible(tp: Type)(using Context): List[Candidate] = Stats.record("uncached eligible") if monitored then record(s"check uncached eligible refs in irefCtx", refs.length) val ownEligible = filterMatching(tp) - if isOuterMost then ownEligible + if isOutermost then ownEligible else combineEligibles(ownEligible, outerImplicits.nn.uncachedEligible(tp)) /** The implicit references that are eligible for type `tp`. */ @@ -366,7 +383,7 @@ object Implicits: private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ trace(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ { if (monitored) record(s"check eligible refs in irefCtx", refs.length) val ownEligible = filterMatching(tp) - if isOuterMost then ownEligible + if isOutermost then ownEligible else combineEligibles(ownEligible, outerImplicits.nn.eligible(tp)) } @@ -375,7 +392,7 @@ object Implicits: override def toString: String = { val own = i"(implicits: $refs%, %)" - if (isOuterMost) own else own + "\n " + outerImplicits + if (isOutermost) own else own + "\n " + outerImplicits } /** This context, or a copy, ensuring root import from symbol `root` @@ -391,6 +408,13 @@ object Implicits: } } + /** Search mode to use for possibly avoiding looping givens */ + enum SearchMode: + case Old, // up to 3.3, old mode w/o protection + CompareWarn, // from 3.4, old mode, warn if new mode would change result + CompareErr, // from 3.5, old mode, error if new mode would change result + New // from future, new mode where looping givens are avoided + /** The result of an implicit search */ sealed abstract class SearchResult extends Showable { def tree: Tree @@ -414,6 +438,7 @@ object Implicits: /** A failed search */ case class SearchFailure(tree: Tree) extends SearchResult { + require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}") final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] } @@ -429,7 +454,7 @@ object Implicits: } } - abstract class SearchFailureType extends ErrorType { + abstract class SearchFailureType extends ErrorType, Addenda { def expectedType: Type def argument: Tree @@ -437,7 +462,7 @@ object Implicits: def clarify(tp: Type)(using Context): Type = tp final protected def qualify(using Context): String = expectedType match { - case SelectionProto(name, mproto, _, _) if !argument.isEmpty => + case SelectionProto(name, mproto, _, _, _) if !argument.isEmpty => i"provide an extension method `$name` on ${argument.tpe}" case NoType => if (argument.isEmpty) i"match expected type" @@ -446,11 +471,6 @@ object Implicits: if (argument.isEmpty) i"match type ${clarify(expectedType)}" else i"convert from ${argument.tpe} to ${clarify(expectedType)}" } - - /** If search was for an implicit conversion, a note describing the failure - * in more detail - this is either empty or starts with a '\n' - */ - def whyNoConversion(using Context): String = "" } class NoMatchingImplicits(val expectedType: Type, val argument: Tree, constraint: Constraint = OrderingConstraint.empty) @@ -504,17 +524,21 @@ object Implicits: /** A failure value indicating that an implicit search for a conversion was not tried */ case class TooUnspecific(target: Type) extends NoMatchingImplicits(NoType, EmptyTree, OrderingConstraint.empty): - override def whyNoConversion(using Context): String = + + override def toAdd(using Context) = i""" |Note that implicit conversions were not tried because the result of an implicit conversion - |must be more specific than $target""" + |must be more specific than $target""" :: Nil override def msg(using Context) = super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + override def toString = s"TooUnspecific" + end TooUnspecific /** An ambiguous implicits failure */ - class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType { + class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType: + def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) @@ -522,15 +546,16 @@ object Implicits: str1 = ctx.printer.toTextRef(alt1.ref).show str2 = ctx.printer.toTextRef(alt2.ref).show em"both $str1 and $str2 $qualify".withoutDisambiguation() - override def whyNoConversion(using Context): String = + + override def toAdd(using Context) = if !argument.isEmpty && argument.tpe.widen.isRef(defn.NothingClass) then - "" + Nil else val what = if (expectedType.isInstanceOf[SelectionProto]) "extension methods" else "conversions" i""" |Note that implicit $what cannot be applied because they are ambiguous; - |$explanation""" - } + |$explanation""" :: Nil + end AmbiguousImplicits class MismatchedImplicit(ref: TermRef, val expectedType: Type, @@ -578,7 +603,7 @@ object Implicits: } end Implicits -import Implicits._ +import Implicits.* /** Info relating to implicits that is kept for one run */ trait ImplicitRunInfo: @@ -603,10 +628,10 @@ trait ImplicitRunInfo: object collectParts extends TypeTraverser: - private var parts: mutable.LinkedHashSet[Type] = _ + private var parts: mutable.LinkedHashSet[Type] = uninitialized private val partSeen = util.HashSet[Type]() - def traverse(t: Type) = + def traverse(t: Type) = try if partSeen.contains(t) then () else if implicitScopeCache.contains(t) then parts += t else @@ -636,8 +661,16 @@ trait ImplicitRunInfo: case t: TypeLambda => for p <- t.paramRefs do partSeen += p traverseChildren(t) + case t: MatchType => + traverseChildren(t) + traverse(t.normalized) + case MatchType.InDisguise(mt) + if !t.isInstanceOf[LazyRef] // skip recursive applications (eg. Tuple.Map) + => + traverse(mt) case t => traverseChildren(t) + catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = parts = mutable.LinkedHashSet() @@ -818,7 +851,7 @@ end ImplicitRunInfo trait Implicits: self: Typer => - import tpd._ + import tpd.* override def viewExists(from: Type, to: Type)(using Context): Boolean = !from.isError @@ -841,8 +874,8 @@ trait Implicits: NoMatchingImplicitsFailure else { def adjust(to: Type) = to.stripTypeVar.widenExpr match { - case SelectionProto(name, memberProto, compat, true) => - SelectionProto(name, memberProto, compat, privateOK = false) + case SelectionProto(name, memberProto, compat, true, nameSpan) => + SelectionProto(name, memberProto, compat, privateOK = false, nameSpan) case tp => tp } @@ -957,7 +990,7 @@ trait Implicits: .filter { imp => !isImplicitDefConversion(imp.underlying) && imp.symbol != defn.Predef_conforms - && viewExists(imp, fail.expectedType) + && viewExists(imp.underlying.resultType, fail.expectedType) } else Nil @@ -968,13 +1001,13 @@ trait Implicits: /** A string indicating the formal parameter corresponding to a missing argument */ def implicitParamString(paramName: TermName, methodStr: String, tree: Tree)(using Context): String = tree match { - case Select(qual, nme.apply) if defn.isFunctionType(qual.tpe.widen) => + case Select(qual, nme.apply) if defn.isFunctionNType(qual.tpe.widen) => val qt = qual.tpe.widen val qt1 = qt.dealiasKeepAnnots def addendum = if (qt1 eq qt) "" else (i"\nWhere $qt is an alias of: $qt1") i"parameter of ${qual.tpe.widen}$addendum" case _ => - i"${ if paramName.is(EvidenceParamName) then "an implicit parameter" + i"${ if paramName.is(ContextBoundParamName) then "a context parameter" else s"parameter $paramName" } of $methodStr" } @@ -1136,10 +1169,10 @@ trait Implicits: pt, locked) } pt match - case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => + case selProto @ SelectionProto(selName: TermName, mbrType, _, _, nameSpan) => def tryExtension(using Context) = - extMethodApply(untpd.Select(untpdGenerated, selName), argument, mbrType) + extMethodApply(untpd.Select(untpdGenerated, selName).withSpan(nameSpan), argument, mbrType) def tryConversionForSelection(using Context) = val converted = tryConversion @@ -1524,35 +1557,113 @@ trait Implicits: case _ => tp.isAny || tp.isAnyRef - private def searchImplicit(contextual: Boolean): SearchResult = + /** Search implicit in context `ctxImplicits` or else in implicit scope + * of expected type if `ctxImplicits == null`. + */ + private def searchImplicit(ctxImplicits: ContextualImplicits | Null, mode: SearchMode): SearchResult = if isUnderspecified(wildProto) then SearchFailure(TooUnspecific(pt), span) else - val eligible = - if contextual then + val contextual = ctxImplicits != null + val preEligible = // the eligible candidates, ignoring positions + if ctxImplicits != null then if ctx.gadt.isNarrowing then withoutMode(Mode.ImplicitsEnabled) { - ctx.implicits.uncachedEligible(wildProto) + ctxImplicits.uncachedEligible(wildProto) } - else ctx.implicits.eligible(wildProto) + else ctxImplicits.eligible(wildProto) else implicitScope(wildProto).eligible - searchImplicit(eligible, contextual) match - case result: SearchSuccess => - result - case failure: SearchFailure => - failure.reason match - case _: AmbiguousImplicits => failure - case reason => - if contextual then - searchImplicit(contextual = false).recoverWith { - failure2 => failure2.reason match - case _: AmbiguousImplicits => failure2 - case _ => - reason match - case (_: DivergingImplicit) => failure - case _ => List(failure, failure2).maxBy(_.tree.treeSize) - } - else failure + + /** Does candidate `cand` come too late for it to be considered as an + * eligible candidate? This is the case if `cand` appears in the same + * scope as a given definition of the form `given ... = ...` that + * encloses the search point and `cand` comes later in the source or + * coincides with that given definition. + */ + def comesTooLate(cand: Candidate): Boolean = + val candSym = cand.ref.symbol + def candSucceedsGiven(sym: Symbol): Boolean = + val owner = sym.owner + if owner == candSym.owner then + sym.is(GivenVal) && sym.span.exists && sym.span.start <= candSym.span.start + else if owner.isClass then false + else candSucceedsGiven(owner) + + ctx.isTyper + && !candSym.isOneOf(TermParamOrAccessor | Synthetic) + && candSym.span.exists + && candSucceedsGiven(ctx.owner) + end comesTooLate + + val eligible = // the eligible candidates that come before the search point + if contextual && mode != SearchMode.Old + then preEligible.filterNot(comesTooLate) + else preEligible + + def checkResolutionChange(result: SearchResult) = + if (eligible ne preEligible) && mode != SearchMode.New then + searchImplicit(preEligible, contextual) match + case prevResult: SearchSuccess => + def remedy = pt match + case _: SelectionProto => + "conversion,\n - use an import to get extension method into scope" + case _: ViewProto => + "conversion" + case _ => + "argument" + + def showResult(r: SearchResult) = r match + case r: SearchSuccess => ctx.printer.toTextRef(r.ref).show + case r => r.show + + result match + case result: SearchSuccess if prevResult.ref frozen_=:= result.ref => + // OK + case _ => + val msg = + em"""Result of implicit search for $pt will change. + |Current result ${showResult(prevResult)} will be no longer eligible + | because it is not defined before the search position. + |Result with new rules: ${showResult(result)}. + |To opt into the new rules, compile with `-source future` or use + |the `scala.language.future` language import. + | + |To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ${showResult(prevResult)} comes earlier, + | - use an explicit $remedy.""" + if mode == SearchMode.CompareErr + then report.error(msg, srcPos) + else report.warning(msg.append("\nThis will be an error in Scala 3.5 and later."), srcPos) + prevResult + case prevResult: SearchFailure => result + else result + end checkResolutionChange + + checkResolutionChange: + searchImplicit(eligible, contextual).recoverWith: + case failure: SearchFailure => + failure.reason match + case _: AmbiguousImplicits => failure + case reason => + if contextual then + // If we filtered out some candidates for being too late, we should + // do another contextual search further out, since the dropped candidates + // might have shadowed an eligible candidate in an outer level. + // Otherwise, proceed with a search of the implicit scope. + val newCtxImplicits = + if eligible eq preEligible then null + else ctxImplicits.nn.outerImplicits: ContextualImplicits | Null + // !!! Dotty problem: without the ContextualImplicits | Null type ascription + // we get a Ycheck failure after arrayConstructors due to "Types differ" + searchImplicit(newCtxImplicits, SearchMode.New).recoverWith: + failure2 => failure2.reason match + case _: AmbiguousImplicits => failure2 + case _ => + reason match + case (_: DivergingImplicit) => failure + case _ => List(failure, failure2).maxBy(_.tree.treeSize) + else failure end searchImplicit /** Find a unique best implicit reference */ @@ -1569,7 +1680,11 @@ trait Implicits: case ref: TermRef => SearchSuccess(tpd.ref(ref).withSpan(span.startPos), ref, 0)(ctx.typerState, ctx.gadt) case _ => - searchImplicit(contextual = true) + searchImplicit(ctx.implicits, + if sourceVersion.isAtLeast(SourceVersion.future) then SearchMode.New + else if sourceVersion.isAtLeast(SourceVersion.`3.5`) then SearchMode.CompareErr + else if sourceVersion.isAtLeast(SourceVersion.`3.4`) then SearchMode.CompareWarn + else SearchMode.Old) end bestImplicit def implicitScope(tp: Type): OfTypeImplicits = ctx.run.nn.implicitScope(tp) @@ -1815,7 +1930,7 @@ final class SearchRoot extends SearchHistory: result match { case failure: SearchFailure => failure case success: SearchSuccess => - import tpd._ + import tpd.* // We might have accumulated dictionary entries for by name implicit arguments // which are not in fact used recursively either directly in the outermost result diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index b5be2daf873b..78cba674bfff 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -3,15 +3,17 @@ package dotc package typer import ast.{tpd, untpd} -import core._ +import core.* import printing.{Printer, Showable} import util.SimpleIdentityMap -import Symbols._, Names._, Types._, Contexts._, StdNames._, Flags._ +import Symbols.*, Names.*, Types.*, Contexts.*, StdNames.*, Flags.* import Implicits.RenamedImplicitRef import StdNames.nme import printing.Texts.Text import NameKinds.QualifiedName +import scala.compiletime.uninitialized + object ImportInfo { case class RootRef(refFn: () => TermRef, isPredef: Boolean = false) @@ -66,7 +68,7 @@ class ImportInfo(symf: Context ?=> Symbol, } mySym.uncheckedNN } - private var mySym: Symbol | Null = _ + private var mySym: Symbol | Null = uninitialized /** The (TermRef) type of the qualifier of the import clause */ def site(using Context): Type = importSym.info match { @@ -109,7 +111,7 @@ class ImportInfo(symf: Context ?=> Symbol, else if sel.rename != sel.name then myExcluded = myExcluded.nn + sel.name - if sel.rename != nme.WILDCARD then + if !sel.isUnimport then myForwardMapping = myForwardMapping.uncheckedNN.updated(sel.name, sel.rename) myReverseMapping = myReverseMapping.uncheckedNN.updated(sel.rename, sel.name) @@ -148,7 +150,7 @@ class ImportInfo(symf: Context ?=> Symbol, else for renamed <- reverseMapping.keys - denot <- pre.member(reverseMapping(renamed).nn).altsWith(_.isOneOf(GivenOrImplicitVal)) + denot <- pre.implicitMembersNamed(reverseMapping(renamed).nn) yield val original = reverseMapping(renamed).nn val ref = TermRef(pre, original, denot) @@ -180,7 +182,7 @@ class ImportInfo(symf: Context ?=> Symbol, private val isLanguageImport: Boolean = untpd.languageImport(qualifier).isDefined - private var myUnimported: Symbol | Null = _ + private var myUnimported: Symbol | Null = uninitialized private var featureCache: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index a9b53f0783bd..7615fbda9f0a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -3,17 +3,18 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ -import Contexts._, Types._, Symbols._, Names._, Decorators._, ProtoTypes._ -import Flags._, SymDenotations._ +import core.* +import Contexts.*, Types.*, Symbols.*, Names.*, Decorators.*, ProtoTypes.* +import Flags.*, SymDenotations.* import NameKinds.FlatName -import StdNames._ +import StdNames.* import config.Printers.{implicits, implicitsDetailed} import ast.{untpd, tpd} import Implicits.{hasExtMethod, Candidate} import java.util.{Timer, TimerTask} import collection.mutable import scala.util.control.NonFatal +import cc.isCaptureChecking /** This trait defines the method `importSuggestionAddendum` that adds an addendum * to error messages suggesting additional imports. @@ -24,7 +25,7 @@ trait ImportSuggestions: /** The maximal number of suggested imports to make */ inline val MaxSuggestions = 10 - import tpd._ + import tpd.* /** Timeout to test a single implicit value as a suggestion, in ms */ private inline val testOneImplicitTimeOut = 500 @@ -195,7 +196,7 @@ trait ImportSuggestions: && { val task = new TimerTask: def run() = - println(i"Cancelling test of $ref when making suggestions for error in ${ctx.source}") + implicits.println(i"Cancelling test of $ref when making suggestions for error in ${ctx.source}") ctx.run.nn.isCancelled = true val span = ctx.owner.srcPos.span val (expectedType, argument, kind) = pt match @@ -237,7 +238,7 @@ trait ImportSuggestions: // don't suggest things that are imported by default def extensionImports = pt match - case ViewProto(argType, SelectionProto(name: TermName, _, _, _)) => + case ViewProto(argType, SelectionProto(name: TermName, _, _, _, _)) => roots.flatMap(extensionMethod(_, name, argType)) case _ => Nil @@ -319,7 +320,7 @@ trait ImportSuggestions: * If there's nothing to suggest, an empty string is returned. */ override def importSuggestionAddendum(pt: Type)(using Context): String = - if ctx.phase == Phases.checkCapturesPhase then + if isCaptureChecking then return "" // it's too late then to look for implicits val (fullMatches, headMatches) = importSuggestions(pt)(using ctx.fresh.setExploreTyperState()) @@ -330,7 +331,7 @@ trait ImportSuggestions: def importString(ref: TermRef): String = val imported = if ref.symbol.is(ExtensionMethod) then - s"${ctx.printer.toTextPrefix(ref.prefix).show}${ref.symbol.name}" + s"${ctx.printer.toTextPrefixOf(ref).show}${ref.symbol.name}" else ctx.printer.toTextRef(ref).show s" import $imported" diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 4d027b8750e0..7e35b8be8caa 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -2,23 +2,23 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, Types._, Flags._, Symbols._ -import ProtoTypes._ +import core.* +import ast.* +import Contexts.*, Types.*, Flags.*, Symbols.* +import ProtoTypes.* import NameKinds.UniqueName -import util.Spans._ +import util.Spans.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec -import reporting._ +import reporting.* import collection.mutable import scala.annotation.internal.sharable object Inferencing { - import tpd._ + import tpd.* /** Is type fully defined, meaning the type does not contain wildcard types * or uninstantiated type variables. As a side effect, this will minimize @@ -60,7 +60,9 @@ object Inferencing { def instantiateSelected(tp: Type, tvars: List[Type])(using Context): Unit = if (tvars.nonEmpty) IsFullyDefinedAccumulator( - ForceDegree.Value(tvars.contains, IfBottom.flip), minimizeSelected = true + new ForceDegree.Value(IfBottom.flip): + override def appliesTo(tvar: TypeVar) = tvars.contains(tvar), + minimizeSelected = true ).process(tp) /** Instantiate any type variables in `tp` whose bounds contain a reference to @@ -141,7 +143,7 @@ object Inferencing { * 3. T is minimized if it has a lower bound (different from Nothing) in the * current constraint (the bound might come from T's declaration). * 4. Otherwise, T is maximized if it has an upper bound (different from Any) - * in the currented constraint (the bound might come from T's declaration). + * in the current constraint (the bound might come from T's declaration). * 5. Otherwise, T is not instantiated at all. * If (1) and (2) do not apply, and minimizeSelected is not set: @@ -154,15 +156,66 @@ object Inferencing { * their lower bound. Record whether successful. * 2nd Phase: If first phase was successful, instantiate all remaining type variables * to their upper bound. + * + * Instance types can be improved by replacing covariant occurrences of Nothing + * with fresh type variables, if `force` allows this in its `canImprove` implementation. */ private class IsFullyDefinedAccumulator(force: ForceDegree.Value, minimizeSelected: Boolean = false) (using Context) extends TypeAccumulator[Boolean] { - private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = { + /** Replace toplevel-covariant occurrences (i.e. covariant without double flips) + * of Nothing by fresh type variables. Double-flips are not covered to be + * conservative and save a bit of time on traversals; we could probably + * generalize that if we see use cases. + * For singleton types and references to module classes: try to + * improve the widened type. For module classes, the widened type + * is the intersection of all its non-transparent parent types. + */ + private def improve(tvar: TypeVar) = new TypeMap: + def apply(t: Type) = trace(i"improve $t", show = true): + def tryWidened(widened: Type): Type = + val improved = apply(widened) + if improved ne widened then improved else mapOver(t) + if variance > 0 then + t match + case t: TypeRef => + if t.symbol == defn.NothingClass then + newTypeVar(TypeBounds.empty, nestingLevel = tvar.nestingLevel) + else if t.symbol.is(ModuleClass) then + tryWidened(t.parents.filter(!_.isTransparent()) + .foldLeft(defn.AnyType: Type)(TypeComparer.andType(_, _))) + else + mapOver(t) + case t: TermRef => + tryWidened(t.widen) + case _ => + mapOver(t) + else t + + // Don't map Nothing arguments for higher-kinded types; we'd get the wrong kind */ + override def mapArg(arg: Type, tparam: ParamInfo): Type = + if tparam.paramInfo.isLambdaSub then arg + else super.mapArg(arg, tparam) + end improve + + /** Instantiate type variable with possibly improved computed instance type. + * @return true if variable was instantiated with improved type, which + * in this case should not be instantiated further, false otherwise. + */ + private def instantiate(tvar: TypeVar, fromBelow: Boolean): Boolean = + if fromBelow && force.canImprove(tvar) then + val inst = tvar.typeToInstantiateWith(fromBelow = true) + if apply(true, inst) then + // need to recursively check before improving, since improving adds type vars + // which should not be instantiated at this point + val better = improve(tvar)(inst) + if better <:< TypeComparer.fullUpperBound(tvar.origin) then + typr.println(i"forced instantiation of invariant ${tvar.origin} = $inst, improved to $better") + tvar.instantiateWith(better) + return true val inst = tvar.instantiate(fromBelow) typr.println(i"forced instantiation of ${tvar.origin} = $inst") - inst - } + false private var toMaximize: List[TypeVar] = Nil @@ -178,31 +231,32 @@ object Inferencing { && ctx.typerState.constraint.contains(tvar) && { var fail = false + var skip = false val direction = instDirection(tvar.origin) if minimizeSelected then if direction <= 0 && tvar.hasLowerBound then - instantiate(tvar, fromBelow = true) + skip = instantiate(tvar, fromBelow = true) else if direction >= 0 && tvar.hasUpperBound then - instantiate(tvar, fromBelow = false) + skip = instantiate(tvar, fromBelow = false) // else hold off instantiating unbounded unconstrained variable else if direction != 0 then - instantiate(tvar, fromBelow = direction < 0) + skip = instantiate(tvar, fromBelow = direction < 0) else if variance >= 0 && tvar.hasLowerBound then - instantiate(tvar, fromBelow = true) + skip = instantiate(tvar, fromBelow = true) else if (variance > 0 || variance == 0 && !tvar.hasUpperBound) && force.ifBottom == IfBottom.ok then // if variance == 0, prefer upper bound if one is given - instantiate(tvar, fromBelow = true) + skip = instantiate(tvar, fromBelow = true) else if variance >= 0 && force.ifBottom == IfBottom.fail then fail = true else toMaximize = tvar :: toMaximize - !fail && foldOver(x, tvar) + !fail && (skip || foldOver(x, tvar)) } case tp => foldOver(x, tp) } catch case ex: Throwable => - handleRecursive("check fully defined", tp.show, ex) + handleRecursive("check fully defined", tp.showSummary(20), ex) } def process(tp: Type): Boolean = @@ -244,16 +298,16 @@ object Inferencing { * relationship _necessarily_ must hold. * * We accomplish that by: - * - replacing covariant occurences with upper GADT bound - * - replacing contravariant occurences with lower GADT bound - * - leaving invariant occurences alone + * - replacing covariant occurrences with upper GADT bound + * - replacing contravariant occurrences with lower GADT bound + * - leaving invariant occurrences alone * * Examples: * - If we have GADT cstr A <: Int, then for all A <: Int, Option[A] <: Option[Int]. * Therefore, we can approximate Option[A] ~~ Option[Int]. * - If we have A >: S <: T, then for all such A, A => A <: S => T. This * illustrates that it's fine to differently approximate different - * occurences of same type. + * occurrences of same type. * - If we have A <: Int and F <: [A] => Option[A] (note the invariance), * then we should approximate F[A] ~~ Option[A]. That is, we should * respect the invariance of the type constructor. @@ -317,7 +371,7 @@ object Inferencing { def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match case tl: TypeLambda => val (tl1, tvars) = constrained(tl, tree) - var tree1 = AppliedTypeTree(tree.withType(tl1), tvars) + val tree1 = AppliedTypeTree(tree.withType(tl1), tvars.map(_.wrapInTypeTree(tree))) tree1.tpe <:< pt if isFullyDefined(tree1.tpe, force = ForceDegree.failBottom) then tree1 @@ -411,7 +465,7 @@ object Inferencing { val vs = variances(tp) val patternBindings = new mutable.ListBuffer[(Symbol, TypeParamRef)] val gadtBounds = ctx.gadt.symbols.map(ctx.gadt.bounds(_).nn) - vs foreachBinding { (tvar, v) => + vs.underlying foreachBinding { (tvar, v) => if !tvar.isInstantiated then // if the tvar is covariant/contravariant (v == 1/-1, respectively) in the input type tp // then it is safe to instantiate if it doesn't occur in any of the GADT bounds. @@ -444,8 +498,6 @@ object Inferencing { res } - type VarianceMap = SimpleIdentityMap[TypeVar, Integer] - /** All occurrences of type vars in `tp` that satisfy predicate * `include` mapped to their variances (-1/0/1) in both `tp` and * `pt.finalResultType`, where @@ -453,7 +505,7 @@ object Inferencing { * +1 means: only covariant occurrences * 0 means: mixed or non-variant occurrences * - * We need to take the occurences in `pt` into account because a type + * We need to take the occurrences in `pt` into account because a type * variable created when typing the current tree might only appear in the * bounds of a type variable in the expected type, for example when * `ConstraintHandling#legalBound` creates type variables when approximating @@ -469,23 +521,18 @@ object Inferencing { * * we want to instantiate U to x.type right away. No need to wait further. */ - private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = { + def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap[TypeVar] = { Stats.record("variances") val constraint = ctx.typerState.constraint - object accu extends TypeAccumulator[VarianceMap] { + object accu extends TypeAccumulator[VarianceMap[TypeVar]]: def setVariance(v: Int) = variance = v - def apply(vmap: VarianceMap, t: Type): VarianceMap = t match { + def apply(vmap: VarianceMap[TypeVar], t: Type): VarianceMap[TypeVar] = t match case t: TypeVar if !t.isInstantiated && accCtx.typerState.constraint.contains(t) => - val v = vmap(t) - if (v == null) vmap.updated(t, variance) - else if (v == variance || v == 0) vmap - else vmap.updated(t, 0) + vmap.recordLocalVariance(t, variance) case _ => foldOver(vmap, t) - } - } /** Include in `vmap` type variables occurring in the constraints of type variables * already in `vmap`. Specifically: @@ -497,10 +544,10 @@ object Inferencing { * bounds as non-variant. * Do this in a fixpoint iteration until `vmap` stabilizes. */ - def propagate(vmap: VarianceMap): VarianceMap = { + def propagate(vmap: VarianceMap[TypeVar]): VarianceMap[TypeVar] = { var vmap1 = vmap def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) } - vmap.foreachBinding { (tvar, v) => + vmap.underlying.foreachBinding { (tvar, v) => val param = tvar.origin constraint.entry(param) match case TypeBounds(lo, hi) => @@ -516,7 +563,7 @@ object Inferencing { if (vmap1 eq vmap) vmap else propagate(vmap1) } - propagate(accu(accu(SimpleIdentityMap.empty, tp), pt.finalResultType)) + propagate(accu(accu(VarianceMap.empty, tp), pt.finalResultType)) } /** Run the transformation after dealiasing but return the original type if it was a no-op. */ @@ -544,7 +591,7 @@ object Inferencing { } if tparams.isEmpty then tp else tp.derivedAppliedType(tycon, args1) case tp: AndOrType => tp.derivedAndOrType(captureWildcards(tp.tp1), captureWildcards(tp.tp2)) - case tp: RefinedType => tp.derivedRefinedType(captureWildcards(tp.parent), tp.refinedName, tp.refinedInfo) + case tp: RefinedType => tp.derivedRefinedType(parent = captureWildcards(tp.parent)) case tp: RecType => tp.derivedRecType(captureWildcards(tp.parent)) case tp: LazyRef => captureWildcards(tp.ref) case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) @@ -557,8 +604,8 @@ object Inferencing { } trait Inferencing { this: Typer => - import Inferencing._ - import tpd._ + import Inferencing.* + import tpd.* /** Interpolate undetermined type variables in the widened type of this tree. * @param tree the tree whose type is interpolated @@ -568,7 +615,7 @@ trait Inferencing { this: Typer => * Eligible for interpolation are all type variables owned by the current typerstate * that are not in `locked` and whose `nestingLevel` is `>= ctx.nestingLevel`. * Type variables occurring co- (respectively, contra-) variantly in the tree type - * or expected type are minimized (respectvely, maximized). Non occurring type variables are minimized if they + * or expected type are minimized (respectively, maximized). Non occurring type variables are minimized if they * have a lower bound different from Nothing, maximized otherwise. Type variables appearing * non-variantly in the type are left untouched. * @@ -642,7 +689,7 @@ trait Inferencing { this: Typer => if !tvar.isInstantiated then // isInstantiated needs to be checked again, since previous interpolations could already have // instantiated `tvar` through unification. - val v = vs(tvar) + val v = vs.computedVariance(tvar) if v == null then buf += ((tvar, 0)) else if v.intValue != 0 then buf += ((tvar, v.intValue)) else comparing(cmp => @@ -776,14 +823,30 @@ trait Inferencing { this: Typer => } /** An enumeration controlling the degree of forcing in "is-fully-defined" checks. */ -@sharable object ForceDegree { - class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom): - override def toString = s"ForceDegree.Value(.., $ifBottom)" - val none: Value = new Value(_ => false, IfBottom.ok) { override def toString = "ForceDegree.none" } - val all: Value = new Value(_ => true, IfBottom.ok) { override def toString = "ForceDegree.all" } - val failBottom: Value = new Value(_ => true, IfBottom.fail) { override def toString = "ForceDegree.failBottom" } - val flipBottom: Value = new Value(_ => true, IfBottom.flip) { override def toString = "ForceDegree.flipBottom" } -} +@sharable object ForceDegree: + class Value(val ifBottom: IfBottom): + + /** Does `tv` need to be instantiated? */ + def appliesTo(tv: TypeVar): Boolean = true + + /** Should we try to improve the computed instance type by replacing bottom types + * with fresh type variables? + */ + def canImprove(tv: TypeVar): Boolean = false + + override def toString = s"ForceDegree.Value($ifBottom)" + end Value + + val none: Value = new Value(IfBottom.ok): + override def appliesTo(tv: TypeVar) = false + override def toString = "ForceDegree.none" + val all: Value = new Value(IfBottom.ok): + override def toString = "ForceDegree.all" + val failBottom: Value = new Value(IfBottom.fail): + override def toString = "ForceDegree.failBottom" + val flipBottom: Value = new Value(IfBottom.flip): + override def toString = "ForceDegree.flipBottom" +end ForceDegree enum IfBottom: case ok, fail, flip diff --git a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala index 89caf5e1c474..bbc34bc692f9 100644 --- a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package typer -import core.Contexts._ -import ast.tpd._ +import core.Contexts.* +import ast.tpd.* /** PostTyper doesn't run on java sources, * but some checks still need to be applied. diff --git a/compiler/src/dotty/tools/dotc/typer/Linter.scala b/compiler/src/dotty/tools/dotc/typer/Linter.scala new file mode 100644 index 000000000000..c0ba581b3732 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/typer/Linter.scala @@ -0,0 +1,126 @@ +package dotty.tools +package dotc +package typer + +import core.* +import Types.*, Contexts.*, Symbols.*, Flags.*, Constants.* +import reporting.* +import Decorators.i + +/** A module for linter checks done at typer */ +object Linter: + import ast.tpd.* + + /** If -Wnonunit-statement is set, warn about statements in blocks that are non-unit expressions. + * @return true if a warning was issued, false otherwise + */ + def warnOnInterestingResultInStatement(t: Tree)(using Context): Boolean = + + def isUninterestingSymbol(sym: Symbol): Boolean = + sym == NoSymbol || + sym.isConstructor || + sym.is(Package) || + sym.isPackageObject || + sym == defn.BoxedUnitClass || + sym == defn.AnyClass || + sym == defn.AnyRefAlias || + sym == defn.AnyValClass + + def isUninterestingType(tpe: Type): Boolean = + tpe == NoType || + tpe.typeSymbol == defn.UnitClass || + defn.isBottomClass(tpe.typeSymbol) || + tpe =:= defn.UnitType || + tpe.typeSymbol == defn.BoxedUnitClass || + tpe =:= defn.AnyValType || + tpe =:= defn.AnyType || + tpe =:= defn.AnyRefType + + def isJavaApplication(t: Tree): Boolean = t match + case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) + case _ => false + + def checkInterestingShapes(t: Tree): Boolean = t match + case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) + case Block(_, res) => checkInterestingShapes(res) + case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) + case _ => checksForInterestingResult(t) + + def checksForInterestingResult(t: Tree): Boolean = + !t.isDef // ignore defs + && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any + && !isUninterestingType(t.tpe) // bottom types, Unit, Any + && !isThisTypeResult(t) // buf += x + && !isSuperConstrCall(t) // just a thing + && !isJavaApplication(t) // Java methods are inherently side-effecting + // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? + + if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + val where = t match + case Block(_, res) => res + case If(_, thenpart, Literal(Constant(()))) => + thenpart match { + case Block(_, res) => res + case _ => thenpart + } + case _ => t + report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) + true + else false + end warnOnInterestingResultInStatement + + /** If -Wimplausible-patterns is set, warn about pattern values that can match the scrutinee + * type only if there would be some user-defined equality method that equates values of the + * two types. + */ + def warnOnImplausiblePattern(pat: Tree, selType: Type)(using Context): Unit = + // approximate type params with bounds + def approx = new ApproximatingTypeMap { + var alreadyExpanding: List[TypeRef] = Nil + def apply(tp: Type) = tp.dealias match + case tp: TypeRef if !tp.symbol.isClass => + if alreadyExpanding contains tp then tp else + val saved = alreadyExpanding + alreadyExpanding ::= tp + val res = expandBounds(tp.info.bounds) + alreadyExpanding = saved + res + case _ => + mapOver(tp) + } + + // Is it possible that a value of `clsA` is equal to a value of `clsB`? + // This ignores user-defined equals methods, but makes an exception + // for numeric classes. + def canOverlap(clsA: ClassSymbol, clsB: ClassSymbol): Boolean = + clsA.mayHaveCommonChild(clsB) + || clsA.isNumericValueClass // this is quite coarse, but matches to what was done before + || clsB.isNumericValueClass + + // Can type `a` possiblly have a common instance with type `b`? + def canEqual(a: Type, b: Type): Boolean = trace(i"canEqual $a $b"): + b match + case _: TypeRef | _: AppliedType if b.typeSymbol.isClass => + a match + case a: TermRef if a.symbol.isOneOf(Module | Enum) => + (a frozen_<:< b) // fast track + || (a frozen_<:< approx(b)) + case _: TypeRef | _: AppliedType if a.typeSymbol.isClass => + if a.isNullType then !b.isNotNull + else canOverlap(a.typeSymbol.asClass, b.typeSymbol.asClass) + case a: TypeProxy => + canEqual(a.superType, b) + case a: AndOrType => + canEqual(a.tp1, b) || canEqual(a.tp2, b) + case b: TypeProxy => + canEqual(a, b.superType) + case b: AndOrType => + // we lose precision with and/or types, but it's hard to do better and + // still compute `canEqual(A & B, B & A) = true`. + canEqual(a, b.tp1) || canEqual(a, b.tp2) + + if ctx.settings.WimplausiblePatterns.value && !canEqual(pat.tpe, selType) then + report.warning(ImplausiblePatternWarning(pat, selType), pat.srcPos) + end warnOnImplausiblePattern + +end Linter diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala new file mode 100644 index 000000000000..8d468fd68bba --- /dev/null +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -0,0 +1,120 @@ +package dotty.tools +package dotc +package typer + +import core.* +import ast.* +import Contexts.* +import Types.* +import Flags.* +import Names.* +import StdNames.* +import Symbols.* +import Trees.* +import ProtoTypes.* +import Decorators.* +import config.MigrationVersion as mv +import config.Feature.{sourceVersion, migrateTo3} +import config.SourceVersion.* +import reporting.* +import NameKinds.ContextBoundParamName +import rewrites.Rewrites.patch +import util.Spans.Span +import rewrites.Rewrites + +/** A utility trait containing source-dependent deprecation messages + * and migrations. + */ +trait Migrations: + this: Typer => + + import tpd.* + + /** Run `migration`, asserting we are in the proper Typer (not a ReTyper) */ + inline def migrate[T](inline migration: T): T = + assert(!this.isInstanceOf[ReTyper]) + migration + + /** Run `migration`, provided we are in the proper Typer (not a ReTyper) */ + inline def migrate(inline migration: Unit): Unit = + if !this.isInstanceOf[ReTyper] then migration + + /** Flag & migrate `?` used as a higher-kinded type parameter + * Warning in 3.0-migration, error from 3.0 + */ + def kindProjectorQMark(tree: untpd.TypeDef, sym: Symbol)(using Context): Unit = + if tree.name eq tpnme.? then + val addendum = if sym.owner.is(TypeParam) + then ", use `_` to denote a higher-kinded type parameter" + else "" + val namePos = tree.sourcePos.withSpan(tree.nameSpan) + report.errorOrMigrationWarning( + em"`?` is not a valid type name$addendum", namePos, mv.Scala2to3) + + def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(using Context): Tree = { + val untpd.PostfixOp(qual, Ident(nme.WILDCARD)) = tree: @unchecked + val pt1 = if (defn.isFunctionNType(pt)) pt else AnyFunctionProto + val nestedCtx = ctx.fresh.setNewTyperState() + val res = typed(qual, pt1)(using nestedCtx) + res match { + case closure(_, _, _) => + case _ => + val recovered = typed(qual)(using ctx.fresh.setExploreTyperState()) + val msg = OnlyFunctionsCanBeFollowedByUnderscore(recovered.tpe.widen, tree) + report.errorOrMigrationWarning(msg, tree.srcPos, mv.Scala2to3) + if mv.Scala2to3.needsPatch then + // Under -rewrite, patch `x _` to `(() => x)` + msg.actions + .headOption + .foreach(Rewrites.applyAction) + return typed(untpd.Function(Nil, qual), pt) + } + nestedCtx.typerState.commit() + + lazy val (prefix, suffix) = res match { + case Block(mdef @ DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => + val arity = vparams.length + if (arity > 0) ("", "") else ("(() => ", "())") + case _ => + ("(() => ", ")") + } + val mversion = mv.FunctionUnderscore + def remedy = + if ((prefix ++ suffix).isEmpty) "simply leave out the trailing ` _`" + else s"use `$prefix$suffix` instead" + def rewrite = Message.rewriteNotice("This construct", mversion.patchFrom) + report.errorOrMigrationWarning( + em"""The syntax ` _` is no longer supported; + |you can $remedy$rewrite""", + tree.srcPos, mversion) + if mversion.needsPatch then + patch(Span(tree.span.start), prefix) + patch(Span(qual.span.end, tree.span.end), suffix) + + res + } + + /** Flag & migrate explicit normal arguments to parameters coming from context bounds + * Warning in 3.4, error in 3.5, rewrite in 3.5-migration. + */ + def contextBoundParams(tree: Tree, tp: Type, pt: FunProto)(using Context): Unit = + val mversion = mv.ExplicitContextBoundArgument + def isContextBoundParams = tp.stripPoly match + case MethodType(ContextBoundParamName(_) :: _) => true + case _ => false + if sourceVersion.isAtLeast(`3.4`) + && isContextBoundParams + && pt.applyKind != ApplyKind.Using + then + def rewriteMsg = + if pt.args.isEmpty then "" + else Message.rewriteNotice("This code", mversion.patchFrom) + report.errorOrMigrationWarning( + em"""Context bounds will map to context parameters. + |A `using` clause is needed to pass explicit arguments to them.$rewriteMsg""", + tree.srcPos, mversion) + if mversion.needsPatch && pt.args.nonEmpty then + patch(Span(pt.args.head.span.start), "using ") + end contextBoundParams + +end Migrations diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index cc4433f75a68..f8ced1c6599a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Trees._, StdNames._, Scopes._, Denotations._, NamerOps._, ContextOps._ -import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._ -import Decorators._, Comments.{_, given} +import core.* +import ast.* +import Trees.*, StdNames.*, Scopes.*, Denotations.*, NamerOps.*, ContextOps.* +import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.* +import Decorators.*, Comments.{_, given} import NameKinds.DefaultGetterName -import ast.desugar, ast.desugar._ -import ProtoTypes._ -import util.Spans._ +import ast.desugar, ast.desugar.* +import ProtoTypes.* +import util.Spans.* import util.Property import collection.mutable import tpd.tpes @@ -20,16 +20,15 @@ import config.Printers.typr import inlines.{Inlines, PrepareInlineable} import parsing.JavaParsers.JavaParser import parsing.Parsers.Parser -import Annotations._ -import Inferencing._ -import transform.ValueClasses._ -import transform.TypeUtils._ -import transform.SymUtils._ +import Annotations.* +import Inferencing.* +import transform.ValueClasses.* import TypeErasure.erasure -import reporting._ +import reporting.* import config.Feature.sourceVersion -import config.SourceVersion._ +import config.SourceVersion.* +import scala.compiletime.uninitialized /** This class creates symbols from definitions and imports and gives them * lazy types. @@ -53,7 +52,7 @@ import config.SourceVersion._ */ class Namer { typer: Typer => - import untpd._ + import untpd.* val TypedAhead : Property.Key[tpd.Tree] = new Property.Key val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key @@ -221,6 +220,8 @@ class Namer { typer: Typer => else NoSymbol var flags1 = flags + if name.isTypeName && Feature.ccEnabled then + flags1 |= CaptureChecked var privateWithin = privateWithinClass(tree.mods) val effectiveOwner = owner.skipWeakOwner if (flags.is(Private) && effectiveOwner.is(Package)) { @@ -247,7 +248,7 @@ class Namer { typer: Typer => val cls = createOrRefine[ClassSymbol](tree, name, flags, ctx.owner, cls => adjustIfModule(new ClassCompleter(cls, tree)(ctx), tree), - newClassSymbol(ctx.owner, name, _, _, _, tree.nameSpan, ctx.source.file)) + newClassSymbol(ctx.owner, name, _, _, _, tree.nameSpan, ctx.compilationUnit.info)) cls.completer.asInstanceOf[ClassCompleter].init() cls case tree: MemberDef => @@ -722,20 +723,27 @@ class Namer { typer: Typer => * Will call the callback with an implementation of type checking * That will set the tpdTree and root tree for the compilation unit. */ - def lateEnterUnit(typeCheckCB: (() => Unit) => Unit)(using Context) = + def lateEnterUnit(typeCheck: Boolean)(typeCheckCB: (() => Unit) => Unit)(using Context) = val unit = ctx.compilationUnit /** Index symbols in unit.untpdTree with lateCompile flag = true */ def lateEnter()(using Context): Context = val saved = lateCompile lateCompile = true - try index(unit.untpdTree :: Nil) finally lateCompile = saved + try + index(unit.untpdTree :: Nil) + finally + lateCompile = saved + if !typeCheck then ctx.run.advanceLate() /** Set the tpdTree and root tree of the compilation unit */ def lateTypeCheck()(using Context) = - unit.tpdTree = typer.typedExpr(unit.untpdTree) - val phase = new transform.SetRootTree() - phase.run + try + unit.tpdTree = typer.typedExpr(unit.untpdTree) + val phase = new transform.SetRootTree() + phase.run + finally + if typeCheck then ctx.run.advanceLate() unit.untpdTree = if (unit.isJava) new JavaParser(unit.source).parse() @@ -746,9 +754,10 @@ class Namer { typer: Typer => // inline body annotations are set in namer, capturing the current context // we need to prepare the context for inlining. lateEnter() - typeCheckCB { () => - lateTypeCheck() - } + if typeCheck then + typeCheckCB { () => + lateTypeCheck() + } } } end lateEnterUnit @@ -1068,7 +1077,7 @@ class Namer { typer: Typer => protected implicit val completerCtx: Context = localContext(cls) - private var localCtx: Context = _ + private var localCtx: Context = uninitialized /** info to be used temporarily while completing the class, to avoid cyclic references. */ private var tempInfo: TempClassInfo | Null = null @@ -1122,7 +1131,10 @@ class Namer { typer: Typer => No("is already an extension method, cannot be exported into another one") else if targets.contains(alias) then No(i"clashes with another export in the same export clause") - else if sym.is(Override) then + else if sym.is(Override) || sym.is(JavaDefined) then + // The tests above are used to avoid futile searches of `allOverriddenSymbols`. + // Scala defined symbols can override concrete symbols only if declared override. + // For Java defined symbols, this does not hold, so we have to search anyway. sym.allOverriddenSymbols.find( other => cls.derivesFrom(other.owner) && !other.is(Deferred) ) match @@ -1134,11 +1146,16 @@ class Namer { typer: Typer => def foreachDefaultGetterOf(sym: TermSymbol, op: TermSymbol => Unit): Unit = var n = 0 + val methodName = + if sym.name == nme.apply && sym.is(Synthetic) && sym.owner.companionClass.is(Case) then + // The synthesized `apply` methods of case classes use the constructor's default getters + nme.CONSTRUCTOR + else sym.name for params <- sym.paramSymss; param <- params do if param.isTerm then if param.is(HasDefault) then - val getterName = DefaultGetterName(sym.name, n) - val getter = pathType.member(DefaultGetterName(sym.name, n)).symbol + val getterName = DefaultGetterName(methodName, n) + val getter = pathType.member(getterName).symbol assert(getter.exists, i"$path does not have a default getter named $getterName") op(getter.asTerm) n += 1 @@ -1171,7 +1188,7 @@ class Namer { typer: Typer => val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span) var target = pathType.select(sym) if target.typeParams.nonEmpty then - target = target.EtaExpand(target.typeParams) + target = target.etaExpand(target.typeParams) newSymbol( cls, forwarderName, Exported | Final, @@ -1234,7 +1251,7 @@ class Namer { typer: Typer => if forwarder.isType then buf += tpd.TypeDef(forwarder.asType).withSpan(span) else - import tpd._ + import tpd.* def extensionParamsCount(pt: Type): Int = pt match case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType) case _ => 0 @@ -1279,7 +1296,7 @@ class Namer { typer: Typer => .foreach(addForwarder(name, _, span)) // ignore if any are not added def addWildcardForwarders(seen: List[TermName], span: Span): Unit = - val nonContextual = mutable.HashSet(seen: _*) + val nonContextual = mutable.HashSet(seen*) val fromCaseClass = pathType.widen.classSymbols.exists(_.is(Case)) def isCaseClassSynthesized(mbr: Symbol) = fromCaseClass && defn.caseClassSynthesized.contains(mbr) @@ -1306,7 +1323,7 @@ class Namer { typer: Typer => if sel.isWildcard then addWildcardForwarders(seen, sel.span) else - if sel.rename != nme.WILDCARD then + if !sel.isUnimport then addForwardersNamed(sel.name, sel.rename, sel.span) addForwarders(sels1, sel.name :: seen) case _ => @@ -1492,7 +1509,7 @@ class Namer { typer: Typer => def typedParentType(tree: untpd.Tree): tpd.Tree = val parentTpt = typer.typedType(parent, AnyTypeConstructorProto) - val ptpe = parentTpt.tpe + val ptpe = parentTpt.tpe.dealias.etaCollapse if ptpe.typeParams.nonEmpty && ptpe.underlyingClassRef(refinementOK = false).exists then @@ -1689,17 +1706,22 @@ class Namer { typer: Typer => def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, paramss: List[List[Symbol]], paramFn: Type => Type)(using Context): Type = { def inferredType = inferredResultType(mdef, sym, paramss, paramFn, WildcardType) - lazy val termParamss = paramss.collect { case TermSymbols(vparams) => vparams } val tptProto = mdef.tpt match { case _: untpd.DerivedTypeTree => WildcardType case TypeTree() => checkMembersOK(inferredType, mdef.srcPos) - case DependentTypeTree(tpFun) => - val tpe = tpFun(termParamss.head) + + // We cannot rely on `typedInLambdaTypeTree` since the computed type might not be fully-defined. + case InLambdaTypeTree(/*isResult =*/ true, tpFun) => + // A lambda has at most one type parameter list followed by exactly one term parameter list. + val tpe = (paramss: @unchecked) match + case TypeSymbols(tparams) :: TermSymbols(vparams) :: Nil => tpFun(tparams, vparams) + case TermSymbols(vparams) :: Nil => tpFun(Nil, vparams) if (isFullyDefined(tpe, ForceDegree.none)) tpe else typedAheadExpr(mdef.rhs, tpe).tpe + case TypedSplice(tpt: TypeTree) if !isFullyDefined(tpt.tpe, ForceDegree.none) => mdef match { case mdef: DefDef if mdef.name == nme.ANON_FUN => @@ -1721,7 +1743,8 @@ class Namer { typer: Typer => // So fixing levels at instantiation avoids the soundness problem but apparently leads // to type inference problems since it comes too late. if !Config.checkLevelsOnConstraints then - val hygienicType = TypeOps.avoid(rhsType, termParamss.flatten) + val termParams = paramss.collect { case TermSymbols(vparams) => vparams }.flatten + val hygienicType = TypeOps.avoid(rhsType, termParams) if (!hygienicType.isValueType || !(hygienicType <:< tpt.tpe)) report.error( em"""return type ${tpt.tpe} of lambda cannot be made hygienic @@ -1880,9 +1903,12 @@ class Namer { typer: Typer => */ def expectedDefaultArgType = val originalTp = defaultParamType - val approxTp = wildApprox(originalTp) + val approxTp = withMode(Mode.TypevarsMissContext): + // assert TypevarsMissContext so that TyperState does not leak into approximation + // We approximate precisely because we want to unlink the type variable. Test case is i18795.scala. + wildApprox(originalTp) approxTp.stripPoly match - case atp @ defn.ContextFunctionType(_, resType, _) + case atp @ defn.ContextFunctionType(_, resType) if !defn.isNonRefinedFunction(atp) // in this case `resType` is lying, gives us only the non-dependent upper bound || resType.existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) => originalTp diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 9104418d406f..cc3fac3a6ffd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package typer -import core._ -import Types._, Contexts._, Symbols._, Decorators._, Constants._ +import core.* +import Types.*, Contexts.*, Symbols.*, Decorators.*, Constants.* import annotation.tailrec import StdNames.nme import util.Property import Names.Name import util.Spans.Span -import Flags._ -import NullOpsDecorator._ +import Flags.* +import NullOpsDecorator.* import collection.mutable import config.Printers.nullables import ast.{tpd, untpd} @@ -18,7 +18,7 @@ import ast.Trees.mods /** Operations for implementing a flow analysis for nullability */ object Nullables: - import ast.tpd._ + import ast.tpd.* def importUnsafeNulls(using Context): Import = Import( ref(defn.LanguageModule), @@ -115,6 +115,10 @@ object Nullables: testSym(tree.symbol, l) case Apply(Select(Literal(Constant(null)), _), r :: Nil) => testSym(tree.symbol, r) + case Apply(Apply(op, l :: Nil), Literal(Constant(null)) :: Nil) => + testPredefSym(op.symbol, l) + case Apply(Apply(op, Literal(Constant(null)) :: Nil), r :: Nil) => + testPredefSym(op.symbol, r) case _ => None @@ -123,6 +127,13 @@ object Nullables: else if sym == defn.Any_!= || sym == defn.Object_ne then Some((operand, false)) else None + private def testPredefSym(opSym: Symbol, operand: Tree)(using Context) = + if opSym.owner == defn.ScalaPredefModuleClass then + if opSym.name == nme.eq then Some((operand, true)) + else if opSym.name == nme.ne then Some((operand, false)) + else None + else None + end CompareNull /** An extractor for null-trackable references */ @@ -190,6 +201,16 @@ object Nullables: // TODO: Add constant pattern if the constant type is not nullable case _ => false + def matchesNull(cdef: CaseDef)(using Context): Boolean = + cdef.guard.isEmpty && patMatchesNull(cdef.pat) + + private def patMatchesNull(pat: Tree)(using Context): Boolean = pat match + case Literal(Constant(null)) => true + case Bind(_, pat) => patMatchesNull(pat) + case Alternative(trees) => trees.exists(patMatchesNull) + case _ if isVarPattern(pat) => true + case _ => false + extension (infos: List[NotNullInfo]) /** Do the current not-null infos imply that `ref` is not null? @@ -401,7 +422,7 @@ object Nullables: * because of shadowing. */ def assignmentSpans(using Context): Map[Int, List[Span]] = - import ast.untpd._ + import ast.untpd.* object populate extends UntypedTreeTraverser: @@ -445,7 +466,7 @@ object Nullables: else candidates -= name case None => traverseChildren(tree) - case _: (If | WhileDo | Typed) => + case _: (If | WhileDo | Typed | Match | CaseDef | untpd.ParsedTry) => traverseChildren(tree) // assignments to candidate variables are OK here ... case _ => reachable = Set.empty // ... but not here @@ -507,7 +528,7 @@ object Nullables: def postProcessByNameArgs(fn: TermRef, app: Tree)(using Context): Tree = fn.widen match case mt: MethodType - if mt.paramInfos.exists(_.isInstanceOf[ExprType]) && !fn.symbol.is(Inline) => + if mt.isMethodWithByNameArgs && !fn.symbol.is(Inline) => app match case Apply(fn, args) => object dropNotNull extends TreeMap: diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index bde279c582e6..6b72f3a8b56e 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -2,27 +2,30 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, Types._, Denotations._, Names._, StdNames._, NameOps._, Symbols._ +import core.* +import ast.* +import Contexts.*, Types.*, Denotations.*, Names.*, StdNames.*, NameOps.*, Symbols.* import NameKinds.DepParamName -import Trees._ -import Constants._ +import Trees.* +import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} -import Decorators._ -import Uniques._ +import Decorators.* +import Uniques.* import inlines.Inlines import config.Printers.typr import Inferencing.* import ErrorReporting.* import util.SourceFile import TypeComparer.necessarySubType +import dotty.tools.dotc.core.Flags.Transparent +import dotty.tools.dotc.config.{ Feature, SourceVersion } import scala.annotation.internal.sharable +import dotty.tools.dotc.util.Spans.{NoSpan, Span} object ProtoTypes { - import tpd._ + import tpd.* /** A trait defining an `isCompatible` method. */ trait Compatibility { @@ -105,7 +108,7 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. + /** Constrain result with special case if `meth` is a transparent inlineable method in an inlineable context. * In that case, we should always succeed and not constrain type parameters in the expected type, * because the actual return type can be a subtype of the currently known return type. * However, we should constrain parameters of the declared return type. This distinction is @@ -113,8 +116,21 @@ object ProtoTypes { */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = if (Inlines.isInlineable(meth)) { - constrainResult(mt, wildApprox(pt)) - true + // Stricter behaviour in 3.4+: do not apply `wildApprox` to non-transparent inlines + if (Feature.sourceVersion.isAtLeast(SourceVersion.`3.4`)) { + if (meth.is(Transparent)) { + constrainResult(mt, wildApprox(pt)) + // do not constrain the result type of transparent inline methods + true + } else { + constrainResult(mt, pt) + } + } else { + // Best-effort to fix https://github.com/lampepfl/dotty/issues/9685 in the 3.3.x series + // while preserving source compatibility as much as possible + val methodMatchedType = constrainResult(mt, wildApprox(pt)) + meth.is(Transparent) || methodMatchedType + } } else constrainResult(mt, pt) } @@ -165,7 +181,7 @@ object ProtoTypes { * * [ ].name: proto */ - abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) + abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span) extends CachedProxyType with ProtoType with ValueTypeOrProto { /** Is the set of members of this type unknown, in the sense that we @@ -211,9 +227,7 @@ object ProtoTypes { || tp1.isValueType && compat.normalizedCompatible(NamedType(tp1, name, m), memberProto, keepConstraint)) // Note: can't use `m.info` here because if `m` is a method, `m.info` // loses knowledge about `m`'s default arguments. - mbr match // hasAltWith inlined for performance - case mbr: SingleDenotation => mbr.exists && qualifies(mbr) - case _ => mbr hasAltWith qualifies + mbr.hasAltWithInline(qualifies) catch case ex: TypeError => // A scenario where this can happen is in pos/15673.scala: // We have a type `CC[A]#C` where `CC`'s upper bound is `[X] => Any`, but @@ -230,9 +244,9 @@ object ProtoTypes { def underlying(using Context): Type = WildcardType - def derivedSelectionProto(name: Name, memberProto: Type, compat: Compatibility)(using Context): SelectionProto = - if ((name eq this.name) && (memberProto eq this.memberProto) && (compat eq this.compat)) this - else SelectionProto(name, memberProto, compat, privateOK) + def derivedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, nameSpan: Span)(using Context): SelectionProto = + if ((name eq this.name) && (memberProto eq this.memberProto) && (compat eq this.compat) && (nameSpan == this.nameSpan)) this + else SelectionProto(name, memberProto, compat, privateOK, nameSpan) override def isErroneous(using Context): Boolean = memberProto.isErroneous @@ -240,14 +254,14 @@ object ProtoTypes { override def unusableForInference(using Context): Boolean = memberProto.unusableForInference - def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) + def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat, nameSpan) def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(x, memberProto) override def deepenProto(using Context): SelectionProto = - derivedSelectionProto(name, memberProto.deepenProto, compat) + derivedSelectionProto(name, memberProto.deepenProto, compat, nameSpan) override def deepenProtoTrans(using Context): SelectionProto = - derivedSelectionProto(name, memberProto.deepenProtoTrans, compat) + derivedSelectionProto(name, memberProto.deepenProtoTrans, compat, nameSpan) override def computeHash(bs: Hashable.Binders): Int = { val delta = (if (compat eq NoViewsAllowed) 1 else 0) | (if (privateOK) 2 else 0) @@ -268,12 +282,12 @@ object ProtoTypes { } } - class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends SelectionProto(name, memberProto, compat, privateOK) + class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span) + extends SelectionProto(name, memberProto, compat, privateOK, nameSpan) object SelectionProto { - def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean)(using Context): SelectionProto = { - val selproto = new CachedSelectionProto(name, memberProto, compat, privateOK) + def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span)(using Context): SelectionProto = { + val selproto = new CachedSelectionProto(name, memberProto, compat, privateOK, nameSpan) if (compat eq NoViewsAllowed) unique(selproto) else selproto } } @@ -281,11 +295,11 @@ object ProtoTypes { /** Create a selection proto-type, but only one level deep; * treat constructors specially */ - def shallowSelectionProto(name: Name, tp: Type, typer: Typer)(using Context): TermType = + def shallowSelectionProto(name: Name, tp: Type, typer: Typer, nameSpan: Span)(using Context): TermType = if (name.isConstructorName) WildcardType else tp match - case tp: UnapplyFunProto => new UnapplySelectionProto(name) - case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true) + case tp: UnapplyFunProto => new UnapplySelectionProto(name, nameSpan) + case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true, nameSpan) /** A prototype for expressions [] that are in some unspecified selection operation * @@ -295,12 +309,12 @@ object ProtoTypes { * operation is further selection. In this case, the expression need not be a value. * @see checkValue */ - @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) - @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) /** A prototype for selections in pattern constructors */ - class UnapplySelectionProto(name: Name) extends SelectionProto(name, WildcardType, NoViewsAllowed, true) + class UnapplySelectionProto(name: Name, nameSpan: Span) extends SelectionProto(name, WildcardType, NoViewsAllowed, true, nameSpan) trait ApplyingProto extends ProtoType // common trait of ViewProto and FunProto trait FunOrPolyProto extends ProtoType: // common trait of PolyProto and FunProto @@ -370,9 +384,9 @@ object ProtoTypes { def allArgTypesAreCurrent()(using Context): Boolean = state.typedArg.size == args.length - private def isUndefined(tp: Type): Boolean = tp match { + private def isUndefined(tp: Type): Boolean = tp.dealias match { case _: WildcardType => true - case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) + case defn.FunctionNOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false } @@ -411,7 +425,7 @@ object ProtoTypes { case ValDef(_, tpt, _) if !tpt.isEmpty => typer.typedType(tpt).typeOpt case _ => WildcardType } - targ = arg.withType(defn.FunctionOf(paramTypes, WildcardType)) + targ = arg.withType(defn.FunctionNOf(paramTypes, WildcardType)) case Some(_) if !force => targ = arg.withType(WildcardType) case _ => @@ -599,7 +613,7 @@ object ProtoTypes { def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean = ctx.typer.isApplicableType(tp, argType :: Nil, resultType) || { resType match { - case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => + case selProto @ SelectionProto(selName: TermName, mbrType, _, _, _) => ctx.typer.hasExtensionMethodNamed(tp, selName, argType, mbrType) //.reporting(i"has ext $tp $name $argType $mbrType: $result") case _ => @@ -712,7 +726,7 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeTree]) = { + ): (TypeLambda, List[TypeVar]) = { val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) @@ -720,33 +734,31 @@ object ProtoTypes { s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction - def newTypeVars(tl: TypeLambda): List[TypeTree] = - for (paramRef <- tl.paramRefs) - yield { - val tt = InferredTypeTree().withSpan(owningTree.span) + def newTypeVars(tl: TypeLambda): List[TypeVar] = + for paramRef <- tl.paramRefs + yield val tvar = TypeVar(paramRef, state, nestingLevel) state.ownedVars += tvar - tt.withType(tvar) - } + tvar val added = state.constraint.ensureFresh(tl) - val tvars = if (addTypeVars) newTypeVars(added) else Nil - TypeComparer.addToConstraint(added, tvars.tpes.asInstanceOf[List[TypeVar]]) + val tvars = if addTypeVars then newTypeVars(added) else Nil + TypeComparer.addToConstraint(added, tvars) (added, tvars) } - def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeTree]) = + def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeVar]) = constrained(tl, owningTree, alwaysAddTypeVars = tl.isInstanceOf[PolyType] && ctx.typerState.isCommittable) - /** Same as `constrained(tl, EmptyTree)`, but returns just the created type lambda */ - def constrained(tl: TypeLambda)(using Context): TypeLambda = - constrained(tl, EmptyTree)._1 + /** Same as `constrained(tl, EmptyTree, alwaysAddTypeVars = true)`, but returns just the created type vars. */ + def constrained(tl: TypeLambda)(using Context): List[TypeVar] = + constrained(tl, EmptyTree, alwaysAddTypeVars = true)._2 /** Instantiate `tl` with fresh type variables added to the constraint. */ def instantiateWithTypeVars(tl: TypeLambda)(using Context): Type = - val targs = constrained(tl, ast.tpd.EmptyTree, alwaysAddTypeVars = true)._2 - tl.instantiate(targs.tpes) + val tvars = constrained(tl) + tl.instantiate(tvars) /** A fresh type variable added to the current constraint. * @param bounds The initial bounds of the variable @@ -765,7 +777,7 @@ object ProtoTypes { pt => bounds :: Nil, pt => represents.orElse(defn.AnyType)) constrained(poly, untpd.EmptyTree, alwaysAddTypeVars = true, nestingLevel) - ._2.head.tpe.asInstanceOf[TypeVar] + ._2.head /** If `param` was created using `newTypeVar(..., represents = X)`, returns X. * This is used in: @@ -921,7 +933,7 @@ object ProtoTypes { } approxOr case tp: SelectionProto => - tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto, theMap, seen, internal), NoViewsAllowed) + tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto, theMap, seen, internal), NoViewsAllowed, tp.nameSpan) case tp: ViewProto => tp.derivedViewProto( wildApprox(tp.argType, theMap, seen, internal), @@ -954,7 +966,7 @@ object ProtoTypes { final def wildApprox(tp: Type)(using Context): Type = wildApprox(tp, null, Set.empty, Set.empty) - @sharable object AssignProto extends UncachedGroundType with MatchAlways + @sharable object LhsProto extends UncachedGroundType with MatchAlways private[ProtoTypes] class WildApproxMap(val seen: Set[TypeParamRef], val internal: Set[TypeLambda])(using Context) extends TypeMap { def apply(tp: Type): Type = wildApprox(tp, this, seen, internal) diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 070449e3ee96..fb9176526e42 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -1,36 +1,39 @@ package dotty.tools.dotc package typer -import dotty.tools.dotc.ast._ -import dotty.tools.dotc.config.Feature._ -import dotty.tools.dotc.config.SourceVersion._ -import dotty.tools.dotc.core._ -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.ast.* +import dotty.tools.dotc.config.Feature.* +import dotty.tools.dotc.config.SourceVersion.* +import dotty.tools.dotc.core.* +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.inlines.PrepareInlineable +import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.dotc.staging.StagingLevel.* -import dotty.tools.dotc.transform.SymUtils._ + import dotty.tools.dotc.typer.ErrorReporting.errorTree -import dotty.tools.dotc.typer.Implicits._ -import dotty.tools.dotc.typer.Inferencing._ -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.typer.Implicits.* +import dotty.tools.dotc.typer.Inferencing.* +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.Stats.record import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative import scala.collection.mutable - +import scala.collection.SeqMap /** Type quotes `'{ ... }` and splices `${ ... }` */ trait QuotesAndSplices { self: Typer => - import tpd._ + import tpd.* + import QuotesAndSplices.* /** Translate `'{ e }` into `scala.quoted.Expr.apply(e)` and `'[T]` into `scala.quoted.Type.apply[T]` * while tracking the quotation level in the context. @@ -75,7 +78,7 @@ trait QuotesAndSplices { def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = { record("typedSplice") checkSpliceOutsideQuote(tree) - assert(!ctx.mode.is(Mode.QuotedPattern)) + assert(!ctx.mode.isQuotedPattern) tree.expr match { case untpd.Quote(innerExpr, Nil) if innerExpr.isTerm => report.warning("Canceled quote directly inside a splice. ${ '{ XYZ } } is equivalent to XYZ.", tree.srcPos) @@ -101,24 +104,26 @@ trait QuotesAndSplices { case tree => tree } + def typedQuotePattern(tree: untpd.QuotePattern, pt: Type)(using Context): Tree = + throw new UnsupportedOperationException("cannot type check a Hole node") + def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = { record("typedSplicePattern") if isFullyDefined(pt, ForceDegree.flipBottom) then - def patternOuterContext(ctx: Context): Context = - if (ctx.mode.is(Mode.QuotedPattern)) patternOuterContext(ctx.outer) else ctx - val typedArgs = tree.args.map { - case arg: untpd.Ident => - typedExpr(arg) - case arg => - report.error("Open pattern expected an identifier", arg.srcPos) - EmptyTree + val typedArgs = withMode(Mode.InQuotePatternHoasArgs) { + tree.args.map { + case arg: untpd.Ident => + typedExpr(arg) + case arg => + report.error("Open pattern expected an identifier", arg.srcPos) + EmptyTree + } } for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val patType = if tree.args.isEmpty then pt else defn.FunctionOf(argTypes, pt) - val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))( - using spliceContext.retractMode(Mode.QuotedPattern).addMode(Mode.Pattern).withOwner(patternOuterContext(ctx).owner)) + val patType = if tree.args.isEmpty then pt else defn.FunctionNOf(argTypes, pt) + val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))(using quotePatternSpliceContext) val baseType = pat.tpe.baseType(defn.QuotedExprClass) val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) @@ -137,13 +142,13 @@ trait QuotesAndSplices { * The prototype must be fully defined to be able to infer the type of `R`. */ def typedAppliedSplice(tree: untpd.Apply, pt: Type)(using Context): Tree = { - assert(ctx.mode.is(Mode.QuotedPattern)) + assert(ctx.mode.isQuotedPattern) val untpd.Apply(splice: untpd.SplicePattern, args) = tree: @unchecked def isInBraces: Boolean = splice.span.end != splice.body.span.end if isInBraces then // ${x}(...) match an application val typedArgs = args.map(arg => typedExpr(arg)) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val splice1 = typedSplicePattern(splice, defn.FunctionOf(argTypes, pt)) + val splice1 = typedSplicePattern(splice, defn.FunctionNOf(argTypes, pt)) untpd.cpy.Apply(tree)(splice1.select(nme.apply), typedArgs).withType(pt) else // $x(...) higher-order quasipattern if args.isEmpty then @@ -151,23 +156,32 @@ trait QuotesAndSplices { typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) } - /** Type a pattern variable name `t` in quote pattern as `${given t$giveni: Type[t @ _]}`. - * The resulting pattern is the split in `splitQuotePattern`. + /** Type check a type binding reference in a quoted pattern. + * + * If no binding exists with that name, this becomes the definition of a new type binding. */ def typedQuotedTypeVar(tree: untpd.Ident, pt: Type)(using Context): Tree = - def spliceOwner(ctx: Context): Symbol = - if (ctx.mode.is(Mode.QuotedPattern)) spliceOwner(ctx.outer) else ctx.owner - val name = tree.name.toTypeName - val nameOfSyntheticGiven = PatMatGivenVarName.fresh(tree.name.toTermName) - val expr = untpd.cpy.Ident(tree)(nameOfSyntheticGiven) val typeSymInfo = pt match case pt: TypeBounds => pt case _ => TypeBounds.empty - val typeSym = newSymbol(spliceOwner(ctx), name, EmptyFlags, typeSymInfo, NoSymbol, tree.span) - typeSym.addAnnotation(Annotation(New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span))) - val pat = typedPattern(expr, defn.QuotedTypeClass.typeRef.appliedTo(typeSym.typeRef))( - using spliceContext.retractMode(Mode.QuotedPattern).withOwner(spliceOwner(ctx))) - pat.select(tpnme.Underlying) + + def warnOnInferredBounds(typeSym: Symbol) = + if !(typeSymInfo =:= TypeBounds.empty) && !(typeSym.info <:< typeSymInfo) then + val (openQuote, closeQuote) = if ctx.mode.is(Mode.QuotedExprPattern) then ("'{", "}") else ("'[", "]") + report.warning(em"Ignored bound$typeSymInfo\n\nConsider defining bounds explicitly:\n $openQuote $typeSym${typeSym.info & typeSymInfo}; ... $closeQuote", tree.srcPos) + + getQuotedPatternTypeVariable(tree.name.asTypeName) match + case Some(typeSym) => + warnOnInferredBounds(typeSym) + ref(typeSym) + case None => + if ctx.mode.is(Mode.InPatternAlternative) then + report.error(IllegalVariableInPatternAlternative(tree.name), tree.srcPos) + val typeSym = inContext(quotePatternOuterContext(ctx)) { + newSymbol(ctx.owner, tree.name.toTypeName, Case, typeSymInfo, NoSymbol, tree.span) + } + addQuotedPatternTypeVariable(typeSym) + Bind(typeSym, untpd.Ident(nme.WILDCARD).withType(typeSymInfo)).withSpan(tree.span) private def checkSpliceOutsideQuote(tree: untpd.Tree)(using Context): Unit = if (level == 0 && !ctx.owner.ownersIterator.exists(_.isInlineMethod)) @@ -180,192 +194,11 @@ trait QuotesAndSplices { |""", tree.srcPos ) - /** Split a typed quoted pattern is split into its type bindings, pattern expression and inner patterns. - * Type definitions with `@patternType` will be inserted in the pattern expression for each type binding. - * - * A quote pattern - * ``` - * case '{ type ${given t$giveni: Type[t @ _]}; ${ls: Expr[List[t]]} } => ... - * ``` - * will return - * ``` - * ( - * Map(: Symbol -> : Bind), - * <'{ - * @scala.internal.Quoted.patternType type t - * scala.internal.Quoted.patternHole[List[t]] - * }>: Tree, - * List(: Tree) - * ) - * ``` - */ - private def splitQuotePattern(quoted: Tree)(using Context): (Map[Symbol, Bind], Tree, List[Tree]) = { - val ctx0 = ctx - - val typeBindings: collection.mutable.Map[Symbol, Bind] = collection.mutable.Map.empty - def getBinding(sym: Symbol): Bind = - typeBindings.getOrElseUpdate(sym, { - val bindingBounds = sym.info - val bsym = newPatternBoundSymbol(sym.name.toString.stripPrefix("$").toTypeName, bindingBounds, quoted.span) - Bind(bsym, untpd.Ident(nme.WILDCARD).withType(bindingBounds)).withSpan(quoted.span) - }) - - object splitter extends tpd.TreeMap { - private var variance: Int = 1 - - inline private def atVariance[T](v: Int)(op: => T): T = { - val saved = variance - variance = v - val res = op - variance = saved - res - } - - val patBuf = new mutable.ListBuffer[Tree] - val freshTypePatBuf = new mutable.ListBuffer[Tree] - val freshTypeBindingsBuff = new mutable.ListBuffer[Tree] - val typePatBuf = new mutable.ListBuffer[Tree] - override def transform(tree: Tree)(using Context) = tree match { - case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => - transform(tpt) // Collect type bindings - transform(splice) - case SplicePattern(pat, args) => - val patType = pat.tpe.widen - val patType1 = patType.translateFromRepeated(toArray = false) - val pat1 = if (patType eq patType1) pat else pat.withType(patType1) - patBuf += pat1 - if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) - else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) - case Select(pat: Bind, _) if tree.symbol.isTypeSplice => - val sym = tree.tpe.dealias.typeSymbol - if sym.exists then - val tdef = TypeDef(sym.asType).withSpan(sym.span) - val nameOfSyntheticGiven = pat.symbol.name.toTermName - freshTypeBindingsBuff += transformTypeBindingTypeDef(nameOfSyntheticGiven, tdef, freshTypePatBuf) - TypeTree(tree.tpe.dealias).withSpan(tree.span) - else - tree - case tdef: TypeDef => - if tdef.symbol.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then - transformTypeBindingTypeDef(PatMatGivenVarName.fresh(tdef.name.toTermName), tdef, typePatBuf) - else if tdef.symbol.isClass then - val kind = if tdef.symbol.is(Module) then "objects" else "classes" - report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) - EmptyTree - else - super.transform(tree) - case tree @ AppliedTypeTree(tpt, args) => - val args1: List[Tree] = args.zipWithConserve(tpt.tpe.typeParams.map(_.paramVarianceSign)) { (arg, v) => - arg.tpe match { - case _: TypeBounds => transform(arg) - case _ => atVariance(variance * v)(transform(arg)) - } - } - cpy.AppliedTypeTree(tree)(transform(tpt), args1) - case tree: NamedDefTree => - if tree.name.is(NameKinds.WildcardParamName) then - report.warning( - "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", - tree.srcPos) - if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name.startsWith("$") then - report.error("Names cannot start with $ quote pattern ", tree.namePos) - super.transform(tree) - case _: Match => - report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) - EmptyTree - case _: Try => - report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) - EmptyTree - case _: Return => - report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) - EmptyTree - case _ => - super.transform(tree) - } - - private def transformTypeBindingTypeDef(nameOfSyntheticGiven: TermName, tdef: TypeDef, buff: mutable.Builder[Tree, List[Tree]])(using Context): Tree = { - if ctx.mode.is(Mode.InPatternAlternative) then - report.error(IllegalVariableInPatternAlternative(tdef.symbol.name), tdef.srcPos) - if variance == -1 then - tdef.symbol.addAnnotation(Annotation(New(ref(defn.QuotedRuntimePatterns_fromAboveAnnot.typeRef)).withSpan(tdef.span))) - val bindingType = getBinding(tdef.symbol).symbol.typeRef - val bindingTypeTpe = AppliedType(defn.QuotedTypeClass.typeRef, bindingType :: Nil) - val sym = newPatternBoundSymbol(nameOfSyntheticGiven, bindingTypeTpe, tdef.span, flags = ImplicitVal)(using ctx0) - buff += Bind(sym, untpd.Ident(nme.WILDCARD).withType(bindingTypeTpe)).withSpan(tdef.span) - super.transform(tdef) - } - } - val shape0 = splitter.transform(quoted) - val patterns = (splitter.freshTypePatBuf.iterator ++ splitter.typePatBuf.iterator ++ splitter.patBuf.iterator).toList - val freshTypeBindings = splitter.freshTypeBindingsBuff.result() - - val shape1 = seq( - freshTypeBindings, - shape0 - ) - val shape2 = - if (freshTypeBindings.isEmpty) shape1 - else { - val isFreshTypeBindings = freshTypeBindings.map(_.symbol).toSet - val typeMap = new TypeMap() { - def apply(tp: Type): Type = tp match { - case tp: TypeRef if tp.symbol.isTypeSplice => - val tp1 = tp.dealias - if (isFreshTypeBindings(tp1.typeSymbol)) tp1 - else tp - case tp => mapOver(tp) - } - } - new TreeTypeMap(typeMap = typeMap).transform(shape1) - } - - (typeBindings.toMap, shape2, patterns) - } - - /** Type a quote pattern `case '{ } =>` qiven the a current prototype. Typing the pattern - * will also transform it into a call to `scala.internal.quoted.Expr.unapply`. + /** Type a quote pattern `case '{ } =>` given the a current prototype. Typing the pattern + * will create a QuotePattern tree. * * Code directly inside the quote is typed as an expression using Mode.QuotedPattern. Splices * within the quotes become patterns again and typed accordingly. - * - * ``` - * case '{ ($ls: List[t]) } => - * // `t$giveni` is of type `Type[t]` for some unknown `t` - * // `t$giveni` is implicitly available - * // `ls` is of type `Expr[List[t]]` - * '{ val h: $t = $ls.head } - * ``` - * - * For each type splice we will create a new type binding in the pattern match (`t @ _` in this case) - * and a corresponding type in the quoted pattern as a hole (`@patternType type t` in this case). - * All these generated types are inserted at the start of the quoted code. - * - * After typing the tree will resemble - * - * ``` - * case '{ type ${given t$giveni: Type[t @ _]}; ${ls: Expr[List[t]]} } => ... - * ``` - * - * Then the pattern is _split_ into the expression contained in the pattern replacing the splices by holes, - * and the patterns in the splices. All these are recombined into a call to `Matcher.unapply`. - * - * ``` - * case scala.internal.quoted.Expr.unapply[ - * KList[t @ _, KNil], // Type binging definition - * Tuple2[Type[t], Expr[List[t]]] // Typing the result of the pattern match - * ]( - * Tuple2.unapply - * [Type[t], Expr[List[t]]] //Propagated from the tuple above - * (given t$giveni @ _, ls @ _: Expr[List[t]]) // from the spliced patterns - * )( - * '{ // Runtime quote Matcher.unapply uses to mach against. Expression directly inside the quoted pattern without the splices - * @scala.internal.Quoted.patternType type t - * scala.internal.Quoted.patternHole[List[t]] - * }, - * true, // If there is at least one type splice. Used to instantiate the context with or without GADT constraints - * x$2 // tasty.Reflection instance - * ) => ... - * ``` */ private def typedQuotePattern(tree: untpd.Quote, pt: Type, quotes: Tree)(using Context): Tree = { val quoted = tree.body @@ -379,64 +212,114 @@ trait QuotesAndSplices { case Some(argPt: ValueType) => argPt // excludes TypeBounds case _ => defn.AnyType } - val quoted0 = desugar.quotedPattern(quoted, untpd.TypedSplice(TypeTree(quotedPt))) - val quoteCtx = quoteContext.addMode(Mode.QuotedPattern).retractMode(Mode.Pattern) - val quoted1 = - if quoted.isType then typedType(quoted0, WildcardType)(using quoteCtx) - else typedExpr(quoted0, WildcardType)(using quoteCtx) - - val (typeBindings, shape, splices) = splitQuotePattern(quoted1) - - class ReplaceBindings extends TypeMap() { - override def apply(tp: Type): Type = tp match { - case tp: TypeRef => - val tp1 = if (tp.symbol.isTypeSplice) tp.dealias else tp - mapOver(typeBindings.get(tp1.typeSymbol).fold(tp)(_.symbol.typeRef)) - case tp => mapOver(tp) - } + val (untpdTypeVariables, quoted0) = desugar.quotedPatternTypeVariables(desugar.quotedPattern(quoted, untpd.TypedSplice(TypeTree(quotedPt)))) + + for tdef @ untpd.TypeDef(_, rhs) <- untpdTypeVariables do rhs match + case _: TypeBoundsTree => // ok + case LambdaTypeTree(_, body: TypeBoundsTree) => // ok + case _ => report.error("Quote type variable definition cannot be an alias", tdef.srcPos) + + if ctx.mode.is(Mode.InPatternAlternative) then + for tpVar <- untpdTypeVariables do + report.error(IllegalVariableInPatternAlternative(tpVar.name), tpVar.srcPos) + + val (typeTypeVariables, patternBlockCtx) = + val quoteCtx = quotePatternContext(quoted.isType) + if untpdTypeVariables.isEmpty then (Nil, quoteCtx) + else typedBlockStats(untpdTypeVariables)(using quoteCtx) + val patternCtx = patternBlockCtx.addMode(if quoted.isType then Mode.QuotedTypePattern else Mode.QuotedExprPattern) + + val allTypeBindings = List.newBuilder[Bind] + for tpVar <- typeTypeVariables do + val sym = tpVar.symbol + allTypeBindings += Bind(sym, untpd.Ident(nme.WILDCARD).withType(sym.info)).withSpan(tpVar.span) + + val body1 = inContext(patternCtx) { + for typeVariable <- typeTypeVariables do + addQuotedPatternTypeVariable(typeVariable.symbol) + + if quoted.isType then typedType(quoted0, WildcardType) + else typedExpr(quoted0, WildcardType) } - val replaceBindings = new ReplaceBindings - val patType = defn.tupleType(splices.tpes.map(tpe => replaceBindings(tpe.widen))) - - val typeBindingsTuple = tpd.hkNestedPairsTypeTree(typeBindings.values.toList) - - val replaceBindingsInTree = new TreeMap { - private var bindMap = Map.empty[Symbol, Symbol] - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - tree match { - case tree: Bind => - val sym = tree.symbol - val newInfo = replaceBindings(sym.info) - val newSym = newSymbol(sym.owner, sym.name, sym.flags, newInfo, sym.privateWithin, sym.coord) - bindMap += sym -> newSym - Bind(newSym, transform(tree.body)).withSpan(sym.span) - case _ => - super.transform(tree).withType(replaceBindingsInType(tree.tpe)) - } - private val replaceBindingsInType = new ReplaceBindings { - override def apply(tp: Type): Type = tp match { - case tp: TermRef => bindMap.get(tp.termSymbol).fold[Type](tp)(_.typeRef) - case tp => super.apply(tp) - } - } + + val extractTypeBindings = new TreeMapWithVariance { + override def transform(tree: Tree)(using Context) = tree match + case pat: Bind if pat.isType => + if inContravariantPosition then + pat.symbol.addAnnotation(Annotation(New(ref(defn.QuotedRuntimePatterns_fromAboveAnnot.typeRef)).withSpan(pat.span))) + allTypeBindings += pat + TypeTree(pat.symbol.typeRef).withSpan(pat.span) + case _: SplicePattern => + tree + case _ => + super.transform(tree) } + val body2 = extractTypeBindings.transform(body1) + + val quoteClass = if quoted.isTerm then defn.QuotedExprClass else defn.QuotedTypeClass + val pt1 = quoteClass.typeRef.appliedTo(body2.tpe) - val splicePat = - if splices.isEmpty then ref(defn.EmptyTupleModule.termRef) - else typed(untpd.Tuple(splices.map(x => untpd.TypedSplice(replaceBindingsInTree.transform(x)))).withSpan(quoted.span), patType) + val quotePattern = QuotePattern(allTypeBindings.result(), body2, quotes, pt1) + QuotePatterns.checkPattern(quotePattern) + quotePattern + } +} - val quoteClass = if (quoted.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass - val quotedPattern = - if (quoted.isTerm) tpd.Quote(shape, Nil).select(nme.apply).appliedTo(quotes) - else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape).appliedTo(quotes) +object QuotesAndSplices { + import tpd.* - val matchModule = if quoted.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch - val unapplyFun = quotes.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(nme.unapply) + /** Key for mapping from quoted pattern type variable names into their symbol */ + private val TypeVariableKey = new Property.Key[collection.mutable.Map[TypeName, Symbol]] - UnApply( - fun = unapplyFun.appliedToTypeTrees(typeBindingsTuple :: TypeTree(patType) :: Nil), - implicits = quotedPattern :: Nil, - patterns = splicePat :: Nil, - proto = quoteClass.typeRef.appliedTo(replaceBindings(quoted1.tpe) & quotedPt)) - } + /** Get the symbol for the quoted pattern type variable if it exists */ + def getQuotedPatternTypeVariable(name: TypeName)(using Context): Option[Symbol] = + ctx.property(TypeVariableKey).get.get(name) + + /** Get the symbol for the quoted pattern type variable if it exists */ + def addQuotedPatternTypeVariable(sym: Symbol)(using Context): Unit = + ctx.property(TypeVariableKey).get.update(sym.name.asTypeName, sym) + + /** Context used to type the contents of a quote pattern */ + def quotePatternContext(isTypePattern: Boolean)(using Context): Context = + quoteContext.fresh.setNewScope + .retractMode(Mode.Pattern) + .setProperty(TypeVariableKey, collection.mutable.Map.empty) + + /** Context used to type the contents of a quote pattern splice */ + def quotePatternSpliceContext(using Context): Context = + spliceContext + .retractMode(Mode.QuotedPatternBits) + .addMode(Mode.Pattern) + .withOwner(quotePatternOuterContext(ctx).owner) + + /** First outer context that is outside of a quoted pattern. */ + def quotePatternOuterContext(ctx: Context): Context = + if ctx.mode.isQuotedPattern then quotePatternOuterContext(ctx.outer) else ctx + + private[QuotesAndSplices] class TreeMapWithVariance extends TreeMap: + private var variance: Int = 1 + + def inContravariantPosition: Boolean = variance == -1 + + inline private def atVariance[T](v: Int)(op: => T): T = { + val saved = variance + variance = v + val res = op + variance = saved + res + } + + override def transform(tree: Tree)(using Context) = tree match + // TODO: handle TypeBoundsTree, LambdaTypeTree as well as method parameters in DefTrees? + case tree @ AppliedTypeTree(tpt, args) => + val args1: List[Tree] = args.zipWithConserve(tpt.tpe.typeParams.map(_.paramVarianceSign)) { (arg, v) => + arg.tpe match { + case _: TypeBounds => transform(arg) + case _ => atVariance(v * variance)(transform(arg)) + } + } + cpy.AppliedTypeTree(tree)(transform(tpt), args1) + case _ => + super.transform(tree) + end TreeMapWithVariance } diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 1fa6e967fbe1..e152b5e6b9c7 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package typer -import core._ -import Contexts._ -import Types._ -import Symbols._ -import StdNames._ -import Decorators._ -import typer.ProtoTypes._ +import core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* +import Decorators.* +import typer.ProtoTypes.* import ast.{tpd, untpd} import scala.util.control.NonFatal import util.Spans.Span -import Nullables._ +import Nullables.* import staging.StagingLevel.* /** A version of Typer that keeps all symbols defined and referenced in a @@ -23,7 +23,7 @@ import staging.StagingLevel.* * Otherwise, everything is as in Typer. */ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking { - import tpd._ + import tpd.* private def assertTyped(tree: untpd.Tree)(using Context): Unit = assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}") @@ -95,9 +95,16 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = typedApply(tree, selType) + override def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = { + val (bindings1, exprCtx) = typedBlockStats(tree.bindings) + val expansion1 = typed(tree.expansion, pt)(using inlineContext(promote(tree))(using exprCtx)) + untpd.cpy.Inlined(tree)(tree.call, bindings1.asInstanceOf[List[MemberDef]], expansion1) + .withType(avoidingType(expansion1, bindings1)) + } + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = assertTyped(tree) - val body1 = typed(tree.body, tree.bodyType)(using quoteContext) + val body1 = typed(tree.body, promote(tree).bodyType)(using quoteContext) for tag <- tree.tags do assertTyped(tag) untpd.cpy.Quote(tree)(body1, tree.tags).withType(tree.typeOpt) @@ -111,6 +118,27 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking val expr1 = typed(tree.expr, quoteType)(using spliceContext) untpd.cpy.Splice(tree)(expr1).withType(tree.typeOpt) + override def typedQuotePattern(tree: untpd.QuotePattern, pt: Type)(using Context): Tree = + assertTyped(tree) + val bindings1 = tree.bindings.map(typed(_)) + val bodyCtx = quoteContext + .retractMode(Mode.Pattern) + .addMode(if tree.body.isType then Mode.QuotedTypePattern else Mode.QuotedExprPattern) + val body1 = typed(tree.body, promote(tree).bodyType)(using bodyCtx) + val quotes1 = typed(tree.quotes, defn.QuotesClass.typeRef) + untpd.cpy.QuotePattern(tree)(bindings1, body1, quotes1).withType(tree.typeOpt) + + override def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = + assertTyped(tree) + val args1 = tree.args.mapconserve(typedExpr(_)) + val patternTpe = + if args1.isEmpty then tree.typeOpt + else defn.FunctionType(args1.size).appliedTo(args1.map(_.tpe) :+ tree.typeOpt) + val bodyCtx = spliceContext.addMode(Mode.Pattern).retractMode(Mode.QuotedPatternBits) + val body1 = typed(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patternTpe))(using bodyCtx) + val args = tree.args.mapconserve(typedExpr(_)) + untpd.cpy.SplicePattern(tree)(body1, args1).withType(tree.typeOpt) + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = promote(tree) @@ -145,7 +173,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = try super.typedUnadapted(tree, pt, locked) catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => - val treeStr = tree.show(using ctx.withPhase(ctx.phase.prevMega)) + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) throw ex diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 025eae3606af..fb692446f1df 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -2,27 +2,28 @@ package dotty.tools package dotc package typer -import transform._ -import core._ -import Symbols._, Types._, Contexts._, Flags._, Names._, NameOps._, NameKinds._ -import StdNames._, Denotations._, SymUtils._, Phases._, SymDenotations._ +import transform.* +import core.* +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, NameOps.*, NameKinds.* +import StdNames.*, Denotations.*, Phases.*, SymDenotations.* import NameKinds.DefaultGetterName -import util.Spans._ +import util.Spans.* import scala.collection.mutable -import ast._ -import MegaPhase._ -import config.Printers.{checks, noPrinter} -import Decorators._ +import ast.* +import MegaPhase.* +import config.Printers.{checks, noPrinter, capt} +import Decorators.* import OverridingPairs.isOverridingPair -import typer.ErrorReporting._ +import typer.ErrorReporting.* import config.Feature.{warnOnMigration, migrateTo3, sourceVersion} -import config.SourceVersion.{`3.0`, `future`} +import config.SourceVersion.`3.0` +import config.MigrationVersion import config.Printers.refcheck -import reporting._ +import reporting.* import Constants.Constant object RefChecks { - import tpd._ + import tpd.* val name: String = "refchecks" val description: String = "checks related to abstract members and overriding" @@ -103,7 +104,7 @@ object RefChecks { val cinfo = cls.classInfo def checkSelfConforms(other: ClassSymbol) = - val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) + var otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) if otherSelf.exists then if !(cinfo.selfType <:< otherSelf) then report.error(DoesNotConformToSelfType("illegal inheritance", cinfo.selfType, cls, otherSelf, "parent", other), @@ -219,31 +220,42 @@ object RefChecks { false precedesIn(parent.asClass.baseClasses) - // We can exclude pairs safely from checking only under three additional conditions - // - their signatures also match in the parent class. - // See neg/i12828.scala for an example where this matters. - // - They overriding/overridden appear in linearization order. - // See neg/i5094.scala for an example where this matters. - // - The overridden symbol is not `abstract override`. For such symbols - // we need a more extensive test since the virtual super chain depends - // on the precise linearization order, which might be different for the - // subclass. See neg/i14415.scala. + /** We can exclude pairs safely from checking only under three additional conditions + * - their signatures also match in the parent class. + * See neg/i12828.scala for an example where this matters. + * - They overriding/overridden appear in linearization order. + * See neg/i5094.scala for an example where this matters. + * - They overriding/overridden appear in linearization order, + * or the parent is a Java class (because linearization does not apply to java classes). + * See neg/i5094.scala and pos/i18654.scala for examples where this matters. + * - The overridden symbol is not `abstract override`. For such symbols + * we need a more extensive test since the virtual super chain depends + * on the precise linearization order, which might be different for the + * subclass. See neg/i14415.scala. + */ override def canBeHandledByParent(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean = isOverridingPair(sym1, sym2, parent.thisType) .showing(i"already handled ${sym1.showLocated}: ${sym1.asSeenFrom(parent.thisType).signature}, ${sym2.showLocated}: ${sym2.asSeenFrom(parent.thisType).signature} = $result", refcheck) - && inLinearizationOrder(sym1, sym2, parent) + && (inLinearizationOrder(sym1, sym2, parent) || parent.is(JavaDefined)) && !sym2.is(AbsOverride) - // Checks the subtype relationship tp1 <:< tp2. - // It is passed to the `checkOverride` operation in `checkAll`, to be used for - // compatibility checking. + /** Checks the subtype relationship tp1 <:< tp2. + * It is passed to the `checkOverride` operation in `checkAll`, to be used for + * compatibility checking. + */ def checkSubType(tp1: Type, tp2: Type)(using Context): Boolean = tp1 frozen_<:< tp2 + /** A hook that allows to omit override checks between `overriding` and `overridden`. + * Overridden in capture checking to handle non-capture checked classes leniently. + */ + def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = true + private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = while hasNext do - checkOverride(subtypeChecker, overriding, overridden) + if needsCheck(overriding, overridden) then + checkOverride(subtypeChecker, overriding, overridden) next() // The OverridingPairs cursor does assume that concrete overrides abstract @@ -257,8 +269,11 @@ object RefChecks { if dcl.is(Deferred) then for other <- dcl.allOverriddenSymbols do if !other.is(Deferred) then - checkOverride(checkSubType, dcl, other) + checkOverride(subtypeChecker, dcl, other) end checkAll + + // Disabled for capture checking since traits can get different parameter refinements + def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -283,9 +298,10 @@ object RefChecks { * 1.9. If M is erased, O is erased. If O is erased, M is erased or inline. * 1.10. If O is inline (and deferred, otherwise O would be final), M must be inline * 1.11. If O is a Scala-2 macro, M must be a Scala-2 macro. - * 1.12. If O is non-experimental, M must be non-experimental. - * 1.13 Under -source future, if O is a val parameter, M must be a val parameter + * 1.12. Under -source future, if O is a val parameter, M must be a val parameter * that passes its value on to O. + * 1.13. If O is non-experimental, M must be non-experimental. + * 1.14. If O has @publicInBinary, M must have @publicInBinary. * 2. Check that only abstract classes have deferred members * 3. Check that concrete classes do not have deferred definitions * that are not implemented in a subclass. @@ -336,14 +352,35 @@ object RefChecks { next == other || isInheritedAccessor(next, other) } + /** Detect any param section where params in last position do not agree isRepeatedParam. + */ + def incompatibleRepeatedParam(member: Symbol, other: Symbol): Boolean = + def loop(mParamInfoss: List[List[Type]], oParamInfoss: List[List[Type]]): Boolean = + mParamInfoss match + case Nil => false + case h :: t => + oParamInfoss match + case Nil => false + case h2 :: t2 => h.nonEmpty && h2.nonEmpty && h.last.isRepeatedParam != h2.last.isRepeatedParam + || loop(t, t2) + member.is(Method, butNot = JavaDefined) + && other.is(Method, butNot = JavaDefined) + && atPhase(typerPhase): + loop(member.info.paramInfoss, other.info.paramInfoss) + + val checker = + if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) + else makeOverridingPairsChecker(clazz, self) + /* Check that all conditions for overriding `other` by `member` * of class `clazz` are met. */ def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = def memberTp(self: Type) = - if (member.isClass) TypeAlias(member.typeRef.EtaExpand(member.typeParams)) + if (member.isClass) TypeAlias(member.typeRef.etaExpand(member.typeParams)) else self.memberInfo(member) - def otherTp(self: Type) = self.memberInfo(other) + def otherTp(self: Type) = + self.memberInfo(other) refcheck.println(i"check override ${infoString(member)} overriding ${infoString(other)}") @@ -425,10 +462,8 @@ object RefChecks { //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG /* Is the intersection between given two lists of overridden symbols empty? */ - def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = { - val set2 = syms2.toSet - !(syms1 exists (set2 contains _)) - } + def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = + !syms1.exists(syms2.toSet.contains) // o: public | protected | package-protected (aka java's default access) // ^-may be overridden by member with access privileges-v @@ -498,6 +533,8 @@ object RefChecks { + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)") else if member.is(Exported) then overrideError("cannot override since it comes from an export") + else if incompatibleRepeatedParam(member, other) then + report.error(DoubleDefinition(member, other, clazz), member.srcPos) else overrideError("needs `override` modifier") else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride)) @@ -535,20 +572,19 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.13) - if sourceVersion.isAtLeast(`future`) then - overrideError(i"cannot override val parameter ${other.showLocated}") - else - report.deprecationWarning( - em"overriding val parameter ${other.showLocated} is deprecated, will be illegal in a future version", - member.srcPos) - else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + report.errorOrMigrationWarning( + em"cannot override val parameter ${other.showLocated}", + member.srcPos, + MigrationVersion.OverrideValParameter) + else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.13) overrideError("may not override non-experimental member") + else if !member.hasAnnotation(defn.PublicInBinaryAnnot) && other.hasAnnotation(defn.PublicInBinaryAnnot) then // (1.14) + overrideError("also needs to be declared with @publicInBinary") else if other.hasAnnotation(defn.DeprecatedOverridingAnnot) then overrideDeprecation("", member, other, "removed or renamed") end checkOverride - val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) checker.checkAll(checkOverride) printMixinOverrideErrors() @@ -797,7 +833,7 @@ object RefChecks { em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz | its type $mbrType | does not conform to ${mbrd.info}""", - (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) + (if (mbr.owner == clazz) mbr else clazz).srcPos, MigrationVersion.Scala2to3) } } } @@ -811,7 +847,7 @@ object RefChecks { for (baseCls <- caseCls.info.baseClasses.tail) if (baseCls.typeParams.exists(_.paramVarianceSign != 0)) for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) - report.errorOrMigrationWarning(problem, clazz.srcPos, from = `3.0`) + report.errorOrMigrationWarning(problem, clazz.srcPos, MigrationVersion.Scala2to3) checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -823,7 +859,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait)) { + if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // @@ -878,6 +914,7 @@ object RefChecks { def isSignatureMatch(sym: Symbol) = sym.isType || { val self = clazz.thisType sym.asSeenFrom(self).matches(member.asSeenFrom(self)) + && !incompatibleRepeatedParam(sym, member) } /* The rules for accessing members which have an access boundary are more @@ -910,8 +947,8 @@ object RefChecks { } // 4. Check that every defined member with an `override` modifier overrides some other member. - for (member <- clazz.info.decls) - if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) { + for member <- clazz.info.decls do + if member.isAnyOverride && !clazz.thisType.baseClasses.exists(hasMatchingSym(_, member)) then if (checks != noPrinter) for (bc <- clazz.info.baseClasses.tail) { val sym = bc.info.decl(member.name).symbol @@ -935,7 +972,7 @@ object RefChecks { } member.resetFlag(Override) member.resetFlag(AbsOverride) - } + end if } /** Check that we do not "override" anything with a private method @@ -957,7 +994,9 @@ object RefChecks { then val cls = sym.owner.asClass for bc <- cls.baseClasses.tail do - val other = sym.matchingDecl(bc, cls.thisType) + var other = sym.matchingDecl(bc, cls.thisType) + if !other.exists && sym.targetName != sym.name then + other = sym.matchingDecl(bc, cls.thisType, sym.targetName) if other.exists then report.error(em"private $sym cannot override ${other.showLocated}", sym.srcPos) end checkNoPrivateOverrides @@ -1097,7 +1136,7 @@ object RefChecks { report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) } -import RefChecks._ +import RefChecks.* /** Post-attribution checking and transformation, which fulfills the following roles * @@ -1131,7 +1170,7 @@ import RefChecks._ */ class RefChecks extends MiniPhase { thisPhase => - import tpd._ + import tpd.* override def phaseName: String = RefChecks.name @@ -1179,6 +1218,10 @@ class RefChecks extends MiniPhase { thisPhase => checkAnyRefMethodCall(tree) tree + override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = + if defn.ScalaBoxedClasses().contains(tree.qualifier.tpe.typeSymbol) && tree.name == nme.synchronized_ then + report.warning(SynchronizedCallOnBoxedClass(tree), tree.srcPos) + tree } /* todo: rewrite and re-enable @@ -1543,7 +1586,7 @@ class RefChecks extends MiniPhase { thisPhase => private def transformIf(tree: If): Tree = { val If(cond, thenpart, elsepart) = tree def unitIfEmpty(t: Tree): Tree = - if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t + if (t == EmptyTree) unitLiteral.setPos(tree.pos).setType(UnitTpe) else t cond.tpe match { case ConstantType(value) => diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 103961b68c29..c94724faf4d4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -2,23 +2,23 @@ package dotty.tools package dotc package typer -import core._ +import core.* import util.Spans.Span -import Contexts._ -import Types._, Flags._, Symbols._, Names._, StdNames._, Constants._ +import Contexts.* +import Types.*, Flags.*, Symbols.*, Names.*, StdNames.*, Constants.* import TypeErasure.{erasure, hasStableErasure} -import Decorators._ -import ProtoTypes._ +import Decorators.* +import ProtoTypes.* import Inferencing.{fullyDefinedType, isFullyDefined} import ast.untpd -import transform.SymUtils._ -import transform.TypeUtils._ -import transform.SyntheticMembers._ +import transform.SyntheticMembers.* import util.Property import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} -import ast.tpd._ -import Synthesizer._ +import ast.tpd.* +import Synthesizer.* +import sbt.ExtractDependencies.* +import xsbti.api.DependencyContext.* /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): @@ -102,11 +102,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case AppliedType(_, funArgs @ fun :: tupled :: Nil) => def functionTypeEqual(baseFun: Type, actualArgs: List[Type], actualRet: Type, expected: Type) = - expected =:= defn.FunctionOf(actualArgs, actualRet, + expected =:= defn.FunctionNOf(actualArgs, actualRet, defn.isContextFunctionType(baseFun)) val arity: Int = - if defn.isErasedFunctionType(fun) then -1 // TODO support? - else if defn.isFunctionType(fun) then + if defn.isFunctionNType(fun) then // TupledFunction[(...) => R, ?] fun.functionArgInfos match case funArgs :+ funRet @@ -114,11 +113,11 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // TupledFunction[(...funArgs...) => funRet, ?] funArgs.size case _ => -1 - else if defn.isFunctionType(tupled) then + else if defn.isFunctionNType(tupled) then // TupledFunction[?, (...) => R] tupled.functionArgInfos match case tupledArgs :: funRet :: Nil => - defn.tupleTypes(tupledArgs.dealias) match + tupledArgs.tupleElementTypes match case Some(funArgs) if functionTypeEqual(tupled, funArgs, funRet, fun) => // TupledFunction[?, ((...funArgs...)) => funRet] funArgs.size @@ -166,7 +165,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def cmpWithBoxed(cls1: ClassSymbol, cls2: ClassSymbol) = cls2 == defn.NothingClass - || cls2 == defn.boxedType(cls1.typeRef).symbol + || cls2 == defn.boxedClass(cls1) || cls1.isNumericValueClass && cls2.derivesFrom(defn.BoxedNumberClass) if cls1.isPrimitiveValueClass then @@ -380,6 +379,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // avoid type aliases for tuples Right(MirrorSource.GenericTuple(types)) case _ => reduce(tp.underlying) + case tp: MatchType => reduce(tp.normalized) case _ => reduce(tp.superType) case tp @ AndType(l, r) => for @@ -408,7 +408,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): New(defn.RuntimeTupleMirrorTypeRef, Literal(Constant(arity)) :: Nil) def makeProductMirror(pre: Type, cls: Symbol, tps: Option[List[Type]]): TreeWithErrors = - val accessors = cls.caseAccessors.filterNot(_.isAllOf(PrivateLocal)) + val accessors = cls.caseAccessors val elemLabels = accessors.map(acc => ConstantType(Constant(acc.name.toString))) val typeElems = tps.getOrElse(accessors.map(mirroredType.resultType.memberInfo(_).widenExpr)) val nestedPairs = TypeOps.nestedPairs(typeElems) @@ -429,7 +429,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): if cls.useCompanionAsProductMirror then companionPath(mirroredType, span) else if defn.isTupleClass(cls) then newTupleMirror(typeElems.size) // TODO: cls == defn.PairClass when > 22 else anonymousMirror(monoType, MirrorImpl.OfProduct(pre), span) - withNoErrors(mirrorRef.cast(mirrorType)) + withNoErrors(mirrorRef.cast(mirrorType).withSpan(span)) end makeProductMirror MirrorSource.reduce(mirroredType) match @@ -442,12 +442,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): mirrorCore(defn.Mirror_SingletonProxyClass, mirroredType, mirroredType, singleton.name) } val mirrorRef = New(defn.Mirror_SingletonProxyClass.typeRef, singletonPath :: Nil) - withNoErrors(mirrorRef.cast(mirrorType)) + withNoErrors(mirrorRef.cast(mirrorType).withSpan(span)) else val mirrorType = formal.constrained_& { mirrorCore(defn.Mirror_SingletonClass, mirroredType, mirroredType, singleton.name) } - withNoErrors(singletonPath.cast(mirrorType)) + withNoErrors(singletonPath.cast(mirrorType).withSpan(span)) case MirrorSource.GenericTuple(tps) => val maxArity = Definitions.MaxTupleArity val arity = tps.size @@ -458,7 +458,14 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val reason = s"it reduces to a tuple with arity $arity, expected arity <= $maxArity" withErrors(i"${defn.PairClass} is not a generic product because $reason") case MirrorSource.ClassSymbol(pre, cls) => - if cls.isGenericProduct then makeProductMirror(pre, cls, None) + if cls.isGenericProduct then + if ctx.runZincPhases then + // The mirror should be resynthesized if the constructor of the + // case class `cls` changes. See `sbt-test/source-dependencies/mirror-product`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls.primaryConstructor) + makeProductMirror(pre, cls, None) else withErrors(i"$cls is not a generic product because ${cls.whyNotGenericProduct}") case Left(msg) => withErrors(i"type `$mirroredType` is not a generic product because $msg") @@ -478,6 +485,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val clsIsGenericSum = cls.isGenericSum(pre) if acceptableMsg.isEmpty && clsIsGenericSum then + if ctx.runZincPhases then + // The mirror should be resynthesized if any child of the sealed class + // `cls` changes. See `sbt-test/source-dependencies/mirror-sum`. + val rec = ctx.compilationUnit.depRecorder + rec.addClassDependency(cls, DependencyByMemberRef) + rec.addUsedName(cls, includeSealedChildren = true) + val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) def internalError(msg: => String)(using Context): Unit = @@ -727,8 +741,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def recur(handlers: SpecialHandlers): TreeWithErrors = handlers match case (cls, handler) :: rest => def baseWithRefinements(tp: Type): Type = tp.dealias match - case tp @ RefinedType(parent, rname, rinfo) => - tp.derivedRefinedType(baseWithRefinements(parent), rname, rinfo) + case tp: RefinedType => + tp.derivedRefinedType(parent = baseWithRefinements(tp.parent)) case _ => tp.baseType(cls) val base = baseWithRefinements(formal) diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index be6121e13209..8bae3a2fb3a7 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -2,14 +2,14 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, ContextOps._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._ -import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._ +import core.* +import ast.* +import Contexts.*, ContextOps.*, Constants.*, Types.*, Symbols.*, Names.*, Flags.*, Decorators.* +import ErrorReporting.*, Annotations.*, Denotations.*, SymDenotations.*, StdNames.* import util.SrcPos -import NameOps._ +import NameOps.* import collection.mutable -import reporting._ +import reporting.* import Checking.{checkNoPrivateLeaks, checkNoWildcard} import cc.CaptureSet @@ -22,7 +22,11 @@ trait TypeAssigner { */ def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(using Context): Symbol = { def qualifies(sym: Symbol) = - sym.isClass && ( + sym.isClass && + // `this` in a polymorphic function type never refers to the desugared refinement. + // In other refinements, `this` does refer to the refinement but is deprecated + // (see `Checking#checkRefinementNonCyclic`). + !(sym.isRefinementClass && sym.derivesFrom(defn.PolyFunctionClass)) && ( qual.isEmpty || sym.name == qual || sym.is(Module) && sym.name.stripModuleClassSuffix == qual) @@ -103,8 +107,10 @@ trait TypeAssigner { val tpe1 = accessibleType(tpe, superAccess) if tpe1.exists then tpe1 else tpe match - case tpe: NamedType => inaccessibleErrorType(tpe, superAccess, pos) - case NoType => tpe + case tpe: NamedType => + if tpe.termSymbol.hasPublicInBinary && tpd.enclosingInlineds.nonEmpty then tpe + else inaccessibleErrorType(tpe, superAccess, pos) + case _ => tpe /** Return a potentially skolemized version of `qualTpe` to be used * as a prefix when selecting `name`. @@ -122,7 +128,7 @@ trait TypeAssigner { val qualType0 = qual1.tpe.widenIfUnstable val qualType = if !qualType0.hasSimpleKind && tree.name != nme.CONSTRUCTOR then - // constructors are selected on typeconstructor, type arguments are passed afterwards + // constructors are selected on type constructor, type arguments are passed afterwards errorType(em"$qualType0 takes type parameters", qual1.srcPos) else if !qualType0.isInstanceOf[TermType] && !qualType0.isError then errorType(em"$qualType0 is illegal as a selection prefix", qual1.srcPos) @@ -161,7 +167,7 @@ trait TypeAssigner { def importSuggestionAddendum(pt: Type)(using Context): String = "" - def notAMemberErrorType(tree: untpd.Select, qual: Tree)(using Context): ErrorType = + def notAMemberErrorType(tree: untpd.Select, qual: Tree, proto: Type)(using Context): ErrorType = val qualType = qual.tpe.widenIfUnstable def kind = if tree.isType then "type" else "value" val foundWithoutNull = qualType match @@ -173,7 +179,7 @@ trait TypeAssigner { def addendum = err.selectErrorAddendum(tree, qual, qualType, importSuggestionAddendum, foundWithoutNull) val msg: Message = if tree.name == nme.CONSTRUCTOR then em"$qualType does not have a constructor" - else NotAMember(qualType, tree.name, kind, addendum) + else NotAMember(qualType, tree.name, kind, proto, addendum) errorType(msg, tree.srcPos) def inaccessibleErrorType(tpe: NamedType, superAccess: Boolean, pos: SrcPos)(using Context): Type = @@ -202,7 +208,7 @@ trait TypeAssigner { def assignType(tree: untpd.Select, qual: Tree)(using Context): Select = val rawType = selectionType(tree, qual) val checkedType = ensureAccessible(rawType, qual.isInstanceOf[Super], tree.srcPos) - val ownType = checkedType.orElse(notAMemberErrorType(tree, qual)) + val ownType = checkedType.orElse(notAMemberErrorType(tree, qual, WildcardType)) assignType(tree, ownType) /** Normalize type T appearing in a new T by following eta expansions to @@ -293,6 +299,8 @@ trait TypeAssigner { else fntpe.resultType // fast path optimization else errorType(em"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) + case err: ErrorType => + err case t => if (ctx.settings.Ydebug.value) new FatalError("").printStackTrace() errorType(err.takesNoParamsMsg(fn, ""), tree.srcPos) @@ -401,7 +409,17 @@ trait TypeAssigner { def assignType(tree: untpd.Closure, meth: Tree, target: Tree)(using Context): Closure = tree.withType( - if (target.isEmpty) meth.tpe.widen.toFunctionType(isJava = meth.symbol.is(JavaDefined), tree.env.length) + if target.isEmpty then + def methTypeWithoutEnv(info: Type): Type = info match + case mt: MethodType => + val dropLast = tree.env.length + val paramNames = mt.paramNames.dropRight(dropLast) + val paramInfos = mt.paramInfos.dropRight(dropLast) + mt.derivedLambdaType(paramNames, paramInfos) + case pt: PolyType => + pt.derivedLambdaType(resType = methTypeWithoutEnv(pt.resType)) + val methodicType = if tree.env.isEmpty then meth.tpe.widen else methTypeWithoutEnv(meth.tpe.widen) + methodicType.toFunctionType(isJava = meth.symbol.is(JavaDefined)) else target.tpe) def assignType(tree: untpd.CaseDef, pat: Tree, body: Tree)(using Context): CaseDef = { @@ -512,6 +530,9 @@ trait TypeAssigner { def assignType(tree: untpd.UnApply, proto: Type)(using Context): UnApply = tree.withType(proto) + def assignType(tree: untpd.QuotePattern, proto: Type)(using Context): QuotePattern = + tree.withType(proto) + def assignType(tree: untpd.ValDef, sym: Symbol)(using Context): ValDef = tree.withType(if (sym.exists) assertExists(sym.termRef) else NoType) @@ -546,5 +567,3 @@ object TypeAssigner extends TypeAssigner: def seqLitType(tree: untpd.SeqLiteral, elemType: Type)(using Context) = tree match case tree: untpd.JavaSeqLiteral => defn.ArrayOf(elemType) case _ => if ctx.erasedTypes then defn.SeqType else defn.SeqType.appliedTo(elemType) - - diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 74be1dee9a9b..fe2a6f92eb97 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3,55 +3,54 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ -import ast._ -import Trees._ -import Constants._ -import StdNames._ -import Scopes._ -import Denotations._ -import ProtoTypes._ -import Contexts._ -import Symbols._ -import Types._ -import SymDenotations._ -import Annotations._ -import Names._ -import NameOps._ -import NameKinds._ -import NamerOps._ -import ContextOps._ -import Flags._ -import Decorators._ -import ErrorReporting._ -import Checking._ -import Inferencing._ +import core.* +import ast.* +import Trees.* +import Constants.* +import StdNames.* +import Scopes.* +import Denotations.* +import ProtoTypes.* +import Contexts.* +import Symbols.* +import Types.* +import SymDenotations.* +import Annotations.* +import Names.* +import NameOps.* +import NameKinds.* +import NamerOps.* +import ContextOps.* +import Flags.* +import Decorators.* +import ErrorReporting.* +import Checking.* +import Inferencing.* import Dynamic.isDynamicExpansion import EtaExpansion.etaExpand import TypeComparer.CompareResult import inlines.{Inlines, PrepareInlineable} -import util.Spans._ -import util.common._ +import util.Spans.* +import util.common.* import util.{Property, SimpleIdentityMap, SrcPos} import Applications.{tupleComponentTypes, wrapDefs, defaultArgument} import collection.mutable import annotation.tailrec -import Implicits._ +import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} import config.Feature import config.Feature.{sourceVersion, migrateTo3} -import config.SourceVersion._ -import rewrites.Rewrites.patch +import config.SourceVersion.* +import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel -import transform.SymUtils._ -import transform.TypeUtils._ -import reporting._ -import Nullables._ -import NullOpsDecorator._ +import reporting.* +import Nullables.* +import NullOpsDecorator.* import cc.CheckCaptures import config.Config +import config.MigrationVersion import scala.annotation.constructorOnly @@ -64,6 +63,11 @@ object Typer { case NothingBound, PackageClause, WildImport, NamedImport, Inheritance, Definition def isImportPrec = this == NamedImport || this == WildImport + + /** special cases: definitions beat imports, and named imports beat + * wildcard imports, provided both are in contexts with same scope */ + def beats(prevPrec: BindingPrec): Boolean = + this == Definition || this == NamedImport && prevPrec == WildImport } /** Assert tree has a position, unless it is empty or a typed splice */ @@ -122,9 +126,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer with Dynamic with Checking with QuotesAndSplices - with Deriving { + with Deriving + with Migrations { - import Typer._ + import Typer.* import tpd.{cpy => _, _} import untpd.cpy @@ -211,7 +216,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * or else `NoContext` if nothing was found yet. */ def findRefRecur(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = { - import BindingPrec._ + import BindingPrec.* /** Check that any previously found result from an inner context * does properly shadow the new one from an outer context. @@ -225,15 +230,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def checkNewOrShadowed(found: Type, newPrec: BindingPrec, scala2pkg: Boolean = false)(using Context): Type = if !previous.exists || TypeComparer.isSameRef(previous, found) then found - else if (prevCtx.scope eq ctx.scope) - && (newPrec == Definition || newPrec == NamedImport && prevPrec == WildImport) - then + else if (prevCtx.scope eq ctx.scope) && newPrec.beats(prevPrec) then // special cases: definitions beat imports, and named imports beat // wildcard imports, provided both are in contexts with same scope found else if !scala2pkg && !previous.isError && !found.isError then - fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx)) + fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx, + isExtension = previous.termSymbol.is(ExtensionMethod) && found.termSymbol.is(ExtensionMethod))) previous /** Assemble and check alternatives to an imported reference. This implies: @@ -260,7 +264,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then altImports.uncheckedNN += altImp - if Feature.enabled(Feature.relaxedExtensionImports) && altImports != null && ctx.isImportContext then + if altImports != null && ctx.isImportContext then val curImport = ctx.importInfo.uncheckedNN namedImportRef(curImport) match case altImp: TermRef => @@ -288,16 +292,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case ImportType(expr) => val pre = expr.tpe val denot0 = pre.memberBasedOnFlags(name, required, excluded) - .accessibleFrom(pre)(using refctx) + var accessibleDenot = denot0.accessibleFrom(pre)(using refctx) + if !accessibleDenot.exists && denot0.hasAltWith(_.symbol.is(Private)) then + accessibleDenot = pre.memberBasedOnFlags(name, required, excluded | Private) + .accessibleFrom(pre)(using refctx) // Pass refctx so that any errors are reported in the context of the // reference instead of the context of the import scope - if denot0.exists then + if accessibleDenot.exists then val denot = if checkBounds then - denot0.filterWithPredicate { mbr => + accessibleDenot.filterWithPredicate { mbr => mbr.matchesImportBound(if mbr.symbol.is(Given) then imp.givenBound else imp.wildcardBound) } - else denot0 + else accessibleDenot def isScalaJsPseudoUnion = denot.name == tpnme.raw.BAR && ctx.settings.scalajs.value && denot.symbol == JSDefinitions.jsdefn.PseudoUnionClass // Just like Scala2Unpickler reinterprets Scala.js pseudo-unions @@ -325,7 +332,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer fail(em"reference to `$name` is ambiguous; it is imported twice") found - if selector.rename == termName && selector.rename != nme.WILDCARD then + if selector.rename == termName && !selector.isUnimport then val memberName = if selector.name == termName then name else if name.isTypeName then selector.name.toTypeName @@ -439,8 +446,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !symsMatch && !suppressErrors then report.errorOrMigrationWarning( AmbiguousReference(name, Definition, Inheritance, prevCtx)(using outer), - pos, from = `3.0`) - if migrateTo3 then + pos, MigrationVersion.Scala2to3) + if MigrationVersion.Scala2to3.needsPatch then patch(Span(pos.span.start), if prevCtx.owner == refctx.owner.enclosingClass then "this." else s"${prevCtx.owner.name}.this.") @@ -526,7 +533,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def toNotNullTermRef(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match case ref: TermRef - if pt != AssignProto && // Ensure it is not the lhs of Assign + if pt != LhsProto && // Ensure it is not the lhs of Assign ctx.notNullInfos.impliesNotNull(ref) && // If a reference is in the context, it is already trackable at the point we add it. // Hence, we don't use isTracked in the next line, because checking use out of order is enough. @@ -556,7 +563,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return tree.withType(defn.AnyType) if untpd.isVarPattern(tree) && name.isTermName then return typed(desugar.patternVar(tree), pt) - else if ctx.mode.is(Mode.QuotedPattern) then + else if ctx.mode.isQuotedPattern then if untpd.isVarPattern(tree) && name.isTypeName then return typedQuotedTypeVar(tree, pt) end if @@ -565,7 +572,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // optimization, it also avoids forcing imports thus potentially avoiding // cyclic references. if (name == nme.ROOTPKG) - return tree.withType(defn.RootPackage.termRef) + val tree2 = tree.withType(defn.RootPackage.termRef) + checkLegalValue(tree2, pt) + return tree2 val rawType = val saved1 = unimported @@ -582,8 +591,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer foundUnderScala2 else found finally - unimported = saved1 - foundUnderScala2 = saved2 + unimported = saved1 + foundUnderScala2 = saved2 /** Normally, returns `ownType` except if `ownType` is a constructor proxy, * and there is another shadowed type accessible with the same name that is not: @@ -655,7 +664,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then // we are in the arguments of a this(...) constructor call errorTree(tree, em"$tree is not accessible from constructor arguments") else - errorTree(tree, MissingIdent(tree, kind, name)) + errorTree(tree, MissingIdent(tree, kind, name, pt)) end typedIdent /** (1) If this reference is neither applied nor selected, check that it does @@ -667,7 +676,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if ctx.mode.is(Mode.Pattern) && !tree.isType && !pt.isInstanceOf[ApplyingProto] - && !tree.tpe.isStable + && !tree.tpe.match + case tp: NamedType => tp.denot.hasAltWith(_.symbol.isStableMember && tp.prefix.isStable || tp.info.isStable) + case tp => tp.isStable && !isWildcardArg(tree) then report.error(StableIdentPattern(tree, pt), tree.srcPos) @@ -695,8 +706,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. typedSelect(tree, pt, qual) - else if defn.isSmallGenericTuple(qual.tpe) then - val elems = defn.tupleTypes(qual.tpe.widenTermRefExpr).getOrElse(Nil) + else if qual.tpe.isSmallGenericTuple then + val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else val tree1 = tryExtensionOrConversion( @@ -717,7 +728,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if checkedType1.exists then gadts.println(i"Member selection healed by GADT approximation") finish(tree1, qual1, checkedType1) - else if defn.isSmallGenericTuple(qual1.tpe) then + else if qual1.tpe.isSmallGenericTuple then gadts.println(i"Tuple member selection healed by GADT approximation") typedSelect(tree, pt, qual1) else @@ -732,7 +743,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && selName.isTermName && !isDynamicExpansion(tree) then val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == AssignProto then + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then assignType(tree2, TryDynamicCallType) else typedDynamicSelect(tree2, Nil, pt) @@ -742,14 +753,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case rawType: NamedType => inaccessibleErrorType(rawType, superAccess, tree.srcPos) case _ => - notAMemberErrorType(tree, qual)) + notAMemberErrorType(tree, qual, pt)) end typedSelect def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") def typeSelectOnTerm(using Context): Tree = - val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() def javaSelectOnType(qual: Tree)(using Context) = @@ -779,7 +790,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tryAlternatively(typeSelectOnTerm)(fallBack) if (tree.qualifier.isType) { - val qual1 = typedType(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual1 = typedType(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) assignType(cpy.Select(tree)(qual1, tree.name), qual1) } else if (ctx.isJava && tree.name.isTypeName) @@ -809,8 +820,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedNumber(tree: untpd.Number, pt: Type)(using Context): Tree = { - import scala.util.FromDigits._ - import untpd.NumberKind._ + import scala.util.FromDigits.* + import untpd.NumberKind.* record("typedNumber") val digits = tree.digits val target = pt.dealias @@ -892,7 +903,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedNew(tree: untpd.New, pt: Type)(using Context): Tree = tree.tpt match { case templ: untpd.Template => - import untpd._ + import untpd.* var templ1 = templ def isEligible(tp: Type) = tp.exists @@ -936,7 +947,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) => if (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) ifPat else { - import untpd._ + import untpd.* typed(Bind(id.name, Typed(Ident(wildName), tree.tpt)).withSpan(tree.span), pt) } case _ => ifExpr @@ -960,7 +971,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so the expected type is the union `Seq[T] | Array[_ <: T]`. val ptArg = // FIXME(#8680): Quoted patterns do not support Array repeated arguments - if ctx.mode.is(Mode.QuotedPattern) then + if ctx.mode.isQuotedPattern then pt.translateFromRepeated(toArray = false, translateWildcard = true) else pt.translateFromRepeated(toArray = false, translateWildcard = true) @@ -1043,7 +1054,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => tree } - def typedNamedArg(tree: untpd.NamedArg, pt: Type)(using Context): NamedArg = { /* Special case for resolving types for arguments of an annotation defined in Java. * It allows that value of any type T can appear in positions where Array[T] is expected. @@ -1079,8 +1089,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typed(appliedUpdate, pt) case lhs => val locked = ctx.typerState.ownedVars - val lhsCore = typedUnadapted(lhs, AssignProto, locked) - def lhs1 = adapt(lhsCore, AssignProto, locked) + val lhsCore = typedUnadapted(lhs, LhsProto, locked) + def lhs1 = adapt(lhsCore, LhsProto, locked) def reassignmentToVal = report.error(ReassignmentToVal(lhsCore.symbol.name), tree.srcPos) @@ -1092,11 +1102,35 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // allow assignments from the primary constructor to class fields ctx.owner.name.is(TraitSetterName) || ctx.owner.isStaticConstructor + /** Mark private variables that are assigned with a prefix other than + * the `this` type of their owner with a `annotation.internal.AssignedNonLocally` + * annotation. The annotation influences the variance check for these + * variables, which is done at PostTyper. It will be removed after the + * variance check. + */ + def rememberNonLocalAssignToPrivate(sym: Symbol) = lhs1 match + case Select(qual, _) + if sym.is(Private, butNot = Local) && !sym.isAccessPrivilegedThisType(qual.tpe) => + sym.addAnnotation(Annotation(defn.AssignedNonLocallyAnnot, lhs1.span)) + case _ => + lhsCore match case Apply(fn, _) if fn.symbol.is(ExtensionMethod) => def toSetter(fn: Tree): untpd.Tree = fn match case fn @ Ident(name: TermName) => - untpd.cpy.Ident(fn)(name.setterName) + // We need to make sure that the prefix of this extension getter is + // retained when we transform it into a setter. Otherwise, we could + // end up resolving an unrelated setter from another extension. We + // transform the `Ident` into a `Select` to ensure that the prefix + // is retained with a `TypedSplice` (see `case Select` bellow). + // See tests/pos/i18713.scala for an example. + fn.tpe match + case TermRef(qual: TermRef, _) => + toSetter(ref(qual).select(fn.symbol).withSpan(fn.span)) + case TermRef(qual: ThisType, _) => + toSetter(This(qual.cls).select(fn.symbol).withSpan(fn.span)) + case TermRef(NoPrefix, _) => + untpd.cpy.Ident(fn)(name.setterName) case fn @ Select(qual, name: TermName) => untpd.cpy.Select(fn)(untpd.TypedSplice(qual), name.setterName) case fn @ TypeApply(fn1, targs) => @@ -1125,15 +1159,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => lhsCore.tpe match { case ref: TermRef => val lhsVal = lhsCore.denot.suchThat(!_.is(Method)) - if (canAssign(lhsVal.symbol)) { - // lhsBounds: (T .. Any) as seen from lhs prefix, where T is the type of lhsVal.symbol + val lhsSym = lhsVal.symbol + if canAssign(lhsSym) then + rememberNonLocalAssignToPrivate(lhsSym) + // lhsBounds: (T .. Any) as seen from lhs prefix, where T is the type of lhsSym // This ensures we do the as-seen-from on T with variance -1. Test case neg/i2928.scala val lhsBounds = - TypeBounds.lower(lhsVal.symbol.info).asSeenFrom(ref.prefix, lhsVal.symbol.owner) + TypeBounds.lower(lhsSym.info).asSeenFrom(ref.prefix, lhsSym.owner) assignType(cpy.Assign(tree)(lhs1, typed(tree.rhs, lhsBounds.loBound))) .computeAssignNullable() - } - else { + else val pre = ref.prefix val setterName = ref.name.setterName val setter = pre.member(setterName) @@ -1146,7 +1181,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => reassignmentToVal } - } case TryDynamicCallType => typedDynamicAssign(tree, pt) case tpe => @@ -1322,12 +1356,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) - if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => - (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) - case SAMType(mt @ MethodTpe(_, formals, restpe)) => + if defn.isNonRefinedFunction(parent) && formals.length == defaultArity => + (formals, untpd.InLambdaTypeTree(isResult = true, (_, syms) => restpe.substParams(mt, syms.map(_.termRef)))) + case defn.PolyFunctionOf(mt @ MethodTpe(_, formals, restpe)) if formals.length == defaultArity => + (formals, untpd.InLambdaTypeTree(isResult = true, (_, syms) => restpe.substParams(mt, syms.map(_.termRef)))) + case SAMType(mt @ MethodTpe(_, formals, _), samParent) => + val restpe = mt.resultType match + case mt: MethodType => mt.toFunctionType(isJava = samParent.classSymbol.is(JavaDefined)) + case tp => tp (formals, if (mt.isResultDependent) - untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef))) + untpd.InLambdaTypeTree(isResult = true, (_, syms) => restpe.substParams(mt, syms.map(_.termRef))) else typeTree(restpe)) case _ => @@ -1423,7 +1462,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val typeArgs = appDef.termParamss.head.map(_.tpt) :+ resTpt val core = - if mt.hasErasedParams then TypeTree(defn.ErasedFunctionClass.typeRef) + if mt.hasErasedParams then TypeTree(defn.PolyFunctionClass.typeRef) else val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) val tycon = TypeTree(funSym.typeRef) @@ -1558,7 +1597,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Returns the type and whether the parameter is erased */ def protoFormal(i: Int): (Type, Boolean) = if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) - else (errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos), false) + else (errorType(WrongNumberOfParameters(tree, params.length, pt, protoFormals.length), tree.srcPos), false) /** Is `formal` a product type which is elementwise compatible with `params`? */ def ptIsCorrectProduct(formal: Type) = @@ -1594,6 +1633,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => if desugared.isEmpty then + val forceDegree = + if pt.isValueType then + // Allow variables that appear invariantly in `pt` to be improved by mapping + // bottom types in their instance types to fresh type variables + new ForceDegree.Value(IfBottom.fail): + val tvmap = variances(pt) + override def canImprove(tvar: TypeVar) = + tvmap.computedVariance(tvar) == (0: Integer) + else + ForceDegree.failBottom + val inferredParams: List[untpd.ValDef] = for ((param, i) <- params.zipWithIndex) yield if (!param.tpt.isEmpty) param @@ -1601,7 +1651,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (formalBounds, isErased) = protoFormal(i) val formal = formalBounds.loBound val isBottomFromWildcard = (formalBounds ne formal) && formal.isExactlyNothing - val knownFormal = isFullyDefined(formal, ForceDegree.failBottom) + val knownFormal = isFullyDefined(formal, forceDegree) // If the expected formal is a TypeBounds wildcard argument with Nothing as lower bound, // try to prioritize inferring from target. See issue 16405 (tests/run/16405.scala) val paramType = @@ -1612,7 +1662,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else inferredFromTarget(param, formal, calleeType, isErased, paramIndex).orElse( if knownFormal then formal0 - else errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos) + else errorType(AnonymousFunctionMissingParamType(param, tree, inferredType = formal, expectedType = pt), param.srcPos) ) val paramTpt = untpd.TypedSplice( (if knownFormal then InferredTypeTree() else untpd.TypeTree()) @@ -1621,12 +1671,51 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ) val param0 = cpy.ValDef(param)(tpt = paramTpt) if isErased then param0.withAddedFlags(Flags.Erased) else param0 - desugared = desugar.makeClosure(inferredParams, fnBody, resultTpt, isContextual, tree.span) + desugared = desugar.makeClosure(Nil, inferredParams, fnBody, resultTpt, tree.span) typed(desugared, pt) .showing(i"desugared fun $tree --> $desugared with pt = $pt", typr) } + + def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = + val tree1 = desugar.normalizePolyFunction(tree) + if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) + else typedPolyFunctionValue(tree1, pt) + + def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = + val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked + val untpd.Function(vparams: List[untpd.ValDef] @unchecked, body) = fun: @unchecked + val dpt = pt.dealias + + // If the expected type is a polymorphic function with the same number of + // type and value parameters, then infer the types of value parameters from the expected type. + val inferredVParams = dpt match + case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) + if tparams.lengthCompare(poly.paramNames) == 0 && vparams.lengthCompare(mt.paramNames) == 0 => + vparams.zipWithConserve(mt.paramInfos): (vparam, formal) => + // Unlike in typedFunctionValue, `formal` cannot be a TypeBounds since + // it must be a valid method parameter type. + if vparam.tpt.isEmpty && isFullyDefined(formal, ForceDegree.failBottom) then + cpy.ValDef(vparam)(tpt = new untpd.InLambdaTypeTree(isResult = false, (tsyms, vsyms) => + // We don't need to substitute `mt` by `vsyms` because we currently disallow + // dependencies between value parameters of a closure. + formal.substParams(poly, tsyms.map(_.typeRef))) + ) + else vparam + case _ => + vparams + + val resultTpt = dpt match + case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) => + untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => + mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) + case _ => untpd.TypeTree() + + val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + typed(desugared, pt) + end typedPolyFunctionValue + def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { val env1 = tree.env mapconserve (typed(_)) val meth1 = typedUnadapted(tree.meth) @@ -1635,31 +1724,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer meth1.tpe.widen match { case mt: MethodType => pt.findFunctionType match { - case pt @ SAMType(sam) - if !defn.isFunctionType(pt) && mt <:< sam => - // SAMs of the form C[?] where C is a class cannot be conversion targets. - // The resulting class `class $anon extends C[?] {...}` would be illegal, - // since type arguments to `C`'s super constructor cannot be constructed. - def isWildcardClassSAM = - !pt.classSymbol.is(Trait) && pt.argInfos.exists(_.isInstanceOf[TypeBounds]) + case SAMType(samMeth, samParent) + if !defn.isFunctionNType(samParent) && mt <:< samMeth => + if defn.isContextFunctionType(mt.resultType) then + report.error( + em"""Implementation restriction: cannot convert this expression to `$samParent` + |because its result type `${mt.resultType}` is a contextual function type.""", + tree.srcPos) val targetTpe = - if isFullyDefined(pt, ForceDegree.all) && !isWildcardClassSAM then - pt - else if pt.isRef(defn.PartialFunctionClass) then + if isFullyDefined(samParent, ForceDegree.all) then + samParent + else if samParent.isRef(defn.PartialFunctionClass) then // Replace the underspecified expected type by one based on the closure method type defn.PartialFunctionOf(mt.firstParamTypes.head, mt.resultType) else - report.error(em"result type of lambda is an underspecified SAM type $pt", tree.srcPos) - pt + report.error(em"result type of lambda is an underspecified SAM type $samParent", tree.srcPos) + samParent TypeTree(targetTpe) case _ => if (mt.isParamDependent) - errorTree(tree, - em"""cannot turn method type $mt into closure - |because it has internal parameter dependencies""") - else if ((tree.tpt `eq` untpd.ContextualEmptyTree) && mt.paramNames.isEmpty) - // Note implicitness of function in target type since there are no method parameters that indicate it. - TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true)) + errorTree(tree, ClosureCannotHaveInternalParameterDependencies(mt)) else if hasCaptureConversionArg(mt.resType) then errorTree(tree, em"""cannot turn method type $mt into closure @@ -1667,6 +1751,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else EmptyTree } + case poly @ PolyType(_, mt: MethodType) => + if (mt.isParamDependent) + errorTree(tree, ClosureCannotHaveInternalParameterDependencies(poly)) + else + // Polymorphic SAMs are not currently supported (#6904). + EmptyTree case tp => if !tp.isErroneous then throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.span}") @@ -1684,8 +1774,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkInInlineContext("summonFrom", tree.srcPos) val cases1 = tree.cases.mapconserve { case cdef @ CaseDef(pat @ Typed(Ident(nme.WILDCARD), _), _, _) => - // case _ : T --> case evidence$n : T - cpy.CaseDef(cdef)(pat = untpd.Bind(EvidenceParamName.fresh(), pat)) + // case _ : T --> case _$n : T + cpy.CaseDef(cdef)(pat = untpd.Bind(WildcardParamName.fresh(), pat)) case cdef => cdef } typedMatchFinish(tree, tpd.EmptyTree, defn.ImplicitScrutineeTypeRef, cases1, pt) @@ -1777,7 +1867,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // TODO: move the check above to patternMatcher phase val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot, tree.selector.span)) tpd.cpy.Match(result)( - selector = tpd.Typed(sel, tpd.TypeTree(uncheckedTpe)), + selector = tpd.Typed(sel, new tpd.InferredTypeTree().withType(uncheckedTpe)), cases = result.cases ) case _ => @@ -1791,12 +1881,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Special typing of Match tree when the expected type is a MatchType, * and the patterns of the Match tree and the MatchType correspond. */ - def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { + def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType0: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { var caseCtx = ctx + var wideSelType = wideSelType0 + var alreadyStripped = false val cases1 = tree.cases.zip(pt.cases) .map { case (cas, tpe) => val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) + if !alreadyStripped && Nullables.matchesNull(case1) then + wideSelType = wideSelType.stripNull + alreadyStripped = true case1 } .asInstanceOf[List[CaseDef]] @@ -1810,11 +1905,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.Match(tree)(sel, cases1), sel, cases1) } - def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType: Type, pt: Type)(using Context): List[CaseDef] = + def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType0: Type, pt: Type)(using Context): List[CaseDef] = var caseCtx = ctx + var wideSelType = wideSelType0 + var alreadyStripped = false cases.mapconserve { cas => val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) + if !alreadyStripped && Nullables.matchesNull(case1) then + wideSelType = wideSelType.stripNull + alreadyStripped = true case1 } @@ -1960,7 +2060,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree = def makeCanThrow(tp: Type): untpd.Tree = untpd.ValDef( - EvidenceParamName.fresh(), + CanThrowEvidenceName.fresh(), untpd.TypeTree(defn.CanThrowClass.typeRef.appliedTo(tp)), untpd.ref(defn.Compiletime_erasedValue)) .withFlags(Given | Final | Erased) @@ -2042,12 +2142,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } - def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = { - val (bindings1, exprCtx) = typedBlockStats(tree.bindings) - val expansion1 = typed(tree.expansion, pt)(using inlineContext(tree.call)(using exprCtx)) - assignType(cpy.Inlined(tree)(tree.call, bindings1.asInstanceOf[List[MemberDef]], expansion1), - bindings1, expansion1) - } + def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = + throw new UnsupportedOperationException("cannot type check a Inlined node") def completeTypeTree(tree: untpd.TypeTree, pt: Type, original: untpd.Tree)(using Context): TypeTree = tree.withSpan(original.span).withAttachmentsFrom(original) @@ -2073,6 +2169,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => completeTypeTree(InferredTypeTree(), pt, tree) + def typedInLambdaTypeTree(tree: untpd.InLambdaTypeTree, pt: Type)(using Context): Tree = + val tp = + if tree.isResult then pt // See InLambdaTypeTree logic in Namer#valOrDefDefSig. + else + val lambdaCtx = ctx.outersIterator.dropWhile(_.owner.name ne nme.ANON_FUN).next() + // A lambda has at most one type parameter list followed by exactly one term parameter list. + // Parameters are entered in order in the scope of the lambda. + val (tsyms: List[TypeSymbol @unchecked], vsyms: List[TermSymbol @unchecked]) = + lambdaCtx.scope.toList.partition(_.isType): @unchecked + tree.tpFun(tsyms, vsyms) + completeTypeTree(InferredTypeTree(), tp, tree) + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -2137,7 +2245,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // any references to other parameter types of the underlying hk lambda // in order not to get orphan parameters. Test case in pos/i15564.scala. // Note 1: It would be better to substitute actual arguments for corresponding - // formal paramaters, but it looks very hard to do this at the point where + // formal parameters, but it looks very hard to do this at the point where // a bound type variable is created. // Note 2: If the type constructor is a class type, no sanitization is needed // since we can refer to the other paraeters with dependent types C[...]#X. @@ -2163,7 +2271,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (desugaredArg, argPt) = if ctx.mode.is(Mode.Pattern) then (if (untpd.isVarPattern(arg)) desugar.patternVar(arg) else arg, tparamBounds) - else if ctx.mode.is(Mode.QuotedPattern) then + else if ctx.mode.isQuotedPattern then (arg, tparamBounds) else (arg, WildcardType) @@ -2327,7 +2435,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // wrt to operand order for `&`, we include the explicit subtype test here. // See also #5649. then body1.tpe - else pt & body1.tpe + else body1.tpe match + case btpe: TypeRef + if btpe.symbol == defn.TupleXXLClass && pt.tupleElementTypes.isDefined => + // leave the original tuple type; don't mix with & TupleXXL which would only obscure things + pt + case _ => + pt & body1.tpe val sym = newPatternBoundSymbol(name, symTp, tree.span) if (pt == defn.ImplicitScrutineeTypeRef || tree.mods.is(Given)) sym.setFlag(Given) if (ctx.mode.is(Mode.InPatternAlternative)) @@ -2415,6 +2529,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { val ValDef(name, tpt, _) = vdef + checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) @@ -2424,7 +2539,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) - postProcessInfo(sym) + postProcessInfo(vdef1, sym) vdef1.setDefTree } @@ -2448,6 +2563,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // hence we special case it until `erased` is no longer experimental. sym.setFlag(Erased) val DefDef(name, paramss, tpt, _) = ddef + checkNonRootName(ddef.name, ddef.nameSpan) completeAnnotations(ddef, sym) val paramss1 = paramss.nestedMapConserve(typed(_)).asInstanceOf[List[ParamClause]] for case ValDefs(vparams) <- paramss1 do @@ -2533,15 +2649,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ddef2 = assignType(cpy.DefDef(ddef)(name, paramss1, tpt1, rhs1), sym) - postProcessInfo(sym) + postProcessInfo(ddef2, sym) ddef2.setDefTree //todo: make sure dependent method types do not depend on implicits or by-name params } /** (1) Check that the signature of the class member does not return a repeated parameter type * (2) If info is an erased class, set erased flag of member + * (3) Check that erased classes are not parameters of polymorphic functions. */ - private def postProcessInfo(sym: Symbol)(using Context): Unit = + private def postProcessInfo(mdef: MemberDef, sym: Symbol)(using Context): Unit = if (!sym.isOneOf(Synthetic | InlineProxy | Param) && sym.info.finalResultType.isRepeatedParam) report.error(em"Cannot return repeated parameter type ${sym.info.finalResultType}", sym.srcPos) if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then @@ -2660,6 +2777,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkEnumParent(cls, firstParent) + if defn.ScalaValueClasses()(cls) && ctx.settings.YcompileScala2Library.value then + constr1.symbol.resetFlag(Private) + val self1 = typed(self)(using ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible if (self1.tpt.tpe.isError || classExistsOnSelf(cls.unforcedDecls, self1)) // fail fast to avoid typing the body with an error type @@ -2692,22 +2812,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check value class constraints checkDerivedValueClass(cls, body1) - // check PolyFunction constraints (no erased functions!) - if parents1.exists(_.tpe.classSymbol eq defn.PolyFunctionClass) then - body1.foreach { - case ddef: DefDef => - ddef.paramss.foreach { params => - val erasedParam = params.collectFirst { case vdef: ValDef if vdef.symbol.is(Erased) => vdef } - erasedParam.foreach { p => - report.error(em"Implementation restriction: erased classes are not allowed in a poly function definition", p.srcPos) - } - } - case _ => - } - val effectiveOwner = cls.owner.skipWeakOwner - if !cls.isRefinementClass - && !cls.isAllOf(PrivateLocal) + if cls.is(ModuleClass) && effectiveOwner.is(Trait) && !effectiveOwner.derivesFrom(defn.ObjectClass) then @@ -2804,6 +2910,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val pkg = pid1.symbol pid1 match case pid1: RefTree if pkg.is(Package) => + if ctx.owner != defn.RootClass // valid top-level "package _root_" + && ctx.owner != defn.EmptyPackageClass // valid "package _root_" after parser's "package " wrapper + then + checkNonRootName(pid1.name, pid1.span) inContext(ctx.packageContext(tree, pkg)) { // If it exists, complete the class containing the top-level definitions // before typing any statement in the package to avoid cycles as in i13669.scala @@ -2832,7 +2942,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if Feature.ccEnabled && (cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot) then - CheckCaptures.checkWellformed(annot1) + CheckCaptures.checkWellformed(arg1, annot1) if arg1.isType then assignType(cpy.Annotated(tree)(arg1, annot1), arg1, annot1) else @@ -2868,46 +2978,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else tree1 } - def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(using Context): Tree = { - val untpd.PostfixOp(qual, Ident(nme.WILDCARD)) = tree: @unchecked - val pt1 = if (defn.isFunctionType(pt)) pt else AnyFunctionProto - val nestedCtx = ctx.fresh.setNewTyperState() - val res = typed(qual, pt1)(using nestedCtx) - res match { - case closure(_, _, _) => - case _ => - val recovered = typed(qual)(using ctx.fresh.setExploreTyperState()) - report.errorOrMigrationWarning(OnlyFunctionsCanBeFollowedByUnderscore(recovered.tpe.widen), tree.srcPos, from = `3.0`) - if (migrateTo3) { - // Under -rewrite, patch `x _` to `(() => x)` - patch(Span(tree.span.start), "(() => ") - patch(Span(qual.span.end, tree.span.end), ")") - return typed(untpd.Function(Nil, qual), pt) - } - } - nestedCtx.typerState.commit() - if sourceVersion.isAtLeast(future) then - lazy val (prefix, suffix) = res match { - case Block(mdef @ DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => - val arity = vparams.length - if (arity > 0) ("", "") else ("(() => ", "())") - case _ => - ("(() => ", ")") - } - def remedy = - if ((prefix ++ suffix).isEmpty) "simply leave out the trailing ` _`" - else s"use `$prefix$suffix` instead" - report.errorOrMigrationWarning( - em"""The syntax ` _` is no longer supported; - |you can $remedy""", - tree.srcPos, - from = future) - if sourceVersion.isMigrating then - patch(Span(tree.span.start), prefix) - patch(Span(qual.span.end, tree.span.end), suffix) - end if - res - } + override def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(using Context): Tree = + migrate(super.typedAsFunction(tree, pt)) /** Translate infix operation expression `l op r` to * @@ -3025,13 +3097,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.TypeDef => // separate method to keep dispatching method `typedNamed` short which might help the JIT def typedTypeOrClassDef: Tree = - if tree.name eq tpnme.? then - val addendum = if sym.owner.is(TypeParam) - then ", use `_` to denote a higher-kinded type parameter" - else "" - val namePos = tree.sourcePos.withSpan(tree.nameSpan) - report.errorOrMigrationWarning( - em"`?` is not a valid type name$addendum", namePos, from = `3.0`) + migrate(kindProjectorQMark(tree, sym)) if tree.isClassDef then typedClassDef(tree, sym.asClass)(using ctx.localContext(tree, sym)) else @@ -3056,6 +3122,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.Block => typedBlock(desugar.block(tree), pt)(using ctx.fresh.setNewScope) case tree: untpd.If => typedIf(tree, pt) case tree: untpd.Function => typedFunction(tree, pt) + case tree: untpd.PolyFunction => typedPolyFunction(tree, pt) case tree: untpd.Closure => typedClosure(tree, pt) case tree: untpd.Import => typedImport(tree) case tree: untpd.Export => typedExport(tree) @@ -3083,26 +3150,45 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.TypedSplice => typedTypedSplice(tree) case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) - case tree: untpd.DependentTypeTree => completeTypeTree(untpd.InferredTypeTree(), pt, tree) + case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) case untpd.EmptyTree => tpd.EmptyTree case tree: untpd.Quote => typedQuote(tree, pt) case tree: untpd.Splice => typedSplice(tree, pt) + case tree: untpd.QuotePattern => typedQuotePattern(tree, pt) case tree: untpd.SplicePattern => typedSplicePattern(tree, pt) case tree: untpd.MacroTree => report.error("Unexpected macro", tree.srcPos); tpd.nullLiteral // ill-formed code may reach here case tree: untpd.Hole => typedHole(tree, pt) case _ => typedUnadapted(desugar(tree, pt), pt, locked) } + def handleTypeError(ex: TypeError): Tree = ex match + case ex: CyclicReference + if ctx.reporter.errorsReported + && xtree.span.isZeroExtent + && ex.isVal => + // Don't report a "recursive val ... needs type" if errors were reported + // previously and the span of the offending tree is empty. In this case, + // it's most likely that this is desugared code, and the error message would + // be redundant and confusing. + xtree.withType(ErrorType(ex.toMessage)) + case _ => + // Use focussed sourcePos since tree might be a large definition + // and a large error span would hide all errors in interior. + // TODO: Not clear that hiding is what we want, actually + errorTree(xtree, ex, xtree.srcPos.focus) + try val ifpt = defn.asContextFunctionType(pt) val result = if ifpt.exists + && defn.functionArity(ifpt) > 0 // ContextFunction0 is only used after ElimByName && xtree.isTerm && !untpd.isContextualClosure(xtree) && !ctx.mode.is(Mode.Pattern) + && !xtree.isInstanceOf[SplicePattern] && !ctx.isAfterTyper && !ctx.isInlineContext then @@ -3111,12 +3197,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case xtree: untpd.NameTree => typedNamed(xtree, pt) case xtree => typedUnnamed(xtree) + val unsimplifiedType = result.tpe simplify(result, pt, locked) - catch case ex: TypeError => errorTree(xtree, ex, xtree.srcPos.focus) - // use focussed sourcePos since tree might be a large definition - // and a large error span would hide all errors in interior. - // TODO: Not clear that hiding is what we want, actually - } + result.tpe.stripTypeVar match + case e: ErrorType if !unsimplifiedType.isErroneous => errorTree(xtree, e.msg, xtree.srcPos) + case _ => result + catch case ex: TypeError => + handleTypeError(ex) + } } /** Interpolate and simplify the type of the given tree. */ @@ -3133,6 +3221,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked + val paramNamesOrNil = pt match + case RefinedType(_, _, rinfo: MethodType) => rinfo.paramNames + case _ => Nil // The getter of default parameters may reach here. // Given the code below @@ -3157,15 +3248,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val paramTypes = { val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], StopAt.Static)) if hasWildcard then formals.map(_ => untpd.TypeTree()) - else formals.map(untpd.TypeTree) + else formals.map(formal => untpd.TypeTree(formal.loBound)) // about loBound, see tests/pos/i18649.scala } - val erasedParams = pt.dealias match { - case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + val erasedParams = pt match { + case defn.PolyFunctionOf(mt: MethodType) => mt.erasedParams case _ => paramTypes.map(_ => false) } - val ifun = desugar.makeContextualFunction(paramTypes, tree, erasedParams) + val ifun = desugar.makeContextualFunction(paramTypes, paramNamesOrNil, tree, erasedParams) typr.println(i"make contextual function $tree / $pt ---> $ifun") typedFunctionValue(ifun, pt) } @@ -3235,7 +3326,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer traverse(xtree :: rest) case stat :: rest => val stat1 = typed(stat)(using ctx.exprContext(stat, exprOwner)) - if !checkInterestingResultInStatement(stat1) then checkStatementPurity(stat1)(stat, exprOwner) + if !Linter.warnOnInterestingResultInStatement(stat1) then checkStatementPurity(stat1)(stat, exprOwner) buf += stat1 traverse(rest)(using stat1.nullableContext) case nil => @@ -3439,7 +3530,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then Some(adapt(tree, pt, locked)) else - val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false) + val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false, tree.nameSpan) if selProto.isMatchedBy(qual.tpe) || tree.hasAttachment(InsertedImplicitOnQualifier) then None else @@ -3464,7 +3555,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer (tree: untpd.Select, pt: Type, mbrProto: Type, qual: Tree, locked: TypeVars, compat: Compatibility, inSelect: Boolean) (using Context): Tree = - def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect) + def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect, tree.nameSpan) def tryExtension(using Context): Tree = val altImports = new mutable.ListBuffer[TermRef]() @@ -3482,7 +3573,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val app = tryExtMethod(alt)(using nestedCtx) (if nestedCtx.reporter.hasErrors then failures else successes) += ((app, nestedCtx.typerState)) - typr.println(i"multiple extensioin methods, success: ${successes.toList}, failure: ${failures.toList}") + typr.println(i"multiple extension methods, success: ${successes.toList}, failure: ${failures.toList}") def pick(alt: (Tree, TyperState)): Tree = val (app, ts) = alt @@ -3516,8 +3607,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val remembered = // report AmbiguousReferences as priority, otherwise last error (errs.filter(_.msg.isInstanceOf[AmbiguousReference]) ++ errs).take(1) for err <- remembered do + val tree = if app.isEmpty then qual else app rememberSearchFailure(qual, - SearchFailure(app.withType(FailedExtension(app, selectionProto, err.msg)))) + SearchFailure(tree.withType(FailedExtension(tree, selectionProto, err.msg)))) catch case ex: TypeError => nestedFailure(ex) // try an implicit conversion or given extension @@ -3680,24 +3772,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { case wtp: MethodOrPoly => def methodStr = methPart(tree).symbol.showLocated - if (matchingApply(wtp, pt)) + if matchingApply(wtp, pt) then + migrate(contextBoundParams(tree, wtp, pt)) if needsTupledDual(wtp, pt) then adapt(tree, pt.tupledDual, locked) else tree else if wtp.isContextualMethod then - def isContextBoundParams = wtp.stripPoly match - case MethodType(EvidenceParamName(_) :: _) => true - case _ => false - if sourceVersion == `future-migration` && isContextBoundParams && pt.args.nonEmpty - then // Under future-migration, don't infer implicit arguments yet for parameters - // coming from context bounds. Issue a warning instead and offer a patch. - def rewriteMsg = Message.rewriteNotice("This code", `future-migration`) - report.migrationWarning( - em"""Context bounds will map to context parameters. - |A `using` clause is needed to pass explicit arguments to them.$rewriteMsg""", tree.srcPos) - patch(Span(pt.args.head.span.start), "using ") - tree - else - adaptNoArgs(wtp) // insert arguments implicitly + adaptNoArgs(wtp) // insert arguments implicitly else if (tree.symbol.isPrimaryConstructor && tree.symbol.info.firstParamTypes.isEmpty) readapt(tree.appliedToNone) // insert () to primary constructors else @@ -3826,7 +3906,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (arg.tpe.isError) Nil else untpd.NamedArg(pname, untpd.TypedSplice(arg)) :: Nil } val app = cpy.Apply(tree)(untpd.TypedSplice(tree), namedArgs) - if (wtp.isContextualMethod) app.setApplyKind(ApplyKind.Using) + val needsUsing = wtp.isContextualMethod || wtp.match + case MethodType(ContextBoundParamName(_) :: _) => sourceVersion.isAtLeast(`3.4`) + case _ => false + if needsUsing then app.setApplyKind(ApplyKind.Using) typr.println(i"try with default implicit args $app") typed(app, pt, locked) else issueErrors() @@ -3878,16 +3961,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptNoArgsUnappliedMethod(wtp: MethodType, functionExpected: Boolean, arity: Int): Tree = { /** Is reference to this symbol `f` automatically expanded to `f()`? */ def isAutoApplied(sym: Symbol): Boolean = + lazy val msg = MissingEmptyArgumentList(sym.show, tree) + sym.isConstructor || sym.matchNullaryLoosely - || Feature.warnOnMigration(MissingEmptyArgumentList(sym.show), tree.srcPos, version = `3.0`) - && { patch(tree.span.endPos, "()"); true } + || Feature.warnOnMigration(msg, tree.srcPos, version = `3.0`) + && { + msg.actions + .headOption + .foreach(Rewrites.applyAction) + true + } /** If this is a selection prototype of the form `.apply(...): R`, return the nested * function prototype `(...)R`. Otherwise `pt`. */ def ptWithoutRedundantApply: Type = pt.revealIgnored match - case SelectionProto(nme.apply, mpt, _, _) => + case SelectionProto(nme.apply, mpt, _, _, _) => mpt.revealIgnored match case fpt: FunProto => fpt case _ => pt @@ -3895,12 +3985,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Reasons NOT to eta expand: // - we reference a constructor - // - we reference a typelevel method // - we are in a pattern // - the current tree is a synthetic apply which is not expandable (eta-expasion would simply undo that) if arity >= 0 && !tree.symbol.isConstructor - && !tree.symbol.isAllOf(InlineMethod) && !ctx.mode.is(Mode.Pattern) && !(isSyntheticApply(tree) && !functionExpected) then @@ -3909,10 +3997,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Ignore `.apply` in `m.apply(...)`; it will later be simplified in typedSelect to `m(...)` adapt1(tree, pt1, locked) else - if (!defn.isFunctionType(pt)) + if (!defn.isFunctionNType(pt)) pt match { - case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => - report.warning(em"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) + case SAMType(_, samParent) if !pt1.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => + report.warning(em"${tree.symbol} is eta-expanded even though $samParent does not have the @FunctionalInterface annotation.", tree.srcPos) case _ => } simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) @@ -3938,8 +4026,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (implicitFun || caseCompanion) && !isApplyProto(pt) && pt != SingletonTypeProto - && pt != AssignProto + && pt != LhsProto && !ctx.mode.is(Mode.Pattern) + && !tree.isInstanceOf[SplicePattern] && !ctx.isAfterTyper && !ctx.isInlineContext then @@ -3995,6 +4084,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer res } => insertGadtCast(tree, wtp, pt) + case CompareResult.OKwithOpaquesUsed if !tree.tpe.frozen_<:<(pt)(using ctx.withOwner(defn.RootClass)) => + // guard to avoid extra Typed trees, eg. from testSubType(O.T, O.T) which returns OKwithOpaquesUsed + Typed(tree, TypeTree(pt)) case _ => //typr.println(i"OK ${tree.tpe}\n${TypeComparer.explained(_.isSubType(tree.tpe, pt))}") // uncomment for unexpected successes tree @@ -4024,7 +4116,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptNoArgs(wtp: Type): Tree = { val ptNorm = underlyingApplied(pt) - def functionExpected = defn.isFunctionType(ptNorm) + def functionExpected = defn.isFunctionNType(ptNorm) def needsEta = pt.revealIgnored match case _: SingletonType | _: FunOrPolyProto => false case _ => true @@ -4032,8 +4124,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer wtp match { case wtp: ExprType => readaptSimplified(tree.withType(wtp.resultType)) - case wtp: MethodType if wtp.isImplicitMethod && - ({ resMatch = constrainResult(tree.symbol, wtp, sharpenedPt); resMatch } || !functionExpected) => + case wtp: MethodType + if wtp.isImplicitMethod + && ({ resMatch = constrainResult(tree.symbol, wtp, sharpenedPt); resMatch} || !functionExpected) + && !ctx.mode.is(Mode.InQuotePatternHoasArgs) => if (resMatch || ctx.mode.is(Mode.ImplicitsEnabled)) adaptNoArgsImplicitMethod(wtp) else @@ -4075,9 +4169,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } - def toSAM(tree: Tree): Tree = tree match { - case tree: Block => tpd.cpy.Block(tree)(tree.stats, toSAM(tree.expr)) - case tree: Closure => cpy.Closure(tree)(tpt = TypeTree(pt)).withType(pt) + def toSAM(tree: Tree, samParent: Type): Tree = tree match { + case tree: Block => tpd.cpy.Block(tree)(tree.stats, toSAM(tree.expr, samParent)) + case tree: Closure => cpy.Closure(tree)(tpt = TypeTree(samParent)).withType(samParent) } def adaptToSubType(wtp: Type): Tree = @@ -4092,7 +4186,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer || x.tag == LongTag && cls == defn.DoubleClass && x.longValue.toDouble.toLong != x.longValue then report.warning(LossyWideningConstantConversion(x.tpe, pt), tree.srcPos) - return adaptConstant(tree, ConstantType(converted)) + return readapt(adaptConstant(tree, ConstantType(converted))) case _ => val captured = captureWildcardsCompat(wtp, pt) @@ -4104,25 +4198,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // local adaptation makes sure every adapted tree conforms to its pt // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) - checkStatementPurity(tree1)(tree, ctx.owner) + checkStatementPurity(tree1)(tree, ctx.owner, isUnitExpr = true) if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { report.warning(ValueDiscarding(tree.tpe), tree.srcPos) } - return tpd.Block(tree1 :: Nil, Literal(Constant(()))) + return tpd.Block(tree1 :: Nil, unitLiteral) } // convert function literal to SAM closure tree match { case closure(Nil, id @ Ident(nme.ANON_FUN), _) - if defn.isFunctionType(wtp) && !defn.isFunctionType(pt) => + if defn.isFunctionNType(wtp) && !defn.isFunctionNType(pt) => pt match { - case SAMType(sam) - if wtp <:< sam.toFunctionType(isJava = pt.classSymbol.is(JavaDefined)) => + case SAMType(samMeth, samParent) + if wtp <:< samMeth.toFunctionType(isJava = samParent.classSymbol.is(JavaDefined)) => // was ... && isFullyDefined(pt, ForceDegree.flipBottom) // but this prevents case blocks from implementing polymorphic partial functions, // since we do not know the result parameter a priori. Have to wait until the // body is typechecked. - return toSAM(tree) + return toSAM(tree, samParent) case _ => } case _ => @@ -4185,10 +4279,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer AppliedType(tree.tpe, tp.typeParams.map(Function.const(TypeBounds.empty))) else // Eta-expand higher-kinded type - tree.tpe.EtaExpand(tp.typeParamSymbols) + tree.tpe.etaExpand(tp.typeParamSymbols) tree.withType(tp1) } - if (ctx.mode.is(Mode.Pattern) || ctx.mode.is(Mode.QuotedPattern) || tree1.tpe <:< pt) tree1 + if (ctx.mode.is(Mode.Pattern) || ctx.mode.isQuotedPattern || tree1.tpe <:< pt) tree1 else err.typeMismatch(tree1, pt) } @@ -4244,14 +4338,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => adaptOverloaded(ref) } - case poly: PolyType if !(ctx.mode is Mode.Type) => + case poly: PolyType + if !(ctx.mode is Mode.Type) && dummyTreeOfType.unapply(tree).isEmpty => + // If we are in a conversion from a TermRef with polymorphic underlying + // type, give up. In this case the typed `null` literal cannot be instantiated. + // Test case was but i18695.scala, but it got fixed by a different tweak in #18719. + // We leave test for this condition in as a defensive measure in case + // it arises somewhere else. if isApplyProxy(tree) then newExpr else if pt.isInstanceOf[PolyProto] then tree else var typeArgs = tree match case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree(_)) case _ => Nil - if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2 + if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2.map(_.wrapInTypeTree(tree)) convertNewGenericArray(readapt(tree.appliedToTypeTrees(typeArgs))) case wtp => val isStructuralCall = wtp.isValueType && isStructuralTermSelectOrApply(tree) @@ -4286,7 +4386,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = val isUsingApply = pt.applyKind == ApplyKind.Using methType.isContextualMethod == isUsingApply - || methType.isImplicitMethod && isUsingApply // for a transition allow `with` arguments for regular implicit parameters + || methType.isImplicitMethod && isUsingApply // for a transition allow `using` arguments for regular implicit parameters /** Check that `tree == x: pt` is typeable. Used when checking a pattern * against a selector of type `pt`. This implementation accounts for @@ -4298,110 +4398,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree match case _: RefTree | _: Literal if !isVarPattern(tree) && !(pt <:< tree.tpe) => - withMode(Mode.GadtConstraintInference) { + withMode(Mode.GadtConstraintInference): TypeComparer.constrainPatternType(tree.tpe, pt) - } - // approximate type params with bounds - def approx = new ApproximatingTypeMap { - var alreadyExpanding: List[TypeRef] = Nil - def apply(tp: Type) = tp.dealias match - case tp: TypeRef if !tp.symbol.isClass => - if alreadyExpanding contains tp then tp else - val saved = alreadyExpanding - alreadyExpanding ::= tp - val res = expandBounds(tp.info.bounds) - alreadyExpanding = saved - res - case _ => - mapOver(tp) - } + Linter.warnOnImplausiblePattern(tree, pt) - // Is it certain that a value of `tree.tpe` is never a subtype of `pt`? - // It is true if either - // - the class of `tree.tpe` and class of `pt` cannot have common subclass, or - // - `tree` is an object or enum value, which cannot possibly be a subtype of `pt` - val isDefiniteNotSubtype = { - val clsA = tree.tpe.widenDealias.classSymbol - val clsB = pt.dealias.classSymbol - clsA.exists && clsB.exists - && clsA != defn.NullClass - && (!clsA.isNumericValueClass && !clsB.isNumericValueClass) // approximation for numeric conversion and boxing - && !clsA.asClass.mayHaveCommonChild(clsB.asClass) - || tree.symbol.isOneOf(Module | Enum) - && !(tree.tpe frozen_<:< pt) // fast track - && !(tree.tpe frozen_<:< approx(pt)) - } - - if isDefiniteNotSubtype then - // We could check whether `equals` is overridden. - // Reasons for not doing so: - // - it complicates the protocol - // - such code patterns usually implies hidden errors in the code - // - it's safe/sound to reject the code - report.error(TypeMismatch(tree.tpe, pt, Some(tree), "\npattern type is incompatible with expected type"), tree.srcPos) - else - val cmp = - untpd.Apply( - untpd.Select(untpd.TypedSplice(tree), nme.EQ), - untpd.TypedSplice(dummyTreeOfType(pt))) - typedExpr(cmp, defn.BooleanType) + val cmp = + untpd.Apply( + untpd.Select(untpd.TypedSplice(tree), nme.EQ), + untpd.TypedSplice(dummyTreeOfType(pt))) + typedExpr(cmp, defn.BooleanType) case _ => - private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = { - def isUninterestingSymbol(sym: Symbol): Boolean = - sym == NoSymbol || - sym.isConstructor || - sym.is(Package) || - sym.isPackageObject || - sym == defn.BoxedUnitClass || - sym == defn.AnyClass || - sym == defn.AnyRefAlias || - sym == defn.AnyValClass - def isUninterestingType(tpe: Type): Boolean = - tpe == NoType || - tpe.typeSymbol == defn.UnitClass || - defn.isBottomClass(tpe.typeSymbol) || - tpe =:= defn.UnitType || - tpe.typeSymbol == defn.BoxedUnitClass || - tpe =:= defn.AnyValType || - tpe =:= defn.AnyType || - tpe =:= defn.AnyRefType - def isJavaApplication(t: Tree): Boolean = t match { - case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) - case _ => false - } - def checkInterestingShapes(t: Tree): Boolean = t match { - case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) - case Block(_, res) => checkInterestingShapes(res) - case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) - case _ => checksForInterestingResult(t) - } - def checksForInterestingResult(t: Tree): Boolean = ( - !t.isDef // ignore defs - && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any - && !isUninterestingType(t.tpe) // bottom types, Unit, Any - && !isThisTypeResult(t) // buf += x - && !isSuperConstrCall(t) // just a thing - && !isJavaApplication(t) // Java methods are inherently side-effecting - // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? - ) - if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then - val where = t match { - case Block(_, res) => res - case If(_, thenpart, Literal(Constant(()))) => - thenpart match { - case Block(_, res) => res - case _ => thenpart - } - case _ => t - } - report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) - true - else false - } - - private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(using Context): Unit = + private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol, isUnitExpr: Boolean = false)(using Context): Unit = if !tree.tpe.isErroneous && !ctx.isAfterTyper && !tree.isInstanceOf[Inlined] @@ -4419,6 +4428,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // sometimes we do not have the original anymore and use the transformed tree instead. // But taken together, the two criteria are quite accurate. missingArgs(tree, tree.tpe.widen) + case _ if isUnitExpr => + report.warning(PureUnitExpression(original, tree.tpe), original.srcPos) case _ => report.warning(PureExpressionInStatementPosition(original, exprOwner), original.srcPos) @@ -4498,7 +4509,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot, tree.symbol.span)) else conj else pt - gadts.println(i"insert GADT cast from $tree to $target") - tree.cast(target) + if target.existsPart(_.isInstanceOf[ProtoType]) then + // we want to avoid embedding a SelectionProto in a Conversion, as the result type + // as it might end up within a GADT cast type, e.g. tests/pos/i15867.scala + // so we just bail - in that example, a GADT cast will be insert on application, so it compiles. + // but tests/pos/i18062.scala is an example with a polymorphic method, which requires type variables to + // be applied to the tree and then constrained before they match the prototype. + // so rather than try to handle all that before calling adapt, let's just bail on this side. + tree + else + gadts.println(i"insert GADT cast from $tree to $target") + tree.cast(target) end insertGadtCast } diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 60f0c043b435..f7afc7a7e0a7 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -2,15 +2,18 @@ package dotty.tools package dotc package typer -import core._ -import Phases._ -import Contexts._ -import Symbols._ +import core.* +import Run.SubPhase +import Phases.* +import Contexts.* +import Symbols.* import ImportInfo.withRootImports import parsing.{Parser => ParserPhase} import config.Printers.typr import inlines.PrepareInlineable -import util.Stats._ +import util.Stats.* +import dotty.tools.dotc.config.Feature +import dotty.tools.dotc.config.SourceVersion /** * @@ -31,13 +34,13 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { // Run regardless of parsing errors override def isRunnable(implicit ctx: Context): Boolean = true - def enterSyms(using Context): Unit = monitor("indexing") { + def enterSyms(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit ctx.typer.index(unit.untpdTree) typr.println("entered: " + unit.source) } - def typeCheck(using Context): Unit = monitor("typechecking") { + def typeCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit try if !unit.suspended then @@ -46,23 +49,23 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { record("retained untyped trees", unit.untpdTree.treeSize) record("retained typed trees after typer", unit.tpdTree.treeSize) ctx.run.nn.suppressions.reportSuspendedMessages(unit.source) - catch - case ex: CompilationUnit.SuspendException => - case ex: Throwable => - println(s"$ex while typechecking $unit") - throw ex + catch case _: CompilationUnit.SuspendException => () } - def javaCheck(using Context): Unit = monitor("checking java") { + def javaCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit if unit.isJava then JavaChecks.check(unit.tpdTree) } protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean = - unit.isJava || unit.suspended + (unit.isJava && !ctx.settings.YjavaTasty.value) || unit.suspended + + override val subPhases: List[SubPhase] = List( + SubPhase("indexing"), SubPhase("typechecking"), SubPhase("checkingJava")) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val List(Indexing @ _, Typechecking @ _, CheckingJava @ _) = subPhases: @unchecked val unitContexts = for unit <- units yield val newCtx0 = ctx.fresh.setPhase(this.start).setCompilationUnit(unit) @@ -73,11 +76,16 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { else newCtx - unitContexts.foreach(enterSyms(using _)) + val unitContexts0 = runSubPhase(Indexing) { + for + unitContext <- unitContexts + if enterSyms(using unitContext) + yield unitContext + } ctx.base.parserPhase match { case p: ParserPhase => - if p.firstXmlPos.exists && !defn.ScalaXmlPackageClass.exists then + if p.firstXmlPos.exists && !defn.ScalaXmlPackageClass.exists && Feature.sourceVersion == SourceVersion.future then report.error( """To support XML literals, your project must depend on scala-xml. |See https://github.com/scala/scala-xml for more information.""".stripMargin, @@ -85,11 +93,22 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { case _ => } - unitContexts.foreach(typeCheck(using _)) + val unitContexts1 = runSubPhase(Typechecking) { + for + unitContext <- unitContexts0 + if typeCheck(using unitContext) + yield unitContext + } + record("total trees after typer", ast.Trees.ntrees) - unitContexts.foreach(javaCheck(using _)) // after typechecking to avoid cycles - val newUnits = unitContexts.map(_.compilationUnit).filterNot(discardAfterTyper) + val unitContexts2 = runSubPhase(CheckingJava) { + for + unitContext <- unitContexts1 + if javaCheck(using unitContext) // after typechecking to avoid cycles + yield unitContext + } + val newUnits = unitContexts2.map(_.compilationUnit).filterNot(discardAfterTyper) ctx.run.nn.checkSuspendedUnits(newUnits) newUnits diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index bcfc9288d862..3699ca80d011 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -2,17 +2,19 @@ package dotty.tools.dotc package typer import dotty.tools.dotc.ast.{ Trees, tpd } -import core._ -import Types._, Contexts._, Flags._, Symbols._, Trees._ -import Decorators._ -import Variances._ -import NameKinds._ +import core.* +import Types.*, Contexts.*, Flags.*, Symbols.*, Trees.* +import Decorators.* +import Variances.* +import NameKinds.* import util.SrcPos import config.Printers.variances import config.Feature.migrateTo3 import reporting.trace import printing.Formatting.hl +import scala.compiletime.uninitialized + /** Provides `check` method to check that all top-level definitions * in tree are variance correct. Does not recurse inside methods. * The method should be invoked once for each Template. @@ -63,11 +65,11 @@ object VarianceChecker { } class VarianceChecker(using Context) { - import VarianceChecker._ - import tpd._ + import VarianceChecker.* + import tpd.* private object Validator extends TypeAccumulator[Option[VarianceError]] { - private var base: Symbol = _ + private var base: Symbol = uninitialized /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`. * The search proceeds from `base` to the owner of `tvar`. @@ -130,8 +132,6 @@ class VarianceChecker(using Context) { case TypeAlias(alias) => this(status, alias) case _ => foldOver(status, tp) } - case tp: MethodOrPoly => - this(status, tp.resultType) // params will be checked in their TypeDef or ValDef nodes. case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedVarianceAnnot => status case tp: ClassInfo => @@ -144,12 +144,26 @@ class VarianceChecker(using Context) { } } - def validateDefinition(base: Symbol): Option[VarianceError] = { - val saved = this.base + def checkInfo(info: Type): Option[VarianceError] = info match + case info: MethodOrPoly => + checkInfo(info.resultType) // params will be checked in their TypeDef or ValDef nodes. + case _ => + apply(None, info) + + def validateDefinition(base: Symbol): Option[VarianceError] = + val savedBase = this.base this.base = base - try apply(None, base.info) - finally this.base = saved - } + val savedVariance = variance + def isLocal = + base.isAllOf(PrivateLocal) + || base.is(Private) && !base.hasAnnotation(defn.AssignedNonLocallyAnnot) + if base.is(Mutable, butNot = Method) && !isLocal then + base.removeAnnotation(defn.AssignedNonLocallyAnnot) + variance = 0 + try checkInfo(base.info) + finally + this.base = savedBase + this.variance = savedVariance } private object Traverser extends TreeTraverser { diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index cde1a63f5293..916bdfa9dca3 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -28,7 +28,7 @@ object Chars: if (0 <= num && num < base) num else -1 } /** Buffer for creating '\ u XXXX' strings. */ - private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0) + private val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0) /** Convert a character to a backslash-u escape */ def char2uescape(c: Char): String = { diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala index b4af59c09310..7224e28fe477 100644 --- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala +++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala @@ -17,7 +17,7 @@ import scala.collection.mutable * handled by scaladoc. */ object CommentParsing { - import Chars._ + import Chars.* /** Returns index of string `str` following `start` skipping longest * sequence of whitespace characters characters (but no newlines) diff --git a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala index cec86fa84443..31acc91caa2e 100644 --- a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala +++ b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala @@ -70,7 +70,9 @@ object DiffUtil { * differences are highlighted. */ def mkColoredLineDiff(expected: Seq[String], actual: Seq[String]): String = { - val expectedSize = EOF.length max expected.maxBy(_.length).length + val longestExpected = expected.map(_.length).maxOption.getOrElse(0) + val longestActual = actual.map(_.length).maxOption.getOrElse(0) + val expectedSize = EOF.length max longestActual max longestExpected actual.padTo(expected.length, "").zip(expected.padTo(actual.length, "")).map { case (act, exp) => mkColoredLineDiff(exp, act, expectedSize) }.mkString(System.lineSeparator) @@ -101,11 +103,75 @@ object DiffUtil { case Deleted(str) => deleted(str) }.mkString + (expectedDiff, actualDiff) val pad = " " * 0.max(expectedSize - expected.length) expectedDiff + pad + " | " + actualDiff } + private def ensureLineSeparator(str: String): String = + if str.endsWith(System.lineSeparator) then + str + else + str + System.lineSeparator + + /** + * Returns a colored diffs by comparison of lines instead of tokens. + * It will automatically group subsequential pairs of `Insert` and `Delete` + * in order to improve the readability + * + * @param expected The expected lines + * @param actual The actual lines + * @return A string with colored diffs between `expected` and `actual` grouped whenever possible + */ + def mkColoredHorizontalLineDiff(expected: String, actual: String): String = { + val indent = 2 + val tab = " " * indent + val insertIndent = "+" ++ (" " * (indent - 1)) + val deleteIndent = "-" ++ (" " * (indent - 1)) + + if actual.isEmpty then + (expected.linesIterator.map(line => added(insertIndent + line)).toList :+ deleted("--- EMPTY OUTPUT ---")) + .map(ensureLineSeparator).mkString + else if expected.isEmpty then + (added("--- NO VALUE EXPECTED ---") +: actual.linesIterator.map(line => deleted(deleteIndent + line)).toList) + .map(ensureLineSeparator).mkString + else + lazy val diff = { + val expectedTokens = expected.linesWithSeparators.toArray + val actualTokens = actual.linesWithSeparators.toArray + hirschberg(actualTokens, expectedTokens) + }.toList + + val transformedDiff = diff.flatMap { + case Modified(original, str) => Seq( + Inserted(ensureLineSeparator(original)), Deleted(ensureLineSeparator(str)) + ) + case other => Seq(other) + } + + val zipped = transformedDiff zip transformedDiff.drop(1) + + val (acc, inserts, deletions) = zipped.foldLeft((Seq[Patch](), Seq[Inserted](), Seq[Deleted]())): (acc, patches) => + val (currAcc, inserts, deletions) = acc + patches match + case (currentPatch: Inserted, nextPatch: Deleted) => + (currAcc, inserts :+ currentPatch, deletions) + case (currentPatch: Deleted, nextPatch: Inserted) => + (currAcc, inserts, deletions :+ currentPatch) + case (currentPatch, nextPatch) => + (currAcc :++ inserts :++ deletions :+ currentPatch, Seq.empty, Seq.empty) + + val stackedDiff = acc :++ inserts :++ deletions :+ diff.last + + stackedDiff.collect { + case Unmodified(str) => tab + str + case Inserted(str) => added(insertIndent + str) + case Deleted(str) => deleted(deleteIndent + str) + }.map(ensureLineSeparator).mkString + + } + def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = { val tokens = splitTokens(code, Nil).toArray val lastTokens = splitTokens(lastCode, Nil).toArray diff --git a/compiler/src/dotty/tools/dotc/util/EqHashMap.scala b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala index ea049acba02b..25d9fb2907b8 100644 --- a/compiler/src/dotty/tools/dotc/util/EqHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala @@ -58,6 +58,22 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): used += 1 if used > limit then growTable() + override def getOrElseUpdate(key: Key, value: => Value): Value = + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) diff --git a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala new file mode 100644 index 000000000000..d584441fd00a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala @@ -0,0 +1,106 @@ +package dotty.tools.dotc.util + +import dotty.tools.uncheckedNN + +object EqHashSet: + + def from[T](xs: IterableOnce[T]): EqHashSet[T] = + val set = new EqHashSet[T]() + set ++= xs + set + +/** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. + * @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +class EqHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashSet[T](initialCapacity, capacityMultiple) { + import GenericHashSet.DenseLimit + + /** System's identity hashcode left shifted by 1 */ + final def hash(key: T): Int = + System.identityHashCode(key) << 1 + + /** reference equality */ + final def isEqual(x: T, y: T): Boolean = x.asInstanceOf[AnyRef] eq y.asInstanceOf[AnyRef] + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 1) + + private def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + private def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] + private def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] + + override def lookup(x: T): T | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + null + + /** Add entry at `x` at index `idx` */ + private def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) + setEntry(idx, x) + used += 1 + if used > limit then growTable() + x + + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. */ + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return false // already entered + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + true // first entry + + override def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + + override def +=(x: T): Unit = put(x) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + setEntry(idx, x) + + override def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e: T | Null = oldTable(idx).asInstanceOf[T | Null] + if e != null then addOld(e.uncheckedNN) + idx += 1 +} diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index a21a4af37038..8c0506573109 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -1,6 +1,8 @@ package dotty.tools package dotc.util +import scala.compiletime.uninitialized + object GenericHashMap: /** The number of elements up to which dense packing is used. @@ -27,9 +29,9 @@ abstract class GenericHashMap[Key, Value] (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit - protected var used: Int = _ - protected var limit: Int = _ - protected var table: Array[AnyRef | Null] = _ + protected var used: Int = uninitialized + protected var limit: Int = uninitialized + protected var table: Array[AnyRef | Null] = uninitialized clear() private def allocate(capacity: Int) = @@ -129,12 +131,20 @@ abstract class GenericHashMap[Key, Value] null def getOrElseUpdate(key: Key, value: => Value): Value = - var v: Value | Null = lookup(key) - if v == null then - val v1 = value - v = v1 - update(key, v1) - v.uncheckedNN + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala new file mode 100644 index 000000000000..3c30e8e73300 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala @@ -0,0 +1,192 @@ +package dotty.tools.dotc.util + +import dotty.tools.uncheckedNN + +import scala.compiletime.uninitialized + +object GenericHashSet: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + +/** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. +* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { + import GenericHashSet.DenseLimit + + protected var used: Int = uninitialized + protected var limit: Int = uninitialized + protected var table: Array[AnyRef | Null] = uninitialized + + clear() + + private def allocate(capacity: Int) = + table = new Array[AnyRef | Null](capacity) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple + + private def roundToPower(n: Int) = + if n < 4 then 4 + else 1 << (32 - Integer.numberOfLeadingZeros(n - 1)) + + def clear(resetToInitial: Boolean): Unit = + used = 0 + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) + + /** The number of elements in the set */ + def size: Int = used + + protected def isDense = limit < DenseLimit + + /** Hashcode, to be implemented in subclass */ + protected def hash(key: T): Int + + /** Equality, to be implemented in subclass */ + protected def isEqual(x: T, y: T): Boolean + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 1) + + protected def currentTable: Array[AnyRef | Null] = table + + private def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + private def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] + private def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] + + def lookup(x: T): T | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + null + + /** Add entry at `x` at index `idx` */ + private def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) + setEntry(idx, x) + used += 1 + if used > limit then growTable() + x + + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. */ + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return false // already entered + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + true // first entry + + def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + + def +=(x: T): Unit = put(x) + + def remove(x: T): Boolean = + Stats.record(statsItem("remove")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then + var hole = idx + while + idx = nextIndex(idx) + e = entryAt(idx) + e != null + do + val eidx = index(hash(e.uncheckedNN)) + if isDense + || index(eidx - (hole + 1)) > index(idx - (hole + 1)) + // entry `e` at `idx` can move unless `index(hash(e))` is in + // the (ring-)interval [hole + 1 .. idx] + then + setEntry(hole, e.uncheckedNN) + hole = idx + table(hole) = null + used -= 1 + return true + idx = nextIndex(idx) + e = entryAt(idx) + false + + def -=(x: T): Unit = + remove(x) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + setEntry(idx, x) + + def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e: T | Null = oldTable(idx).asInstanceOf[T | Null] + if e != null then addOld(e.uncheckedNN) + idx += 1 + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length * 2 + allocate(newLength) + copyFrom(oldTable) + + abstract class EntryIterator extends Iterator[T]: + def entry(idx: Int): T | Null + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 1 + idx < table.length + def next() = + require(hasNext) + try entry(idx).uncheckedNN finally idx += 1 + + def iterator: Iterator[T] = new EntryIterator(): + def entry(idx: Int) = entryAt(idx) + + override def toString: String = + iterator.mkString("HashSet(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashSet(dense)." else "HashSet." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" +} diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index aaae781c310a..eec3a604b5e2 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -63,6 +63,22 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): used += 1 if used > limit then growTable() + override def getOrElseUpdate(key: Key, value: => Value): Value = + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index a6e1532c804f..3a973793d542 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -4,11 +4,6 @@ import dotty.tools.uncheckedNN object HashSet: - /** The number of elements up to which dense packing is used. - * If the number of elements reaches `DenseLimit` a hash table is used instead - */ - inline val DenseLimit = 8 - def from[T](xs: IterableOnce[T]): HashSet[T] = val set = new HashSet[T]() set ++= xs @@ -26,33 +21,8 @@ object HashSet: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { - import HashSet.DenseLimit - - private var used: Int = _ - private var limit: Int = _ - private var table: Array[AnyRef | Null] = _ - - clear() - - private def allocate(capacity: Int) = - table = new Array[AnyRef | Null](capacity) - limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - - private def roundToPower(n: Int) = - if n < 4 then 4 - else if Integer.bitCount(n) == 1 then n - else 1 << (32 - Integer.numberOfLeadingZeros(n)) - - def clear(resetToInitial: Boolean): Unit = - used = 0 - if resetToInitial then allocate(roundToPower(initialCapacity)) - else java.util.Arrays.fill(table, null) - - /** The number of elements in the set */ - def size: Int = used - - protected def isDense = limit < DenseLimit +class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashSet[T](initialCapacity, capacityMultiple) { + import GenericHashSet.DenseLimit /** Hashcode, by default a processed `x.hashCode`, can be overridden */ protected def hash(key: T): Int = @@ -68,8 +38,6 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu /** Turn hashcode `x` into a table index */ protected def index(x: Int): Int = x & (table.length - 1) - protected def currentTable: Array[AnyRef | Null] = table - protected def firstIndex(x: T) = if isDense then 0 else index(hash(x)) protected def nextIndex(idx: Int) = Stats.record(statsItem("miss")) @@ -78,7 +46,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu protected def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] protected def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] - def lookup(x: T): T | Null = + override def lookup(x: T): T | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) @@ -96,48 +64,29 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu if used > limit then growTable() x - def put(x: T): T = - Stats.record(statsItem("put")) + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) while e != null do - // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling - if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + if isEqual(e.uncheckedNN, x) then return false // already entered idx = nextIndex(idx) e = entryAt(idx) addEntryAt(idx, x) + true // first entry - def +=(x: T): Unit = put(x) - - def remove(x: T): Boolean = - Stats.record(statsItem("remove")) + override def put(x: T): T = + Stats.record(statsItem("put")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) while e != null do - if isEqual(e.uncheckedNN, x) then - var hole = idx - while - idx = nextIndex(idx) - e = entryAt(idx) - e != null - do - val eidx = index(hash(e.uncheckedNN)) - if isDense - || index(eidx - (hole + 1)) > index(idx - (hole + 1)) - // entry `e` at `idx` can move unless `index(hash(e))` is in - // the (ring-)interval [hole + 1 .. idx] - then - setEntry(hole, e.uncheckedNN) - hole = idx - table(hole) = null - used -= 1 - return true + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN idx = nextIndex(idx) e = entryAt(idx) - false + addEntryAt(idx, x) - def -=(x: T): Unit = - remove(x) + override def +=(x: T): Unit = put(x) private def addOld(x: T) = Stats.record(statsItem("re-enter")) @@ -148,7 +97,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu e = entryAt(idx) setEntry(idx, x) - def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + override def copyFrom(oldTable: Array[AnyRef | Null]): Unit = if isDense then Array.copy(oldTable, 0, table, 0, oldTable.length) else @@ -157,33 +106,4 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu val e: T | Null = oldTable(idx).asInstanceOf[T | Null] if e != null then addOld(e.uncheckedNN) idx += 1 - - protected def growTable(): Unit = - val oldTable = table - val newLength = - if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) - else table.length * 2 - allocate(newLength) - copyFrom(oldTable) - - abstract class EntryIterator extends Iterator[T]: - def entry(idx: Int): T | Null - private var idx = 0 - def hasNext = - while idx < table.length && table(idx) == null do idx += 1 - idx < table.length - def next() = - require(hasNext) - try entry(idx).uncheckedNN finally idx += 1 - - def iterator: Iterator[T] = new EntryIterator(): - def entry(idx: Int) = entryAt(idx) - - override def toString: String = - iterator.mkString("HashSet(", ", ", ")") - - protected def statsItem(op: String) = - val prefix = if isDense then "HashSet(dense)." else "HashSet." - val suffix = getClass.getSimpleName - s"$prefix$op $suffix" } diff --git a/compiler/src/dotty/tools/dotc/util/IntMap.scala b/compiler/src/dotty/tools/dotc/util/IntMap.scala index 008ea866f70e..1d04567e99c7 100644 --- a/compiler/src/dotty/tools/dotc/util/IntMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IntMap.scala @@ -1,5 +1,7 @@ package dotty.tools.dotc.util +import scala.compiletime.uninitialized + /** A dense map from some `Key` type to `Int. Dense means: All keys and values * are stored in arrays from 0 up to the size of the map. Keys and values * can be obtained by index using `key(index)` and `value(index)`. Values @@ -19,7 +21,7 @@ package dotty.tools.dotc.util */ final class IntMap[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends PerfectHashing[Key](initialCapacity, capacityMultiple): - private var values: Array[Int] = _ + private var values: Array[Int] = uninitialized def default: Int = -1 diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index 99ee8a80227b..e124159575e7 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -18,7 +18,7 @@ import annotation.tailrec * at the `last` position. */ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { - import LRUCache._ + import LRUCache.* val keys: Array[Key] = new Array[Key](Retained) val values: Array[Value] = new Array(Retained) var next: SixteenNibbles = new SixteenNibbles(initialRing.bits) diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 9529262fa5ec..05fd57a50e71 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -7,6 +7,13 @@ abstract class MutableSet[T] extends ReadOnlySet[T]: /** Add element `x` to the set */ def +=(x: T): Unit + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. + * Overridden in GenericHashSet. + */ + def add(x: T): Boolean = + if lookup(x) == null then { this += x; true } + else false + /** Like `+=` but return existing element equal to `x` of it exists, * `x` itself otherwise. */ diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala index 61cf238fbc7f..f641ea90dcdd 100644 --- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala @@ -4,7 +4,7 @@ package util import scala.language.unsafeNulls -import core.Names._ +import core.Names.* import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala index ac724f7e336f..ee56a74d5eb5 100644 --- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala +++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc.util import scala.language.unsafeNulls -import dotty.tools.dotc.core.Comments.{Comment, CommentsContext} -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Comments.{Comment, docCtx} +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.printing.SyntaxHighlighting import scala.Console.{BOLD, RESET} diff --git a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala index fca790837959..1351afb68f3d 100644 --- a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala +++ b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala @@ -1,5 +1,7 @@ package dotty.tools.dotc.util +import scala.compiletime.uninitialized + object PerfectHashing: /** The number of elements up to which dense packing is used. @@ -22,9 +24,9 @@ object PerfectHashing: class PerfectHashing[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2): import PerfectHashing.DenseLimit - private var used: Int = _ - private var table: Array[Int] = _ - private var keys: Array[AnyRef] = _ + private var used: Int = uninitialized + private var table: Array[Int] = uninitialized + private var keys: Array[AnyRef] = uninitialized clear() diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala index 4dd897dd082a..d7837d9763fe 100644 --- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala +++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.util import scala.collection.mutable.ArrayBuffer -import scala.util.chaining._ +import scala.util.chaining.* /** A wrapper for a list of cached instances of a type `T`. * The wrapper is recursion-reentrant: several instances are kept, so @@ -15,8 +15,8 @@ import scala.util.chaining._ * Ported from scala.reflect.internal.util.ReusableInstance */ final class ReusableInstance[T <: AnyRef] private (make: => T) { - private[this] val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) - private[this] var taken = 0 + private val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) + private var taken = 0 inline def withInstance[R](action: T => R): R ={ if (taken == cache.size) diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index f85a57a8f812..8d5d0c27ab0e 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -8,10 +8,10 @@ import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble import core.unpickleScala2.PickleBuffer -import core.Names._ +import core.Names.* object ShowPickled { - import core.unpickleScala2.PickleFormat._ + import core.unpickleScala2.PickleFormat.* case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) { def isName: Boolean = tag == TERMname || tag == TYPEname diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 5513a1f803c6..ce05cfb40294 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -1,20 +1,20 @@ package dotty.tools.dotc package util -import ast.Trees._ +import ast.Trees.* import ast.tpd import core.Constants.Constant -import core.Contexts._ +import core.Contexts.* import core.Denotations.{SingleDenotation, Denotation} import core.Flags import core.NameOps.isUnapplyName -import core.Names._ +import core.Names.* import core.NameKinds -import core.Types._ -import core.Symbols.NoSymbol +import core.Types.* +import core.Symbols.{NoSymbol, isLocalToBlock} import interactive.Interactive import util.Spans.Span -import reporting._ +import reporting.* object Signatures { @@ -97,6 +97,17 @@ object Signatures { case tp @ TypeApply(fun, types) => applyCallInfo(span, types, fun, true) case _ => (0, 0, Nil) + + def isEnclosingApply(tree: tpd.Tree, span: Span)(using Context): Boolean = + tree match + case apply @ Apply(fun, _) => !fun.span.contains(span) && isValid(apply) + case unapply @ UnApply(fun, _, _) => + !fun.span.contains(span) && !ctx.definitions.isFunctionNType(tree.tpe) // we want to show tuples in unapply + case typeTree @ AppliedTypeTree(fun, _) => !fun.span.contains(span) && isValid(typeTree) + case typeApply @ TypeApply(fun, _) => !fun.span.contains(span) && isValid(typeApply) + case _ => false + + /** * Finds enclosing application from given `path` for `span`. * @@ -108,17 +119,26 @@ object Signatures { * next subsequent application exists, it returns the latter */ private def findEnclosingApply(path: List[tpd.Tree], span: Span)(using Context): tpd.Tree = - path.filterNot { - case apply @ Apply(fun, _) => fun.span.contains(span) || isValid(apply) - case unapply @ UnApply(fun, _, _) => fun.span.contains(span) || isValid(unapply) - case typeTree @ AppliedTypeTree(fun, _) => fun.span.contains(span) || isValid(typeTree) - case typeApply @ TypeApply(fun, _) => fun.span.contains(span) || isValid(typeApply) - case _ => true - } match { + import tpd.TreeOps + + val filteredPath = path.filter: + case block @ Block(stats, expr) => + block.existsSubTree(tree => isEnclosingApply(tree, span) && tree.span.contains(span)) + case other => isEnclosingApply(other, span) + + filteredPath match case Nil => tpd.EmptyTree + case tpd.Block(stats, expr) :: _ => // potential block containing lifted args + + val enclosingFunction = stats.collectFirst: + case defdef: tpd.DefDef if defdef.rhs.span.contains(span) => defdef + + val enclosingTree = enclosingFunction.getOrElse(expr) + findEnclosingApply(Interactive.pathTo(enclosingTree, span), span) + case direct :: enclosing :: _ if isClosingSymbol(direct.source(span.end -1)) => enclosing case direct :: _ => direct - } + private def isClosingSymbol(ch: Char) = ch == ')' || ch == ']' @@ -178,7 +198,8 @@ object Signatures { (alternativeIndex, alternatives) case _ => val funSymbol = fun.symbol - val alternatives = funSymbol.owner.info.member(funSymbol.name).alternatives + val alternatives = if funSymbol.isLocalToBlock then List(funSymbol.denot) else + funSymbol.owner.info.member(funSymbol.name).alternatives val alternativeIndex = alternatives.map(_.symbol).indexOf(funSymbol) max 0 (alternativeIndex, alternatives) @@ -302,7 +323,7 @@ object Signatures { * @param tree tree to validate */ private def isValid(tree: tpd.Tree)(using Context): Boolean = - ctx.definitions.isTupleNType(tree.tpe) || ctx.definitions.isFunctionType(tree.tpe) + !ctx.definitions.isTupleNType(tree.tpe) && !ctx.definitions.isFunctionNType(tree.tpe) /** * Get unapply method result type omiting unknown types and another method calls. @@ -407,7 +428,7 @@ object Signatures { (params :: rest) def isSyntheticEvidence(name: String) = - if !name.startsWith(NameKinds.EvidenceParamName.separator) then false else + if !name.startsWith(NameKinds.ContextBoundParamName.separator) then false else symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) denot.info.stripPoly match diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index dd766dc99c7e..b243145c9e5f 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -14,7 +14,10 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { def contains[E >: Elem <: AnyRef](x: E): Boolean def foreach(f: Elem => Unit): Unit def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean - def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] + def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = + var acc: SimpleIdentitySet[B] = SimpleIdentitySet.empty + foreach(x => acc += f(x)) + acc def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A def toList: List[Elem] def iterator: Iterator[Elem] @@ -63,7 +66,7 @@ object SimpleIdentitySet { def contains[E <: AnyRef](x: E): Boolean = false def foreach(f: Nothing => Unit): Unit = () def exists[E <: AnyRef](p: E => Boolean): Boolean = false - def map[B <: AnyRef](f: Nothing => B): SimpleIdentitySet[B] = empty + override def map[B <: AnyRef](f: Nothing => B): SimpleIdentitySet[B] = empty def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z def toList = Nil def iterator = Iterator.empty @@ -79,7 +82,7 @@ object SimpleIdentitySet { def foreach(f: Elem => Unit): Unit = f(x0.asInstanceOf[Elem]) def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = p(x0.asInstanceOf[E]) - def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = + override def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = Set1(f(x0.asInstanceOf[Elem])) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(z, x0.asInstanceOf[E]) @@ -99,8 +102,10 @@ object SimpleIdentitySet { def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]) } def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) - def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = - Set2(f(x0.asInstanceOf[Elem]), f(x1.asInstanceOf[Elem])) + override def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = + val y0 = f(x0.asInstanceOf[Elem]) + val y1 = f(x1.asInstanceOf[Elem]) + if y0 eq y1 then Set1(y0) else Set2(y0, y1) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil @@ -133,8 +138,14 @@ object SimpleIdentitySet { } def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) || p(x2.asInstanceOf[E]) - def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = - Set3(f(x0.asInstanceOf[Elem]), f(x1.asInstanceOf[Elem]), f(x2.asInstanceOf[Elem])) + override def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = + val y0 = f(x0.asInstanceOf[Elem]) + val y1 = f(x1.asInstanceOf[Elem]) + val y2 = f(x2.asInstanceOf[Elem]) + if y1 eq y0 then + if y2 eq y0 then Set1(y0) else Set2(y0, y2) + else if (y2 eq y0) || (y2 eq y1) then Set2(y0, y1) + else Set3(y0, y1, y2) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil @@ -182,8 +193,6 @@ object SimpleIdentitySet { } def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = xs.asInstanceOf[Array[E]].exists(p) - def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = - SetN(xs.map(x => f(x.asInstanceOf[Elem]).asInstanceOf[AnyRef])) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = xs.asInstanceOf[Array[E]].foldLeft(z)(f) def toList: List[Elem] = { diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala index 42286aef5d31..b51f6bdcac61 100644 --- a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala +++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala @@ -5,7 +5,7 @@ package dotty.tools.dotc.util * */ class SixteenNibbles(val bits: Long) extends AnyVal { - import SixteenNibbles._ + import SixteenNibbles.* def apply(idx: Int): Int = (bits >>> (idx * Width)).toInt & Mask diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 42d07869f74e..9da4f58f2deb 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -4,20 +4,22 @@ package util import scala.language.unsafeNulls -import dotty.tools.io._ -import Spans._ -import core.Contexts._ +import dotty.tools.io.* +import Spans.* +import core.Contexts.* import scala.io.Codec -import Chars._ +import Chars.* import scala.annotation.internal.sharable import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.compiletime.uninitialized import scala.util.chaining.given import java.io.File.separator +import java.net.URI import java.nio.charset.StandardCharsets -import java.nio.file.{FileSystemException, NoSuchFileException} +import java.nio.file.{FileSystemException, NoSuchFileException, Paths} import java.util.Optional import java.util.concurrent.atomic.AtomicInteger import java.util.regex.Pattern @@ -60,7 +62,7 @@ object ScriptSourceFile { } class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile { - import SourceFile._ + import SourceFile.* private var myContent: Array[Char] | Null = null @@ -136,7 +138,7 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends buf.toArray } - private var lineIndicesCache: Array[Int] = _ + private var lineIndicesCache: Array[Int] = uninitialized private def lineIndices: Array[Int] = if lineIndicesCache eq null then lineIndicesCache = calculateLineIndicesFromContents() @@ -222,6 +224,13 @@ object SourceFile { SourceFile(new VirtualFile(name.replace(separator, "/"), content.getBytes(StandardCharsets.UTF_8)), content.toCharArray) .tap(_._maybeInComplete = maybeIncomplete) + /** A helper method to create a virtual source file for given URI. + * It relies on SourceFile#virtual implementation to create the virtual file. + */ + def virtual(uri: URI, content: String): SourceFile = + val path = Paths.get(uri).toString + SourceFile.virtual(path, content) + /** Returns the relative path of `source` within the `reference` path * * It returns the absolute path of `source` if it is not contained in `reference`. @@ -251,7 +260,7 @@ object SourceFile { // and use both slashes as separators, or on other OS and use forward slash // as separator, backslash as file name character. - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* val path = refPath.relativize(sourcePath) path.iterator.asScala.mkString("/") else diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 29f9a34d2292..904704b2349c 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -5,7 +5,7 @@ package util import scala.language.unsafeNulls import printing.{Showable, Printer} -import printing.Texts._ +import printing.Texts.* import core.Contexts.Context import Spans.{Span, NoSpan} import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index e9b72015b202..750a799a9f0a 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -4,13 +4,17 @@ package util import scala.annotation.internal.sharable -import core.Contexts._ +import core.Contexts.* import collection.mutable @sharable object Stats { + // when false, Stats.record and Stats.trackTime are elided. inline val enabled = false + // set to true if only `trackTime` should be recorded by default + inline val timerOnly = false + var monitored: Boolean = false @volatile private var stack: List[String] = Nil @@ -19,8 +23,8 @@ import collection.mutable override def default(key: String): Int = 0 } - inline def record(inline fn: String, inline n: Int = 1): Unit = - if (enabled) doRecord(fn, n) + inline def record(inline fn: String, inline n: Int = 1, inline skip: Boolean = timerOnly): Unit = + if (enabled && !skip) doRecord(fn, n) def doRecord(fn: String, n: Int) = if (monitored) { @@ -28,7 +32,7 @@ import collection.mutable hits(name) += n } - def doRecordSize(fn: String, coll: scala.collection.Iterable[_]): coll.type = + def doRecordSize(fn: String, coll: scala.collection.Iterable[?]): coll.type = doRecord(fn, coll.size) coll @@ -38,7 +42,7 @@ import collection.mutable def doTrackTime[T](fn: String)(op: => T): T = { if (monitored) { val start = System.nanoTime - try op finally record(fn, ((System.nanoTime - start) / 1000).toInt) + try op finally record(fn, ((System.nanoTime - start) / 1000).toInt, skip = false) } else op } diff --git a/compiler/src/dotty/tools/dotc/util/Store.scala b/compiler/src/dotty/tools/dotc/util/Store.scala index d8c9230b9272..8605b9021980 100644 --- a/compiler/src/dotty/tools/dotc/util/Store.scala +++ b/compiler/src/dotty/tools/dotc/util/Store.scala @@ -8,7 +8,7 @@ object Store { } class Store(private val elems: Array[AnyRef | Null]) extends AnyVal { - import Store._ + import Store.* def newLocation[T](): (Location[T], Store) = { val elems1 = new Array[AnyRef | Null](elems.length + 1) diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 975826a87a37..d93505f6f3c2 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -6,7 +6,7 @@ import java.lang.ref.{ReferenceQueue, WeakReference} import scala.annotation.{ constructorOnly, tailrec } -import dotty.tools._ +import dotty.tools.* /** * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other @@ -20,7 +20,7 @@ import dotty.tools._ */ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A] { - import WeakHashSet._ + import WeakHashSet.* type This = WeakHashSet[A] diff --git a/compiler/src/dotty/tools/dotc/util/kwords.sc b/compiler/src/dotty/tools/dotc/util/kwords.sc index 961be3b0aa23..377be9dbcb65 100644 --- a/compiler/src/dotty/tools/dotc/util/kwords.sc +++ b/compiler/src/dotty/tools/dotc/util/kwords.sc @@ -1,8 +1,8 @@ package dotty.tools.dotc.util -import dotty.tools.dotc.parsing._ -import Scanners._ -import Tokens._ +import dotty.tools.dotc.parsing.* +import Scanners.* +import Tokens.* object kwords { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index f34fe6f40b9c..a7dcb7d396e8 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -97,8 +97,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns the path of this abstract file in a canonical form. */ def canonicalPath: String = if (jpath == null) path else jpath.normalize.toString - /** Checks extension case insensitively. */ + /** Checks extension case insensitively. TODO: change to enum */ def hasExtension(other: String): Boolean = extension == other.toLowerCase + + /** Returns the extension of this abstract file. TODO: store as an enum to avoid costly comparisons */ val extension: String = Path.extension(name) /** The absolute file, if this is a relative file. */ @@ -250,6 +252,12 @@ abstract class AbstractFile extends Iterable[AbstractFile] { file } + /** Returns the sibling abstract file in the parent of this abstract file or directory. + * If there is no such file, returns `null`. + */ + final def resolveSibling(name: String): AbstractFile | Null = + container.lookupName(name, directory = false) + private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = lookupName(name, isDir) match { case null => diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index b45de57f9850..8df4015a53c2 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -21,7 +21,7 @@ import dotc.classpath.{ PackageEntry, ClassPathEntries, PackageName } * A representation of the compiler's class- or sourcepath. */ trait ClassPath { - import dotty.tools.dotc.classpath._ + import dotty.tools.dotc.classpath.* def asURLs: Seq[URL] final def hasPackage(pkg: String): Boolean = hasPackage(PackageName(pkg)) @@ -94,7 +94,7 @@ trait ClassPath { /** The whole classpath in the form of one String. */ - def asClassPathString: String = ClassPath.join(asClassPathStrings: _*) + def asClassPathString: String = ClassPath.join(asClassPathStrings*) // for compatibility purposes @deprecated("use asClassPathString instead of this one", "2.11.5") def asClasspathString: String = asClassPathString @@ -152,7 +152,7 @@ object ClassPath { def join(paths: String*): String = paths.filterNot(_ == "").mkString(pathSeparator) /** Split the classpath, apply a transformation function, and reassemble it. */ - def map(cp: String, f: String => String): String = join(split(cp) map f: _*) + def map(cp: String, f: String => String): String = join(split(cp).map(f)*) /** Expand path and possibly expanding stars */ def expandPath(path: String, expandStar: Boolean = true): List[String] = diff --git a/compiler/src/dotty/tools/io/ClassfileWriterOps.scala b/compiler/src/dotty/tools/io/ClassfileWriterOps.scala new file mode 100644 index 000000000000..c2107ded6f51 --- /dev/null +++ b/compiler/src/dotty/tools/io/ClassfileWriterOps.scala @@ -0,0 +1,50 @@ +package dotty.tools.io + +import dotty.tools.io.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.report +import scala.language.unsafeNulls +import scala.annotation.constructorOnly + + +/** Experimental usage - writes bytes to JarArchives */ +class ClassfileWriterOps(outputDir: JarArchive)(using @constructorOnly ictx: Context) { + + type InternalName = String + + // if non-null, classfiles are written to a jar instead of the output directory + private val jarWriter: JarWriter | Null = + val localCtx = ictx + outputDir.underlyingSource.map { source => + if outputDir.isEmpty then + new Jar(source.file).jarWriter() + else inContext(localCtx) { + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWriter + report.warning(em"Tried to write to non-empty JAR: $source") + null + } + }.getOrElse( + inContext(localCtx) { + report.warning(em"tried to create a file writer for $outputDir, but it had no underlying source.") + null + } + ) + + def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = + writeToJar(className, bytes, ".tasty") + + private def writeToJar(className: InternalName, bytes: Array[Byte], suffix: String): Unit = { + if (jarWriter == null) return + val path = className + suffix + val out = jarWriter.newOutputStream(path) + try out.write(bytes, 0, bytes.length) + finally out.flush() + } + + def close(): Unit = { + if (jarWriter != null) jarWriter.close() + outputDir.close() + } +} diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 27f2c077dd6a..22a0e04b2b48 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -12,7 +12,7 @@ import scala.language.unsafeNulls import java.io.{File => JavaIoFile, _} import java.nio.file.{Files, Paths} -import java.nio.file.StandardOpenOption._ +import java.nio.file.StandardOpenOption.* import scala.io.Codec /** @@ -35,7 +35,7 @@ object File { * @author Paul Phillips * @since 2.8 * - * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { override val creationCodec: io.Codec = constructorCodec diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 9979a9ca9379..3e65d2f7635d 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -10,8 +10,8 @@ package io import scala.language.unsafeNulls import java.io.{ InputStream, OutputStream, DataOutputStream } -import java.util.jar._ -import scala.jdk.CollectionConverters._ +import java.util.jar.* +import scala.jdk.CollectionConverters.* import scala.collection.mutable import Attributes.Name import scala.annotation.tailrec @@ -42,7 +42,7 @@ class Jar(file: File) { protected def errorFn(msg: String): Unit = Console println msg - import Jar._ + import Jar.* lazy val jarFile: JarFile = new JarFile(file.jpath.toFile) lazy val manifest: Option[Manifest] = withJarInput(s => Option(s.getManifest)) @@ -62,7 +62,7 @@ class Jar(file: File) { finally in.close() } def jarWriter(mainAttrs: (Attributes.Name, String)*): JarWriter = { - new JarWriter(file, Jar.WManifest.apply(mainAttrs: _*).underlying) + new JarWriter(file, Jar.WManifest.apply(mainAttrs*).underlying) } def toList: List[JarEntry] = withJarInput { in => @@ -142,7 +142,7 @@ object Jar { def underlying: JManifest = manifest def attrs: mutable.Map[Name, String] = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null def initialMainAttrs: Map[Attributes.Name, String] = { - import scala.util.Properties._ + import scala.util.Properties.* Map( Name.MANIFEST_VERSION -> "1.0", ScalaCompilerVersion -> versionNumberString diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 8f3b5d8010e4..f42f68e745ed 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import java.nio.file.{FileSystemAlreadyExistsException, FileSystems} -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* /** * This class implements an [[AbstractFile]] backed by a jar @@ -12,6 +12,7 @@ import scala.jdk.CollectionConverters._ */ class JarArchive private (root: Directory) extends PlainDirectory(root) { def close(): Unit = jpath.getFileSystem().close() + override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) } diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index dddb870afc65..c8420c5e381d 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -8,11 +8,11 @@ package dotty.tools.io import scala.language.unsafeNulls import java.io.RandomAccessFile -import java.nio.file._ +import java.nio.file.* import java.net.{URI, URL} import java.nio.file.attribute.{BasicFileAttributes, FileTime} import java.io.IOException -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.util.Random.alphanumeric /** An abstraction for filesystem paths. The differences between @@ -63,7 +63,7 @@ object Path { private[io] def randomPrefix: String = alphanumeric take 6 mkString "" private[io] def fail(msg: String): Nothing = throw FileOperationException(msg) } -import Path._ +import Path.* /** The Path constructor is private so we can enforce some * semantics regarding how a Path might relate to the world. diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index 898e037488f7..acef191d3072 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -102,8 +102,15 @@ class PlainFile(val givenPath: Path) extends AbstractFile { */ def lookupName(name: String, directory: Boolean): AbstractFile = { val child = givenPath / name - if ((child.isDirectory && directory) || (child.isFile && !directory)) new PlainFile(child) - else null + if directory then + if child.isDirectory /* IO! */ then + new PlainFile(child) + else + null + else if child.isFile /* IO! */ then + new PlainFile(child) + else + null } /** Does this abstract file denote an existing file? */ diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 4383bc187979..3a4d32614c82 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -13,7 +13,7 @@ import java.nio.file.Files import java.util.zip.{ ZipEntry, ZipFile } import java.util.jar.Manifest import scala.collection.mutable -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* /** An abstraction for zip files and streams. Everything is written the way * it is for performance: we come through here a lot on every run. Be careful @@ -52,7 +52,7 @@ object ZipArchive { else path.substring(idx + 1) } } -import ZipArchive._ +import ZipArchive.* /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) extends AbstractFile with Equals { self => @@ -72,6 +72,7 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex // have to keep this name for compat with sbt's compiler-interface def getArchive: ZipFile = null override def underlyingSource: Option[ZipArchive] = Some(self) + override def container: Entry = parent override def toString: String = self.path + "(" + path + ")" } @@ -213,7 +214,7 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc case _ => false } - private[this] var closeables: List[java.io.Closeable] = Nil + private var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { closeables.foreach(_.close) closeables = Nil @@ -280,7 +281,7 @@ final class ManifestResources(val url: URL) extends ZipArchive(null, None) { } } - private[this] var closeables: List[java.io.Closeable] = Nil + private var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { closeables.foreach(_.close()) closeables = Nil diff --git a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala index d539c1986f8d..860c4a9372f9 100644 --- a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala +++ b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala @@ -1,20 +1,22 @@ package dotty.tools.repl import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase +import scala.compiletime.uninitialized + /** A phase that collects user defined top level imports. * * These imports must be collected as typed trees and therefore * after Typer. */ class CollectTopLevelImports extends Phase { - import tpd._ + import tpd.* def phaseName: String = "collectTopLevelImports" - private var myImports: List[Import] = _ + private var myImports: List[Import] = uninitialized def imports: List[Import] = myImports def run(using Context): Unit = { diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 8e048d786ae1..294f0a331ec2 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -2,15 +2,15 @@ package dotty.tools.repl import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.parsing.Scanners.Scanner -import dotty.tools.dotc.parsing.Tokens._ +import dotty.tools.dotc.parsing.Tokens.* import dotty.tools.dotc.printing.SyntaxHighlighting import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.util.SourceFile import org.jline.reader import org.jline.reader.Parser.ParseContext -import org.jline.reader._ +import org.jline.reader.* import org.jline.reader.impl.LineReaderImpl import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder @@ -50,8 +50,8 @@ class JLineTerminal extends java.io.Closeable { def readLine( completer: Completer // provide auto-completions )(using Context): String = { - import LineReader.Option._ - import LineReader._ + import LineReader.Option.* + import LineReader.* val userHome = System.getProperty("user.home") val lineReader = LineReaderBuilder .builder() diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index a67b247066f7..b9139343bca1 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -3,7 +3,7 @@ package repl import dotc.CompilationUnit import dotc.ast.untpd -import dotc.core.Contexts._ +import dotc.core.Contexts.* import dotc.core.StdNames.str import dotc.parsing.Parsers.Parser import dotc.parsing.Tokens diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index c647ef302bb9..517815615f2a 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -7,9 +7,9 @@ import dotc.*, core.* import Contexts.*, Denotations.*, Flags.*, NameOps.*, StdNames.*, Symbols.* import printing.ReplPrinter import reporting.Diagnostic -import transform.ValueClasses import util.StackTraceOps.* +import scala.compiletime.uninitialized import scala.util.control.NonFatal /** This rendering object uses `ClassLoader`s to accomplish crossing the 4th @@ -22,12 +22,12 @@ import scala.util.control.NonFatal */ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): - import Rendering._ + import Rendering.* - var myClassLoader: AbstractFileClassLoader = _ + var myClassLoader: AbstractFileClassLoader = uninitialized /** (value, maxElements, maxCharacters) => String */ - var myReplStringOf: (Object, Int, Int) => String = _ + var myReplStringOf: (Object, Int, Int) => String = uninitialized /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = @@ -126,7 +126,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): * @param value underlying value */ private def rewrapValueClass(sym: Symbol, value: Object)(using Context): Option[Object] = - if ValueClasses.isDerivedValueClass(sym) then + if sym.isDerivedValueClass then val valueClass = Class.forName(sym.binaryClassName, true, classLoader()) valueClass.getConstructors.headOption.map(_.newInstance(value)) else diff --git a/compiler/src/dotty/tools/repl/ReplCommand.scala b/compiler/src/dotty/tools/repl/ReplCommand.scala index 3e46106acc2c..0b40a7cec0b3 100644 --- a/compiler/src/dotty/tools/repl/ReplCommand.scala +++ b/compiler/src/dotty/tools/repl/ReplCommand.scala @@ -1,6 +1,6 @@ package dotty.tools.repl -import dotty.tools.dotc.config.Properties._ +import dotty.tools.dotc.config.Properties.* import dotty.tools.dotc.config.CompilerCommand object ReplCommand extends CompilerCommand: diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index 764695e8479b..d69173cb6d88 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -1,23 +1,24 @@ package dotty.tools.repl -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{tpd, untpd} import dotty.tools.dotc.ast.tpd.TreeOps -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.CompilationUnitInfo +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.reporting.Diagnostic import dotty.tools.dotc.transform.PostTyper import dotty.tools.dotc.typer.ImportInfo.{withRootImports, RootRef} import dotty.tools.dotc.typer.TyperPhase -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.{ParsedComment, Property, SourceFile} import dotty.tools.dotc.{CompilationUnit, Compiler, Run} -import dotty.tools.repl.results._ +import dotty.tools.repl.results.* import scala.collection.mutable import scala.util.chaining.given @@ -93,9 +94,9 @@ class ReplCompiler extends Compiler: end compile final def typeOf(expr: String)(using state: State): Result[String] = - typeCheck(expr).map { tree => + typeCheck(expr).map { (_, tpdTree) => given Context = state.context - tree.rhs match { + tpdTree.rhs match { case Block(xs, _) => xs.last.tpe.widen.show case _ => """Couldn't compute the type of your expression, so sorry :( @@ -129,7 +130,7 @@ class ReplCompiler extends Compiler: Iterator(sym) ++ sym.allOverriddenSymbols } - typeCheck(expr).map { + typeCheck(expr).map { (_, tpdTree) => tpdTree match case ValDef(_, _, Block(stats, _)) if stats.nonEmpty => val stat = stats.last.asInstanceOf[tpd.Tree] if (stat.tpe.isError) stat.tpe.show @@ -152,11 +153,11 @@ class ReplCompiler extends Compiler: } } - final def typeCheck(expr: String, errorsAllowed: Boolean = false)(using state: State): Result[tpd.ValDef] = { + final def typeCheck(expr: String, errorsAllowed: Boolean = false)(using state: State): Result[(untpd.ValDef, tpd.ValDef)] = { def wrapped(expr: String, sourceFile: SourceFile, state: State)(using Context): Result[untpd.PackageDef] = { def wrap(trees: List[untpd.Tree]): untpd.PackageDef = { - import untpd._ + import untpd.* val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length))) val tmpl = Template(emptyConstructor, Nil, Nil, EmptyValDef, List(valdef)) @@ -181,22 +182,32 @@ class ReplCompiler extends Compiler: } } - def unwrapped(tree: tpd.Tree, sourceFile: SourceFile)(using Context): Result[tpd.ValDef] = { - def error: Result[tpd.ValDef] = - List(new Diagnostic.Error(s"Invalid scala expression", - sourceFile.atSpan(Span(0, sourceFile.content.length)))).errors + def error[Tree <: untpd.Tree](sourceFile: SourceFile): Result[Tree] = + List(new Diagnostic.Error(s"Invalid scala expression", + sourceFile.atSpan(Span(0, sourceFile.content.length)))).errors - import tpd._ + def unwrappedTypeTree(tree: tpd.Tree, sourceFile0: SourceFile)(using Context): Result[tpd.ValDef] = { + import tpd.* tree match { case PackageDef(_, List(TypeDef(_, tmpl: Template))) => tmpl.body .collectFirst { case dd: ValDef if dd.name.show == "expr" => dd.result } - .getOrElse(error) + .getOrElse(error[tpd.ValDef](sourceFile0)) case _ => - error + error[tpd.ValDef](sourceFile0) } } + def unwrappedUntypedTree(tree: untpd.Tree, sourceFile0: SourceFile)(using Context): Result[untpd.ValDef] = + import untpd.* + tree match { + case PackageDef(_, List(TypeDef(_, tmpl: Template))) => + tmpl.body + .collectFirst { case dd: ValDef if dd.name.show == "expr" => dd.result } + .getOrElse(error[untpd.ValDef](sourceFile0)) + case _ => + error[untpd.ValDef](sourceFile0) + } val src = SourceFile.virtual("", expr) inContext(state.context.fresh @@ -209,7 +220,10 @@ class ReplCompiler extends Compiler: ctx.run.nn.compileUnits(unit :: Nil, ctx) if (errorsAllowed || !ctx.reporter.hasErrors) - unwrapped(unit.tpdTree, src) + for + tpdTree <- unwrappedTypeTree(unit.tpdTree, src) + untpdTree <- unwrappedUntypedTree(unit.untpdTree, src) + yield untpdTree -> tpdTree else ctx.reporter.removeBufferedMessages.errors } @@ -220,7 +234,7 @@ object ReplCompiler: val objectNames = mutable.Map.empty[Int, TermName] end ReplCompiler -class ReplCompilationUnit(source: SourceFile) extends CompilationUnit(source): +class ReplCompilationUnit(source: SourceFile) extends CompilationUnit(source, CompilationUnitInfo(source.file)): override def isSuspendable: Boolean = false /** A placeholder phase that receives parse trees.. diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 905f4f06de08..5226ef0b4546 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -5,21 +5,21 @@ import scala.language.unsafeNulls import java.io.{File => JFile, PrintStream} import java.nio.charset.StandardCharsets -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{tpd, untpd} import dotty.tools.dotc.config.CommandLineParser.tokenize import dotty.tools.dotc.config.Properties.{javaVersion, javaVmName, simpleVersionString} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Phases.{unfusedPhases, typerPhase} import dotty.tools.dotc.core.Denotations.Denotation -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.NameKinds.SimpleNameKind import dotty.tools.dotc.core.NameKinds.DefaultGetterName -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Symbols.{Symbol, defn} import dotty.tools.dotc.interfaces import dotty.tools.dotc.interactive.Completion @@ -30,13 +30,14 @@ import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.{CompilationUnit, Driver} import dotty.tools.dotc.config.CompilerCommand -import dotty.tools.io._ +import dotty.tools.io.* import dotty.tools.runner.ScalaClassLoader.* -import org.jline.reader._ +import org.jline.reader.* import scala.annotation.tailrec import scala.collection.mutable -import scala.jdk.CollectionConverters._ +import scala.compiletime.uninitialized +import scala.jdk.CollectionConverters.* import scala.util.control.NonFatal import scala.util.Using @@ -116,10 +117,10 @@ class ReplDriver(settings: Array[String], rendering = new Rendering(classLoader) } - private var rootCtx: Context = _ - private var shouldStart: Boolean = _ - private var compiler: ReplCompiler = _ - protected var rendering: Rendering = _ + private var rootCtx: Context = uninitialized + private var shouldStart: Boolean = uninitialized + private var compiler: ReplCompiler = uninitialized + protected var rendering: Rendering = uninitialized // initialize the REPL session as part of the constructor so that once `run` // is called, we're in business @@ -251,10 +252,11 @@ class ReplDriver(settings: Array[String], given state: State = newRun(state0) compiler .typeCheck(expr, errorsAllowed = true) - .map { tree => + .map { (untpdTree, tpdTree) => val file = SourceFile.virtual("", expr, maybeIncomplete = true) val unit = CompilationUnit(file)(using state.context) - unit.tpdTree = tree + unit.untpdTree = untpdTree + unit.tpdTree = tpdTree given Context = state.context.fresh.setCompilationUnit(unit) val srcPos = SourcePosition(file, Span(cursor)) val completions = try Completion.completions(srcPos)._2 catch case NonFatal(_) => Nil diff --git a/compiler/src/dotty/tools/runner/ObjectRunner.scala b/compiler/src/dotty/tools/runner/ObjectRunner.scala index cb8f9d791dfa..62dbcc32f30d 100644 --- a/compiler/src/dotty/tools/runner/ObjectRunner.scala +++ b/compiler/src/dotty/tools/runner/ObjectRunner.scala @@ -19,7 +19,7 @@ trait CommonRunner { * @throws java.lang.reflect.InvocationTargetException */ def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = { - import RichClassLoader._ + import RichClassLoader.* ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments) } diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 9ec0199abcbb..a8cd36cba6bd 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -33,7 +33,7 @@ final class RichClassLoader(private val self: ClassLoader) extends AnyVal { val method = clsToRun.getMethod("main", classOf[Array[String]]) if !Modifier.isStatic(method.getModifiers) then throw new NoSuchMethodException(s"$objectName.main is not static") - try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) + try asContext(method.invoke(null, Array(arguments.toArray: AnyRef)*)) catch unwrapHandler({ case ex => throw ex }) } @@ -64,7 +64,7 @@ object ScalaClassLoader { def fromURLsParallelCapable(urls: Seq[URL], parent: ClassLoader | Null = null): URLClassLoader = new URLClassLoader(urls.toArray, if parent == null then bootClassLoader else parent) - @sharable private[this] val bootClassLoader: ClassLoader = + @sharable private val bootClassLoader: ClassLoader = if scala.util.Properties.isJavaAtLeast("9") then try ClassLoader.getSystemClassLoader.getParent diff --git a/compiler/src/dotty/tools/scripting/Main.scala b/compiler/src/dotty/tools/scripting/Main.scala index 8db12f400c64..0b954cb79b26 100755 --- a/compiler/src/dotty/tools/scripting/Main.scala +++ b/compiler/src/dotty/tools/scripting/Main.scala @@ -73,7 +73,7 @@ object Main: ) import dotty.tools.io.{Jar, Directory} val jar = new Jar(jarPath) - val writer = jar.jarWriter(manifestAttributes:_*) + val writer = jar.jarWriter(manifestAttributes*) try writer.writeAllFrom(Directory(outDir)) finally diff --git a/compiler/src/scala/quoted/runtime/impl/ExprCastException.scala b/compiler/src/scala/quoted/runtime/impl/ExprCastException.scala new file mode 100644 index 000000000000..ba870808cee3 --- /dev/null +++ b/compiler/src/scala/quoted/runtime/impl/ExprCastException.scala @@ -0,0 +1,20 @@ +package scala.quoted.runtime.impl + +import dotty.tools.dotc.ast.tpd.Tree +import dotty.tools.dotc.core.Contexts.* + +class ExprCastException(msg: String) extends Exception(msg) + + +object ExprCastException: + def apply(expectedType: String, actualType: String, exprCode: String): ExprCastException = + new ExprCastException( + s"""| + | Expected type: ${formatLines(expectedType)} + | Actual type: ${formatLines(actualType)} + | Expression: ${formatLines(exprCode)} + |""".stripMargin) + + private def formatLines(str: String): String = + if !str.contains("\n") then str + else str.linesIterator.mkString("\n ", "\n ", "\n") diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index bfa4c1c6d1f2..44886d59ac12 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -3,8 +3,10 @@ package runtime.impl import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.Mode.GadtConstraintInference import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* @@ -91,7 +93,11 @@ import dotty.tools.dotc.util.optional * '{ val x: T = e1; e2 } =?= '{ val y: P = p1; p2 } ===> withEnv(x -> y)('[T] =?= '[P] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2}) * * /* Match def */ - * '{ def x0(x1: T1, ..., xn: Tn): T0 = e1; e2 } =?= '{ def y0(y1: P1, ..., yn: Pn): P0 = p1; p2 } ===> withEnv(x0 -> y0, ..., xn -> yn)('[T0] =?= '[P0] &&& ... &&& '[Tn] =?= '[Pn] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2}) + * '{ def x0(x1: T1, ..., xn: Tn)...(y1: U1, ..., ym: Um): T0 = e1; e2 } =?= '{ def y0(z1: P1, ..., zn: Pn)...(w1: Q1, ..., wn: Qn): P0 = p1; p2 } ===> + * /* Note that types of parameters can depend on earlier parameters */ + * withEnv(x1 -> y1, ..., zn -> zn)(...withEnv(y1 -> w1, ..., ym -> wm)( + * ('[T1] =?= '[P1] &&& ... &&&'[T1] =?= '[P1]) &&& ... &&& ('[U1] =?= '[Q1] &&& ... &&&'[Um] =?= '[Qm]) + * &&& '[T0] =?= '[P0] &&& '{e1} =?= '{p1} && '{e2} =?= '{p2})...) * * // Types * @@ -100,12 +106,9 @@ import dotty.tools.dotc.util.optional * * ``` */ -object QuoteMatcher { +class QuoteMatcher(debug: Boolean) { import tpd.* - // TODO use flag from Context. Maybe -debug or add -debug-macros - private inline val debug = false - /** Sequence of matched expressions. * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. */ @@ -122,10 +125,62 @@ object QuoteMatcher { private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) - def treeMatch(scrutineeTree: Tree, patternTree: Tree)(using Context): Option[MatchingExprs] = - given Env = Map.empty - optional: - scrutineeTree =?= patternTree + /** Evaluate the result of pattern matching against a quote pattern. + * Implementation of the runtime of `QuoteMatching.{ExprMatch, TypeMatch}.unapply`. + */ + def treeMatch(scrutinee: Tree, pattern: Tree)(using Context): Option[Tuple] = { + val (pat1, typeHoles, ctx1) = instrumentTypeHoles(pattern) + inContext(ctx1) { + optional { + given Env = Map.empty + scrutinee =?= pat1 + }.map { matchings => + lazy val spliceScope = SpliceScope.getCurrent + // After matching and doing all subtype checks, we have to approximate all the type bindings + // that we have found, seal them in a quoted.Type and add them to the result + val typeHoleApproximations = typeHoles.map(typeHoleApproximation) + val matchedTypes = typeHoleApproximations.map { tpe => + new TypeImpl(TypeTree(tpe).withSpan(scrutinee.span), spliceScope) + } + val matchedExprs = + val typeHoleMap: Type => Type = + if typeHoles.isEmpty then identity + else new TypeMap { + private val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) + def apply(tp: Type): Type = tp match + case TypeRef(NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) + case _ => mapOver(tp) + } + if matchings.isEmpty then Nil + else matchings.map(_.toExpr(typeHoleMap, spliceScope)) + val results = matchedTypes ++ matchedExprs + Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) + } + } + } + + def instrumentTypeHoles(pat: Tree)(using Context): (Tree, List[Symbol], Context) = + def isTypeHoleDef(tree: Tree): Boolean = tree match + case tree: TypeDef => tree.symbol.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) + case _ => false + pat match + case tpd.Inlined(_, Nil, pat2) => instrumentTypeHoles(pat2) + case tpd.Block(stats @ ((typeHole: TypeDef) :: _), expr) if isTypeHoleDef(typeHole) => + val (holeDefs, otherStats) = stats.span(isTypeHoleDef) + val holeSyms = holeDefs.map(_.symbol) + val ctx1 = ctx.fresh.setFreshGADTBounds.addMode(GadtConstraintInference) + ctx1.gadtState.addToConstraint(holeSyms) + (tpd.cpy.Block(pat)(otherStats, expr), holeSyms, ctx1) + case _ => + (pat, Nil, ctx) + + /** Type approximation of a quote pattern type variable. + * Should only be approximated after matching the tree. + */ + def typeHoleApproximation(sym: Symbol)(using Context): Type = + val fromAboveAnnot = sym.hasAnnotation(defn.QuotedRuntimePatterns_fromAboveAnnot) + val fullBounds = ctx.gadt.fullBounds(sym) + if fromAboveAnnot then fullBounds.nn.hi else fullBounds.nn.lo /** Check that all trees match with `mtch` and concatenate the results with &&& */ private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match { @@ -181,7 +236,7 @@ object QuoteMatcher { case _ => None end TypeTreeTypeTest - val res = pattern match + def runMatch(): optional[MatchingExprs] = pattern match /* Term hole */ // Match a scala.internal.Quoted.patternHole typed as a repeated argument and return the scrutinee tree @@ -206,12 +261,34 @@ object QuoteMatcher { // Matches an open term and wraps it into a lambda that provides the free variables case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => + + /* Some of method symbols in arguments of higher-order term hole are eta-expanded. + * e.g. + * g: (Int) => Int + * => { + * def $anonfun(y: Int): Int = g(y) + * closure($anonfun) + * } + * + * f: (using Int) => Int + * => f(using x) + * This function restores the symbol of the original method from + * the eta-expanded function. + */ + def getCapturedIdent(arg: Tree)(using Context): Ident = + arg match + case id: Ident => id + case Apply(fun, _) => getCapturedIdent(fun) + case Block((ddef: DefDef) :: _, _: Closure) => getCapturedIdent(ddef.rhs) + case Typed(expr, _) => getCapturedIdent(expr) + val env = summon[Env] - val capturedArgs = args.map(_.symbol) - val captureEnv = env.filter((k, v) => !capturedArgs.contains(v)) + val capturedIds = args.map(getCapturedIdent) + val capturedSymbols = capturedIds.map(_.symbol) + val captureEnv = env.filter((k, v) => !capturedSymbols.contains(v)) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, args, env) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), env) case _ => notMatched } @@ -341,19 +418,34 @@ object QuoteMatcher { case scrutinee @ DefDef(_, paramss1, tpt1, _) => pattern match case pattern @ DefDef(_, paramss2, tpt2, _) => - def rhsEnv: Env = - val paramSyms: List[(Symbol, Symbol)] = - for - (clause1, clause2) <- paramss1.zip(paramss2) - (param1, param2) <- clause1.zip(clause2) - yield - param1.symbol -> param2.symbol - val oldEnv: Env = summon[Env] - val newEnv: List[(Symbol, Symbol)] = (scrutinee.symbol -> pattern.symbol) :: paramSyms - oldEnv ++ newEnv - matchLists(paramss1, paramss2)(_ =?= _) - &&& tpt1 =?= tpt2 - &&& withEnv(rhsEnv)(scrutinee.rhs =?= pattern.rhs) + def matchErasedParams(sctype: Type, pttype: Type): optional[MatchingExprs] = + (sctype, pttype) match + case (sctpe: MethodType, pttpe: MethodType) => + if sctpe.erasedParams.sameElements(pttpe.erasedParams) then + matchErasedParams(sctpe.resType, pttpe.resType) + else + notMatched + case _ => matched + + def matchParamss(scparamss: List[ParamClause], ptparamss: List[ParamClause])(using Env): optional[(Env, MatchingExprs)] = + (scparamss, ptparamss) match { + case (scparams :: screst, ptparams :: ptrest) => + val mr1 = matchLists(scparams, ptparams)(_ =?= _) + val newEnv = summon[Env] ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)) + val (resEnv, mrrest) = withEnv(newEnv)(matchParamss(screst, ptrest)) + (resEnv, mr1 &&& mrrest) + case (Nil, Nil) => (summon[Env], matched) + case _ => notMatched + } + + val ematch = matchErasedParams(scrutinee.tpe.widenTermRefExpr, pattern.tpe.widenTermRefExpr) + val (pEnv, pmatch) = matchParamss(paramss1, paramss2) + val defEnv = pEnv + (scrutinee.symbol -> pattern.symbol) + + ematch + &&& pmatch + &&& withEnv(defEnv)(tpt1 =?= tpt2) + &&& withEnv(defEnv)(scrutinee.rhs =?= pattern.rhs) case _ => notMatched case Closure(_, _, tpt1) => @@ -373,24 +465,32 @@ object QuoteMatcher { // No Match case _ => notMatched + end runMatch + + if debug then + try { + runMatch() + } catch { + case e: util.boundary.Break[?] => + val quotes = QuotesImpl() + println( + s""">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> + |Scrutinee + | ${scrutinee.show} + |did not match pattern + | ${pattern.show} + | + |with environment: ${summon[Env]} + | + |Scrutinee: ${quotes.reflect.Printer.TreeStructure.show(scrutinee.asInstanceOf)} + |Pattern: ${quotes.reflect.Printer.TreeStructure.show(pattern.asInstanceOf)} + | + |""".stripMargin) + throw e + } + else + runMatch() - if (debug && res == notMatched) - val quotes = QuotesImpl() - println( - s""">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - |Scrutinee - | ${scrutinee.show} - |did not match pattern - | ${pattern.show} - | - |with environment: ${summon[Env]} - | - |Scrutinee: ${quotes.reflect.Printer.TreeStructure.show(scrutinee.asInstanceOf)} - |Pattern: ${quotes.reflect.Printer.TreeStructure.show(pattern.asInstanceOf)} - | - |""".stripMargin) - - res end =?= end extension @@ -444,10 +544,11 @@ object QuoteMatcher { * * @param tree Scrutinee sub-tree that matched * @param patternTpe Type of the pattern hole (from the pattern) - * @param args HOAS arguments (from the pattern) + * @param argIds Identifiers of HOAS arguments (from the pattern) + * @param argTypes Eta-expanded types of HOAS arguments (from the pattern) * @param env Mapping between scrutinee and pattern variables */ - case OpenTree(tree: Tree, patternTpe: Type, args: List[Tree], env: Env) + case OpenTree(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env) /** Return the expression that was extracted from a hole. * @@ -457,22 +558,25 @@ object QuoteMatcher { * * This expression is assumed to be a valid expression in the given splice scope. */ - def toExpr(mapTypeHoles: TypeMap, spliceScope: Scope)(using Context): Expr[Any] = this match + def toExpr(mapTypeHoles: Type => Type, spliceScope: Scope)(using Context): Expr[Any] = this match case MatchResult.ClosedTree(tree) => new ExprImpl(tree, spliceScope) - case MatchResult.OpenTree(tree, patternTpe, args, env) => - val names: List[TermName] = args.map { - case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName - case arg => arg.symbol.name.asTermName - } - val paramTypes = args.map(x => mapTypeHoles(x.tpe.widenTermRefExpr)) + case MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env) => + val names: List[TermName] = argIds.map(_.symbol.name.asTermName) + val paramTypes = argTypes.map(tpe => mapTypeHoles(tpe.widenTermRefExpr)) val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) val meth = newAnonFun(ctx.owner, methTpe) def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = args.view.map(_.symbol).zip(lambdaArgss.head).toMap + val argsMap = argIds.view.map(_.symbol).zip(lambdaArgss.head).toMap val body = new TreeMap { override def transform(tree: Tree)(using Context): Tree = tree match + /* + * When matching a method call `f(0)` against a HOAS pattern `p(g)` where + * f has a method type `(x: Int): Int` and `f` maps to `g`, `p` should hold + * `g.apply(0)` because the type of `g` is `Int => Int` due to eta expansion. + */ + case Apply(fun, args) if env.contains(tree.symbol) => transform(fun).select(nme.apply).appliedToArgs(args.map(transform)) case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) case tree => super.transform(tree) }.transform(tree) @@ -481,7 +585,7 @@ object QuoteMatcher { val hoasClosure = Closure(meth, bodyFn) new ExprImpl(hoasClosure, spliceScope) - private inline def notMatched: optional[MatchingExprs] = + private inline def notMatched[T]: optional[T] = optional.break() private inline def matched: MatchingExprs = @@ -490,8 +594,8 @@ object QuoteMatcher { private inline def matched(tree: Tree)(using Context): MatchingExprs = Seq(MatchResult.ClosedTree(tree)) - private def matchedOpen(tree: Tree, patternTpe: Type, args: List[Tree], env: Env)(using Context): MatchingExprs = - Seq(MatchResult.OpenTree(tree, patternTpe, args, env)) + private def matchedOpen(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env)) extension (self: MatchingExprs) /** Concatenates the contents of two successful matchings */ diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index db4e3e6c6a05..51f133c972b4 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -7,21 +7,23 @@ import dotty.tools.dotc import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Annotations -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.NameOps._ -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Types import dotty.tools.dotc.NoCompilationUnit import dotty.tools.dotc.quoted.MacroExpansion import dotty.tools.dotc.quoted.PickledQuotes -import dotty.tools.dotc.quoted.reflect._ +import dotty.tools.dotc.quoted.QuotePatterns +import dotty.tools.dotc.quoted.reflect.* import scala.quoted.runtime.{QuoteUnpickler, QuoteMatching} -import scala.quoted.runtime.impl.printers._ +import scala.quoted.runtime.impl.printers.* import scala.reflect.TypeTest +import dotty.tools.dotc.core.NameKinds.ExceptionBinderName object QuotesImpl { @@ -37,15 +39,17 @@ object QuotesImpl { } class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler, QuoteMatching: + import tpd.* private val xCheckMacro: Boolean = ctx.settings.XcheckMacros.value + private val yDebugMacro: Boolean = ctx.settings.YdebugMacros.value extension [T](self: scala.quoted.Expr[T]) def show: String = reflect.Printer.TreeCode.show(reflect.asTerm(self)) def matches(that: scala.quoted.Expr[Any]): Boolean = - treeMatch(reflect.asTerm(self), reflect.asTerm(that)).nonEmpty + QuoteMatcher(yDebugMacro).treeMatch(reflect.asTerm(self), reflect.asTerm(that)).nonEmpty def valueOrAbort(using fromExpr: FromExpr[T]): T = def reportError = @@ -67,11 +71,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if self.isExprOf[X] then self.asInstanceOf[scala.quoted.Expr[X]] else - throw Exception( - s"""Expr cast exception: ${self.show} - |of type: ${reflect.Printer.TypeReprCode.show(reflect.asTerm(self).tpe)} - |did not conform to type: ${reflect.Printer.TypeReprCode.show(reflect.TypeRepr.of[X])} - |""".stripMargin + throw ExprCastException( + expectedType = reflect.Printer.TypeReprCode.show(reflect.TypeRepr.of[X]), + actualType = reflect.Printer.TypeReprCode.show(reflect.asTerm(self).tpe), + exprCode = self.show ) } end extension @@ -93,7 +96,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given TreeMethods: TreeMethods with extension (self: Tree) - def pos: Position = self.sourcePos + def pos: Position = + val treePos = self.sourcePos + if treePos.exists then treePos + else + if xCheckMacro then report.warning(s"Missing tree position (defaulting to position 0): ${Printer.TreeStructure.show(self)}\nThis is a compiler bug. Please report it.") + self.source.atSpan(dotc.util.Spans.Span(0)) + def symbol: Symbol = self.symbol def show(using printer: Printer[Tree]): String = printer.show(self) def isExpr: Boolean = @@ -265,6 +274,21 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end ClassDefMethods + type ValOrDefDef = tpd.ValOrDefDef + + object ValOrDefDefTypeTest extends TypeTest[Tree, ValOrDefDef]: + def unapply(x: Tree): Option[ValOrDefDef & x.type] = x match + case x: (tpd.ValOrDefDef & x.type) => Some(x) + case _ => None + end ValOrDefDefTypeTest + + given ValOrDefDefMethods: ValOrDefDefMethods with + extension (self: ValOrDefDef) + def tpt: TypeTree = self.tpt + def rhs: Option[Term] = optional(self.rhs) + end extension + end ValOrDefDefMethods + type DefDef = tpd.DefDef object DefDefTypeTest extends TypeTest[Tree, DefDef]: @@ -622,9 +646,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Apply extends ApplyModule: def apply(fun: Term, args: List[Term]): Apply = + xCheckMacroAssert(fun.tpe.widen.isInstanceOf[dotc.core.Types.MethodType], "Expected `fun.tpe` to widen into a `MethodType`") xCheckMacroValidExprs(args) withDefaultPos(tpd.Apply(fun, args)) def copy(original: Tree)(fun: Term, args: List[Term]): Apply = + xCheckMacroAssert(fun.tpe.widen.isInstanceOf[dotc.core.Types.MethodType], "Expected `fun.tpe` to widen into a `MethodType`") xCheckMacroValidExprs(args) tpd.cpy.Apply(original)(fun, args) def unapply(x: Apply): (Term, List[Term]) = @@ -663,8 +689,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object TypeApply extends TypeApplyModule: def apply(fun: Term, args: List[TypeTree]): TypeApply = + xCheckMacroAssert(fun.tpe.widen.isInstanceOf[dotc.core.Types.PolyType], "Expected `fun.tpe` to widen into a `PolyType`") withDefaultPos(tpd.TypeApply(fun, args)) def copy(original: Tree)(fun: Term, args: List[TypeTree]): TypeApply = + xCheckMacroAssert(fun.tpe.widen.isInstanceOf[dotc.core.Types.PolyType], "Expected `fun.tpe` to widen into a `PolyType`") tpd.cpy.TypeApply(original)(fun, args) def unapply(x: TypeApply): (Term, List[TypeTree]) = (x.fun, x.args) @@ -788,7 +816,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Block extends BlockModule: def apply(stats: List[Statement], expr: Term): Block = - xCheckMacroBlockOwners(withDefaultPos(tpd.Block(stats, expr))) + xCheckMacroBlockOwners(withDefaultPos(tpd.Block(stats, xCheckMacroValidExpr(expr)))) def copy(original: Tree)(stats: List[Statement], expr: Term): Block = xCheckMacroBlockOwners(tpd.cpy.Block(original)(stats, expr)) def unapply(x: Block): (List[Statement], Term) = @@ -1002,7 +1030,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def apply(call: Option[Tree], bindings: List[Definition], expansion: Term): Inlined = withDefaultPos(tpd.Inlined(call.getOrElse(tpd.EmptyTree), bindings.map { case b: tpd.MemberDef => b }, xCheckMacroValidExpr(expansion))) def copy(original: Tree)(call: Option[Tree], bindings: List[Definition], expansion: Term): Inlined = - tpd.cpy.Inlined(original)(call.getOrElse(tpd.EmptyTree), bindings.asInstanceOf[List[tpd.MemberDef]], xCheckMacroValidExpr(expansion)) + tpd.Inlined(call.getOrElse(tpd.EmptyTree), bindings.asInstanceOf[List[tpd.MemberDef]], xCheckMacroValidExpr(expansion)).withSpan(original.span).withType(original.tpe) def unapply(x: Inlined): (Option[Tree /* Term | TypeTree */], List[Definition], Term) = (optional(x.call), x.bindings, x.body) end Inlined @@ -1515,11 +1543,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end BindMethods - type Unapply = tpd.UnApply + type Unapply = tpd.UnApply | tpd.QuotePattern // TODO expose QuotePattern AST in Quotes object UnapplyTypeTest extends TypeTest[Tree, Unapply]: def unapply(x: Tree): Option[Unapply & x.type] = x match case x: (tpd.UnApply & x.type) => Some(x) + case x: (tpd.QuotePattern & x.type) => Some(x) // TODO expose QuotePattern AST in Quotes case _ => None end UnapplyTypeTest @@ -1534,9 +1563,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given UnapplyMethods: UnapplyMethods with extension (self: Unapply) - def fun: Term = self.fun - def implicits: List[Term] = self.implicits - def patterns: List[Tree] = effectivePatterns(self.patterns) + def fun: Term = self match + case self: tpd.UnApply => self.fun + case self: tpd.QuotePattern => QuotePatterns.encode(self).fun // TODO expose QuotePattern AST in Quotes + def implicits: List[Term] = self match + case self: tpd.UnApply => self.implicits + case self: tpd.QuotePattern => QuotePatterns.encode(self).implicits // TODO expose QuotePattern AST in Quotes + def patterns: List[Tree] = self match + case self: tpd.UnApply => effectivePatterns(self.patterns) + case self: tpd.QuotePattern => effectivePatterns(QuotePatterns.encode(self).patterns) // TODO expose QuotePattern AST in Quotes end extension private def effectivePatterns(patterns: List[Tree]): List[Tree] = patterns match @@ -1750,7 +1785,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def show(using printer: Printer[TypeRepr]): String = printer.show(self) - def seal: scala.quoted.Type[_] = self.asType + def seal: scala.quoted.Type[?] = self.asType def asType: scala.quoted.Type[?] = new TypeImpl(Inferred(self), SpliceScope.getCurrent) @@ -1761,6 +1796,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def widenTermRefByName: TypeRepr = self.widenTermRefExpr def widenByName: TypeRepr = self.widenExpr def dealias: TypeRepr = self.dealias + def dealiasKeepOpaques: TypeRepr = self.dealiasKeepOpaques def simplified: TypeRepr = self.simplified def classSymbol: Option[Symbol] = if self.classSymbol.exists then Some(self.classSymbol.asClass) @@ -1774,11 +1810,16 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def baseType(cls: Symbol): TypeRepr = self.baseType(cls) def derivesFrom(cls: Symbol): Boolean = self.derivesFrom(cls) def isFunctionType: Boolean = - dotc.core.Symbols.defn.isFunctionType(self) + dotc.core.Symbols.defn.isFunctionNType(self) def isContextFunctionType: Boolean = dotc.core.Symbols.defn.isContextFunctionType(self) def isErasedFunctionType: Boolean = - dotc.core.Symbols.defn.isErasedFunctionType(self) + self match + case dotc.core.Symbols.defn.PolyFunctionOf(mt) => + mt match + case mt: MethodType => mt.hasErasedParams + case PolyType(_, _, mt1) => mt1.hasErasedParams + case _ => false def isDependentFunctionType: Boolean = val tpNoRefinement = self.dropDependentRefinement tpNoRefinement != self @@ -1884,7 +1925,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given SuperTypeMethods: SuperTypeMethods with extension (self: SuperType) def thistpe: TypeRepr = self.thistpe - def supertpe: TypeRepr = self.thistpe + def supertpe: TypeRepr = self.supertpe end extension end SuperTypeMethods @@ -2212,6 +2253,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler extension (self: TypeLambda) def param(idx: Int): TypeRepr = self.newParamRef(idx) def paramBounds: List[TypeBounds] = self.paramInfos + def paramVariances: List[Flags] = + self.typeParams.map(_.paramVariance) end extension end TypeLambdaMethods @@ -2588,10 +2631,16 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def info: TypeRepr = self.denot.info def pos: Option[Position] = - if self.exists then Some(self.sourcePos) else None + if self.exists then + val symPos = self.sourcePos + if symPos.exists then Some(symPos) + else + if xCheckMacro then report.warning(s"Missing symbol position (defaulting to position 0): $self\nThis is a compiler bug. Please report it.") + Some(self.source.atSpan(dotc.util.Spans.Span(0))) + else None def docstring: Option[String] = - import dotc.core.Comments.CommentsContext + import dotc.core.Comments.docCtx val docCtx = ctx.docCtx.getOrElse { throw new RuntimeException( "DocCtx could not be found and documentations are unavailable. This is a compiler-internal error." @@ -2720,6 +2769,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler } def isTypeParam: Boolean = self.isTypeParam + def paramVariance: Flags = self.paramVariance def signature: Signature = self.signature def moduleClass: Symbol = self.denot.moduleClass def companionClass: Symbol = self.denot.companionClass @@ -2809,9 +2859,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol = if arity < 0 then throw IllegalArgumentException(s"arity: $arity") if isErased then - throw new Exception("Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction`") + throw new Exception("Erased function classes are not supported. Use a refined `scala.PolyFunction`") else dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit) - def ErasedFunctionClass = dotc.core.Symbols.defn.ErasedFunctionClass + def FunctionClass(arity: Int): Symbol = + FunctionClass(arity, false, false) + def FunctionClass(arity: Int, isContextual: Boolean): Symbol = + FunctionClass(arity, isContextual, false) + def PolyFunctionClass = dotc.core.Symbols.defn.PolyFunctionClass def TupleClass(arity: Int): Symbol = dotc.core.Symbols.defn.TupleType(arity).nn.classSymbol.asClass def isTupleClass(sym: Symbol): Boolean = @@ -2874,9 +2928,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Transparent: Flags = dotc.core.Flags.Transparent // Keep: aligned with Quotes's `newMethod` doc - private[QuotesImpl] def validMethodFlags: Flags = Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | AbsOverride | JavaStatic // Flags that could be allowed: Synthetic | ExtensionMethod | Exported | Erased | Infix | Invisible + private[QuotesImpl] def validMethodFlags: Flags = Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | AbsOverride | JavaStatic | Synthetic | Artifact // Flags that could be allowed: Synthetic | ExtensionMethod | Exported | Erased | Infix | Invisible // Keep: aligned with Quotes's `newVal` doc - private[QuotesImpl] def validValFlags: Flags = Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | AbsOverride | JavaStatic // Flags that could be added: Synthetic | Erased | Invisible + private[QuotesImpl] def validValFlags: Flags = Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | AbsOverride | JavaStatic | Synthetic | Artifact // Flags that could be added: Synthetic | Erased | Invisible + // Keep: aligned with Quotes's `newBind` doc private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased end Flags @@ -3081,13 +3136,22 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if xCheckMacro then termOpt.foreach(xCheckMacroValidExpr) termOpt private def xCheckMacroValidExpr(term: Term): term.type = - if xCheckMacro then - assert(!term.tpe.widenDealias.isInstanceOf[dotc.core.Types.MethodicType], + xCheckMacroAssert(!term.tpe.widenDealias.isInstanceOf[dotc.core.Types.MethodicType], "Reference to a method must be eta-expanded before it is used as an expression: " + term.show) term private inline def xCheckMacroAssert(inline cond: Boolean, inline msg: String): Unit = - assert(!xCheckMacro || cond, msg) + if xCheckMacro && !cond then + xCheckMacroAssertFail(msg) + + private def xCheckMacroAssertFail(msg: String): Unit = + val error = new AssertionError(msg) + if !yDebugMacro then + // start stack trace at the place where the user called the reflection method + error.setStackTrace( + error.getStackTrace + .dropWhile(_.getClassName().startsWith("scala.quoted.runtime.impl"))) + throw error object Printer extends PrinterModule: @@ -3155,65 +3219,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Expr[Any])(using pattern: scala.quoted.Expr[Any]): Option[Tup] = val scrutineeTree = reflect.asTerm(scrutinee) val patternTree = reflect.asTerm(pattern) - treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] + QuoteMatcher(yDebugMacro).treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] end ExprMatch object TypeMatch extends TypeMatchModule: def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Type[?])(using pattern: scala.quoted.Type[?]): Option[Tup] = val scrutineeTree = reflect.TypeTree.of(using scrutinee) val patternTree = reflect.TypeTree.of(using pattern) - treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] + QuoteMatcher(yDebugMacro).treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] end TypeMatch - private def treeMatch(scrutinee: reflect.Tree, pattern: reflect.Tree): Option[Tuple] = { - import reflect._ - def isTypeHoleDef(tree: Tree): Boolean = - tree match - case tree: TypeDef => - tree.symbol.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_patternTypeAnnot) - case _ => false - - def extractTypeHoles(pat: Term): (Term, List[Symbol]) = - pat match - case tpd.Inlined(_, Nil, pat2) => extractTypeHoles(pat2) - case tpd.Block(stats @ ((typeHole: TypeDef) :: _), expr) if isTypeHoleDef(typeHole) => - val holes = stats.takeWhile(isTypeHoleDef).map(_.symbol) - val otherStats = stats.dropWhile(isTypeHoleDef) - (tpd.cpy.Block(pat)(otherStats, expr), holes) - case _ => - (pat, Nil) - - val (pat1, typeHoles) = extractTypeHoles(pattern) - - val ctx1 = - if typeHoles.isEmpty then ctx - else - val ctx1 = ctx.fresh.setFreshGADTBounds.addMode(dotc.core.Mode.GadtConstraintInference) - ctx1.gadtState.addToConstraint(typeHoles) - ctx1 - - // After matching and doing all subtype checks, we have to approximate all the type bindings - // that we have found, seal them in a quoted.Type and add them to the result - def typeHoleApproximation(sym: Symbol) = - val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) - val fullBounds = ctx1.gadt.fullBounds(sym) - if fromAboveAnnot then fullBounds.hi else fullBounds.lo - - QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1).map { matchings => - import QuoteMatcher.MatchResult.* - lazy val spliceScope = SpliceScope.getCurrent - val typeHoleApproximations = typeHoles.map(typeHoleApproximation) - val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) - val typeHoleMap = new Types.TypeMap { - def apply(tp: Types.Type): Types.Type = tp match - case Types.TypeRef(Types.NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) - case _ => mapOver(tp) - } - val matchedExprs = matchings.map(_.toExpr(typeHoleMap, spliceScope)) - val matchedTypes = typeHoleApproximations.map(reflect.TypeReprMethods.asType) - val results = matchedTypes ++ matchedExprs - Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) - } - } - end QuotesImpl diff --git a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala index d65328bb5405..705efc5ffab1 100644 --- a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala +++ b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala @@ -1,7 +1,7 @@ package scala.quoted.runtime.impl import dotty.tools.dotc.ast.tpd.Tree -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* class ScopeException(msg: String) extends Exception(msg) diff --git a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala index 797b38be2743..397ad49a309b 100644 --- a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala +++ b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala @@ -1,7 +1,7 @@ package scala.quoted package runtime.impl -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.util.SourcePosition diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index c229338ad228..eac85244d97b 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -1,7 +1,7 @@ package scala.quoted package runtime.impl.printers -import scala.quoted._ +import scala.quoted.* object Extractors { @@ -18,7 +18,7 @@ object Extractors { new ExtractorsPrinter[quotes.type]().visitSymbol(symbol).result() def showFlags(using Quotes)(flags: quotes.reflect.Flags): String = { - import quotes.reflect._ + import quotes.reflect.* val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "Flags.Abstract" if (flags.is(Flags.Artifact)) flagList += "Flags.Artifact" @@ -64,7 +64,7 @@ object Extractors { } private class ExtractorsPrinter[Q <: Quotes & Singleton](using val quotes: Q) { self => - import quotes.reflect._ + import quotes.reflect.* private val sb: StringBuilder = new StringBuilder diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index a6a773adc9ba..b27016045051 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -19,7 +19,7 @@ object SourceCode { symbol.fullName def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = { - import quotes.reflect._ + import quotes.reflect.* val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "abstract" if (flags.is(Flags.Artifact)) flagList += "artifact" @@ -52,7 +52,7 @@ object SourceCode { if (flags.is(Flags.Param)) flagList += "param" if (flags.is(Flags.ParamAccessor)) flagList += "paramAccessor" if (flags.is(Flags.Private)) flagList += "private" - if (flags.is(Flags.PrivateLocal)) flagList += "private[this]" + if (flags.is(Flags.PrivateLocal)) flagList += "private" if (flags.is(Flags.Protected)) flagList += "protected" if (flags.is(Flags.Scala2x)) flagList += "scala2x" if (flags.is(Flags.Sealed)) flagList += "sealed" @@ -64,12 +64,12 @@ object SourceCode { } private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q) { - import syntaxHighlight._ - import quotes.reflect._ + import syntaxHighlight.* + import quotes.reflect.* - private[this] val sb: StringBuilder = new StringBuilder + private val sb: StringBuilder = new StringBuilder - private[this] var indent: Int = 0 + private var indent: Int = 0 private def indented(printIndented: => Unit): Unit = { indent += 1 printIndented @@ -158,7 +158,7 @@ object SourceCode { for paramClause <- paramss do paramClause match case TermParamClause(params) => - printArgsDefs(params) + printMethdArgsDefs(params) case TypeParamClause(params) => printTargsDefs(stats.collect { case targ: TypeDef => targ }.filter(_.symbol.isTypeParam).zip(params)) } @@ -313,7 +313,7 @@ object SourceCode { this += highlightKeyword("def ") += highlightValDef(name1) for clause <- paramss do clause match - case TermParamClause(params) => printArgsDefs(params) + case TermParamClause(params) => printMethdArgsDefs(params) case TypeParamClause(params) => printTargsDefs(params.zip(params)) if (!isConstructor) { this += ": " @@ -460,7 +460,7 @@ object SourceCode { case tree @ Lambda(params, body) => // must come before `Block` inParens { - printArgsDefs(params) + printLambdaArgsDefs(params) this += (if tree.tpe.isContextFunctionType then " ?=> " else " => ") printTree(body) } @@ -804,29 +804,37 @@ object SourceCode { } } - private def printArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = { + private def printSeparatedParamDefs(list: List[ValDef])(using elideThis: Option[Symbol]): Unit = list match { + case Nil => + case x :: Nil => printParamDef(x) + case x :: xs => + printParamDef(x) + this += ", " + printSeparatedParamDefs(xs) + } + + private def printMethdArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = { val argFlags = args match { case Nil => Flags.EmptyFlags case arg :: _ => arg.symbol.flags } - if (argFlags.is(Flags.Erased | Flags.Given)) { - if (argFlags.is(Flags.Given)) this += " given" - if (argFlags.is(Flags.Erased)) this += " erased" - this += " " - } inParens { if (argFlags.is(Flags.Implicit) && !argFlags.is(Flags.Given)) this += "implicit " + if (argFlags.is(Flags.Given)) this += "using " - def printSeparated(list: List[ValDef]): Unit = list match { - case Nil => - case x :: Nil => printParamDef(x) - case x :: xs => - printParamDef(x) - this += ", " - printSeparated(xs) - } + printSeparatedParamDefs(args) + } + } - printSeparated(args) + private def printLambdaArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = { + val argFlags = args match { + case Nil => Flags.EmptyFlags + case arg :: _ => arg.symbol.flags + } + inParens { + if (argFlags.is(Flags.Implicit) && !argFlags.is(Flags.Given)) this += "implicit " + + printSeparatedParamDefs(args) } } @@ -846,6 +854,9 @@ object SourceCode { private def printParamDef(arg: ValDef)(using elideThis: Option[Symbol]): Unit = { val name = splicedName(arg.symbol).getOrElse(arg.symbol.name) val sym = arg.symbol.owner + + if (arg.symbol.flags.is(Flags.Erased)) this += "erased " + if sym.isDefDef && sym.name == "" then val ClassDef(_, _, _, _, body) = sym.owner.tree: @unchecked body.collectFirst { @@ -1189,12 +1200,12 @@ object SourceCode { } case SuperType(thistpe, supertpe) => - printType(supertpe) + printType(thistpe) this += highlightTypeDef(".super") case TypeLambda(paramNames, tparams, body) => inSquare(printMethodicTypeParams(paramNames, tparams)) - this += highlightTypeDef(" => ") + this += highlightTypeDef(" =>> ") printType(body) case ParamRef(lambda, idx) => @@ -1372,28 +1383,24 @@ object SourceCode { private def printProtectedOrPrivate(definition: Definition): Boolean = { var prefixWasPrinted = false - def printWithin(within: TypeRepr) = within match { - case TypeRef(_, name) => this += name - case _ => printFullClassName(within) - } - if (definition.symbol.flags.is(Flags.Protected)) { + def printWithin(within: Option[TypeRepr]) = within match + case _ if definition.symbol.flags.is(Flags.Local) => inSquare(this += "this") + case Some(TypeRef(_, name)) => inSquare(this += name) + case Some(within) => inSquare(printFullClassName(within)) + case _ => + + if definition.symbol.flags.is(Flags.Protected) then this += highlightKeyword("protected") - definition.symbol.protectedWithin match { - case Some(within) => - inSquare(printWithin(within)) - case _ => - } + printWithin(definition.symbol.protectedWithin) prefixWasPrinted = true - } else { - definition.symbol.privateWithin match { - case Some(within) => - this += highlightKeyword("private") - inSquare(printWithin(within)) - prefixWasPrinted = true - case _ => - } - } - if (prefixWasPrinted) + else + val privateWithin = definition.symbol.privateWithin + if privateWithin.isDefined || definition.symbol.flags.is(Flags.Private) then + this += highlightKeyword("private") + printWithin(definition.symbol.privateWithin) + prefixWasPrinted = true + + if prefixWasPrinted then this += " " prefixWasPrinted } @@ -1434,8 +1441,8 @@ object SourceCode { private def escapedString(str: String): String = str flatMap escapedChar - private[this] val names = collection.mutable.Map.empty[Symbol, String] - private[this] val namesIndex = collection.mutable.Map.empty[String, Int] + private val names = collection.mutable.Map.empty[Symbol, String] + private val namesIndex = collection.mutable.Map.empty[String, Int] private def splicedName(sym: Symbol): Option[String] = { if sym.owner.isClassDef then None diff --git a/compiler/test-resources/repl-macros/i15104c b/compiler/test-resources/repl-macros/i15104c index 482b9487c9d9..9f2eb0a4442a 100644 --- a/compiler/test-resources/repl-macros/i15104c +++ b/compiler/test-resources/repl-macros/i15104c @@ -1,6 +1,6 @@ scala> import scala.quoted._ scala> def macroImpl(using Quotes) = Expr(1) -def macroImpl(using x$1: quoted.Quotes): quoted.Expr[Int] +def macroImpl(using x$1: scala.quoted.Quotes): scala.quoted.Expr[Int] scala> inline def foo = ${ macroImpl } def foo: Int scala> foo diff --git a/compiler/test-resources/repl-macros/i5551 b/compiler/test-resources/repl-macros/i5551 index 984551438b51..0a07e971440d 100644 --- a/compiler/test-resources/repl-macros/i5551 +++ b/compiler/test-resources/repl-macros/i5551 @@ -1,7 +1,8 @@ scala> import scala.quoted._ scala> def assertImpl(expr: Expr[Boolean])(using q: Quotes) = '{ if !($expr) then throw new AssertionError("failed assertion")} def assertImpl - (expr: quoted.Expr[Boolean])(using q: quoted.Quotes): scala.quoted.Expr[Unit] + (expr: scala.quoted.Expr[Boolean]) + (using q: scala.quoted.Quotes): scala.quoted.Expr[Unit] scala> inline def assert(expr: => Boolean): Unit = ${ assertImpl('{expr}) } def assert(expr: => Boolean): Unit diff --git a/compiler/test-resources/repl/i10355 b/compiler/test-resources/repl/i10355 index 294b9d7f1101..9fa17f9511a1 100644 --- a/compiler/test-resources/repl/i10355 +++ b/compiler/test-resources/repl/i10355 @@ -1,7 +1,9 @@ scala> import scala.quoted._ scala> def foo(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ $x: Any } } def foo - (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Any] + (expr: scala.quoted.Expr[Any]) + (using x$2: scala.quoted.Quotes): scala.quoted.Expr[Any] scala> def bar(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ val a: t = ??? ; ???} } def bar - (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Nothing] + (expr: scala.quoted.Expr[Any]) + (using x$2: scala.quoted.Quotes): scala.quoted.Expr[Nothing] diff --git a/compiler/test-resources/repl/i13208.scala b/compiler/test-resources/repl/i13208.scala index ce4fcf0d9ed8..6871a962105c 100644 --- a/compiler/test-resources/repl/i13208.scala +++ b/compiler/test-resources/repl/i13208.scala @@ -1,8 +1,3 @@ -// scalac: -source:future -deprecation +//> using options -source:3.4-migration scala> type M[X] = X match { case Int => String case _ => Int } scala> type N[X] = X match { case List[_] => Int } -1 warning found --- Deprecation Warning: -------------------------------------------------------- -1 | type N[X] = X match { case List[_] => Int } - | ^ - | `_` is deprecated for wildcard arguments of types: use `?` instead diff --git a/compiler/test-resources/repl/i1370 b/compiler/test-resources/repl/i1370 index 4bd92b4d5f83..e10020bf4891 100644 --- a/compiler/test-resources/repl/i1370 +++ b/compiler/test-resources/repl/i1370 @@ -3,4 +3,5 @@ scala> object Lives { class Private { def foo1: Any = new Private.C1; def foo2: 1 | object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 }; object Private { class C1 private {}; private class C2 {} } } | ^^^^^^^^^^ |constructor C1 cannot be accessed as a member of Lives.Private.C1 from class Private. + | private constructor C1 can only be accessed from class C1 in object Private. 1 error found diff --git a/compiler/test-resources/repl/i1374 b/compiler/test-resources/repl/i1374 index 3d117fdb4ff9..2e0b5be900af 100644 --- a/compiler/test-resources/repl/i1374 +++ b/compiler/test-resources/repl/i1374 @@ -1,4 +1,4 @@ -scala> implicit class Padder(val sb: StringBuilder) extends AnyVal { def pad2(width: Int) = { 1 to width - sb.length foreach { sb append '*' }; sb } } +scala> implicit class Padder(val sb: StringBuilder) extends AnyVal { infix def pad2(width: Int) = { 1 to width - sb.length foreach { sb append '*' }; sb } } // defined class Padder def Padder(sb: StringBuilder): Padder scala> val greeting = new StringBuilder("Hello, kitteh!") diff --git a/compiler/test-resources/repl/i15493 b/compiler/test-resources/repl/i15493 index 670cf8ebcbd2..063f7edfaca4 100644 --- a/compiler/test-resources/repl/i15493 +++ b/compiler/test-resources/repl/i15493 @@ -146,4 +146,4 @@ scala> Vector.unapplySeq(Vector(2)) val res35: scala.collection.SeqFactory.UnapplySeqWrapper[Int] = scala.collection.SeqFactory$UnapplySeqWrapper@df507bfd scala> new scala.concurrent.duration.DurationInt(5) -val res36: scala.concurrent.duration.package.DurationInt = scala.concurrent.duration.package$DurationInt@5 +val res36: scala.concurrent.duration.DurationInt = scala.concurrent.duration.package$DurationInt@5 \ No newline at end of file diff --git a/compiler/test-resources/repl/i3966 b/compiler/test-resources/repl/i3966 index 204a7685f854..44f42e75aecb 100644 --- a/compiler/test-resources/repl/i3966 +++ b/compiler/test-resources/repl/i3966 @@ -1,2 +1,2 @@ -scala> val List(x: _*) = List(1, 2): @unchecked +scala> val List(x*) = List(1, 2): @unchecked val x: Seq[Int] = List(1, 2) diff --git a/compiler/test-resources/repl/i6643 b/compiler/test-resources/repl/i6643 index e139ae9f7f94..fc58435c5fff 100644 --- a/compiler/test-resources/repl/i6643 +++ b/compiler/test-resources/repl/i6643 @@ -1,7 +1,12 @@ scala> import scala.collection._ - scala>:type 1 Int - -scala> object IterableTest { def g[CC[_] <: Iterable[_] with IterableOps[_, _, _]](from: CC[Int]): IterableFactory[CC] = ??? } +scala> object IterableTest { def g[CC[_] <: Iterable[?] with IterableOps[?, ?, ?]](from: CC[Int]): IterableFactory[CC] = ??? } +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | object IterableTest { def g[CC[_] <: Iterable[?] with IterableOps[?, ?, ?]](from: CC[Int]): IterableFactory[CC] = ??? } + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` // defined object IterableTest diff --git a/compiler/test-resources/repl/nowarn.scala b/compiler/test-resources/repl/nowarn.scala index 5038cda7094e..975f9c72a809 100644 --- a/compiler/test-resources/repl/nowarn.scala +++ b/compiler/test-resources/repl/nowarn.scala @@ -27,7 +27,7 @@ scala> def f = { 1; 2 } -- [E129] Potential Issue Warning: --------------------------------------------- 1 | def f = { 1; 2 } | ^ - |A pure expression does nothing in statement position; you may be omitting necessary parentheses + | A pure expression does nothing in statement position | | longer explanation available when compiling with `-explain` def f: Int diff --git a/compiler/test-resources/repl/rewrite-messages b/compiler/test-resources/repl/rewrite-messages index eee2fe034c43..a63a72195019 100644 --- a/compiler/test-resources/repl/rewrite-messages +++ b/compiler/test-resources/repl/rewrite-messages @@ -1,4 +1,4 @@ -// scalac: -source:future-migration -deprecation -Werror +//> using options -source:future-migration -deprecation -Werror scala> import scala.util._ -- Error: ---------------------------------------------------------------------- 1 | import scala.util._ diff --git a/compiler/test-resources/type-printer/infix b/compiler/test-resources/type-printer/infix index a7904ae9ec43..bedb7071e7f2 100644 --- a/compiler/test-resources/type-printer/infix +++ b/compiler/test-resources/type-printer/infix @@ -29,7 +29,7 @@ def foo: (Int &: String) & Boolean scala> def foo: Int &: (Boolean & String) = ??? def foo: Int &: (Boolean & String) scala> import scala.annotation.showAsInfix -scala> @scala.annotation.showAsInfix class Mappy[T,U] +scala> @scala.annotation.showAsInfix infix class Mappy[T,U] // defined class Mappy scala> def foo: (Int Mappy Boolean) && String = ??? def foo: (Int Mappy Boolean) && String @@ -48,10 +48,31 @@ def foo: Int && Boolean & String scala> def foo: Int && (Boolean & String) = ??? def foo: Int && (Boolean & String) scala> def foo: Int && (Boolean with String) = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: Int && (Boolean with String) = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && (Boolean & String) scala> def foo: (Int && Boolean) with String = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: (Int && Boolean) with String = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && Boolean & String scala> def foo: Int && Boolean with String = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: Int && Boolean with String = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && (Boolean & String) scala> def foo: Int && Boolean | String = ??? def foo: Int && Boolean | String diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist new file mode 100644 index 000000000000..f435867fcaab --- /dev/null +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist @@ -0,0 +1,6 @@ +## See #18882 +patmat.scala +t9312.scala +unapplySeq-implicit-arg.scala +unapplySeq-implicit-arg2.scala +unapplySeq-implicit-arg3.scala diff --git a/compiler/test/dotc/neg-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-scala2-library-tasty.blacklist new file mode 100644 index 000000000000..d46a021ddd50 --- /dev/null +++ b/compiler/test/dotc/neg-scala2-library-tasty.blacklist @@ -0,0 +1,2 @@ +i8752.scala +f-interpolator-neg.scala # Additional: A pure expression does nothing in statement position diff --git a/compiler/test/dotc/patmat-exhaustivity-scala2-library-tasty.blacklist b/compiler/test/dotc/patmat-exhaustivity-scala2-library-tasty.blacklist new file mode 100644 index 000000000000..6f1717d532fd --- /dev/null +++ b/compiler/test/dotc/patmat-exhaustivity-scala2-library-tasty.blacklist @@ -0,0 +1,4 @@ +t7746.scala # order of exhaustivity suggestions differs +t4408.scala # order of exhaustivity suggestions differs +patmat-ortype.scala # order of exhaustivity suggestions differs +i13003.scala # order of exhaustivity suggestions differs diff --git a/compiler/test/dotc/pos-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/pos-init-global-scala2-library-tasty.blacklist new file mode 100644 index 000000000000..eb60fc3b7c14 --- /dev/null +++ b/compiler/test/dotc/pos-init-global-scala2-library-tasty.blacklist @@ -0,0 +1,5 @@ +## See #18882 +patmat.scala +patmat-interpolator.scala +unapplySeq-implicit-arg-pos.scala +global-cycle11.scala diff --git a/compiler/test/dotc/pos-linting.allowlist b/compiler/test/dotc/pos-linting.allowlist new file mode 100644 index 000000000000..41b8123457e6 --- /dev/null +++ b/compiler/test/dotc/pos-linting.allowlist @@ -0,0 +1,995 @@ +10747-onnxmin.scala +10747-shapeless-min.scala +11463.scala +13455.scala +13491.scala +6322.scala +8313.scala +8905.scala +9623.scala +9757.scala +A.scala +AE-9a131723f09b9f77c99c52b709965e580a61706e.scala +Annotations.scala +B.scala +Dynamic.scala +IArrayToArraySeq.scala +Iter2.scala +MathSpec.scala +Monoid.scala +Orderings.scala +Result.scala +SI-4012-b.scala +Transactions.scala +aliasNew.scala +and-inf.scala +andtypes.scala +annotDepMethType.scala +anonClassSubtyping.scala +approximateUnion.scala +argDenot-alpakka.min.scala +array-clone.scala +array-interfaces.scala +arrays3.scala +basicFromTasty.scala +byname-implicits-1.scala +byname-implicits-19.scala +byname-implicits-22.scala +byname-implicits-25.scala +byname-implicits-27.scala +capturedVars.scala +case-signature.scala +caseClassInMethod.scala +caseclass-access.scala +classWithCompObj.scala +cls.scala +collectGenericCC.scala +combine.scala +comma-separated.scala +compare-singletons.scala +complex-self-call.scala +compound.scala +constants.scala +constrs.scala +conversion-function-prototype.scala +default-param-interface.scala +default-super.scala +delambdafy-lambdalift.scala +depmet_1_pos.scala +depmet_implicit_tpbetareduce.scala +derives-obj.scala +doWhile.scala +duplicate-parents.scala +elidable-tparams.scala +enum-companion-first.scala +enum-variance.scala +enums-capture.scala +erased-conforms.scala +erased-lub.scala +erased-typedef.scala +escapingRefs.scala +exhaust_2.scala +exhaust_alternatives.scala +existentials.scala +experimental-erased-2.scala +experimental-inheritance.scala +experimentalExperimental.scala +exponential-spec.scala +export-in-extension-rename.scala +export-proxies.scala +extmethods-2.scala +extmethods.scala +extractor-types.scala +f112.scala +fbounds.scala +fewer-braces.scala +flatten.scala +freezeBounds.scala +gadt-cast-if.scala +gadt-eval.scala +gadt-simpleEQ.scala +gadt-strip-refinement.scala +gadt-upcast.scala +help.scala +hk-subtyping.scala +hk-variance.scala +hk.scala +hkarray.scala +hklower.scala +ho-implicits.scala +i0268.scala +i0290-type-bind-2.scala +i0306.scala +i10080.scala +i10107c.scala +i10116.scala +i10123.scala +i10161.scala +i10242.scala +i10247.scala +i10259.scala +i10311.scala +i10389.scala +i1044.scala +i1047.scala +i10495.scala +i10542.scala +i10638.scala +i10667.scala +i10769.scala +i10951.scala +i10964.scala +i10980.scala +i11015.scala +i11020.scala +i11022.scala +i1103.scala +i11057.scala +i11078.scala +i11078b.scala +i11114.scala +i11163.scala +i11168.scala +i11170a.scala +i11184d.scala +i11223.scala +i11230.scala +i11255.scala +i11420.scala +i11481.scala +i11538a.scala +i11538b.scala +i11556.scala +i11605.scala +i11631.scala +i11631b.scala +i11682.scala +i11712.scala +i11713.scala +i11729.scala +i11731.scala +i11781.scala +i1181.scala +i1181c.scala +i11922.scala +i11955.scala +i11968.scala +i11977.scala +i11981.scala +i11997.scala +i1202c.scala +i1202d.scala +i12072-c.scala +i12072.scala +i12127.scala +i12141.scala +i12168.scala +i1216a.scala +i12180b.scala +i12216.scala +i12218.scala +i12226.scala +i12277.scala +i1235.scala +i12373.scala +i12395.scala +i12396.scala +i12474.scala +i12476.scala +i12478.scala +i12508.scala +i12508a.scala +i12508b.scala +i12508c.scala +i12645.scala +i12655.scala +i12722.scala +i12730.scala +i12754.scala +i12757.scala +i12896.scala +i12915.scala +i12953.scala +i12955.scala +i1307.scala +i13091.scala +i13331.scala +i13346.scala +i13433b.scala +i13460.scala +i13487.scala +i13541.scala +i13548.scala +i13558.scala +i13660.scala +i13668.scala +i13816.scala +i13820.scala +i13842.scala +i13900.scala +i13974a.scala +i13990.scala +i14010.scala +i14061.scala +i14096.scala +i14151b.scala +i14151c.scala +i14152.scala +i14218.http4s.scala +i14349.scala +i14367.scala +i1444.scala +i14451.scala +i1447.scala +i14477.scala +i14494.scala +i14544.scala +i14637.scala +i14642.scala +i14653b.scala +i14656.scala +i14699.scala +i14739.works.scala +i14914.scala +i14966.scala +i1500.scala +i15029.orig.scala +i15029.scala +i1515.scala +i15178.scala +i15216.scala +i15226.scala +i15274.orig.scala +i15448.scala +i15460.scala +i15523.avoid.scala +i15525.scala +i15664.scala +i15673.scala +i1570.scala +i15743.scala +i15813.scala +i15820.scala +i15893.scala +i1590.scala +i15926.contra.scala +i15926.extract.scala +i15926.scala +i15931.scala +i15967.scala +i15983b.scala +i15991.abstract.scala +i16035.scala +i16091.scala +i16105.scala +i16236.scala +i16328.scala +i16339.scala +i16342.scala +i16374a.scala +i1638.scala +i16408.min2.scala +i16408.scala +i16435.avoid.scala +i16451.default.scala +i16464.scala +i16469.scala +i16488.scala +i16544c.scala +i16562.scala +i1661.scala +i16641.scala +i1665.scala +i16777.scala +i16785.scala +i16808.scala +i16954.scala +i16997.scala +i17002.scala +i1704.scala +i17052.scala +i17115.scala +i17230.bootstrap.scala +i17230.orig.scala +i17245.scala +i17380.scala +i17395.scala +i17465.scala +i17525.scala +i17556.scala +i1756.scala +i17588.scala +i1777.scala +i1793.scala +i17948.all.scala +i17948.scala +i1795.scala +i18062.scala +i18083.scala +i18211.scala +i18253.orig.scala +i18253.scala +i18263.orig.scala +i18263.scala +i18275.scala +i18345.scala +i1857.scala +i18649.scala +i1866.scala +i1990a.scala +i2009.scala +i2056.scala +i2071.scala +i2071_1.scala +i2104.scala +i2112.scala +i2140.scala +i2188.scala +i2232.scala +i2234.scala +i2278.scala +i2292.scala +i2324.scala +i2340.scala +i2367.scala +i2397.scala +i2437b.scala +i2468.scala +i2576.scala +i2637.scala +i2672.scala +i2697.scala +i2723.scala +i2749.scala +i2774.scala +i2888.scala +i2941.scala +i2944.scala +i2973.scala +i2981.scala +i2997.scala +i2998.scala +i3050.scala +i3082.scala +i3130c.scala +i3130d.scala +i3139.scala +i3208.scala +i324.scala +i3246.scala +i3248.scala +i3264.scala +i3323.scala +i3488.scala +i3495.scala +i3500.scala +i3542-1.scala +i3564.scala +i3585.scala +i3588.scala +i3607.scala +i3658.scala +i3666-gadt.scala +i3703.scala +i3736.scala +i3955.scala +i3956.scala +i3971.scala +i3976.scala +i4006.scala +i4031.scala +i4166.scala +i4184.scala +i4203.scala +i4316.scala +i4318.scala +i4395b.scala +i4419.scala +i4430.scala +i4466b.scala +i4588.scala +i4623.scala +i4720.scala +i4725.scala +i4753b.scala +i4785.scala +i480a.scala +i4819.scala +i4837.scala +i4854.scala +i4863.scala +i4884.scala +i4906.scala +i4984.scala +i4996.scala +i4999.scala +i5039.scala +i5068-gadt.scala +i5107.scala +i5145.scala +i5188.scala +i5295.scala +i530-import-symbolic.scala +i5309.scala +i536.scala +i540.scala +i5411.scala +i5413-a.scala +i5413-b.scala +i5418.scala +i5481.scala +i5625.scala +i566.scala +i5666.scala +i5699.scala +i5711.scala +i5720.scala +i5765.scala +i5773.scala +i5773a.scala +i5773b.scala +i5794.scala +i5833.scala +i5966.scala +i5970.scala +i6014-gadt.scala +i6126.scala +i6127.scala +i6146.scala +i6199a.scala +i6199b.scala +i6213.scala +i6247.scala +i6286.scala +i6288.scala +i6312b.scala +i6375.scala +i6395.scala +i6450.scala +i6451.scala +i6507b.scala +i6561.scala +i6565.scala +i6655.scala +i6662.scala +i6682b.scala +i6705.scala +i6734.scala +i6745.scala +i6847.scala +i6854.scala +i6864.scala +i6900.scala +i6909.scala +i7034.scala +i7041.scala +i7044.scala +i7056.scala +i7070.scala +i7087.scala +i7103.scala +i7159.scala +i7219c.scala +i7304.scala +i7359.scala +i7383.scala +i7392.scala +i7403.scala +i7414.scala +i743.scala +i7452.scala +i7477.scala +i7516.scala +i7575.scala +i7655.scala +i7739.scala +i7740a.scala +i7744.scala +i7793.scala +i7819.scala +i7840.scala +i7944.scala +i8031.scala +i8083.scala +i8143.scala +i815.scala +i8151.scala +i8188a.scala +i8188b.scala +i8188c.scala +i8198.scala +i8241.scala +i8276.scala +i8319.scala +i8321.scala +i8338.scala +i8357.scala +i8368.scala +i8397.scala +i8404.scala +i8449.scala +i8491.scala +i8516.scala +i8530.scala +i8537.scala +i8750.scala +i8758.scala +i8786.scala +i880.scala +i8801.scala +i8802a.scala +i8825.scala +i8843.scala +i8874.scala +i8881.scala +i8900-cycle.scala +i8900a.scala +i8921.scala +i8963.scala +i8968.scala +i8972.scala +i9046.scala +i9051.scala +i9103.scala +i9150-a.scala +i9150-b.scala +i9213.scala +i9226.scala +i9307.scala +i9342a.scala +i9342c.scala +i939.scala +i9403.scala +i9482.scala +i9509.scala +i9531.scala +i9562.scala +i9626.scala +i9751b.scala +i9775.scala +i9793.scala +i9828.scala +i9833.scala +i9841b.scala +i9844.scala +i9977.scala +i9996.scala +i9999.scala +i9999a.scala +ift-assign.scala +imp2-pos.scala +implicit-dep.scala +implicit-divergent.scala +implicit-scope-loop.scala +implicit-unwrap-tc.scala +implicitDivergenc.scala +implicitFuns.scala +implicitonSelect.scala +indent3.scala +inf.scala +infer.scala +infer2-pos.scala +inferOverloaded.scala +inferred.scala +injectivity-gadt.scala +inline-i1773.scala +inline-named-typeargs.scala +inline-rewrite.scala +inline-t2425.scala +inline-val-constValue-2.scala +inline-val-constValue-3.scala +inlineAccesses.scala +inlined-the.scala +interfaceObject.scala +intersection.scala +isApplicableSafe.scala +isRef.scala +java-futures.scala +jdk-8-app.scala +kind-projector-underscores.scala +kind-projector.scala +kindPolySemiGroup.scala +kinds.scala +kinzer.scala +ksbug1.scala +lambdalift.scala +lambdalift1.scala +large2.scala +lazyvals.scala +leading-infix-op.scala +listpattern.scala +local-objects.scala +local-signature.scala +localmodules.scala +lookuprefined.scala +main-method-scheme.scala +manifest-summoning.scala +match-type-inference.scala +match.scala +matches.scala +matrixOps.scala +matthias3.scala +matthias5.scala +maxim1.scala +merge-constraint.scala +michel3.scala +mirror-implicit-scope.scala +misc-unapply_pos.scala +modules1.scala +mt-scrutinee-widen3.scala +multiple-additional-imports.scala +named-typeargs.scala +native-warning.scala +no-selftype.scala +objXfun.scala +opaque-immutable-array.scala +opaque-inline.scala +opaque-nullable.scala +opaque-simple.scala +opassign.scala +overloaddefault.scala +overloadedAccess.scala +overrideDataRace.scala +p11210-multiowner.scala +p11210-values.scala +packageobject.scala +packagings.scala +param-depmeth.scala +partialApplications.scala +patdef.scala +patmat-exhaustive.scala +patmat-extract-tparam.scala +patmat.scala +patmatSeq.scala +patmatch-in-catch.scala +patterns1.scala +patterns2.scala +philippe2.scala +pickleAlias.scala +pickleSkolem.scala +pmbug.scala +polyalias.scala +polytypes.scala +pos-bug1210.scala +postconditions.scala +preserve-union.scala +printTest.scala +printbounds.scala +private-types-after-typer.scala +projection.scala +projections.scala +range.scala +rangepos-anonapply.scala +repeatedArgs213.scala +sammy_single.scala +sams.scala +scala-singleton.scala +scala3mock.scala +scoping3.scala +sets.scala +sigs.scala +simple-repeated-args.scala +simpleClass-3.scala +simpleConstructor.scala +simpleRefinement.scala +simpleTry.scala +simpleTypeSelect.scala +simplelists.scala +single-unions.scala +singleton-ops-composition.scala +singleton-ops-test-issue-8287.scala +singletons.scala +sip23-aliasing.scala +source-import-3-0.scala +source-import-3-3-migration.scala +source-import-3-3.scala +spec-asseenfrom.scala +spec-fields-old.scala +spec-foo.scala +spec-funs.scala +spec-lists.scala +spec-multiplectors.scala +spec-params-old.scala +spec-partially.scala +spec-partialmap.scala +spec-sealed.scala +spec-super.scala +spec-tailcall.scala +spec-thistype.scala +spec-traits.scala +specialize10.scala +spurious-overload.scala +staleSymbol.scala +strings.scala +strip-tvars-for-lubbasetypes.scala +sudoku.scala +supercalls.scala +switchUnbox.scala +switches.scala +t0017.scala +t0029.scala +t0030.scala +t0031.scala +t0036.scala +t0049.scala +t0069.scala +t0076.scala +t0081.scala +t0085.scala +t0091.scala +t0123.scala +t0301.scala +t0304.scala +t0438.scala +t0654.scala +t0710.scala +t0770.scala +t0786.scala +t0872.scala +t0904.scala +t1006.scala +t1027.scala +t10387b.scala +t1049.scala +t1053.scala +t1075.scala +t112606A.scala +t1131.scala +t1146.scala +t115.scala +t116.scala +t1168.scala +t1208.scala +t121.scala +t1226.scala +t1236a.scala +t1260.scala +t1280.scala +t1391.scala +t1406.scala +t1422_pos.scala +t1560.scala +t159.scala +t1625.scala +t1625b.scala +t16827.scala +t1756.scala +t177.scala +t1786-cycle.scala +t1789.scala +t183.scala +t1832.scala +t1843.scala +t1987a.scala +t1996.scala +t2023.scala +t2066.scala +t2081.scala +t2082.scala +t2119.scala +t2127.scala +t2130-1.scala +t2179.scala +t2194.scala +t2260.scala +t229.scala +t2310.scala +t2331.scala +t2421.scala +t2444.scala +t2454.scala +t2486.scala +t2503.scala +t2619.scala +t2664.scala +t2667.scala +t267.scala +t2683.scala +t2691.scala +t2693.scala +t2708.scala +t2712-4.scala +t2712-8.scala +t2795-new.scala +t2797.scala +t2809.scala +t295.scala +t296.scala +t3020.scala +t304.scala +t3071.scala +t3136.scala +t3152.scala +t3160.scala +t319.scala +t3343.scala +t3371.scala +t3373.scala +t3374.scala +t3452f.scala +t348plus.scala +t3528.scala +t3570.scala +t3578.scala +t3631.scala +t3636.scala +t3792.scala +t3800.scala +t3837.scala +t3862.scala +t3866.scala +t3880.scala +t3898.scala +t397.scala +t3986.scala +t4052.scala +t4063.scala +t4070b.scala +t4114.scala +t4176b.scala +t4220.scala +t4275.scala +t443.scala +t4430.scala +t4547.scala +t4593.scala +t4692.scala +t4716.scala +t4760.scala +t4859.scala +t5033.scala +t5070.scala +t5082.scala +t5084.scala +t5099.scala +t5127.scala +t514.scala +t522.scala +t5245.scala +t5305.scala +t5330c.scala +t5359.scala +t5399a.scala +t5508-min-okay.scala +t5508-min.scala +t5542.scala +t5577.scala +t5606.scala +t5643.scala +t566.scala +t5720-ownerous.scala +t5779-numeq-warn.scala +t578.scala +t5796.scala +t5829.scala +t5856b.scala +t5862.scala +t5886.scala +t5892.scala +t5900a.scala +t5930.scala +t5932.scala +t5968.scala +t6022b.scala +t604.scala +t6089b.scala +t6091.scala +t6123-explaintypes-implicits.scala +t613.scala +t6146.scala +t6162-inheritance.scala +t6208.scala +t6231.scala +t6278-synth-def.scala +t6301.scala +t6311.scala +t640.scala +t651.scala +t6514.scala +t6537.scala +t6551.scala +t6575a.scala +t6664b.scala +t6675.scala +t6722.scala +t6745.scala +t675.scala +t6771.scala +t6780.scala +t694.scala +t6948.scala +t6966.scala +t697.scala +t698.scala +t7180.scala +t7183.scala +t7226.scala +t7315.scala +t7377b.scala +t7427.scala +t7433.scala +t7475a.scala +t7475d.scala +t757a.scala +t762.scala +t767.scala +t7690.scala +t7753.scala +t7815.scala +t7853-partial-function.scala +t7864.scala +t788.scala +t803.scala +t805.scala +t812.scala +t8170.scala +t8177b.scala +t8177d.scala +t8219.scala +t8300-conversions-b.scala +t8300-patmat-b.scala +t8315.scala +t8363.scala +t8364.scala +t8367.scala +t839.scala +t851.scala +t927.scala +t946.scala +tailrec.scala +targetName-refine.scala +tcpoly_boundedmonad.scala +tcpoly_checkkinds_mix.scala +tcpoly_infer_easy.scala +tcpoly_infer_implicit_tuple_wrapper.scala +tcpoly_overloaded.scala +tcpoly_poly.scala +tcpoly_ticket2096.scala +tcpoly_variance_pos.scala +tcpoly_wildcards.scala +test-implicits2.scala +test-pickler-private-this.scala +test-typers.scala +test4.scala +test4refine.scala +test5.scala +testCoercionThis.scala +testcast.scala +thistypes.scala +ticket0137.scala +tparam_inf.scala +trailing-comma-pattern.scala +trait-java-enum.scala +traits.scala +transparent-inline.scala +tryWithoutHandler.scala +tuple-exaustivity.scala +tuplePatDef.scala +type-projection.scala +type-test-syntesize.scala +typealias_dubious.scala +typeclass-encoding3.scala +typedapply.scala +typelevel0.scala +typetestcast.scala +unapplyContexts2.scala +unary-eq.scala +unchecked-a.scala +unify-wildcard-patterns.scala +unions.scala +vararg-pattern.scala +varargs-annot-currying.scala +variances_pos.scala +vcblock.scala +vcif.scala +virtpatmat_anonfun_for.scala +virtpatmat_exhaust.scala +virtpatmat_exist3.scala +virtpatmat_exist_uncurry.scala +virtpatmat_infer_single_1.scala +widen-existential.scala +wildcardDefs.scala +xfatalWarnings.scala +z1730.scala +zipper.scala +zoo.scala \ No newline at end of file diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 9888916a86c9..b0da78f0a1eb 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -22,10 +22,20 @@ i15922.scala t5031_2.scala i16997.scala i7414.scala +i17588.scala +i9804.scala +i13433.scala +i16649-irrefutable.scala +strict-pattern-bindings-3.0-migration.scala +i17186b.scala +i11982a.scala # Tree is huge and blows stack for printing Text i7034.scala +# Causes cyclic reference by interacting with compiler stdlib types +stdlib + # Stale symbol: package object scala seqtype-cycle @@ -49,6 +59,10 @@ i6505.scala i15158.scala i15155.scala i15827.scala +i17149.scala +tuple-fold.scala +mt-redux-norm.perspective.scala +i18211.scala # Opaque type i5720.scala @@ -93,6 +107,7 @@ i4176-gadt.scala # GADT difference i13974a.scala +i15867.scala java-inherited-type1 diff --git a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist new file mode 100644 index 000000000000..63a6e2cee345 --- /dev/null +++ b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist @@ -0,0 +1,4 @@ +# Checkfile differences for equivalent type +tasty-extractors-1 +tasty-extractors-2 +tasty-extractors-types diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index cc47303d5468..4bb3628722ad 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -82,6 +82,12 @@ object Properties { /** scala-library jar */ def scalaLibrary: String = sys.props("dotty.tests.classes.scalaLibrary") + /** scala-library TASTy jar */ + def scalaLibraryTasty: Option[String] = sys.props.get("dotty.tests.tasties.scalaLibrary") + + /** If we are using the scala-library TASTy jar */ + def usingScalaLibraryTasty: Boolean = scalaLibraryTasty.isDefined + /** scala-asm jar */ def scalaAsm: String = sys.props("dotty.tests.classes.scalaAsm") diff --git a/compiler/test/dotty/tools/CheckTypesTests.scala b/compiler/test/dotty/tools/CheckTypesTests.scala index df4bc1636d82..290d8523df49 100644 --- a/compiler/test/dotty/tools/CheckTypesTests.scala +++ b/compiler/test/dotty/tools/CheckTypesTests.scala @@ -24,7 +24,7 @@ class CheckTypeTest extends DottyTest { "List[B]" ) - checkTypes(source, types: _*) { + checkTypes(source, types*) { case (List(a, b, lu, li, lr, ls, la, lb), context) => given Context = context diff --git a/compiler/test/dotty/tools/DottyTest.scala b/compiler/test/dotty/tools/DottyTest.scala index 54cf0e0c177c..7ccbc09a4c92 100644 --- a/compiler/test/dotty/tools/DottyTest.scala +++ b/compiler/test/dotty/tools/DottyTest.scala @@ -44,9 +44,14 @@ trait DottyTest extends ContextEscapeDetection { fc.setProperty(ContextDoc, new ContextDocstrings) } + protected def defaultCompiler: Compiler = new Compiler() + private def compilerWithChecker(phase: String)(assertion: (tpd.Tree, Context) => Unit) = new Compiler { + + private val baseCompiler = defaultCompiler + override def phases = { - val allPhases = super.phases + val allPhases = baseCompiler.phases val targetPhase = allPhases.flatten.find(p => p.phaseName == phase).get val groupsBefore = allPhases.takeWhile(x => !x.contains(targetPhase)) val lastGroup = allPhases.find(x => x.contains(targetPhase)).get.takeWhile(x => !(x eq targetPhase)) @@ -67,6 +72,15 @@ trait DottyTest extends ContextEscapeDetection { run.runContext } + def checkAfterCompile(checkAfterPhase: String, sources: List[String])(assertion: Context => Unit): Context = { + val c = defaultCompiler + val run = c.newRun + run.compileFromStrings(sources) + val rctx = run.runContext + assertion(rctx) + rctx + } + def checkTypes(source: String, typeStrings: String*)(assertion: (List[Type], Context) => Unit): Unit = checkTypes(source, List(typeStrings.toList)) { (tpess, ctx) => (tpess: @unchecked) match { case List(tpes) => assertion(tpes, ctx) diff --git a/compiler/test/dotty/tools/SignatureTest.scala b/compiler/test/dotty/tools/SignatureTest.scala index 43d517417108..587d7098a0a7 100644 --- a/compiler/test/dotty/tools/SignatureTest.scala +++ b/compiler/test/dotty/tools/SignatureTest.scala @@ -2,14 +2,23 @@ package dotty.tools import vulpix.TestConfiguration +import org.junit.Assert._ import org.junit.Test -import dotc.ast.Trees._ +import dotc.ast.untpd import dotc.core.Decorators._ import dotc.core.Contexts._ +import dotc.core.Flags._ import dotc.core.Phases._ +import dotc.core.Names._ import dotc.core.Types._ import dotc.core.Symbols._ +import dotc.core.StdNames._ +import dotc.core.Signature +import dotc.typer.ProtoTypes.constrained +import dotc.typer.Inferencing.isFullyDefined +import dotc.typer.ForceDegree +import dotc.util.NoSourcePosition import java.io.File import java.nio.file._ @@ -38,3 +47,69 @@ class SignatureTest: |${ref.denot.signature}""".stripMargin) } } + + /** Ensure that signature computation returns an underdefined signature when + * the signature depends on uninstantiated type variables. + */ + @Test def underdefined: Unit = + inCompilerContext(TestConfiguration.basicClasspath, separateRun = false, + """trait Foo + |trait Bar + |class A[T <: Tuple]: + | def and(x: T & Foo): Unit = {} + | def andor(x: (T | Bar) & Foo): Unit = {} + | def array(x: Array[(T | Bar) & Foo]): Unit = {} + | def tuple(x: Foo *: T): Unit = {} + | def tuple2(x: Foo *: (T | Tuple) & Foo): Unit = {} + |""".stripMargin): + val cls = requiredClass("A") + val tvar = constrained(cls.requiredMethod(nme.CONSTRUCTOR).info.asInstanceOf[TypeLambda]).head + tvar <:< defn.TupleTypeRef + val prefix = cls.typeRef.appliedTo(tvar) + + def checkSignatures(expectedIsUnderDefined: Boolean)(using Context): Unit = + for decl <- cls.info.decls.toList if decl.is(Method) && !decl.isConstructor do + val meth = decl.asSeenFrom(prefix) + val sig = meth.info.signature + val what = if expectedIsUnderDefined then "underdefined" else "fully-defined" + assert(sig.isUnderDefined == expectedIsUnderDefined, i"Signature of `$meth` with prefix `$prefix` and type `${meth.info}` should be $what but is `$sig`") + + checkSignatures(expectedIsUnderDefined = true) + assert(isFullyDefined(tvar, force = ForceDegree.all), s"Could not instantiate $tvar") + checkSignatures(expectedIsUnderDefined = false) + + /** Check that signature caching behaves correctly with respect to retracted + * instantiations of type variables. + */ + @Test def cachingWithRetraction: Unit = + inCompilerContext(TestConfiguration.basicClasspath, separateRun = false, + """trait Foo + |trait Bar + |class A[T]: + | def and(x: T & Foo): Unit = {} + |""".stripMargin): + val cls = requiredClass("A") + val tvar = constrained(cls.requiredMethod(nme.CONSTRUCTOR).info.asInstanceOf[TypeLambda]).head + val prefix = cls.typeRef.appliedTo(tvar) + val ref = prefix.select(cls.requiredMethod("and")).asInstanceOf[TermRef] + + /** Check that the signature of the first parameter of `ref` is equal to `expectedParamSig`. */ + def checkParamSig(ref: TermRef, expectedParamSig: TypeName)(using Context): Unit = + assertEquals(i"Check failed for param signature of $ref", + expectedParamSig, ref.signature.paramsSig.head) + // Both NamedType and MethodOrPoly cache signatures, so check both caches. + assertEquals(i"Check failed for param signature of ${ref.info} (but not for $ref itself)", + expectedParamSig, ref.info.signature.paramsSig.head) + + + // Initially, the param signature is Uninstantiated since it depends on an uninstantiated type variable + checkParamSig(ref, tpnme.Uninstantiated) + + // In this context, the signature is the erasure of `Bar & Foo`. + inContext(ctx.fresh.setNewTyperState()): + tvar =:= requiredClass("Bar").typeRef + assert(isFullyDefined(tvar, force = ForceDegree.all), s"Could not instantiate $tvar") + checkParamSig(ref, "Bar".toTypeName) + + // If our caching logic is working correctly, we should get the original signature here. + checkParamSig(ref, tpnme.Uninstantiated) diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index 6961a61b69b6..a288e49c5eb9 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -6,6 +6,7 @@ import java.io.File import java.nio.file._ import scala.jdk.CollectionConverters._ +import dotty.Properties object TestSources { @@ -15,11 +16,17 @@ object TestSources { def posTestPicklingBlacklistFile: String = "compiler/test/dotc/pos-test-pickling.blacklist" def posTestRecheckExcludesFile: String = "compiler/test/dotc/pos-test-recheck.excludes" def posLazyValsAllowlistFile: String = "compiler/test/dotc/pos-lazy-vals-tests.allowlist" + def posLintingAllowlistFile: String = "compiler/test/dotc/pos-linting.allowlist" + def posInitGlobalScala2LibraryTastyBlacklistFile: String = "compiler/test/dotc/pos-init-global-scala2-library-tasty.blacklist" def posFromTastyBlacklisted: List[String] = loadList(posFromTastyBlacklistFile) def posTestPicklingBlacklisted: List[String] = loadList(posTestPicklingBlacklistFile) def posTestRecheckExcluded: List[String] = loadList(posTestRecheckExcludesFile) def posLazyValsAllowlist: List[String] = loadList(posLazyValsAllowlistFile) + def posLintingAllowlist: List[String] = loadList(posLintingAllowlistFile) + def posInitGlobalScala2LibraryTastyBlacklisted: List[String] = + if Properties.usingScalaLibraryTasty then loadList(posInitGlobalScala2LibraryTastyBlacklistFile) + else Nil // run tests lists @@ -27,12 +34,35 @@ object TestSources { def runTestPicklingBlacklistFile: String = "compiler/test/dotc/run-test-pickling.blacklist" def runTestRecheckExcludesFile: String = "compiler/test/dotc/run-test-recheck.excludes" def runLazyValsAllowlistFile: String = "compiler/test/dotc/run-lazy-vals-tests.allowlist" - + def runMacrosScala2LibraryTastyBlacklistFile: String = "compiler/test/dotc/run-macros-scala2-library-tasty.blacklist" def runFromTastyBlacklisted: List[String] = loadList(runFromTastyBlacklistFile) def runTestPicklingBlacklisted: List[String] = loadList(runTestPicklingBlacklistFile) def runTestRecheckExcluded: List[String] = loadList(runTestRecheckExcludesFile) def runLazyValsAllowlist: List[String] = loadList(runLazyValsAllowlistFile) + def runMacrosScala2LibraryTastyBlacklisted: List[String] = + if Properties.usingScalaLibraryTasty then loadList(runMacrosScala2LibraryTastyBlacklistFile) + else Nil + + // neg tests lists + + def negScala2LibraryTastyBlacklistFile: String = "compiler/test/dotc/neg-scala2-library-tasty.blacklist" + def negInitGlobalScala2LibraryTastyBlacklistFile: String = "compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist" + + def negScala2LibraryTastyBlacklisted: List[String] = + if Properties.usingScalaLibraryTasty then loadList(negScala2LibraryTastyBlacklistFile) + else Nil + def negInitGlobalScala2LibraryTastyBlacklisted: List[String] = + if Properties.usingScalaLibraryTasty then loadList(negInitGlobalScala2LibraryTastyBlacklistFile) + else Nil + + // patmat tests lists + + def patmatExhaustivityScala2LibraryTastyBlacklistFile: String = "compiler/test/dotc/patmat-exhaustivity-scala2-library-tasty.blacklist" + + def patmatExhaustivityScala2LibraryTastyBlacklisted: List[String] = + if Properties.usingScalaLibraryTasty then loadList(patmatExhaustivityScala2LibraryTastyBlacklistFile) + else Nil // load lists diff --git a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala index e7cd20ba98b2..c99de8fcf956 100644 --- a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala @@ -1,4 +1,5 @@ -package dotty.tools.backend.jvm +package dotty.tools +package backend.jvm import org.junit.Test import org.junit.Assert._ @@ -109,7 +110,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest { @Test def testArrayInlined = test( """{ - | inline def array(inline xs: Int*): Array[Int] = Array(xs: _*) + | inline def array(inline xs: Int*): Array[Int] = Array(xs*) | array(1, 2) |}""".stripMargin, newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE), TypeOp(CHECKCAST, "[I"))) @@ -117,7 +118,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest { @Test def testArrayInlined2 = test( """{ - | inline def array(inline x: Int, inline xs: Int*): Array[Int] = Array(x, xs: _*) + | inline def array(inline x: Int, inline xs: Int*): Array[Int] = Array(x, xs*) | array(1, 2) |}""".stripMargin, newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE))) @@ -125,7 +126,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest { @Test def testArrayInlined3 = test( """{ - | inline def array[T](inline xs: T*)(using inline ct: scala.reflect.ClassTag[T]): Array[T] = Array(xs: _*) + | inline def array[T](inline xs: T*)(using inline ct: scala.reflect.ClassTag[T]): Array[T] = Array(xs*) | array(1, 2) |}""".stripMargin, newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE), TypeOp(CHECKCAST, "[I"))) @@ -160,4 +161,153 @@ class ArrayApplyOptTest extends DottyBytecodeTest { } } + @Test def testListApplyAvoidsIntermediateArray = { + checkApplyAvoidsIntermediateArray("List"): + """import scala.collection.immutable.{ ::, Nil } + |class Foo { + | def meth1: List[String] = List("1", "2", "3") + | def meth2: List[String] = new ::("1", new ::("2", new ::("3", Nil))) + |} + """.stripMargin + } + + @Test def testSeqApplyAvoidsIntermediateArray = { + checkApplyAvoidsIntermediateArray("Seq"): + """import scala.collection.immutable.{ ::, Nil } + |class Foo { + | def meth1: Seq[String] = Seq("1", "2", "3") + | def meth2: Seq[String] = new ::("1", new ::("2", new ::("3", Nil))) + |} + """.stripMargin + } + + @Test def testSeqApplyAvoidsIntermediateArray2 = { + checkApplyAvoidsIntermediateArray("scala.collection.immutable.Seq"): + """import scala.collection.immutable.{ ::, Seq, Nil } + |class Foo { + | def meth1: Seq[String] = Seq("1", "2", "3") + | def meth2: Seq[String] = new ::("1", new ::("2", new ::("3", Nil))) + |} + """.stripMargin + } + + @Test def testSeqApplyAvoidsIntermediateArray3 = { + checkApplyAvoidsIntermediateArray("scala.collection.Seq"): + """import scala.collection.immutable.{ ::, Nil }, scala.collection.Seq + |class Foo { + | def meth1: Seq[String] = Seq("1", "2", "3") + | def meth2: Seq[String] = new ::("1", new ::("2", new ::("3", Nil))) + |} + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max1 = { + checkApplyAvoidsIntermediateArray_examples("max1"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", "6", "7") + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::("5", new ::("6", new ::("7", Nil))))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max2 = { + checkApplyAvoidsIntermediateArray_examples("max2"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", "6", List[Object]()) + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::("5", new ::("6", new ::(Nil, Nil))))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max3 = { + checkApplyAvoidsIntermediateArray_examples("max3"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", List[Object]("6")) + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::("5", new ::(new ::("6", Nil), Nil)))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max4 = { + checkApplyAvoidsIntermediateArray_examples("max4"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", List[Object]("5", "6")) + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::(new ::("5", new ::("6", Nil)), Nil))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over1 = { + checkApplyAvoidsIntermediateArray_examples("over1"): + """ def meth1: List[Object] = List("1", "2", "3", "4", "5", "6", "7", "8") + | def meth2: List[Object] = List(wrapRefArray(Array("1", "2", "3", "4", "5", "6", "7", "8"))*) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over2 = { + checkApplyAvoidsIntermediateArray_examples("over2"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", "6", "7", List[Object]()) + | def meth2: List[Object] = List(wrapRefArray(Array[Object]("1", "2", "3", "4", "5", "6", "7", Nil))*) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over3 = { + checkApplyAvoidsIntermediateArray_examples("over3"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", "6", List[Object]("7")) + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::("5", new ::("6", new ::(List(wrapRefArray(Array[Object]("7"))*), Nil))))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over4 = { + checkApplyAvoidsIntermediateArray_examples("over4"): + """ def meth1: List[Object] = List[Object]("1", "2", "3", "4", "5", List[Object]("6", "7")) + | def meth2: List[Object] = new ::("1", new ::("2", new ::("3", new ::("4", new ::("5", new ::(List(wrapRefArray(Array[Object]("6", "7"))*), Nil)))))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max5 = { + checkApplyAvoidsIntermediateArray_examples("max5"): + """ def meth1: List[Object] = List[Object](List[Object](List[Object](List[Object](List[Object](List[Object](List[Object](List[Object]()))))))) + | def meth2: List[Object] = new ::(new ::(new ::(new ::(new ::(new ::(new ::(Nil, Nil), Nil), Nil), Nil), Nil), Nil), Nil) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over5 = { + checkApplyAvoidsIntermediateArray_examples("over5"): + """ def meth1: List[Object] = List[Object](List[Object](List[Object](List[Object](List[Object](List[Object](List[Object](List[Object](List[Object]())))))))) + | def meth2: List[Object] = new ::(new ::(new ::(new ::(new ::(new ::(new ::(List[Object](wrapRefArray(Array[Object](Nil))*), Nil), Nil), Nil), Nil), Nil), Nil), Nil) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_max6 = { + checkApplyAvoidsIntermediateArray_examples("max6"): + """ def meth1: List[Object] = List[Object]("1", "2", List[Object]("3", "4", List[Object](List[Object]()))) + | def meth2: List[Object] = new ::("1", new ::("2", new ::(new ::("3", new ::("4", new ::(new ::(Nil, Nil), Nil))), Nil))) + """.stripMargin + } + + @Test def testListApplyAvoidsIntermediateArray_over6 = { + checkApplyAvoidsIntermediateArray_examples("over6"): + """ def meth1: List[Object] = List[Object]("1", "2", List[Object]("3", "4", List[Object]("5"))) + | def meth2: List[Object] = new ::("1", new ::("2", new ::(new ::("3", new ::("4", new ::(new ::("5", Nil), Nil))), Nil))) + """.stripMargin + } + + def checkApplyAvoidsIntermediateArray_examples(name: String)(body: String): Unit = { + checkApplyAvoidsIntermediateArray(s"List_$name"): + s"""import scala.collection.immutable.{ ::, Nil }, scala.runtime.ScalaRunTime.wrapRefArray + |class Foo { + |$body + |} + """.stripMargin + } + + def checkApplyAvoidsIntermediateArray(name: String)(source: String): Unit = { + checkBCode(source) { dir => + val clsIn = dir.lookupName("Foo.class", directory = false).input + val clsNode = loadClassNode(clsIn) + val meth1 = getMethod(clsNode, "meth1") + val meth2 = getMethod(clsNode, "meth2") + + val instructions1 = instructionsFromMethod(meth1).filter { case TypeOp(CHECKCAST, _) => false case _ => true } + val instructions2 = instructionsFromMethod(meth2).filter { case TypeOp(CHECKCAST, _) => false case _ => true } + + assert(instructions1 == instructions2, + s"the $name.apply method\n" + + diffInstructions(instructions1, instructions2)) + } + } + } diff --git a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala index a7900994d402..c751937bd9f9 100644 --- a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala +++ b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala @@ -245,7 +245,7 @@ object ASMConverters { case Jump(op, label) => method.visitJumpInsn(op, asmLabel(label)) case Ldc(op, cst) => method.visitLdcInsn(cst) case LookupSwitch(op, dflt, keys, labels) => method.visitLookupSwitchInsn(asmLabel(dflt), keys.toArray, (labels map asmLabel).toArray) - case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray: _*) + case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray*) case Invoke(op, owner, name, desc, itf) => method.visitMethodInsn(op, owner, name, desc, itf) case InvokeDynamic(op, name, desc, bsm, bsmArgs) => method.visitInvokeDynamicInsn(name, desc, unconvertMethodHandle(bsm), unconvertBsmArgs(bsmArgs)) case NewArray(op, desc, dims) => method.visitMultiANewArrayInsn(desc, dims) diff --git a/compiler/test/dotty/tools/backend/jvm/AsmNode.scala b/compiler/test/dotty/tools/backend/jvm/AsmNode.scala index caaf250eeafa..046e93e0cfa2 100644 --- a/compiler/test/dotty/tools/backend/jvm/AsmNode.scala +++ b/compiler/test/dotty/tools/backend/jvm/AsmNode.scala @@ -28,7 +28,7 @@ sealed trait AsmNode[+T] { object AsmNode { type AsmMethod = AsmNode[MethodNode] type AsmField = AsmNode[FieldNode] - type AsmMember = AsmNode[_] + type AsmMember = AsmNode[?] implicit class ClassNodeOps(val node: ClassNode) { def fieldsAndMethods: List[AsmMember] = { diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala index 6b7af09d4a2d..8a9611a9b165 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala @@ -202,7 +202,7 @@ trait DottyBytecodeTest { assert(succ, msg) } - private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): (Boolean, String) = { + private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[?] => String): (Boolean, String) = { val ms1 = clazzA.fieldsAndMethods.toIndexedSeq val ms2 = clazzB.fieldsAndMethods.toIndexedSeq val name1 = clazzA.name @@ -254,7 +254,7 @@ trait DottyBytecodeTest { } .getOrElse(fail("Could not find constructor for object `Test`")) - private def boxingError(ins: List[_], source: String) = + private def boxingError(ins: List[?], source: String) = s"""|---------------------------------- |${ins.mkString("\n")} |---------------------------------- @@ -271,7 +271,7 @@ trait DottyBytecodeTest { } .getOrElse(fail("Could not find constructor for object `Test`")) - protected def boxingInstructions(method: MethodNode): (List[_], Boolean) = { + protected def boxingInstructions(method: MethodNode): (List[?], Boolean) = { val ins = instructionsFromMethod(method) val boxed = ins.exists { case Invoke(op, owner, name, desc, itf) => diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index ac4ba3ee0e75..94d42952a6eb 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -308,7 +308,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { |import java.nio.file._ |class Test { | def test(xs: Array[String]) = { - | val p4 = Paths.get("Hello", xs: _*) + | val p4 = Paths.get("Hello", xs*) | } |} """.stripMargin @@ -874,7 +874,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } - @Test def freshNames = { + @Test def stableNames = { val sourceA = """|class A { | def a1[T: Ordering]: Unit = {} @@ -902,11 +902,11 @@ class DottyBytecodeTests extends DottyBytecodeTest { s"Method ${mn.name} has parameter $actualName but expected $expectedName") } - // The fresh name counter should be reset for every compilation unit + // Each definition should get the same names since there's no possible clashes. assertParamName(a1, "evidence$1") - assertParamName(a2, "evidence$2") + assertParamName(a2, "evidence$1") assertParamName(b1, "evidence$1") - assertParamName(b2, "evidence$2") + assertParamName(b2, "evidence$1") } } @@ -1058,7 +1058,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { TypeOp(CHECKCAST, "scala/collection/immutable/$colon$colon"), VarOp(ASTORE, 3), VarOp(ALOAD, 3), - Invoke(INVOKEVIRTUAL, "scala/collection/immutable/$colon$colon", "next$access$1", "()Lscala/collection/immutable/List;", false), + Invoke(INVOKEVIRTUAL, "scala/collection/immutable/$colon$colon", "next", "()Lscala/collection/immutable/List;", false), VarOp(ASTORE, 4), VarOp(ALOAD, 3), Invoke(INVOKEVIRTUAL, "scala/collection/immutable/$colon$colon", "head", "()Ljava/lang/Object;", false), @@ -1112,7 +1112,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { Invoke(INVOKESTATIC, "scala/runtime/BoxesRunTime", "unboxToInt", "(Ljava/lang/Object;)I", false), VarOp(ISTORE, 4), VarOp(ALOAD, 3), - Invoke(INVOKEVIRTUAL, "scala/collection/immutable/$colon$colon", "next$access$1", "()Lscala/collection/immutable/List;", false), + Invoke(INVOKEVIRTUAL, "scala/collection/immutable/$colon$colon", "next", "()Lscala/collection/immutable/List;", false), VarOp(ASTORE, 5), Op(ICONST_1), VarOp(ILOAD, 4), @@ -1682,6 +1682,57 @@ class DottyBytecodeTests extends DottyBytecodeTest { assertSameCode(instructions, expected) } } + + @Test def i18320(): Unit = { + val c1 = + """class C { + | def m: Unit = { + | val x = 1 + | } + |} + |""".stripMargin + checkBCode(c1) {dir => + val clsIn = dir.lookupName("C.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "m") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + val expected = List(LineNumber(3, Label(0))) + assertSameCode(instructions, expected) + + } + } + + @Test def i18816 = { + // The primary goal of this test is to check that `LineNumber` have correct numbers + val source = + """trait Context + | + |class A(x: Context) extends AnyVal: + | given [T]: Context = x + | + | def m1 = + | println(m3) + | def m2 = + | m3 // line 9 + | println(m2) + | + | def m3(using Context): String = "" + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("A$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "m2$1") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + // There used to be references to line 7 here + val expected = List( + LineNumber(9, Label(0)), + ) + + assertSameCode(instructions, expected) + } + } } object invocationReceiversTestCode { diff --git a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala new file mode 100644 index 000000000000..25b46532e58b --- /dev/null +++ b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala @@ -0,0 +1,388 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import org.junit.Assert._ +import org.junit.Test + +import scala.tools.asm +import asm._ +import asm.tree._ + +import scala.tools.asm.Opcodes +import scala.jdk.CollectionConverters._ +import Opcodes._ + +class PublicInBinaryTests extends DottyBytecodeTest { + import ASMConverters._ + + private def privateOrProtectedOpcode = Opcodes.ACC_PRIVATE | Opcodes.ACC_PROTECTED + + private def checkPublicMethod(classNode: ClassNode, methodName: String, desc: String): Unit = + val method = getMethod(classNode, methodName) + assert(method.desc == desc) + assert((method.access & privateOrProtectedOpcode) == 0) + + private def checkPrivateMethod(classNode: ClassNode, methodName: String, desc: String): Unit = + val method = getMethod(classNode, methodName) + assert(method.desc == desc) + assert((method.access & Opcodes.ACC_PRIVATE) == Opcodes.ACC_PRIVATE) + + private def checkPublicField(classNode: ClassNode, fliedName: String): Unit = + val method = getField(classNode, fliedName) + assert((method.access & privateOrProtectedOpcode) == 0) + + private def checkPrivateField(classNode: ClassNode, fliedName: String): Unit = + val method = getField(classNode, fliedName) + assert((method.access & Opcodes.ACC_PRIVATE) == Opcodes.ACC_PRIVATE) + + private def checkPublicClass(classNode: ClassNode): Unit = + assert((classNode.access & privateOrProtectedOpcode) == 0) + + @Test + def publicInBinaryDef(): Unit = { + val code = + """import scala.annotation.publicInBinary + |class C: + | @publicInBinary private[C] def packagePrivateMethod: Int = 1 + | @publicInBinary protected def protectedMethod: Int = 1 + | inline def inlined = packagePrivateMethod + protectedMethod + | def testInlined = inlined + """.stripMargin + checkBCode(code) { dir => + val cClass = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + + checkPublicMethod(cClass, "packagePrivateMethod", "()I") + checkPublicMethod(cClass, "protectedMethod", "()I") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cClass, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEVIRTUAL, "C", "packagePrivateMethod", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedMethod", "()I", false), + )) + } + } + + @Test + def publicInBinaryVal(): Unit = { + val code = + """import scala.annotation.publicInBinary + |class C: + | @publicInBinary private[C] val packagePrivateVal: Int = 1 + | @publicInBinary protected val protectedVal: Int = 1 + | @publicInBinary private[C] lazy val lazyPackagePrivateVal: Int = 1 + | @publicInBinary protected lazy val lazyProtectedVal: Int = 1 + | inline def inlined = packagePrivateVal + protectedVal + lazyPackagePrivateVal + lazyProtectedVal + | def testInlined = inlined + """.stripMargin + checkBCode(code) { dir => + val cClass = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + + checkPublicMethod(cClass, "packagePrivateVal", "()I") + checkPublicMethod(cClass, "protectedVal", "()I") + + checkPublicMethod(cClass, "lazyPackagePrivateVal", "()I") + checkPublicMethod(cClass, "lazyProtectedVal", "()I") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cClass, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEVIRTUAL, "C", "packagePrivateVal", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedVal", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "lazyPackagePrivateVal", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "lazyProtectedVal", "()I", false), + )) + } + } + + @Test + def publicInBinaryVar(): Unit = { + val code = + """import scala.annotation.publicInBinary + |class C: + | @publicInBinary private[C] var packagePrivateVar: Int = 1 + | @publicInBinary protected var protectedVar: Int = 1 + | inline def inlined = + | packagePrivateVar = 1 + | protectedVar = 1 + | packagePrivateVar + protectedVar + | def testInlined = inlined + """.stripMargin + checkBCode(code) { dir => + val cClass = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + + checkPublicMethod(cClass, "packagePrivateVar", "()I") + checkPublicMethod(cClass, "packagePrivateVar_$eq", "(I)V") + checkPublicMethod(cClass, "protectedVar", "()I") + checkPublicMethod(cClass, "protectedVar_$eq", "(I)V") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cClass, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEVIRTUAL, "C", "packagePrivateVar_$eq", "(I)V", false), + Invoke(INVOKEVIRTUAL, "C", "protectedVar_$eq", "(I)V", false), + Invoke(INVOKEVIRTUAL, "C", "packagePrivateVar", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedVar", "()I", false), + )) + } + } + + @Test + def publicInBinaryGiven(): Unit = { + val code = + """import scala.annotation.publicInBinary + |class C: + | @publicInBinary private[C] given packagePrivateGiven1: Int = 1 + | @publicInBinary protected given protectedGiven1: Int = 1 + | @publicInBinary private[C] given packagePrivateGiven2(using Int): Int = 1 + | @publicInBinary protected given protectedGiven2(using Int): Int = 1 + | inline def inlined = + | packagePrivateGiven1 + protectedGiven1 + packagePrivateGiven2(using 1) + protectedGiven2(using 1) + | def testInlined = inlined + """.stripMargin + checkBCode(code) { dir => + val cClass = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + checkPublicMethod(cClass, "packagePrivateGiven1", "()I") + checkPublicMethod(cClass, "protectedGiven1", "()I") + + checkPublicMethod(cClass, "packagePrivateGiven2", "(I)I") + checkPublicMethod(cClass, "protectedGiven2", "(I)I") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cClass, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEVIRTUAL, "C", "packagePrivateGiven1", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedGiven1", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "packagePrivateGiven2", "(I)I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedGiven2", "(I)I", false), + )) + } + } + + @Test + def publicInBinaryClassParam(): Unit = { + val code = + """import scala.annotation.publicInBinary + |class C( + | @publicInBinary private[C] val packagePrivateVal: Int = 1, + | @publicInBinary protected val protectedVal: Int = 1, + |) { + | inline def inlined = + | packagePrivateVal + protectedVal + | def testInlined = inlined + |} + """.stripMargin + checkBCode(code) { dir => + val cClass = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + checkPublicMethod(cClass, "packagePrivateVal", "()I") + checkPublicMethod(cClass, "protectedVal", "()I") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cClass, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEVIRTUAL, "C", "packagePrivateVal", "()I", false), + Invoke(INVOKEVIRTUAL, "C", "protectedVal", "()I", false), + )) + } + } + + @Test + def publicInBinaryObject(): Unit = { + val code = + """package foo + |import scala.annotation.publicInBinary + |private object PrivateObject + |@publicInBinary private[foo] object PackagePrivateObject + |@publicInBinary protected object ProtectedObject + """.stripMargin + checkBCode(code) { dir => + val privateObject = loadClassNode(dir.subdirectoryNamed("foo").lookupName("PrivateObject$.class", directory = false).input, skipDebugInfo = false) + checkPublicClass(privateObject) + checkPublicField(privateObject, "MODULE$") + + val packagePrivateObject = loadClassNode(dir.subdirectoryNamed("foo").lookupName("PackagePrivateObject$.class", directory = false).input, skipDebugInfo = false) + checkPublicClass(packagePrivateObject) + checkPublicField(packagePrivateObject, "MODULE$") + + val protectedObject = loadClassNode(dir.subdirectoryNamed("foo").lookupName("ProtectedObject$.class", directory = false).input, skipDebugInfo = false) + checkPublicClass(protectedObject) + checkPublicField(protectedObject, "MODULE$") + } + } + + @Test + def publicInBinaryTraitDefs(): Unit = { + val code = + """import scala.annotation.publicInBinary + |trait C: + | @publicInBinary private[C] val packagePrivateVal: Int = 1 + | @publicInBinary protected val protectedVal: Int = 1 + | @publicInBinary private[C] lazy val packagePrivateLazyVal: Int = 1 + | @publicInBinary protected lazy val protectedLazyVal: Int = 1 + | @publicInBinary private[C] var packagePrivateVar: Int = 1 + | @publicInBinary protected var protectedVar: Int = 1 + | @publicInBinary private[C] def packagePrivateDef: Int = 1 + | @publicInBinary protected def protectedDef: Int = 1 + | inline def inlined = + | packagePrivateVar = 1 + | protectedVar = 1 + | packagePrivateVal + + | protectedVal + + | packagePrivateLazyVal + + | protectedLazyVal + + | packagePrivateVar + + | protectedVar + + | packagePrivateDef + + | protectedDef + | def testInlined = inlined + """.stripMargin + checkBCode(code) { dir => + val cTrait = loadClassNode(dir.lookupName("C.class", directory = false).input, skipDebugInfo = false) + + checkPublicMethod(cTrait, "packagePrivateVal", "()I") + checkPublicMethod(cTrait, "protectedVal", "()I") + checkPublicMethod(cTrait, "packagePrivateLazyVal", "()I") + checkPublicMethod(cTrait, "protectedLazyVal", "()I") + checkPublicMethod(cTrait, "packagePrivateVar", "()I") + checkPublicMethod(cTrait, "packagePrivateVar_$eq", "(I)V") + checkPublicMethod(cTrait, "protectedVar", "()I") + checkPublicMethod(cTrait, "protectedVar_$eq", "(I)V") + checkPublicMethod(cTrait, "packagePrivateDef", "()I") + checkPublicMethod(cTrait, "protectedDef", "()I") + + // Check that the @publicInBinary annotated method is called + val testInlined = getMethod(cTrait, "testInlined") + val testInlinedInstructions = instructionsFromMethod(testInlined).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInlinedInstructions, List( + Invoke(INVOKEINTERFACE, "C", "packagePrivateVar_$eq", "(I)V", true), + Invoke(INVOKEINTERFACE, "C", "protectedVar_$eq", "(I)V", true), + Invoke(INVOKEINTERFACE, "C", "packagePrivateVal", "()I", true), + Invoke(INVOKEINTERFACE, "C", "protectedVal", "()I", true), + Invoke(INVOKEINTERFACE, "C", "packagePrivateLazyVal", "()I", true), + Invoke(INVOKEINTERFACE, "C", "protectedLazyVal", "()I", true), + Invoke(INVOKEINTERFACE, "C", "packagePrivateVar", "()I", true), + Invoke(INVOKEINTERFACE, "C", "protectedVar", "()I", true), + Invoke(INVOKEINTERFACE, "C", "packagePrivateDef", "()I", true), + Invoke(INVOKEINTERFACE, "C", "protectedDef", "()I", true) + )) + } + } + + @Test + def i13215(): Unit = { + val code = + """import scala.annotation.publicInBinary + |package foo: + | trait Bar: + | inline def baz = Baz + | def testInlined = baz + | @publicInBinary private[foo] object Baz + """.stripMargin + checkBCode(code) { dir => + val barClass = loadClassNode(dir.subdirectoryNamed("foo").lookupName("Bar.class", directory = false).input, skipDebugInfo = false) + checkPublicMethod(barClass, "testInlined", "()Lfoo/Baz$;") + } + } + + @Test + def i13215b(): Unit = { + val code = + """import scala.annotation.publicInBinary + |package foo: + | trait Bar: + | inline def baz = Baz + | def testInlined = baz + | @publicInBinary private object Baz + """.stripMargin + checkBCode(code) { dir => + val barClass = loadClassNode(dir.subdirectoryNamed("foo").lookupName("Bar.class", directory = false).input, skipDebugInfo = false) + checkPublicMethod(barClass, "testInlined", "()Lfoo/Baz$;") + } + } + + @Test + def i15413(): Unit = { + val code = + """import scala.quoted.* + |import scala.annotation.publicInBinary + |class Macro: + | inline def foo = Macro.fooImpl + | def test = foo + |object Macro: + | @publicInBinary private[Macro] def fooImpl = {} + """.stripMargin + checkBCode(code) { dir => + val macroClass = loadClassNode(dir.lookupName("Macro.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(macroClass, "test") + val testInstructions = instructionsFromMethod(testMethod).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInstructions, List( + Invoke(INVOKEVIRTUAL, "Macro$", "fooImpl", "()V", false))) + } + } + + @Test + def i15413b(): Unit = { + val code = + """package foo + |import scala.annotation.publicInBinary + |class C: + | inline def baz = D.bazImpl + | def test = baz + |object D: + | @publicInBinary private[foo] def bazImpl = {} + """.stripMargin + checkBCode(code) { dir => + val barClass = loadClassNode(dir.subdirectoryNamed("foo").lookupName("C.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(barClass, "test") + val testInstructions = instructionsFromMethod(testMethod).filter(_.isInstanceOf[Invoke]) + assertSameCode(testInstructions, List( + Invoke(INVOKEVIRTUAL, "foo/D$", "bazImpl", "()V", false))) + } + } + + @Test + def noProtectedAccessorsForBinaryInPublic(): Unit = { + val code = + """import scala.annotation.publicInBinary + |package p { + | class A { + | protected def a(): Int = 1 + | @publicInBinary protected def b(): Int = 1 + | } + |} + |package q { + | class B extends p.A { + | trait BInner { + | def test1() = a() // protected accessor for `a` + | def test2() = b() // no protected accessor for `b` + | } + | } + |} + """.stripMargin + checkBCode(code) { dir => + val bClass = loadClassNode(dir.subdirectoryNamed("q").lookupName("B.class", directory = false).input, skipDebugInfo = false) + assert(bClass.methods.asScala.exists(_.name == "protected$a")) + assert(bClass.methods.asScala.forall(_.name != "protected$b")) + + val bInnerClass = loadClassNode(dir.subdirectoryNamed("q").lookupName("B$BInner.class", directory = false).input, skipDebugInfo = false) + + val test1Method = getMethod(bInnerClass, "test1") + val test1Instructions = instructionsFromMethod(test1Method).filter(_.isInstanceOf[Invoke]) + assertSameCode(test1Instructions, List( + Invoke(INVOKEINTERFACE, "q/B$BInner", "q$B$BInner$$$outer", "()Lq/B;", true), + Invoke(INVOKEVIRTUAL, "q/B", "protected$a", "()I", false))) + + val test2Method = getMethod(bInnerClass, "test2") + val test2Instructions = instructionsFromMethod(test2Method).filter(_.isInstanceOf[Invoke]) + assertSameCode(test2Instructions, List( + Invoke(INVOKEINTERFACE, "q/B$BInner", "q$B$BInner$$$outer", "()Lq/B;", true), + Invoke(INVOKEVIRTUAL, "q/B", "b", "()I", false) )) + } + } +} diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index 2a665c478932..ab2adddbb16e 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -29,14 +29,11 @@ class BootstrappedOnlyCompilationTests { aggregateTests( compileFilesInDir("tests/bench", defaultOptions.without("-Yno-deep-subtypes")), compileFilesInDir("tests/pos-macros", defaultOptions.and("-Xcheck-macros")), - compileFilesInDir("tests/pos-custom-args/semanticdb", defaultOptions.and("-Xsemanticdb")), - compileDir("tests/pos-special/i7592", defaultOptions.and("-Yretain-trees")), - compileDir("tests/pos-special/i11331.1", defaultOptions), - compileDir("tests/pos-custom-args/i13405", defaultOptions.and("-Xfatal-warnings")), ).checkCompile() } - @Test def posWithCompilerCC: Unit = + // @Test + def posWithCompilerCC: Unit = implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") aggregateTests( compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) @@ -107,12 +104,8 @@ class BootstrappedOnlyCompilationTests { @Test def negMacros: Unit = { implicit val testGroup: TestGroup = TestGroup("compileNegWithCompiler") - aggregateTests( - compileFilesInDir("tests/neg-macros", defaultOptions.and("-Xcheck-macros")), - compileFile("tests/pos-macros/i9570.scala", defaultOptions.and("-Xfatal-warnings")), - compileFile("tests/pos-macros/macro-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-deprecation")), - compileFile("tests/pos-macros/macro-experimental.scala", defaultOptions.and("-Yno-experimental")), - ).checkExpectedErrors() + compileFilesInDir("tests/neg-macros", defaultOptions.and("-Xcheck-macros")) + .checkExpectedErrors() } @Test def negWithCompiler: Unit = { @@ -127,28 +120,22 @@ class BootstrappedOnlyCompilationTests { @Test def runMacros: Unit = { implicit val testGroup: TestGroup = TestGroup("runMacros") - aggregateTests( - compileFilesInDir("tests/run-macros", defaultOptions.and("-Xcheck-macros")), - compileFilesInDir("tests/run-custom-args/Yretain-trees", defaultOptions and "-Yretain-trees"), - compileFilesInDir("tests/run-custom-args/Yread-comments", defaultOptions and "-Yread-docs"), - compileFilesInDir("tests/run-custom-args/run-macros-erased", defaultOptions.and("-language:experimental.erasedDefinitions").and("-Xcheck-macros")), - compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), - compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), - ) - }.checkRuns() + compileFilesInDir("tests/run-macros", defaultOptions.and("-Xcheck-macros"), FileFilter.exclude(TestSources.runMacrosScala2LibraryTastyBlacklisted)) + .checkRuns() + } @Test def runWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("runWithCompiler") val basicTests = List( compileFilesInDir("tests/run-with-compiler", withCompilerOptions), compileFilesInDir("tests/run-staging", withStagingOptions), - compileFilesInDir("tests/run-custom-args/tasty-inspector", withTastyInspectorOptions) + compileFilesInDir("tests/run-tasty-inspector", withTastyInspectorOptions) ) val tests = if scala.util.Properties.isWin then basicTests - else compileDir("tests/run-custom-args/tasty-interpreter", withTastyInspectorOptions) :: basicTests + else compileDir("tests/old-tasty-interpreter-prototype", withTastyInspectorOptions) :: basicTests - aggregateTests(tests: _*).checkRuns() + aggregateTests(tests*).checkRuns() } @Test def runBootstrappedOnly: Unit = { diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 4e86a3b83383..120b7373f7cc 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -4,7 +4,7 @@ package dotc import scala.language.unsafeNulls -import org.junit.{ Test, BeforeClass, AfterClass } +import org.junit.{ Test, BeforeClass, AfterClass, Ignore } import org.junit.Assert._ import org.junit.Assume._ import org.junit.experimental.categories.Category @@ -18,6 +18,7 @@ import scala.concurrent.duration._ import TestSources.sources import reporting.TestReporter import vulpix._ +import dotty.tools.dotc.config.ScalaSettings class CompilationTests { import ParallelTesting._ @@ -30,42 +31,23 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") var tests = List( - compileFile("tests/pos/nullarify.scala", defaultOptions.and("-Ycheck:nullarify")), - compileFile("tests/pos-special/utf8encoded.scala", explicitUTF8), - compileFile("tests/pos-special/utf16encoded.scala", explicitUTF16), + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Wunused:all", "-Xlint:private-shadow", "-Xlint:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-special/sourcepath/outer", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), compileFile("tests/pos-special/sourcepath/outer/nested/Test4.scala", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), - compileFilesInDir("tests/pos-special/fatal-warnings", defaultOptions.and("-Xfatal-warnings", "-deprecation", "-feature")), - compileFilesInDir("tests/pos-special/spec-t5545", defaultOptions), - compileFilesInDir("tests/pos-special/strawman-collections", allowDeepSubtypes), - compileFilesInDir("tests/pos-special/isInstanceOf", allowDeepSubtypes.and("-Xfatal-warnings")), - compileFilesInDir("tests/new", defaultOptions.and("-source", "3.2")), // just to see whether 3.2 works - compileFilesInDir("tests/pos-scala2", scala2CompatMode), + compileFilesInDir("tests/pos-scala2", defaultOptions.and("-source", "3.0-migration")), compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), - compileFilesInDir("tests/pos-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init")), + compileFile("tests/pos-special/utf8encoded.scala", defaultOptions.and("-encoding", "UTF8")), + compileFile("tests/pos-special/utf16encoded.scala", defaultOptions.and("-encoding", "UTF16")), + compileDir("tests/pos-special/i18589", defaultOptions.and("-Ysafe-init").without("-Ycheck:all")), // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), - compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), - compileFilesInDir("tests/pos-custom-args/strict", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileFile( - // succeeds despite -Xfatal-warnings because of -nowarn - "tests/neg-custom-args/fatal-warnings/xfatalWarnings.scala", - defaultOptions.and("-nowarn", "-Xfatal-warnings") - ), - compileFile("tests/pos-special/typeclass-scaling.scala", defaultOptions.and("-Xmax-inlines", "40")), - compileFile("tests/pos-special/i7575.scala", defaultOptions.andLanguageFeature("dynamics")), - compileFile("tests/pos-special/kind-projector.scala", defaultOptions.and("-Ykind-projector")), - compileFile("tests/pos-special/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")), - compileFile("tests/run/i5606.scala", defaultOptions.and("-Yretain-trees")), - compileFile("tests/pos-custom-args/i8875.scala", defaultOptions.and("-Xprint:getters")), - compileFile("tests/pos-custom-args/i9267.scala", defaultOptions.and("-Ystop-after:erasure")), - compileFile("tests/pos-special/extend-java-enum.scala", defaultOptions.and("-source", "3.0-migration")), - compileFile("tests/pos-custom-args/help.scala", defaultOptions.and("-help", "-V", "-W", "-X", "-Y")), - compileFile("tests/pos-custom-args/i13044.scala", defaultOptions.and("-Xmax-inlines:33")), - compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")) + ) ::: ( + // FIXME: This fails due to a bug involving self types and capture checking + if Properties.usingScalaLibraryTasty then Nil + else List(compileDir("tests/pos-special/stdlib", allowDeepSubtypes)) ) if scala.util.Properties.isJavaAtLeast("16") then @@ -78,8 +60,13 @@ class CompilationTests { implicit val testGroup: TestGroup = TestGroup("rewrites") aggregateTests( - compileFile("tests/rewrites/rewrites.scala", scala2CompatMode.and("-rewrite", "-indent")), + compileFile("tests/rewrites/rewrites.scala", defaultOptions.and("-source", "3.0-migration").and("-rewrite", "-indent")), compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), + compileFile("tests/rewrites/uninitialized-var.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/alphanumeric-infix-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/refutable-pattern-bindings.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")), @@ -132,65 +119,30 @@ class CompilationTests { ).times(2).checkCompile() } + // Warning tests ------------------------------------------------------------ + + @Test def warn: Unit = { + implicit val testGroup: TestGroup = TestGroup("compileWarn") + aggregateTests( + compileFilesInDir("tests/warn", defaultOptions), + ).checkWarnings() + } + // Negative tests ------------------------------------------------------------ @Test def negAll: Unit = { implicit val testGroup: TestGroup = TestGroup("compileNeg") aggregateTests( - compileFilesInDir("tests/neg", defaultOptions), - compileFilesInDir("tests/neg-tailcall", defaultOptions), - compileFilesInDir("tests/neg-strict", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileFilesInDir("tests/neg-no-kind-polymorphism", defaultOptions and "-Yno-kind-polymorphism"), - compileFilesInDir("tests/neg-custom-args/deprecation", defaultOptions.and("-Xfatal-warnings", "-deprecation")), - compileFilesInDir("tests/neg-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), - compileFilesInDir("tests/neg-custom-args/nowarn", defaultOptions.and("-deprecation", "-Wunused:nowarn", "-Wconf:msg=@nowarn annotation does not suppress any warnings:e")), - compileFilesInDir("tests/neg-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), - compileFilesInDir("tests/neg-custom-args/allow-double-bindings", allowDoubleBindings), - compileFilesInDir("tests/neg-custom-args/allow-deep-subtypes", allowDeepSubtypes), - compileFilesInDir("tests/neg-custom-args/feature", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), + compileFilesInDir("tests/neg", defaultOptions, FileFilter.exclude(TestSources.negScala2LibraryTastyBlacklisted)), + compileFilesInDir("tests/neg-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), - compileFilesInDir("tests/neg-custom-args/explain", defaultOptions.and("-explain")), - compileFile("tests/neg-custom-args/avoid-warn-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFile("tests/neg-custom-args/i3246.scala", scala2CompatMode), - compileFile("tests/neg-custom-args/overrideClass.scala", scala2CompatMode), - compileFile("tests/neg-custom-args/ovlazy.scala", scala2CompatMode.and("-Xfatal-warnings")), - compileFile("tests/neg-custom-args/newline-braces.scala", scala2CompatMode.and("-Xfatal-warnings")), - compileFile("tests/neg-custom-args/autoTuplingTest.scala", defaultOptions.andLanguageFeature("noAutoTupling")), - compileFile("tests/neg-custom-args/i1650.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/i3882.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/i4372.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/i12650.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/i9517.scala", defaultOptions.and("-Xprint-types")), - compileFile("tests/neg-custom-args/interop-polytypes.scala", allowDeepSubtypes.and("-Yexplicit-nulls")), - compileFile("tests/neg-custom-args/conditionalWarnings.scala", allowDeepSubtypes.and("-deprecation").and("-Xfatal-warnings")), - compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings"), - compileFile("tests/neg-custom-args/i3627.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/sourcepath/outer/nested/Test1.scala", defaultOptions.and("-sourcepath", "tests/neg-custom-args/sourcepath")), compileDir("tests/neg-custom-args/sourcepath2/hi", defaultOptions.and("-sourcepath", "tests/neg-custom-args/sourcepath2", "-Xfatal-warnings")), compileList("duplicate source", List( "tests/neg-custom-args/toplevel-samesource/S.scala", "tests/neg-custom-args/toplevel-samesource/nested/S.scala"), defaultOptions), - compileFile("tests/neg-custom-args/i6300.scala", allowDeepSubtypes), - compileFile("tests/neg-custom-args/infix.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/missing-targetName.scala", defaultOptions.and("-Yrequire-targetName", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/wildcards.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/indentRight.scala", defaultOptions.and("-no-indent", "-Xfatal-warnings")), - compileDir("tests/neg-custom-args/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")), compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures.and("-language:_")), - compileFile("tests/neg-custom-args/kind-projector.scala", defaultOptions.and("-Ykind-projector")), - compileFile("tests/neg-custom-args/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")), - compileFile("tests/neg-custom-args/typeclass-derivation2.scala", defaultOptions.and("-language:experimental.erasedDefinitions")), - compileFile("tests/neg-custom-args/deptypes.scala", defaultOptions.and("-language:experimental.dependent")), - compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), - compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), - compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-language:experimental.captureChecking", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")), - compileFile("tests/neg-custom-args/i13838.scala", defaultOptions.and("-Ximplicit-search-limit", "1000")), - compileFile("tests/neg-custom-args/jdk-9-app.scala", defaultOptions.and("-release:8")), - compileFile("tests/neg-custom-args/i10994.scala", defaultOptions.and("-source", "future")), ).checkExpectedErrors() } @@ -204,16 +156,9 @@ class CompilationTests { @Test def runAll: Unit = { implicit val testGroup: TestGroup = TestGroup("runAll") aggregateTests( - compileFile("tests/run-custom-args/typeclass-derivation1.scala", defaultOptions.without(yCheckOptions*)), - compileFile("tests/run-custom-args/tuple-cons.scala", allowDeepSubtypes), - compileFile("tests/run-custom-args/i5256.scala", allowDeepSubtypes), - compileFile("tests/run-custom-args/no-useless-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"), - compileFile("tests/run-custom-args/defaults-serizaliable-no-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"), - compileFilesInDir("tests/run-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), - compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), - compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), - compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), + compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), + compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), // Run tests for legacy lazy vals. compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() @@ -231,7 +176,6 @@ class CompilationTests { @Test def pickling: Unit = { implicit val testGroup: TestGroup = TestGroup("testPickling") aggregateTests( - compileFilesInDir("tests/new", picklingOptions), compileFilesInDir("tests/pos", picklingOptions, FileFilter.exclude(TestSources.posTestPicklingBlacklisted)), compileFilesInDir("tests/run", picklingOptions, FileFilter.exclude(TestSources.runTestPicklingBlacklisted)) ).checkCompile() @@ -241,8 +185,7 @@ class CompilationTests { def recheck: Unit = given TestGroup = TestGroup("recheck") aggregateTests( - compileFilesInDir("tests/new", recheckOptions), - compileFilesInDir("tests/run", recheckOptions, FileFilter.exclude(TestSources.runTestRecheckExcluded)) + compileFilesInDir("tests/run", defaultOptions.and("-Yrecheck-test"), FileFilter.exclude(TestSources.runTestRecheckExcluded)) //Disabled to save some time. //compileFilesInDir("tests/pos", recheckOptions, FileFilter.exclude(TestSources.posTestRecheckExcluded)), ).checkCompile() @@ -251,29 +194,32 @@ class CompilationTests { @Test def explicitNullsNeg: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsNeg") aggregateTests( - compileFilesInDir("tests/explicit-nulls/neg", explicitNullsOptions), - compileFilesInDir("tests/explicit-nulls/neg-patmat", explicitNullsOptions and "-Xfatal-warnings"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/neg", defaultOptions and "-Yexplicit-nulls"), + compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls"), ) }.checkExpectedErrors() @Test def explicitNullsPos: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsPos") aggregateTests( - compileFilesInDir("tests/explicit-nulls/pos", explicitNullsOptions), - compileFilesInDir("tests/explicit-nulls/pos-separate", explicitNullsOptions), - compileFilesInDir("tests/explicit-nulls/pos-patmat", explicitNullsOptions and "-Xfatal-warnings"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions and "-language:unsafeNulls"), - compileFile("tests/explicit-nulls/pos-special/i14682.scala", explicitNullsOptions and "-Ysafe-init"), - compileFile("tests/explicit-nulls/pos-special/i14947.scala", explicitNullsOptions and "-Ytest-pickler" and "-Xprint-types"), + compileFilesInDir("tests/explicit-nulls/pos", defaultOptions and "-Yexplicit-nulls"), + compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls" and "-language:unsafeNulls"), ) }.checkCompile() @Test def explicitNullsRun: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsRun") - compileFilesInDir("tests/explicit-nulls/run", explicitNullsOptions) + compileFilesInDir("tests/explicit-nulls/run", defaultOptions and "-Yexplicit-nulls") }.checkRuns() + // initialization tests + @Test def checkInitGlobal: Unit = { + implicit val testGroup: TestGroup = TestGroup("checkInitGlobal") + val options = defaultOptions.and("-Ysafe-init-global", "-Xfatal-warnings") + compileFilesInDir("tests/init-global/neg", options, FileFilter.exclude(TestSources.negInitGlobalScala2LibraryTastyBlacklisted)).checkExpectedErrors() + compileFilesInDir("tests/init-global/pos", options, FileFilter.exclude(TestSources.posInitGlobalScala2LibraryTastyBlacklisted)).checkCompile() + } + // initialization tests @Test def checkInit: Unit = { implicit val testGroup: TestGroup = TestGroup("checkInit") @@ -281,7 +227,6 @@ class CompilationTests { compileFilesInDir("tests/init/neg", options).checkExpectedErrors() compileFilesInDir("tests/init/pos", options).checkCompile() compileFilesInDir("tests/init/crash", options.without("-Xfatal-warnings")).checkCompile() - // The regression test for i12128 has some atypical classpath requirements. // The test consists of three files: (a) Reflect_1 (b) Macro_2 (c) Test_3 // which must be compiled separately. In addition: @@ -303,6 +248,39 @@ class CompilationTests { tests.foreach(_.delete()) } } + + // parallel backend tests + @Ignore("Temporarily disabled due to frequent timeouts") + @Test def parallelBackend: Unit = { + given TestGroup = TestGroup("parallelBackend") + val parallelism = Runtime.getRuntime().availableProcessors().min(16) + assumeTrue("Not enough available processors to run parallel tests", parallelism > 1) + + val options = defaultOptions.and(s"-Ybackend-parallelism:${parallelism}") + def parCompileDir(directory: String) = compileDir(directory, options) + + // Compilation units containing more than 1 source file + aggregateTests( + parCompileDir("tests/pos/i10477"), + parCompileDir("tests/pos/i4758"), + parCompileDir("tests/pos/scala2traits"), + parCompileDir("tests/pos/class-gadt"), + parCompileDir("tests/pos/tailcall"), + parCompileDir("tests/pos/reference"), + parCompileDir("tests/pos/pos_valueclasses") + ).checkCompile() + + aggregateTests( + parCompileDir("tests/neg/package-implicit"), + parCompileDir("tests/neg/package-export") + ).checkExpectedErrors() + + aggregateTests( + parCompileDir("tests/run/decorators"), + parCompileDir("tests/run/generic") + ).checkRuns() + + } } object CompilationTests extends ParallelTesting { diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala index b515ebb05f96..81dd7d7f4710 100644 --- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala +++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala @@ -45,7 +45,7 @@ class IdempotencyTests { compileList(testDir.getName, sources.reverse, opt)(TestGroup("idempotency/orderIdempotency2")) ) } - aggregateTests(tests: _*) + aggregateTests(tests*) } def check(name: String) = { diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 44cf83b521f4..a1014043724e 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -83,4 +83,18 @@ class ScalaSettingsTests: val nowr = new Diagnostic.Warning("This is a problem.".toMessage, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(nowr)) + @Test def `i18367 rightmost WConf flags take precedence over flags to the left`: Unit = + import reporting.{Action, Diagnostic} + val sets = new ScalaSettings + val args = List("-Wconf:cat=deprecation:e", "-Wconf:cat=deprecation:s") + val sumy = ArgsSummary(sets.defaultState, args, errors = Nil, warnings = Nil) + val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) + val conf = sets.Wconf.valueIn(proc.sstate) + val msg = "Don't use that!".toMessage + val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) + val sut = reporting.WConf.fromSettings(conf).getOrElse(???) + assertEquals(Action.Silent, sut.action(depr)) + + + end ScalaSettingsTests diff --git a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala index 9ae3fda8c6b9..4ca8e243dc0c 100644 --- a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala +++ b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala @@ -19,8 +19,7 @@ class ConstraintsTest: @Test def mergeParamsTransitivity: Unit = inCompilerContext(TestConfiguration.basicClasspath, scalaSources = "trait A { def foo[S, T, R]: Any }") { - val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 - val List(s, t, r) = tvars.tpes + val List(s, t, r) = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda]) val innerCtx = ctx.fresh.setExploreTyperState() inContext(innerCtx) { @@ -38,8 +37,7 @@ class ConstraintsTest: @Test def mergeBoundsTransitivity: Unit = inCompilerContext(TestConfiguration.basicClasspath, scalaSources = "trait A { def foo[S, T]: Any }") { - val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 - val List(s, t) = tvars.tpes + val List(s, t) = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda]) val innerCtx = ctx.fresh.setExploreTyperState() inContext(innerCtx) { @@ -57,10 +55,9 @@ class ConstraintsTest: @Test def validBoundsInit: Unit = inCompilerContext( TestConfiguration.basicClasspath, scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String]: Any }") { - val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 - val List(s, t) = tvars.tpes + val List(s, t) = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda]) - val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.origin): @unchecked assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") assert(hi =:= defn.StringType, i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") // used to be Any } @@ -68,12 +65,11 @@ class ConstraintsTest: @Test def validBoundsUnify: Unit = inCompilerContext( TestConfiguration.basicClasspath, scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String | Int]: Any }") { - val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 - val List(s, t) = tvars.tpes + val List(s, t) = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda]) s <:< t - val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.origin): @unchecked assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") assert(hi =:= (defn.StringType | defn.IntType), i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") } @@ -81,8 +77,7 @@ class ConstraintsTest: @Test def validBoundsReplace: Unit = inCompilerContext( TestConfiguration.basicClasspath, scalaSources = "trait X; trait A { def foo[S <: U | X, T, U]: Any }") { - val tvarTrees = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 - val tvars @ List(s, t, u) = tvarTrees.tpes.asInstanceOf[List[TypeVar]] + val tvars @ List(s, t, u) = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda]) s =:= t t =:= u diff --git a/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala b/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala index 0ae9069c03d1..4726596c0428 100644 --- a/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala +++ b/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala @@ -47,6 +47,19 @@ class SealedDescendantsTest extends DottyTest { ) end enumOpt + @Test + def javaEnum: Unit = + expectedDescendents("java.util.concurrent.TimeUnit", + "TimeUnit" :: + "NANOSECONDS.type" :: + "MICROSECONDS.type" :: + "MILLISECONDS.type" :: + "SECONDS.type" :: + "MINUTES.type" :: + "HOURS.type" :: + "DAYS.type" :: Nil + ) + @Test def hierarchicalSharedChildren: Unit = // Q is a child of both Z and A and should appear once @@ -91,10 +104,22 @@ class SealedDescendantsTest extends DottyTest { ) end hierarchicalSharedChildrenB - def expectedDescendents(source: String, root: String, expected: List[String]) = - exploreRoot(source, root) { rootCls => - val descendents = rootCls.sealedDescendants.map(sym => s"${sym.name}${if (sym.isTerm) ".type" else ""}") - assertEquals(expected.toString, descendents.toString) + def assertMatchingDescenants(rootCls: Symbol, expected: List[String])(using Context): Unit = + val descendents = rootCls.sealedDescendants.map(sym => s"${sym.name}${if (sym.isTerm) ".type" else ""}") + assertEquals(expected.toString, descendents.toString) + + def expectedDescendents(root: String, expected: List[String]): Unit = + exploreRootNoSource(root)(assertMatchingDescenants(_, expected)) + + def expectedDescendents(source: String, root: String, expected: List[String]): Unit = + exploreRoot(source, root)(assertMatchingDescenants(_, expected)) + + def exploreRootNoSource(root: String)(op: Context ?=> ClassSymbol => Unit) = + val source1 = s"""package testsealeddescendants + |object Foo { def foo: $root = ??? }""".stripMargin + checkCompile("typer", source1) { (_, context) => + given Context = context + op(requiredClass(root)) } def exploreRoot(source: String, root: String)(op: Context ?=> ClassSymbol => Unit) = diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 0af4434fdcde..4daaf86f2fb0 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -5,7 +5,7 @@ import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.TreeOps import dotty.tools.dotc.{Driver, Main} -import dotty.tools.dotc.core.Comments.CommentsContext +import dotty.tools.dotc.core.Comments.docCtx import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Decorators.{toTermName, toTypeName} import dotty.tools.dotc.core.Mode @@ -18,6 +18,7 @@ import dotty.tools.vulpix.TestConfiguration import org.junit.Test import org.junit.Assert.{assertEquals, assertFalse, fail} +import dotty.tools.io.AbstractFile class CommentPicklingTest { @@ -92,7 +93,7 @@ class CommentPicklingTest { val out = tmp./("out") out.createDirectory() - val options = compileOptions.and("-d", out.toAbsolute.toString).and(sourceFiles: _*) + val options = compileOptions.and("-d", out.toAbsolute.toString).and(sourceFiles*) val reporter = TestReporter.reporter(System.out, logLevel = ERROR) Main.process(options.all, reporter) assertFalse("Compilation failed.", reporter.hasErrors) @@ -116,7 +117,7 @@ class CommentPicklingTest { implicit val ctx: Context = setup(args, initCtx).map(_._2).getOrElse(initCtx) ctx.initialize() val trees = files.flatMap { f => - val unpickler = new DottyUnpickler(f.toByteArray()) + val unpickler = new DottyUnpickler(AbstractFile.getFile(f.jpath), f.toByteArray()) unpickler.enter(roots = Set.empty) unpickler.rootTrees(using ctx) } diff --git a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala index 438c6034f8b3..66463e3ff66c 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala @@ -14,7 +14,6 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.TreeOps import dotty.tools.dotc.{Driver, Main} import dotty.tools.dotc.decompiler -import dotty.tools.dotc.core.Comments.CommentsContext import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Decorators.{toTermName, toTypeName} import dotty.tools.dotc.core.Mode diff --git a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala new file mode 100644 index 000000000000..bde4246ef0f0 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala @@ -0,0 +1,311 @@ +package dotty.tools.dotc.core.tasty + +import org.junit.Assert._ +import org.junit.{Test, Ignore} + +import dotty.tools.tasty.TastyFormat._ +import dotty.tools.tasty.TastyBuffer._ +import dotty.tools.tasty.TastyBuffer +import dotty.tools.tasty.TastyReader +import dotty.tools.tasty.UnpickleException +import dotty.tools.tasty.TastyHeaderUnpickler +import dotty.tools.tasty.TastyVersion +import dotty.tools.tasty.UnpicklerConfig + +class TastyHeaderUnpicklerTest { + + import TastyHeaderUnpicklerTest._ + + @Test + def okThisCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion) + val read = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion) + runTest(file, read, "Scala (current)") + } + + @Test + def okExperimentalCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, 1) + val read = TastyVersion(MajorVersion, MinorVersion, 1) + runTest(file, read, "Scala (current)") + } + + @Test + def okStableCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, 0) + val read = TastyVersion(MajorVersion, MinorVersion, 0) + runTest(file, read, "Scala (current)") + } + + @Test + def okReadOldStableMinorFromStable: Unit = { + val file = TastyVersion(28, 2, 0) + val read = TastyVersion(28, 3, 0) + runTest(file, read, "Scala 3.2.2") + } + + @Test + def okReadOldStableMinorFromExperimental: Unit = { + val file = TastyVersion(28, 2, 0) + val read = TastyVersion(28, 3, 1) + runTest(file, read, "Scala 3.2.2") + } + + @Test + def failReadExperimentalFromStableSameMinor: Unit = { + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 0) + expectUnpickleError(runTest(file, read, "Scala 3.4.0-RC1-bin-SNAPSHOT")) { + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.4.0-RC1-bin-SNAPSHOT, + | expected stable TASTy from 28.0 to 28.4. + | The source of this file should be recompiled by a Scala 3.4.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadExperimentalFromOldMinor: Unit = { + val file = TastyVersion(28, 3, 1) + val read = TastyVersion(28, 4, 0) + expectUnpickleError(runTest(file, read, "Scala 3.2.1-RC1-bin-SNAPSHOT")) { + """Backward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.1-RC1-bin-SNAPSHOT, + | expected stable TASTy from 28.0 to 28.4. + | The source of this file should be recompiled by a Scala 3.3.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadOldMajor: Unit = { + val file = TastyVersion(27, 3, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1")) { + """Backward incompatible TASTy file has version 27.3, + | expected stable TASTy from 28.0 to 28.3. + | The source of this file should be recompiled by a Scala 3.0.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadOldMajor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(27, 3, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1", generic = true)) { + """Backward incompatible TASTy file has version 27.3, + | expected stable TASTy from 28.0 to 28.3. + | The source of this file should be recompiled by a later version. + | Usually this means that the classpath entry of this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadOldExperimentalFromSameMinorWhileExperimental: Unit = { + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 2) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY")) { + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. + | The source of this file should be recompiled by the same nightly or snapshot Scala 3.3 compiler. + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadOldExperimentalFromSameMinorWhileExperimental_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 2) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY", generic = true)) { + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. + | The source of this file should be recompiled by a later version. + | Usually this means that the classpath entry of this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerStableMinorFromStable: Unit = { + val file = TastyVersion(28, 3, 0) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.3.1")) { + """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1, + | expected stable TASTy from 28.0 to 28.2. + | To read this TASTy file, use a Scala 3.3.0 compiler or newer. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerStableMinorFromStable_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 3, 0) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.3.1", generic = true)) { + """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1, + | expected stable TASTy from 28.0 to 28.2. + | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerExperimentalMinorFromStable: Unit = { + val file = TastyVersion(28, 3, 1) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.2.2-RC1-NIGHTLY")) { + """Forward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.2-RC1-NIGHTLY, + | expected stable TASTy from 28.0 to 28.2. + | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.2 compiler. + | Note that you are using a stable compiler, which can not read experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerStableMajor: Unit = { + val file = TastyVersion(29, 0, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0")) { + """Forward incompatible TASTy file has version 29.0, produced by Scala 4.0.0, + | expected stable TASTy from 28.0 to 28.3. + | To read this TASTy file, use a more recent Scala compiler. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerExperimentalMajor: Unit = { + val file = TastyVersion(29, 0, 1) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1")) { + """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1, + | expected stable TASTy from 28.0 to 28.3. + | To read this experimental TASTy file, use the same Scala compiler. + | Note that you are using a stable compiler, which can not read experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerExperimentalMajor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(29, 0, 1) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1", generic = true)) { + """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1, + | expected stable TASTy from 28.0 to 28.3. + | To read this experimental TASTy file, use the version of this tool compatible with TASTy 29.0-experimental-1. + | Note that this tool does not support reading experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadStableFromExperimentalSameMinor: Unit = { + val file = TastyVersion(28, 4, 0) + val read = TastyVersion(28, 4, 1) // 3.4.0-RC1-NIGHTLY + expectUnpickleError(runTest(file, read, "Scala 3.4.2")) { + """Forward incompatible TASTy file has version 28.4, produced by Scala 3.4.2, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this TASTy file, use a Scala 3.4.0 compiler or newer. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerExperimentalFromExperimentalSameMinor: Unit = { + val file = TastyVersion(28, 4, 2) + val read = TastyVersion(28, 4, 1) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY")) { + """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + + @Test + def failReadNewerExperimentalFromExperimentalSameMinor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 4, 2) + val read = TastyVersion(28, 4, 1) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY", generic = true)) { + """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + } + } + +} + +object TastyHeaderUnpicklerTest { + + def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = { + val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8).nn + val buf = new TastyBuffer(header.length + 32 + compilerBytes.length) + for (ch <- header) buf.writeByte(ch.toByte) + buf.writeNat(maj) + buf.writeNat(min) + buf.writeNat(exp) + buf.writeNat(compilerBytes.length) + buf.writeBytes(compilerBytes, compilerBytes.length) + buf.writeUncompressedLong(237478L) + buf.writeUncompressedLong(324789L) + buf + } + + case class CustomScalaConfig(compilerVersion: TastyVersion) extends TastyUnpickler.Scala3CompilerConfig { + override def majorVersion: Int = compilerVersion.major + override def minorVersion: Int = compilerVersion.minor + override def experimentalVersion: Int = compilerVersion.experimental + } + + case class CustomGenericConfig(compilerVersion: TastyVersion) extends UnpicklerConfig.Generic { + override def majorVersion: Int = compilerVersion.major + override def minorVersion: Int = compilerVersion.minor + override def experimentalVersion: Int = compilerVersion.experimental + } + + def runTest(file: TastyVersion, read: TastyVersion, compiler: String, generic: Boolean = false): Unit = { + val headerBuffer = fillHeader(file.major, file.minor, file.experimental, compiler) + val bs = headerBuffer.bytes.clone + val config = if (generic) CustomGenericConfig(read) else CustomScalaConfig(read) + val hr = new TastyHeaderUnpickler(config, new TastyReader(bs)) + hr.readFullHeader() + } + + def expectUnpickleError(op: => Unit)(message: String) = { + try { + op + fail() + } + catch { + case err: UnpickleException => assert(err.getMessage.nn.contains(message)) + } + } + +} diff --git a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala index 77e172f61167..7efa1f6f564e 100644 --- a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala +++ b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala @@ -3,6 +3,7 @@ package dotty.tools.dotc.coverage import org.junit.Test import org.junit.AfterClass import org.junit.Assert.* +import org.junit.Assume.* import org.junit.experimental.categories.Category import dotty.{BootstrappedOnlyTests, Properties} import dotty.tools.vulpix.* @@ -17,6 +18,8 @@ import scala.language.unsafeNulls import scala.collection.mutable.Buffer import dotty.tools.dotc.util.DiffUtil +import java.util.stream.Collectors + @Category(Array(classOf[BootstrappedOnlyTests])) class CoverageTests: import CoverageTests.{*, given} @@ -26,10 +29,18 @@ class CoverageTests: @Test def checkCoverageStatements(): Unit = + assumeFalse( + "FIXME: test output differs when using Scala 2 library TASTy", + Properties.usingScalaLibraryTasty + ) checkCoverageIn(rootSrc.resolve("pos"), false) @Test def checkInstrumentedRuns(): Unit = + assumeFalse( + "FIXME: test output differs when using Scala 2 library TASTy", + Properties.usingScalaLibraryTasty + ) checkCoverageIn(rootSrc.resolve("run"), true) def checkCoverageIn(dir: Path, run: Boolean)(using TestGroup): Unit = @@ -61,6 +72,26 @@ class CoverageTests: val instructions = FileDiff.diffMessage(expectFile.toString, targetFile.toString) fail(s"Coverage report differs from expected data.\n$instructions") + // measurement files only exist in the "run" category + // as these are generated at runtime by the scala.runtime.coverage.Invoker + val expectMeasurementFile = path.resolveSibling(s"$fileName.measurement.check") + if run && Files.exists(expectMeasurementFile) then + + // Note that this assumes that the test invoked was single threaded, + // if that is not the case then this will have to be adjusted + val targetMeasurementFile = findMeasurementFile(targetDir) + + if updateCheckFiles then + Files.copy(targetMeasurementFile, expectMeasurementFile, StandardCopyOption.REPLACE_EXISTING) + + else + val targetMeasurementFile = findMeasurementFile(targetDir) + val expectedMeasurements = fixWindowsPaths(Files.readAllLines(expectMeasurementFile).asScala) + val obtainedMeasurements = fixWindowsPaths(Files.readAllLines(targetMeasurementFile).asScala) + if expectedMeasurements != obtainedMeasurements then + val instructions = FileDiff.diffMessage(expectMeasurementFile.toString, targetMeasurementFile.toString) + fail(s"Measurement report differs from expected data.\n$instructions") + () }) /** Generates the coverage report for the given input file, in a temporary directory. */ @@ -75,6 +106,14 @@ class CoverageTests: test.checkCompile() target + private def findMeasurementFile(targetDir: Path): Path = { + val allFilesInTarget = Files.list(targetDir).collect(Collectors.toList).asScala + allFilesInTarget.filter(_.getFileName.toString.startsWith("scoverage.measurements.")).headOption.getOrElse( + throw new AssertionError(s"Expected to find measurement file in targetDir [${targetDir}] but none were found.") + ) + } + + object CoverageTests extends ParallelTesting: import scala.concurrent.duration.* diff --git a/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala b/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala new file mode 100644 index 000000000000..870da08dcfba --- /dev/null +++ b/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala @@ -0,0 +1,172 @@ +package dotty.tools.dotc.reporting + +import dotty.tools.DottyTest +import dotty.tools.dotc.rewrites.Rewrites +import dotty.tools.dotc.rewrites.Rewrites.ActionPatch +import dotty.tools.dotc.util.SourceFile + +import scala.annotation.tailrec +import scala.jdk.CollectionConverters.* +import scala.runtime.Scala3RunTime.assertFailed + +import org.junit.Assert._ +import org.junit.Test + +/** This is a test suite that is meant to test the actions attached to the + * diagnostic for a given code snippet. + */ +class CodeActionTest extends DottyTest: + + @Test def convertToFunctionValue = + checkCodeAction( + """|object Test: + | def x: Int = 3 + | val test = x _ + |""".stripMargin, + "Rewrite to function value", + """|object Test: + | def x: Int = 3 + | val test = (() => x) + |""".stripMargin + ) + + @Test def insertBracesForEmptyArgument = + checkCodeAction( + """|object Test: + | def foo(): Unit = () + | val x = foo + |""".stripMargin, + "Insert ()", + """|object Test: + | def foo(): Unit = () + | val x = foo() + |""".stripMargin + + ) + + @Test def removeRepeatModifier = + checkCodeAction( + """|final final class Test + |""".stripMargin, + """Remove repeated modifier: "final"""", + // TODO look into trying to remove the extra space that is left behind + """|final class Test + |""".stripMargin + ) + + @Test def insertMissingCases = + checkCodeAction( + code = + """|enum Tree: + | case Node(l: Tree, r: Tree) + | case Leaf(v: String) + | + |object Test: + | def foo(tree: Tree) = tree match { + | case Tree.Node(_, _) => ??? + | } + |""".stripMargin, + title = "Insert missing cases (1)", + expected = + """|enum Tree: + | case Node(l: Tree, r: Tree) + | case Leaf(v: String) + | + |object Test: + | def foo(tree: Tree) = tree match { + | case Tree.Node(_, _) => ??? + | case Tree.Leaf(_) => ??? + | } + |""".stripMargin, + afterPhase = "patternMatcher" + ) + + @Test def insertMissingCasesForUnionStringType = + checkCodeAction( + code = + """object Test: + | def foo(text: "Alice" | "Bob") = text match { + | case "Alice" => ??? + | } + |""".stripMargin, + title = "Insert missing cases (1)", + expected = + """object Test: + | def foo(text: "Alice" | "Bob") = text match { + | case "Alice" => ??? + | case "Bob" => ??? + | } + |""".stripMargin, + afterPhase = "patternMatcher" + ) + + @Test def insertMissingCasesForUnionIntType = + checkCodeAction( + code = + """object Test: + | def foo(text: 1 | 2) = text match { + | case 2 => ??? + | } + |""".stripMargin, + title = "Insert missing cases (1)", + expected = + """object Test: + | def foo(text: 1 | 2) = text match { + | case 2 => ??? + | case 1 => ??? + | } + |""".stripMargin, + afterPhase = "patternMatcher" + ) + + @Test def insertMissingCasesUsingBracelessSyntax = + checkCodeAction( + code = + """object Test: + | def foo(text: 1 | 2) = text match + | case 2 => ??? + |""".stripMargin, + title = "Insert missing cases (1)", + expected = + """object Test: + | def foo(text: 1 | 2) = text match + | case 2 => ??? + | case 1 => ??? + |""".stripMargin, + afterPhase = "patternMatcher" + ) + + // Make sure we're not using the default reporter, which is the ConsoleReporter, + // meaning they will get reported in the test run and that's it. + private def newContext = + val rep = new StoreReporter(null) with UniqueMessagePositions with HideNonSensicalMessages + initialCtx.setReporter(rep).withoutColors + + private def checkCodeAction(code: String, title: String, expected: String, afterPhase: String = "typer") = + ctx = newContext + val source = SourceFile.virtual("test", code).content + val runCtx = checkCompile(afterPhase, code) { (_, _) => () } + val diagnostics = runCtx.reporter.removeBufferedMessages + assertEquals("Expected exactly one diagnostic", 1, diagnostics.size) + + val diagnostic = diagnostics.head + val actions = diagnostic.msg.actions.toList + assertEquals("Expected exactly one action", 1, actions.size) + + // TODO account for more than 1 action + val action = actions.head + assertEquals(action.title, title) + val patches = action.patches.toList + if patches.nonEmpty then + patches.reduceLeft: (p1, p2) => + assert(p1.srcPos.span.end <= p2.srcPos.span.start, s"overlapping patches $p1 and $p2") + p2 + else assertFailed("Expected a patch attatched to this action, but it was empty") + + val result = patches.reverse.foldLeft(code): (newCode, patch) => + import scala.language.unsafeNulls + val start = newCode.substring(0, patch.srcPos.start) + val ending = newCode.substring(patch.srcPos.end, newCode.length) + start + patch.replacement + ending + + assertEquals(expected, result) diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index 940fc875a021..a96a2765d56a 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -17,12 +17,14 @@ import dotty.Properties import interfaces.Diagnostic.{ERROR, WARNING} import scala.io.Codec +import scala.compiletime.uninitialized -class TestReporter protected (outWriter: PrintWriter, filePrintln: String => Unit, logLevel: Int) +class TestReporter protected (outWriter: PrintWriter, logLevel: Int) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering { - protected final val _errorBuf = mutable.ArrayBuffer.empty[Diagnostic] - final def errors: Iterator[Diagnostic] = _errorBuf.iterator + protected final val _diagnosticBuf = mutable.ArrayBuffer.empty[Diagnostic] + final def diagnostics: Iterator[Diagnostic] = _diagnosticBuf.iterator + final def errors: Iterator[Diagnostic] = diagnostics.filter(_.level >= ERROR) protected final val _messageBuf = mutable.ArrayBuffer.empty[String] final def messages: Iterator[String] = _messageBuf.iterator @@ -31,9 +33,6 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M protected final val _consoleReporter = new ConsoleReporter(null, new PrintWriter(_consoleBuf)) final def consoleOutput: String = _consoleBuf.toString - private var _didCrash = false - final def compilerCrashed: Boolean = _didCrash - private var _skip: Boolean = false final def setSkip(): Unit = _skip = true final def skipped: Boolean = _skip @@ -49,14 +48,6 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M def log(msg: String) = _messageBuf.append(msg) - def logStackTrace(thrown: Throwable): Unit = { - _didCrash = true - val sw = new java.io.StringWriter - val pw = new java.io.PrintWriter(sw) - thrown.printStackTrace(pw) - log(sw.toString) - } - /** Prints the message with the given position indication. */ def printMessageAndPos(dia: Diagnostic, extra: String)(using Context): Unit = { val msg = messageAndPos(dia) @@ -79,8 +70,9 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M case _ => "" } - if dia.level >= ERROR then _errorBuf.append(dia) - if dia.level >= WARNING then _consoleReporter.doReport(dia) + if dia.level >= WARNING then + _diagnosticBuf.append(dia) + _consoleReporter.doReport(dia) printMessageAndPos(dia, extra) } } @@ -90,9 +82,9 @@ object TestReporter { private val failedTestsFileName: String = "last-failed.log" private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName") - private var outFile: JFile = _ - private var logWriter: PrintWriter = _ - private var failedTestsWriter: PrintWriter = _ + private var outFile: JFile = uninitialized + private var logWriter: PrintWriter = uninitialized + private var failedTestsWriter: PrintWriter = uninitialized private def initLog() = if (logWriter eq null) { val date = new Date @@ -125,10 +117,10 @@ object TestReporter { } def reporter(ps: PrintStream, logLevel: Int): TestReporter = - new TestReporter(new PrintWriter(ps, true), logPrintln, logLevel) + new TestReporter(new PrintWriter(ps, true), logLevel) def simplifiedReporter(writer: PrintWriter): TestReporter = { - val rep = new TestReporter(writer, logPrintln, WARNING) { + val rep = new TestReporter(writer, WARNING) { /** Prints the message with the given position indication in a simplified manner */ override def printMessageAndPos(dia: Diagnostic, extra: String)(using Context): Unit = { def report() = { diff --git a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala new file mode 100644 index 000000000000..489dc0f1759c --- /dev/null +++ b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala @@ -0,0 +1,283 @@ +package dotty.tools.dotc.sbt + +import dotty.tools.DottyTest +import dotty.tools.dotc.core.Contexts.FreshContext +import dotty.tools.dotc.sbt.ProgressCallbackTest.* + +import org.junit.Assert.* +import org.junit.Test + +import dotty.tools.toOption +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Contexts.ctx +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.Compiler +import dotty.tools.dotc.Run +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.io.VirtualDirectory +import dotty.tools.dotc.NoCompilationUnit +import dotty.tools.dotc.interactive.Interactive.Include.all + +final class ProgressCallbackTest extends DottyTest: + + @Test + def testCallback: Unit = + val source1 = """class Foo""" + val source2 = """class Bar""" + + inspectProgress(List(source1, source2), terminalPhase = None): progressCallback => + locally: + // (1) assert that the way we compute next phase in `Run.doAdvancePhase` is correct + assertNextPhaseIsNext() + + locally: + // (1) given correct computation, check that the recorded progression of phases is monotonic + assertMonotonicProgression(progressCallback) + + locally: + // (1) given monotonic progression, check that the recorded progression of phases is complete + val expectedCurr = allSubPhases + val expectedNext = expectedCurr.tail ++ syntheticNextPhases + assertProgressPhases(progressCallback, expectedCurr, expectedNext) + + locally: + // (2) next check that for each unit, we record all the "runnable" phases that could go through + assertExpectedPhasesForUnits(progressCallback, expectedPhases = runnableSubPhases) + + locally: + // (2) therefore we can now cross-reference the recorded progression with the recorded phases per unit + assertTotalUnits(progressCallback) + + locally: + // (3) finally, check that the callback was not cancelled + assertFalse(progressCallback.isCancelled) + end testCallback + + // TODO: test cancellation + + @Test + def cancelMidTyper: Unit = + inspectCancellationAtPhase("typer[typechecking]") + + @Test + def cancelErasure: Unit = + inspectCancellationAtPhase("erasure") + + @Test + def cancelPickler: Unit = + inspectCancellationAtPhase("pickler") + + def cancelOnEnter(targetPhase: String)(testCallback: TestProgressCallback): Boolean = + testCallback.latestProgress.exists(_.currPhase == targetPhase) + + def inspectCancellationAtPhase(targetPhase: String): Unit = + val source1 = """class Foo""" + + inspectProgress(List(source1), cancellation = Some(cancelOnEnter(targetPhase))): progressCallback => + locally: + // (1) assert that the compiler was cancelled + assertTrue("should have cancelled", progressCallback.isCancelled) + + locally: + // (2) assert that compiler visited all the subphases before cancellation, + // and does not visit any after. + // (2.2) first extract the surrounding phases of the target + val (befores, target +: next +: _) = allSubPhases.span(_ != targetPhase): @unchecked + // (2.3) we expect to see the subphases before&including target reported as a "current" phase, so extract here + val expectedCurr = befores :+ target + // (2.4) we expect to see next after target reported as a "next" phase, so extract here + val expectedNext = expectedCurr.tail :+ next + assertProgressPhases(progressCallback, expectedCurr, expectedNext) + + locally: + // (3) assert that the compilation units were only entered in the phases before cancellation + val (befores, target +: next +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + assertExpectedPhasesForUnits(progressCallback, expectedPhases = befores) + + locally: + // (4) assert that the final progress recorded is at the target phase, + // and progress is equal to the number of phases before the target. + val (befores, target +: next +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // (4.1) we expect cancellation to occur *as we enter* the target phase, + // so no units should be visited in this phase. Therefore progress + // should be equal to the number of phases before the target. (as we have 1 unit) + val expectedProgress = befores.size + progressCallback.latestProgress match + case Some(ProgressEvent(`expectedProgress`, _, `target`, `next`)) => + case other => fail(s"did not match expected progress, found $other") + end inspectCancellationAtPhase + + /** Assert that the computed `next` phase matches the real next phase */ + def assertNextPhaseIsNext()(using Context): Unit = + val allPhases = ctx.base.allPhases + for case Array(p1, p2) <- allPhases.sliding(2) do + val p1Next = Run.SubPhases(p1).next.get.phase // used to compute the next phase in `Run.doAdvancePhase` + assertEquals(p1Next, p2) + + /** Assert that the recorded progression of phases are all in the real progression, and that order is preserved */ + def assertMonotonicProgression(progressCallback: TestProgressCallback)(using Context): Unit = + val allPhasePlan = ctx.base.allPhases.flatMap(asSubphases) ++ syntheticNextPhases + for case List( + PhaseTransition(curr1, next1), + PhaseTransition(curr2, next2) + ) <- progressCallback.progressPhasesFinal.sliding(2) do + val curr1Index = indexOrFail(allPhasePlan, curr1) + val curr2Index = indexOrFail(allPhasePlan, curr2) + val next1Index = indexOrFail(allPhasePlan, next1) + val next2Index = indexOrFail(allPhasePlan, next2) + assertTrue(s"Phase `$curr1` did not come before `$curr2`", curr1Index < curr2Index) + assertTrue(s"Phase `$next1` did not come before `$next2`", next1Index < next2Index) + assertTrue(s"Phase `$curr1` did not come before `$next1`", curr1Index < next1Index) + assertTrue(s"Phase `$curr2` did not come before `$next2`", curr2Index < next2Index) + assertTrue(s"Predicted next phase `$next1` didn't match the following current `$curr2`", next1Index == curr2Index) + + /** Assert that the recorded progression of phases contains every phase in the plan */ + def assertProgressPhases(progressCallback: TestProgressCallback, + currExpected: Seq[String], nextExpected: Seq[String])(using Context): Unit = + val (allPhasePlan, expectedCurrPhases, expectedNextPhases) = + val allPhases = currExpected + val firstPhase = allPhases.head + val expectedCurrPhases = allPhases.toSet + val expectedNextPhases = nextExpected.toSet //expectedCurrPhases - firstPhase ++ syntheticNextPhases + (allPhases.toList, expectedCurrPhases, expectedNextPhases) + + for (expectedCurr, recordedCurr) <- allPhasePlan.zip(progressCallback.progressPhasesFinal.map(_.curr)) do + assertEquals(s"Phase $recordedCurr was not expected", expectedCurr, recordedCurr) + + val (seenCurrPhases, seenNextPhases) = + val (currs0, nexts0) = progressCallback.progressPhasesFinal.unzip(Tuple.fromProductTyped) + (currs0.toSet, nexts0.toSet) + + val missingCurrPhases = expectedCurrPhases.diff(seenCurrPhases) + val extraCurrPhases = seenCurrPhases.diff(expectedCurrPhases) + assertTrue(s"these phases were not visited ${missingCurrPhases}", missingCurrPhases.isEmpty) + assertTrue(s"these phases were visited, but not in the real plan ${extraCurrPhases}", extraCurrPhases.isEmpty) + + val missingNextPhases = expectedNextPhases.diff(seenNextPhases) + val extraNextPhases = seenNextPhases.diff(expectedNextPhases) + assertTrue(s"these phases were not planned to visit, but were expected ${missingNextPhases}", missingNextPhases.isEmpty) + assertTrue(s"these phases were planned to visit, but were not in the real plan ${extraNextPhases}", extraNextPhases.isEmpty) + + + /** Assert that the phases recorded per unit match the actual phases ran on them */ + def assertExpectedPhasesForUnits(progressCallback: TestProgressCallback, expectedPhases: Seq[String])(using Context): Unit = + for (unit, visitedPhases) <- progressCallback.unitPhases do + val uniquePhases = visitedPhases.toSet + assert(unit != NoCompilationUnit, s"unexpected NoCompilationUnit for phases $uniquePhases") + val duplicatePhases = visitedPhases.view.groupBy(identity).values.filter(_.size > 1).map(_.head) + assertEquals(s"some phases were visited twice for $unit! ${duplicatePhases.toList}", visitedPhases.size, uniquePhases.size) + val unvisitedPhases = expectedPhases.filterNot(visitedPhases.contains) + val extraPhases = visitedPhases.filterNot(expectedPhases.contains) + assertTrue(s"these phases were not visited for $unit ${unvisitedPhases}", unvisitedPhases.isEmpty) + assertTrue(s"these phases were visited for $unit, but not expected ${extraPhases}", extraPhases.isEmpty) + + /** Assert that the number of total units of work matches the number of files * the runnable phases */ + def assertTotalUnits(progressCallback: TestProgressCallback)(using Context): Unit = + var fileTraversals = 0 // files * phases + for (_, phases) <- progressCallback.unitPhases do + fileTraversals += phases.size + val expectedTotal = fileTraversals // assume that no late enters occur + progressCallback.totalEvents match + case Nil => fail("No total events recorded") + case TotalEvent(total, _) :: _ => + assertEquals(expectedTotal, total) + + def inspectProgress( + sources: List[String], + terminalPhase: Option[String] = Some("typer"), + cancellation: Option[TestProgressCallback => Boolean] = None)( + op: Context ?=> TestProgressCallback => Unit)(using Context) = + for cancelNow <- cancellation do + testProgressCallback.withCancelNow(cancelNow) + val sources0 = sources.map(_.linesIterator.map(_.trim.nn).filterNot(_.isEmpty).mkString("\n|").stripMargin) + val terminalPhase0 = terminalPhase.getOrElse(defaultCompiler.phases.last.last.phaseName) + checkAfterCompile(terminalPhase0, sources0) { case given Context => + op(testProgressCallback) + } + + private def testProgressCallback(using Context): TestProgressCallback = + ctx.progressCallback match + case cb: TestProgressCallback => cb + case _ => + fail(s"Expected TestProgressCallback but got ${ctx.progressCallback}") + ??? + + override protected def initializeCtx(fc: FreshContext): Unit = + super.initializeCtx( + fc.setProgressCallback(TestProgressCallback()) + .setSetting(fc.settings.outputDir, new VirtualDirectory("")) + ) + +object ProgressCallbackTest: + + case class TotalEvent(total: Int, atPhase: String) + case class ProgressEvent(curr: Int, total: Int, currPhase: String, nextPhase: String) + case class PhaseTransition(curr: String, next: String) + + def asSubphases(phase: Phase): IndexedSeq[String] = + val subPhases = Run.SubPhases(phase) + val indices = 0 until phase.traversals + indices.map(subPhases.subPhase) + + def runnableSubPhases(using Context): IndexedSeq[String] = + ctx.base.allPhases.filter(_.isRunnable).flatMap(asSubphases).toIndexedSeq + + def allSubPhases(using Context): IndexedSeq[String] = + ctx.base.allPhases.flatMap(asSubphases).toIndexedSeq + + private val syntheticNextPhases = List("") + + /** Asserts that the computed phase name exists in the real phase plan */ + def indexOrFail(allPhasePlan: Array[String], phaseName: String): Int = + val i = allPhasePlan.indexOf(phaseName) + if i < 0 then + fail(s"Phase $phaseName not found") + i + + final class TestProgressCallback extends interfaces.ProgressCallback: + import collection.immutable, immutable.SeqMap + + private var _cancelled: Boolean = false + private var _unitPhases: SeqMap[CompilationUnit, List[String]] = immutable.SeqMap.empty // preserve order + private var _totalEvents: List[TotalEvent] = List.empty + private var _latestProgress: Option[ProgressEvent] = None + private var _progressPhases: List[PhaseTransition] = List.empty + private var _shouldCancelNow: TestProgressCallback => Boolean = _ => false + + def totalEvents = _totalEvents + def latestProgress = _latestProgress + def unitPhases = _unitPhases + def progressPhasesFinal = _progressPhases.reverse + def currentPhase = _progressPhases.headOption.map(_.curr) + + def withCancelNow(f: TestProgressCallback => Boolean): this.type = + _shouldCancelNow = f + this + + override def cancel(): Unit = _cancelled = true + override def isCancelled(): Boolean = _cancelled + + override def informUnitStarting(phase: String, unit: CompilationUnit): Unit = + _unitPhases += (unit -> (unitPhases.getOrElse(unit, Nil) :+ phase)) + + override def progress(current: Int, total: Int, currPhase: String, nextPhase: String): Boolean = + // record the total and current phase whenever the total changes + _totalEvents = _totalEvents match + case Nil => TotalEvent(total, currPhase) :: Nil + case events @ (head :: _) if head.total != total => TotalEvent(total, currPhase) :: events + case events => events + + _latestProgress = Some(ProgressEvent(current, total, currPhase, nextPhase)) + + // record the current and next phase whenever the current phase changes + _progressPhases = _progressPhases match + case all @ PhaseTransition(head, _) :: rest => + if head != currPhase then + PhaseTransition(currPhase, nextPhase) :: all + else + all + case Nil => PhaseTransition(currPhase, nextPhase) :: Nil + + !_shouldCancelNow(this) + +end ProgressCallbackTest diff --git a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala index a85cc9ad80f9..4db047d0951e 100644 --- a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala +++ b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala @@ -102,7 +102,7 @@ class SemanticdbTests: |inspect with: | diff $expect ${expect.resolveSibling("" + expect.getFileName + ".out")} |Or else update all expect files with - | sbt 'scala3-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect'""".stripMargin) + | sbt 'scala3-compiler-bootstrapped/Test/runMain dotty.tools.dotc.semanticdb.updateExpect'""".stripMargin) Files.walk(target).sorted(Comparator.reverseOrder).forEach(Files.delete) if errors.nonEmpty then fail(s"${errors.size} errors in expect test.") @@ -130,7 +130,7 @@ class SemanticdbTests: val target = Files.createTempDirectory("semanticdb") val javaArgs = Array("-d", target.toString) ++ javaFiles().map(_.toString) val javac = ToolProvider.getSystemJavaCompiler - val exitJava = javac.run(null, null, null, javaArgs:_*) + val exitJava = javac.run(null, null, null, javaArgs*) assert(exitJava == 0, "java compiler has errors") val args = Array( "-Xsemanticdb", @@ -142,7 +142,8 @@ class SemanticdbTests: "-sourceroot", expectSrc.toString, "-classpath", target.toString, "-Xignore-scala2-macros", - "-usejavacp" + "-usejavacp", + "-Wunused:all" ) ++ inputFiles().map(_.toString) val exit = Main.process(args) assertFalse(s"dotc errors: ${exit.errorCount}", exit.hasErrors) diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index 1e7d7ef2c708..f538d9534cd9 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -63,12 +63,14 @@ class PatmatExhaustivityTest { @Test def patmatExhaustivity: Unit = { + val blacklisted = TestSources.patmatExhaustivityScala2LibraryTastyBlacklisted.toSet val res = Directory(testsDir).list.toList .filter(f => f.extension == "scala" || f.isDirectory) .filter { f => val path = if f.isDirectory then f.path + "/" else f.path Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) } + .filterNot(f => blacklisted.contains(f.name)) .map(f => if f.isDirectory then compileDir(f.jpath) else compileFile(f.jpath)) val failed = res.filter(!_) diff --git a/compiler/test/dotty/tools/dotc/transform/SpecializeFunctionsTests.scala b/compiler/test/dotty/tools/dotc/transform/SpecializeFunctionsTests.scala index bf33dad915c4..2982238490e1 100644 --- a/compiler/test/dotty/tools/dotc/transform/SpecializeFunctionsTests.scala +++ b/compiler/test/dotty/tools/dotc/transform/SpecializeFunctionsTests.scala @@ -36,6 +36,10 @@ class SpecializeFunctionsTests extends DottyBytecodeTest { ) assert(applys.contains("apply"), "Foo did not contain `apply` forwarder method") assert(applys.contains("apply$mcII$sp"), "Foo did not contain specialized apply") + + // It's not essential they are in this particular order, + // but they should be in deterministic order + assert(applys == List("apply", "apply$mcII$sp", "apply")) } } @@ -48,20 +52,84 @@ class SpecializeFunctionsTests extends DottyBytecodeTest { checkBCode(source) { dir => val apps = findClass("Func2", dir).methods.asScala.collect { + case m if m.name == "apply" => m case m if m.name == "apply$mcIII$sp" => - assert(!hasInvokeStatic(m)) // should not call super specialized method + assert(!hasInvokeStatic(m), s"${m.name} should not call super specialized method") m - case m if m.name == "apply" => m + case m if m.name.startsWith("apply") => m } .map(_.name) .toList assert( - apps.length == 3, + apps.length == 56, s"Wrong number of specialized applys, actual length: ${apps.length} - $apps" ) assert(apps.contains("apply"), "Func2 did not contain `apply` forwarder method") assert(apps.contains("apply$mcIII$sp"), "Func2 did not contain specialized apply") + + // It's not essential they are in this particular order, + // but they should be in some deterministic order: + assert( + apps == List( + "apply$mcVII$sp", + "apply$mcVIJ$sp", + "apply$mcVID$sp", + "apply$mcVJI$sp", + "apply$mcVJJ$sp", + "apply$mcVJD$sp", + "apply$mcVDI$sp", + "apply$mcVDJ$sp", + "apply$mcVDD$sp", + "apply$mcZII$sp", + "apply$mcZIJ$sp", + "apply$mcZID$sp", + "apply$mcZJI$sp", + "apply$mcZJJ$sp", + "apply$mcZJD$sp", + "apply$mcZDI$sp", + "apply$mcZDJ$sp", + "apply$mcZDD$sp", + "apply$mcIIJ$sp", + "apply$mcIID$sp", + "apply$mcIJI$sp", + "apply$mcIJJ$sp", + "apply$mcIJD$sp", + "apply$mcIDI$sp", + "apply$mcIDJ$sp", + "apply$mcIDD$sp", + "apply$mcFII$sp", + "apply$mcFIJ$sp", + "apply$mcFID$sp", + "apply$mcFJI$sp", + "apply$mcFJJ$sp", + "apply$mcFJD$sp", + "apply$mcFDI$sp", + "apply$mcFDJ$sp", + "apply$mcFDD$sp", + "apply$mcJII$sp", + "apply$mcJIJ$sp", + "apply$mcJID$sp", + "apply$mcJJI$sp", + "apply$mcJJJ$sp", + "apply$mcJJD$sp", + "apply$mcJDI$sp", + "apply$mcJDJ$sp", + "apply$mcJDD$sp", + "apply$mcDII$sp", + "apply$mcDIJ$sp", + "apply$mcDID$sp", + "apply$mcDJI$sp", + "apply$mcDJJ$sp", + "apply$mcDJD$sp", + "apply$mcDDI$sp", + "apply$mcDDJ$sp", + "apply$mcDDD$sp", + "apply", + "apply$mcIII$sp", + "apply"), + s"Apply methods were not in the expected order: $apps" + ) } } diff --git a/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala b/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala new file mode 100644 index 000000000000..561dabb555a9 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala @@ -0,0 +1,115 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class EqHashMapTest: + + var counter = 0 + + // basic identity hash, and reference equality, but with a counter for ordering + class Id: + val count = { counter += 1; counter } + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + + @Test + def newEmpty: Unit = + val m = EqHashMap[Id, Int]() + assert(m.size == 0) + assert(m.iterator.toList == Nil) + + @Test + def update: Unit = + val m = EqHashMap[Id, Int]() + assert(m.size == 0 && !m.contains(id1)) + m.update(id1, 1) + assert(m.size == 1 && m(id1) == 1) + m.update(id1, 2) // replace value + assert(m.size == 1 && m(id1) == 2) + m.update(id3, 3) // new key + assert(m.size == 2 && m(id1) == 2 && m(id3) == 3) + + @Test + def getOrElseUpdate: Unit = + val m = EqHashMap[Id, Int]() + // add id1 + assert(m.size == 0 && !m.contains(id1)) + val added = m.getOrElseUpdate(id1, 1) + assert(added == 1 && m.size == 1 && m(id1) == 1) + // try add id1 again + val addedAgain = m.getOrElseUpdate(id1, 23) + assert(addedAgain != 23 && m.size == 1 && m(id1) == 1) // no change + + private def fullMap() = + val m = EqHashMap[Id, Int]() + m.update(id1, 1) + m.update(id2, 2) + m + + @Test + def remove: Unit = + val m = fullMap() + // remove id2 + m.remove(id2) + assert(m.size == 1) + assert(m.contains(id1) && !m.contains(id2)) + // remove id1 + m -= id1 + assert(m.size == 0) + assert(!m.contains(id1) && !m.contains(id2)) + + @Test + def lookup: Unit = + val m = fullMap() + assert(m.lookup(id1) == 1) + assert(m.lookup(id2) == 2) + assert(m.lookup(id3) == null) + + @Test + def iterator: Unit = + val m = fullMap() + assert(m.iterator.toList.sorted == List(id1 -> 1,id2 -> 2)) + + @Test + def clear: Unit = + locally: + val s1 = fullMap() + s1.clear() + assert(s1.size == 0) + locally: + val s2 = fullMap() + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash set is based on reference equality, i.e. does not use universal equality */ + @Test + def referenceEquality: Unit = + val i1, i2 = I32(1) // different instances + + assert(i1.equals(i2)) // structural equality + assert(i1 ne i2) // reference inequality + + val m = locally: + val m = EqHashMap[I32, Int]() + m(i1) = 23 + m(i2) = 29 + m + + assert(m.size == 2 && m(i1) == 23 && m(i2) == 29) + assert(m.keysIterator.toSet == Set(i1)) // scala.Set delegates to universal equality + end referenceEquality + diff --git a/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala b/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala new file mode 100644 index 000000000000..1c1ffe0b7931 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala @@ -0,0 +1,119 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class EqHashSetTest: + + var counter = 0 + + // basic identity hash, and reference equality, but with a counter for ordering + class Id: + val count = { counter += 1; counter } + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + + @Test + def newEmpty: Unit = + val s = EqHashSet[Id]() + assert(s.size == 0) + assert(s.iterator.toList == Nil) + + @Test + def put: Unit = + val s = EqHashSet[Id]() + // put id1 + assert(s.size == 0 && !s.contains(id1)) + s += id1 + assert(s.size == 1 && s.contains(id1)) + // put id2 + assert(!s.contains(id2)) + s.put(id2) + assert(s.size == 2 && s.contains(id1) && s.contains(id2)) + // put id3 + s ++= List(id3) + assert(s.size == 3 && s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def add: Unit = + val s = EqHashSet[Id]() + // add id1 + assert(s.size == 0 && !s.contains(id1)) + val added = s.add(id1) + assert(added && s.size == 1 && s.contains(id1)) + // try add id1 again + val addedAgain = s.add(id1) + assert(!addedAgain && s.size == 1 && s.contains(id1)) // no change + + @Test + def construct: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + assert(s.size == 3) + assert(s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def remove: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + // remove id2 + s.remove(id2) + assert(s.size == 2) + assert(s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id1 + s -= id1 + assert(s.size == 1) + assert(!s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id3 + s --= List(id3) + assert(s.size == 0) + assert(!s.contains(id1) && !s.contains(id2) && !s.contains(id3)) + + @Test + def lookup: Unit = + val s = EqHashSet.from(List(id1, id2)) + assert(s.lookup(id1) eq id1) + assert(s.lookup(id2) eq id2) + assert(s.lookup(id3) eq null) + + @Test + def iterator: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + assert(s.iterator.toList.sorted == List(id1,id2,id3)) + + @Test + def clear: Unit = + locally: + val s1 = EqHashSet.from(List(id1,id2,id3)) + s1.clear() + assert(s1.size == 0) + locally: + val s2 = EqHashSet.from(List(id1,id2,id3)) + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash map is based on reference equality, i.e. does not use universal equality */ + @Test + def referenceEquality: Unit = + val i1, i2 = I32(1) // different instances + + assert(i1.equals(i2)) // structural equality + assert(i1 ne i2) // reference inequality + + val s = EqHashSet.from(List(i1,i2)) + + assert(s.size == 2 && s.contains(i1) && s.contains(i2)) + assert(s.iterator.toSet == Set(i1)) // scala.Set delegates to universal equality + end referenceEquality + diff --git a/compiler/test/dotty/tools/dotc/util/HashMapTest.scala b/compiler/test/dotty/tools/dotc/util/HashMapTest.scala new file mode 100644 index 000000000000..97bf8446756c --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/HashMapTest.scala @@ -0,0 +1,137 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class HashMapTest: + + var counter = 0 + + // structural hash and equality, but with a counter for ordering + class Id(val count: Int = { counter += 1; counter }): + override def hashCode(): Int = count + override def equals(that: Any): Boolean = that match + case that: Id => this.count == that.count + case _ => false + def makeCopy: Id = new Id(count) + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + assert(id1 != id2 && id1 != id3 && id2 != id3) + + @Test + def newEmpty: Unit = + val m = HashMap[Id, Int]() + assert(m.size == 0) + assert(m.iterator.toList == Nil) + + @Test + def update: Unit = + val m = HashMap[Id, Int]() + assert(m.size == 0 && !m.contains(id1)) + m.update(id1, 1) + assert(m.size == 1 && m(id1) == 1) + m.update(id1, 2) // replace value + assert(m.size == 1 && m(id1) == 2) + m.update(id3, 3) // new key + assert(m.size == 2 && m(id1) == 2 && m(id3) == 3) + + @Test + def getOrElseUpdate: Unit = + val m = HashMap[Id, Int]() + // add id1 + assert(m.size == 0 && !m.contains(id1)) + val added = m.getOrElseUpdate(id1, 1) + assert(added == 1 && m.size == 1 && m(id1) == 1) + // try add id1 again + val addedAgain = m.getOrElseUpdate(id1, 23) + assert(addedAgain != 23 && m.size == 1 && m(id1) == 1) // no change + + class StatefulHash: + var hashCount = 0 + override def hashCode(): Int = { hashCount += 1; super.hashCode() } + + @Test + def getOrElseUpdate_hashesAtMostOnce: Unit = + locally: + val sh1 = StatefulHash() + val m = HashMap[StatefulHash, Int]() // will be a dense map with default size + val added = m.getOrElseUpdate(sh1, 1) + assert(sh1.hashCount == 0) // no hashing at all for dense maps + locally: + val sh1 = StatefulHash() + val m = HashMap[StatefulHash, Int](64) // not dense + val added = m.getOrElseUpdate(sh1, 1) + assert(sh1.hashCount == 1) // would be 2 if for example getOrElseUpdate was implemented as lookup + update + + private def fullMap() = + val m = HashMap[Id, Int]() + m.update(id1, 1) + m.update(id2, 2) + m + + @Test + def remove: Unit = + val m = fullMap() + // remove id2 + m.remove(id2) + assert(m.size == 1) + assert(m.contains(id1) && !m.contains(id2)) + // remove id1 + m -= id1 + assert(m.size == 0) + assert(!m.contains(id1) && !m.contains(id2)) + + @Test + def lookup: Unit = + val m = fullMap() + assert(m.lookup(id1) == 1) + assert(m.lookup(id2) == 2) + assert(m.lookup(id3) == null) + + @Test + def iterator: Unit = + val m = fullMap() + assert(m.iterator.toList.sorted == List(id1 -> 1,id2 -> 2)) + + @Test + def clear: Unit = + locally: + val s1 = fullMap() + s1.clear() + assert(s1.size == 0) + locally: + val s2 = fullMap() + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash map is based on universal equality, i.e. does not use reference equality */ + @Test + def universalEquality: Unit = + val id2_2 = id2.makeCopy + + assert(id2.equals(id2_2)) // structural equality + assert(id2 ne id2_2) // reference inequality + + val m = locally: + val m = HashMap[Id, Int]() + m(id2) = 23 + m(id2_2) = 29 + m + + assert(m.size == 1 && m(id2) == 29 && m(id2_2) == 29) + assert(m.keysIterator.toList.head eq id2) // does not replace id2 with id2_2 + end universalEquality + diff --git a/compiler/test/dotty/tools/dotc/util/HashSetTest.scala b/compiler/test/dotty/tools/dotc/util/HashSetTest.scala new file mode 100644 index 000000000000..2089be508a4c --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/HashSetTest.scala @@ -0,0 +1,117 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class HashSetTest: + + var counter = 0 + + // structural hash and equality, with a counter for ordering + class Id(val count: Int = { counter += 1; counter }): + override def hashCode: Int = count + override def equals(that: Any): Boolean = that match + case that: Id => this.count == that.count + case _ => false + def makeCopy: Id = new Id(count) + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + assert(id1 != id2 && id1 != id3 && id2 != id3) + + @Test + def newEmpty: Unit = + val s = HashSet[Id]() + assert(s.size == 0) + assert(s.iterator.toList == Nil) + + @Test + def put: Unit = + val s = HashSet[Id]() + // put id1 + assert(s.size == 0 && !s.contains(id1)) + s += id1 + assert(s.size == 1 && s.contains(id1)) + // put id2 + assert(!s.contains(id2)) + s.put(id2) + assert(s.size == 2 && s.contains(id1) && s.contains(id2)) + // put id3 + s ++= List(id3) + assert(s.size == 3 && s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def add: Unit = + val s = HashSet[Id]() + // add id1 + assert(s.size == 0 && !s.contains(id1)) + val added = s.add(id1) + assert(added && s.size == 1 && s.contains(id1)) + // try add id1 again + val addedAgain = s.add(id1) + assert(!addedAgain && s.size == 1 && s.contains(id1)) // no change + + @Test + def construct: Unit = + val s = HashSet.from(List(id1,id2,id3)) + assert(s.size == 3) + assert(s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def remove: Unit = + val s = HashSet.from(List(id1,id2,id3)) + // remove id2 + s.remove(id2) + assert(s.size == 2) + assert(s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id1 + s -= id1 + assert(s.size == 1) + assert(!s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id3 + s --= List(id3) + assert(s.size == 0) + assert(!s.contains(id1) && !s.contains(id2) && !s.contains(id3)) + + @Test + def lookup: Unit = + val s = HashSet.from(List(id1, id2)) + assert(s.lookup(id1) eq id1) + assert(s.lookup(id2) eq id2) + assert(s.lookup(id3) eq null) + + @Test + def iterator: Unit = + val s = HashSet.from(List(id1,id2,id3)) + assert(s.iterator.toList.sorted == List(id1,id2,id3)) + + @Test + def clear: Unit = + locally: + val s1 = HashSet.from(List(id1,id2,id3)) + s1.clear() + assert(s1.size == 0) + locally: + val s2 = HashSet.from(List(id1,id2,id3)) + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + /** the hash set is based on universal equality, i.e. does not use reference equality */ + @Test + def universalEquality: Unit = + val id2_2 = id2.makeCopy + + assert(id2.equals(id2_2)) // structural equality + assert(id2 ne id2_2) // reference inequality + + val s = HashSet.from(List(id2,id2_2)) + + assert(s.size == 1 && s.contains(id2) && s.contains(id2_2)) + assert(s.iterator.toList == List(id2)) // single element + end universalEquality + diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index ecdfeb512e1b..26092b73f107 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -169,6 +169,62 @@ class ReplCompilerTests extends ReplTest: ) } + @Test def i16596 = + initially { + run(""" + |import scala.compiletime.{error, ops, requireConst}, ops.int.* + |import scala.quoted.* + | + |sealed trait Nat + |object Nat: + | case object Zero extends Nat + | case class Succ[N <: Nat](prev: N) extends Nat + | + | given zero: Zero.type = Zero + | given buildSucc[N <: Nat](using n: N): Succ[N] = Succ(n) + | + | def value[N <: Nat](using n: N): N = n + | + | def prevImpl[I <: Int: Type](expr: Expr[I])(using Quotes): Expr[I - 1] = + | val prev = expr.valueOrAbort - 1 + | // this compiles, but fails at use time + | //Expr(prev).asExprOf[ops.int.-[I, 1]] + | Expr(prev).asInstanceOf[Expr[I - 1]] + | + | inline def prevOf[I <: Int](inline i: I): I - 1 = ${prevImpl('i)} + | + | type FromInt[I <: Int] <: Nat = I match + | case 0 => Zero.type + | case _ => Succ[FromInt[I - 1]] + | + | inline def fromInt[I <: Int & Singleton](i: I): FromInt[i.type] = + | requireConst(i) + | inline i match + | case _: 0 => Zero + | case _ => + | inline if i < 0 + | then error("cannot convert negative to Nat") + | else Succ(fromInt(prevOf[i.type](i))) + """.stripMargin) + }.andThen { + assertMultiLineEquals( + """// defined trait Nat + |// defined object Nat + |""".stripMargin, storedOutput()) + run("Nat.fromInt(2)") + }.andThen { + assertEquals("val res0: Nat.Succ[Nat.Succ[Nat.Zero.type]] = Succ(Succ(Zero))", storedOutput().trim) + run("summon[Nat.FromInt[2]]") + }.andThen { + assertEquals("val res1: Nat.Succ[Nat.Succ[Nat.Zero.type]] = Succ(Succ(Zero))", storedOutput().trim) + run("Nat.fromInt(3)") + }.andThen { + assertEquals("val res2: Nat.Succ[Nat.Succ[Nat.Succ[Nat.Zero.type]]] = Succ(Succ(Succ(Zero)))", storedOutput().trim) + run("summon[Nat.FromInt[3]]") + }.andThen { + assertEquals("val res3: Nat.Succ[Nat.Succ[Nat.Succ[Nat.Zero.type]]] = Succ(Succ(Succ(Zero)))", storedOutput().trim) + } + @Test def i6200 = initially { run(""" diff --git a/compiler/test/dotty/tools/repl/ReplTest.scala b/compiler/test/dotty/tools/repl/ReplTest.scala index 34cad747fde6..8fbf635c9a17 100644 --- a/compiler/test/dotty/tools/repl/ReplTest.scala +++ b/compiler/test/dotty/tools/repl/ReplTest.scala @@ -69,7 +69,7 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na val expectedOutput = lines.filter(nonBlank) val actualOutput = { - val opts = toolArgsFor(ToolName.Scalac)(lines.take(1)) + val opts = toolArgsFor(ToolName.Scalac, scriptFile.map(_.toString))(lines.take(1)) val (optsLine, inputLines) = if opts.isEmpty then ("", lines) else (lines.head, lines.drop(1)) resetToInitial(opts) diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index 910584a9b5e7..0bce525e1469 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -32,6 +32,11 @@ class TabcompleteTests extends ReplTest { assertEquals(List("apply"), comp) } + @Test def tabCompleteInExtensionDefinition = initially { + val comp = tabComplete("extension (x: Lis") + assertEquals(List("List"), comp) + } + @Test def tabCompleteTwiceIn = { val src1 = "class Foo { def bar(xs: List[Int]) = xs.map" val src2 = "class Foo { def bar(xs: List[Int]) = xs.mapC" diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index f5a9559dffc9..cc53447cd64b 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -7,6 +7,7 @@ import scala.language.unsafeNulls import java.nio.file.Files, java.nio.charset.StandardCharsets.UTF_8 import org.junit.{ After, Test } import org.junit.Assert.assertEquals +import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category import ScriptTestEnv.* @@ -19,6 +20,7 @@ class BashExitCodeTests: /** Verify the exit code of running `cmd args*`. */ def verifyExit(cmd: String, args: String*)(expectedExitCode: Int): Unit = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val (validTest, exitCode, stdout, stderr) = bashCommand((cmd +: args).mkString(" ")) if verifyValid(validTest) then assertEquals({ diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index 8a19d0420692..f3f364754e20 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -7,6 +7,7 @@ import scala.language.unsafeNulls import java.nio.file.Paths import org.junit.{Test, AfterClass} import org.junit.Assert.assertEquals +import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category import vulpix.TestConfiguration @@ -84,30 +85,35 @@ class BashScriptsTests: /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.sc */ @Test def verifyScJProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World1" val stdout = callScript(tag, envtestSc, s"-J-Dkey") assertEquals( s"Hello $tag", stdout) /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.scala */ @Test def verifyScalaJProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World2" val stdout = callScript(tag, envtestScala, s"-J-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D for envtest.sc */ @Test def verifyScDProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World3" val stdout = callScript(tag, envtestSc, s"-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D for envtest.scala */ @Test def verifyScalaDProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World4" val stdout = callScript(tag, envtestScala, s"-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D when executing compiled script via -jar envtest.jar */ @Test def saveAndRunWithDProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val commandline = Seq("SCALA_OPTS= ", scalaPath.relpath, "-save", envtestScala.relpath).mkString(" ") val (_, _, _, _) = bashCommand(commandline) // compile jar, discard output val testJar = testFile("envtest.jar") // jar is created by the previous bashCommand() @@ -125,6 +131,7 @@ class BashScriptsTests: /* verify `dist/bin/scalac` non-interference with command line args following script name */ @Test def verifyScalacArgs = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val commandline = (Seq("SCALA_OPTS= ", scalacPath, "-script", showArgsScript) ++ testScriptArgs).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) if verifyValid(validTest) then @@ -140,6 +147,7 @@ class BashScriptsTests: /* verify `dist/bin/scala` non-interference with command line args following script name */ @Test def verifyScalaArgs = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val commandline = (Seq("SCALA_OPTS= ", scalaPath, showArgsScript) ++ testScriptArgs).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) if verifyValid(validTest) then @@ -159,6 +167,7 @@ class BashScriptsTests: */ @Category(Array(classOf[BootstrappedOnlyTests])) @Test def verifyScriptPathProperty = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val scriptFile = testFiles.find(_.getName == "scriptPath.sc").get val expected = s"${scriptFile.getName}" printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName) @@ -175,6 +184,7 @@ class BashScriptsTests: * verify SCALA_OPTS can specify an @argsfile when launching a scala script in `dist/bin/scala`. */ @Test def verifyScalaOpts = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val scriptFile = testFiles.find(_.getName == "classpathReport.sc").get printf("===> verify SCALA_OPTS='@argsfile' is properly handled by `dist/bin/scala`\n") val envPairs = List(("SCALA_OPTS", s"@$argsfile")) @@ -197,6 +207,7 @@ class BashScriptsTests: * verify that individual scripts can override -save with -nosave (needed to address #13760). */ @Test def sqlDateTest = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val scriptBase = "sqlDateError" val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs @@ -221,6 +232,7 @@ class BashScriptsTests: * verify -e println("yo!") works. */ @Test def verifyCommandLineExpression = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) printf("===> verify -e is properly handled by `dist/bin/scala`\n") val expected = "9" val expression = s"println(3*3)" diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 0f3ada041538..4fd1211698f6 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -43,7 +43,7 @@ class ClasspathTests: // cwd: // classpath: - val scriptOutput: Seq[String] = exec(cmd:_*) + val scriptOutput: Seq[String] = exec(cmd*) val scriptCwd: String = findTaggedLine("cwd", scriptOutput) // the value tagged "cwd: " printf("script ran in directory [%s]\n", scriptCwd) val scriptCp = findTaggedLine("classpath", scriptOutput) // the value tagged "classpath: " @@ -94,7 +94,7 @@ class ClasspathTests: cmd.foreach { printf("[%s]\n", _) } // test script reports the classpath it sees - val scriptOutput = exec(cmd:_*) + val scriptOutput = exec(cmd*) val scriptCp = findTaggedLine("unglobbed classpath", scriptOutput) printf("%s\n", scriptCp) val classpathJars = scriptCp.split(psep).map { _.getName }.sorted.distinct diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index a464d15a8fab..6b5248e67f08 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -7,6 +7,7 @@ import scala.language.unsafeNulls import java.nio.file.Paths import org.junit.{Test, AfterClass} import org.junit.Assert.assertEquals +import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category import vulpix.TestConfiguration @@ -22,13 +23,15 @@ class ExpressionTest: * verify -e works. */ @Test def verifyCommandLineExpression = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) printf("===> verify -e is properly handled by `dist/bin/scala`\n") val expected = "9" val expression = s"println(3*3)" val result = getResult(expression) - assert(result.contains(expected), s"expression [$expression] did not send [$expected] to stdout") + assert(result.contains(expected), s"expression [$expression] did not send [$expected] to stdout. It send [$result].") @Test def verifyImports: Unit = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val expressionLines = List( "import java.nio.file.Paths", "import scala.util.Properties.userDir", diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala index 16eeb48f0b2a..5ec417090504 100644 --- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala +++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala @@ -12,6 +12,8 @@ import org.junit.Test import vulpix.TestConfiguration import ScriptTestEnv.* +import org.junit.Assume.assumeFalse + /** Runs all tests contained in `compiler/test-resources/scripting/` */ class ScriptingTests: // classpath tests managed by scripting.ClasspathTests.scala @@ -21,6 +23,7 @@ class ScriptingTests: * Call .scala scripts without -save option, verify no jar created */ @Test def scriptingDriverTests = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".scala") do showScriptUnderTest(scriptFile) val unexpectedJar = script2jar(scriptFile) @@ -43,6 +46,7 @@ class ScriptingTests: * Call .sc scripts without -save option, verify no jar created */ @Test def scriptingMainTests = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do showScriptUnderTest(scriptFile) val unexpectedJar = script2jar(scriptFile) @@ -61,6 +65,7 @@ class ScriptingTests: * Call .sc scripts with -save option, verify jar is created. */ @Test def scriptingJarTest = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do showScriptUnderTest(scriptFile) val expectedJar = script2jar(scriptFile) @@ -85,6 +90,7 @@ class ScriptingTests: * Verify that when ScriptingDriver callback returns false, main is not called. */ @Test def scriptCompileOnlyTests = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val scriptFile = touchFileScript showScriptUnderTest(scriptFile) @@ -123,6 +129,7 @@ class ScriptingTests: * Compile touchFile.sc to create executable jar, verify jar execution succeeds. */ @Test def scriptingNoCompileJar: Unit = + assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val scriptFile = touchFileScript showScriptUnderTest(scriptFile) val expectedJar = script2jar(scriptFile) diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index 75918674146c..8161631acb44 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -65,7 +65,7 @@ type ToolArgs = Map[ToolName, List[String]] */ def toolArgsFor(files: List[JPath], charset: Charset = UTF_8): ToolArgs = files.foldLeft(Map.empty[ToolName, List[String]]) { (res, path) => - val toolargs = toolArgsParse(resource(Files.lines(path, charset))(_.limit(10).toScala(List))) + val toolargs = toolArgsParse(resource(Files.lines(path, charset))(_.limit(10).toScala(List)), Some(path.toString)) toolargs.foldLeft(res) { case (acc, (tool, args)) => val name = ToolName.named(tool) @@ -74,29 +74,36 @@ def toolArgsFor(files: List[JPath], charset: Charset = UTF_8): ToolArgs = } } -def toolArgsFor(tool: ToolName)(lines: List[String]): List[String] = - toolArgsParse(lines).collectFirst { case (name, args) if tool eq ToolName.named(name) => CommandLineParser.tokenize(args) }.getOrElse(Nil) +def toolArgsFor(tool: ToolName, filename: Option[String])(lines: List[String]): List[String] = + toolArgsParse(lines, filename).collectFirst { case (name, args) if tool eq ToolName.named(name) => CommandLineParser.tokenize(args) }.getOrElse(Nil) -// scalac: arg1 arg2, with alternative opening, optional space, alt names, text that is not */ up to end. +// scalajs: arg1 arg2, with alternative opening, optional space, alt names, text that is not */ up to end. // groups are (name, args) +// note: ideally we would replace everything that requires this to use directive syntax, however scalajs: --skip has no directive equivalent yet. private val toolArg = raw"(?://|/\*| \*) ?(?i:(${ToolName.values.mkString("|")})):((?:[^*]|\*(?!/))*)".r.unanchored +private val directiveOptionsArg = raw"//> using options (.*)".r.unanchored // Inspect the lines for compiler options of the form -// `// scalac: args`, `/* scalac: args`, ` * scalac: args`. +// `//> using options args`, `// scalajs: args`, `/* scalajs: args`, ` * scalajs: args` etc. // If args string ends in close comment, stop at the `*` `/`. // Returns all the matches by the regex. -def toolArgsParse(lines: List[String]): List[(String,String)] = - lines.flatMap { case toolArg(name, args) => List((name, args)) case _ => Nil } +def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,String)] = + lines.flatMap { + case toolArg("scalac", _) => sys.error(s"`// scalac: args` not supported. Please use `//> using options args`${filename.fold("")(f => s" in file $f")}") + case toolArg(name, args) => List((name, args)) + case _ => Nil + } ++ + lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case _ => Nil } import org.junit.Test import org.junit.Assert._ class ToolArgsTest: - @Test def `missing toolarg is absent`: Unit = assertEquals(Nil, toolArgsParse(List(""))) - @Test def `toolarg is present`: Unit = assertEquals(("test", " -hey") :: Nil, toolArgsParse("// test: -hey" :: Nil)) - @Test def `tool is present`: Unit = assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test)("// test: -hey" :: Nil)) - @Test def `missing tool is absent`: Unit = assertEquals(Nil, toolArgsFor(ToolName.Javac)("// test: -hey" :: Nil)) + @Test def `missing toolarg is absent`: Unit = assertEquals(Nil, toolArgsParse(List(""), None)) + @Test def `toolarg is present`: Unit = assertEquals(("test", " -hey") :: Nil, toolArgsParse("// test: -hey" :: Nil, None)) + @Test def `tool is present`: Unit = assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: Nil)) + @Test def `missing tool is absent`: Unit = assertEquals(Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: Nil)) @Test def `multitool is present`: Unit = - assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) - assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Javac)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) + assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) + assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) end ToolArgsTest diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index bccbcbee29e1..e93a0435987b 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -85,18 +85,18 @@ trait ParallelTesting extends RunnerOrchestration { self => val newFlags = newFlags0.toArray if (!flags.options.containsSlice(newFlags)) self match { case self: JointCompilationSource => - self.copy(flags = flags.and(newFlags:_*)) + self.copy(flags = flags.and(newFlags*)) case self: SeparateCompilationSource => - self.copy(flags = flags.and(newFlags:_*)) + self.copy(flags = flags.and(newFlags*)) } else self } def withoutFlags(flags1: String*): TestSource = self match { case self: JointCompilationSource => - self.copy(flags = flags.without(flags1: _*)) + self.copy(flags = flags.without(flags1*)) case self: SeparateCompilationSource => - self.copy(flags = flags.without(flags1: _*)) + self.copy(flags = flags.without(flags1*)) } lazy val allToolArgs: ToolArgs = @@ -226,14 +226,14 @@ trait ParallelTesting extends RunnerOrchestration { self => Try(testSource match { case testSource @ JointCompilationSource(name, files, flags, outDir, fromTasty, decompilation) => val reporter = - if (fromTasty) compileFromTasty(flags, suppressErrors, outDir) - else compile(testSource.sourceFiles, flags, suppressErrors, outDir) + if (fromTasty) compileFromTasty(flags, outDir) + else compile(testSource.sourceFiles, flags, outDir) List(reporter) case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => testSource.compilationGroups.map { (group, files) => if group.compiler.isEmpty then - compile(files, flags, suppressErrors, outDir) + compile(files, flags, outDir) else compileWithOtherCompiler(group.compiler, files, flags, outDir) } @@ -244,7 +244,7 @@ trait ParallelTesting extends RunnerOrchestration { self => final def countErrors (reporters: Seq[TestReporter]) = countErrorsAndWarnings(reporters)._1 final def countWarnings(reporters: Seq[TestReporter]) = countErrorsAndWarnings(reporters)._2 - final def reporterFailed(r: TestReporter) = r.compilerCrashed || r.errorCount > 0 + final def reporterFailed(r: TestReporter) = r.errorCount > 0 /** * For a given test source, returns a check file against which the result of the test run @@ -469,7 +469,7 @@ trait ParallelTesting extends RunnerOrchestration { self => registerCompletion() throw e - protected def compile(files0: Array[JFile], flags0: TestFlags, suppressErrors: Boolean, targetDir: JFile): TestReporter = { + protected def compile(files0: Array[JFile], flags0: TestFlags, targetDir: JFile): TestReporter = { import scala.util.Properties.* def flattenFiles(f: JFile): Array[JFile] = @@ -490,7 +490,7 @@ trait ParallelTesting extends RunnerOrchestration { self => def scalacOptions = toolArgs.getOrElse(ToolName.Scalac, Nil) val flags = flags0 - .and(scalacOptions: _*) + .and(scalacOptions*) .and("-d", targetDir.getPath) .withClasspath(targetDir.getPath) @@ -610,17 +610,38 @@ trait ParallelTesting extends RunnerOrchestration { self => .run() .mkString(JFile.pathSeparator) - val stdlibClasspath = artifactClasspath("org.scala-lang", "scala3-library_3") - val scalacClasspath = artifactClasspath("org.scala-lang", "scala3-compiler_3") - val pageWidth = TestConfiguration.pageWidth - 20 - val flags1 = flags.copy(defaultClassPath = stdlibClasspath) - .withClasspath(targetDir.getPath) - .and("-d", targetDir.getPath) - .and("-pagewidth", pageWidth.toString) - val scalacCommand = Array("java", "-cp", scalacClasspath, "dotty.tools.dotc.Main") - val command = scalacCommand ++ flags1.all ++ files.map(_.getAbsolutePath) + val fileArgs = files.map(_.getAbsolutePath) + + def scala2Command(): Array[String] = { + assert(!flags.options.contains("-scalajs"), + "Compilation tests with Scala.js on Scala 2 are not supported.\nThis test can be skipped using the `// scalajs: --skip` tag") + val stdlibClasspath = artifactClasspath("org.scala-lang", "scala-library") + val scalacClasspath = artifactClasspath("org.scala-lang", "scala-compiler") + val flagsArgs = flags + .copy(options = Array.empty, defaultClassPath = stdlibClasspath) + .withClasspath(targetDir.getPath) + .and("-d", targetDir.getPath) + .all + val scalacCommand = Array("java", "-cp", scalacClasspath, "scala.tools.nsc.Main") + scalacCommand ++ flagsArgs ++ fileArgs + } + + def scala3Command(): Array[String] = { + val stdlibClasspath = artifactClasspath("org.scala-lang", "scala3-library_3") + val scalacClasspath = artifactClasspath("org.scala-lang", "scala3-compiler_3") + val flagsArgs = flags + .copy(defaultClassPath = stdlibClasspath) + .withClasspath(targetDir.getPath) + .and("-d", targetDir.getPath) + .and("-pagewidth", pageWidth.toString) + .all + val scalacCommand = Array("java", "-cp", scalacClasspath, "dotty.tools.dotc.Main") + scalacCommand ++ flagsArgs ++ fileArgs + } + + val command = if compiler.startsWith("2") then scala2Command() else scala3Command() val process = Runtime.getRuntime.exec(command) val reporter = mkReporter @@ -634,7 +655,7 @@ trait ParallelTesting extends RunnerOrchestration { self => reporter - protected def compileFromTasty(flags0: TestFlags, suppressErrors: Boolean, targetDir: JFile): TestReporter = { + protected def compileFromTasty(flags0: TestFlags, targetDir: JFile): TestReporter = { val tastyOutput = new JFile(targetDir.getPath + "_from-tasty") tastyOutput.mkdir() val flags = flags0 and ("-d", tastyOutput.getPath) and "-from-tasty" @@ -653,6 +674,12 @@ trait ParallelTesting extends RunnerOrchestration { self => private def mkLogLevel = if suppressErrors || suppressAllOutput then ERROR + 1 else ERROR private def mkReporter = TestReporter.reporter(realStdout, logLevel = mkLogLevel) + protected def diffCheckfile(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable) = + checkFile(testSource).foreach(diffTest(testSource, _, reporterOutputLines(reporters), reporters, logger)) + + private def reporterOutputLines(reporters: Seq[TestReporter]): List[String] = + reporters.flatMap(_.consoleOutput.split("\n")).toList + private[ParallelTesting] def executeTestSuite(): this.type = { assert(testSourcesCompleted == 0, "not allowed to re-use a `CompileRun`") if filteredSources.nonEmpty then @@ -717,6 +744,78 @@ trait ParallelTesting extends RunnerOrchestration { self => private final class PosTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting) extends Test(testSources, times, threadLimit, suppressAllOutput) + private final class WarnTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting) + extends Test(testSources, times, threadLimit, suppressAllOutput): + override def suppressErrors = true + override def onSuccess(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable): Unit = + diffCheckfile(testSource, reporters, logger) + + override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = + lazy val (map, expCount) = getWarnMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq) + lazy val obtCount = reporters.foldLeft(0)(_ + _.warningCount) + lazy val (expected, unexpected) = getMissingExpectedWarnings(map, reporters.iterator.flatMap(_.diagnostics)) + lazy val diagnostics = reporters.flatMap(_.diagnostics.toSeq.sortBy(_.pos.line).map(e => s" at ${e.pos.line + 1}: ${e.message}")) + def showLines(title: String, lines: Seq[String]) = if lines.isEmpty then "" else title + lines.mkString("\n", "\n", "") + def hasMissingAnnotations = expected.nonEmpty || unexpected.nonEmpty + def showDiagnostics = showLines("-> following the diagnostics:", diagnostics) + Option: + if reporters.exists(_.errorCount > 0) then + s"""Compilation failed for: ${testSource.title} + |$showDiagnostics + |""".stripMargin.trim.linesIterator.mkString("\n", "\n", "") + else if expCount != obtCount then + s"""|Wrong number of warnings encountered when compiling $testSource + |expected: $expCount, actual: $obtCount + |${showLines("Unfulfilled expectations:", expected)} + |${showLines("Unexpected warnings:", unexpected)} + |$showDiagnostics + |""".stripMargin.trim.linesIterator.mkString("\n", "\n", "") + else if hasMissingAnnotations then s"\nWarnings found on incorrect row numbers when compiling $testSource\n$showDiagnostics" + else if !map.isEmpty then s"\nExpected warnings(s) have {=}: $map" + else null + end maybeFailureMessage + + def getWarnMapAndExpectedCount(files: Seq[JFile]): (HashMap[String, Integer], Int) = + val comment = raw"//( *)warn".r + val map = new HashMap[String, Integer]() + var count = 0 + def bump(key: String): Unit = + map.get(key) match + case null => map.put(key, 1) + case n => map.put(key, n+1) + count += 1 + files.filter(isSourceFile).foreach { file => + Using(Source.fromFile(file, StandardCharsets.UTF_8.name)) { source => + source.getLines.zipWithIndex.foreach { case (line, lineNbr) => + comment.findAllMatchIn(line).foreach { _ => + bump(s"${file.getPath}:${lineNbr+1}") + } + } + }.get + } + (map, count) + + def getMissingExpectedWarnings(map: HashMap[String, Integer], reporterWarnings: Iterator[Diagnostic]): (List[String], List[String]) = + val unexpected, unpositioned = ListBuffer.empty[String] + def relativize(path: String): String = path.split(JFile.separatorChar).dropWhile(_ != "tests").mkString(JFile.separator) + def seenAt(key: String): Boolean = + map.get(key) match + case null => false + case 1 => map.remove(key) ; true + case n => map.put(key, n - 1) ; true + def sawDiagnostic(d: Diagnostic): Unit = + val srcpos = d.pos.nonInlined + if srcpos.exists then + val key = s"${relativize(srcpos.source.file.toString())}:${srcpos.line + 1}" + if !seenAt(key) then unexpected += key + else + unpositioned += relativize(srcpos.source.file.toString()) + + reporterWarnings.foreach(sawDiagnostic) + + (map.asScala.keys.toList, (unexpected ++ unpositioned).toList) + end getMissingExpectedWarnings + private final class RewriteTest(testSources: List[TestSource], checkFiles: Map[JFile, JFile], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting) extends Test(testSources, times, threadLimit, suppressAllOutput) { private def verifyOutput(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable) = { @@ -782,7 +881,6 @@ trait ParallelTesting extends RunnerOrchestration { self => override def suppressErrors = true override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = - def compilerCrashed = reporters.exists(_.compilerCrashed) lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq) lazy val actualErrors = reporters.foldLeft(0)(_ + _.errorCount) lazy val (expected, unexpected) = getMissingExpectedErrors(errorMap, reporters.iterator.flatMap(_.errors)) @@ -791,8 +889,7 @@ trait ParallelTesting extends RunnerOrchestration { self => reporters.flatMap(_.allErrors.sortBy(_.pos.line).map(e => s"${e.pos.line + 1}: ${e.message}")).mkString(" at ", "\n at ", "") Option { - if compilerCrashed then s"Compiler crashed when compiling: ${testSource.title}" - else if actualErrors == 0 then s"\nNo errors found when compiling neg test $testSource" + if actualErrors == 0 then s"\nNo errors found when compiling neg test $testSource" else if expectedErrors == 0 then s"\nNo errors expected/defined in $testSource -- use // error or // nopos-error" else if expectedErrors != actualErrors then s"""|Wrong number of errors encountered when compiling $testSource @@ -808,10 +905,7 @@ trait ParallelTesting extends RunnerOrchestration { self => end maybeFailureMessage override def onSuccess(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable): Unit = - checkFile(testSource).foreach(diffTest(testSource, _, reporterOutputLines(reporters), reporters, logger)) - - def reporterOutputLines(reporters: Seq[TestReporter]): List[String] = - reporters.flatMap(_.consoleOutput.split("\n")).toList + diffCheckfile(testSource, reporters, logger) // In neg-tests we allow two or three types of error annotations. // Normally, `// error` must be annotated on the correct line number. @@ -1014,20 +1108,11 @@ trait ParallelTesting extends RunnerOrchestration { self => * compilation without generating errors and that they do not crash the * compiler */ - def checkCompile()(implicit summaryReport: SummaryReporting): this.type = { - val test = new PosTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() - - cleanup() - - if (!shouldFail && test.didFail) { - fail(s"Expected no errors when compiling, failed for the following reason(s):\n${reasonsForFailure(test)}\n") - } - else if (shouldFail && !test.didFail && test.skipCount == 0) { - fail("Pos test should have failed, but didn't") - } + def checkCompile()(implicit summaryReport: SummaryReporting): this.type = + checkPass(new PosTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput), "Pos") - this - } + def checkWarnings()(implicit summaryReport: SummaryReporting): this.type = + checkPass(new WarnTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput), "Warn") /** Creates a "neg" test run, which makes sure that each test generates the * correct number of errors at the correct positions. It also makes sure @@ -1047,35 +1132,16 @@ trait ParallelTesting extends RunnerOrchestration { self => end checkExpectedErrors /** Creates a "fuzzy" test run, which makes sure that each test compiles (or not) without crashing */ - def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { - val test = new NoCrashTest(targets, times, threadLimit, shouldSuppressOutput).executeTestSuite() - - cleanup() - - if (test.didFail) { - fail("Fuzzy test shouldn't have crashed, but did") - } - - this - } + def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = + checkFail(new NoCrashTest(targets, times, threadLimit, shouldSuppressOutput), "Fuzzy") /** Creates a "run" test run, which is a superset of "pos". In addition to * making sure that all tests pass compilation and that they do not crash * the compiler; it also makes sure that all tests can run with the * expected output */ - def checkRuns()(implicit summaryReport: SummaryReporting): this.type = { - val test = new RunTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() - - cleanup() - - if !shouldFail && test.didFail then - fail(s"Run test failed, but should not, reasons:\n${ reasonsForFailure(test) }") - else if shouldFail && !test.didFail && test.skipCount == 0 then - fail("Run test should have failed, but did not") - - this - } + def checkRuns()(implicit summaryReport: SummaryReporting): this.type = + checkPass(new RunTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput), "Run") /** Tests `-rewrite`, which makes sure that the rewritten files still compile * and agree with the expected result (if specified). @@ -1100,15 +1166,34 @@ trait ParallelTesting extends RunnerOrchestration { self => target.copy(dir = copyToDir(outDir, dir)) } - val test = new RewriteTest(copiedTargets, checkFileMap, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() + val test = new RewriteTest(copiedTargets, checkFileMap, times, threadLimit, shouldFail || shouldSuppressOutput) + + checkFail(test, "Rewrite") + } + + private def checkPass(test: Test, desc: String): this.type = + test.executeTestSuite() cleanup() - if test.didFail then - fail("Rewrite test failed") + if !shouldFail && test.didFail then + fail(s"$desc test failed, but should not, reasons:\n${reasonsForFailure(test)}") + else if shouldFail && !test.didFail && test.skipCount == 0 then + fail(s"$desc test should have failed, but didn't") + + this + + private def checkFail(test: Test, desc: String): this.type = + test.executeTestSuite() + + cleanup() + + if shouldFail && !test.didFail && test.skipCount == 0 then + fail(s"$desc test shouldn't have failed, but did. Reasons:\n${reasonsForFailure(test)}") + else if !shouldFail && test.didFail then + fail(s"$desc test failed") this - } /** Deletes output directories and files */ private def cleanup(): this.type = { diff --git a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala index 8e9a27e766b4..9047bb6737dc 100644 --- a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala +++ b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala @@ -14,6 +14,7 @@ import scala.concurrent.duration.Duration import scala.concurrent.{ Await, Future } import scala.concurrent.ExecutionContext.Implicits.global import scala.collection.mutable +import scala.compiletime.uninitialized /** Vulpix spawns JVM subprocesses (`numberOfSlaves`) in order to run tests * without compromising the main JVM @@ -70,8 +71,8 @@ trait RunnerOrchestration { withRunner(_.runMain(classPath)) private class Runner(private var process: Process) { - private var childStdout: BufferedReader = _ - private var childStdin: PrintStream = _ + private var childStdout: BufferedReader = uninitialized + private var childStdin: PrintStream = uninitialized /** Checks if `process` is still alive * diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 5d2992b50a09..04be00fe921e 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -25,12 +25,14 @@ object TestConfiguration { "-Xverify-signatures" ) - val basicClasspath = mkClasspath(List( + val basicClasspath = mkClasspath( + Properties.scalaLibraryTasty.toList ::: List( Properties.scalaLibrary, Properties.dottyLibrary )) - val withCompilerClasspath = mkClasspath(List( + val withCompilerClasspath = mkClasspath( + Properties.scalaLibraryTasty.toList ::: List( Properties.scalaLibrary, Properties.scalaAsm, Properties.jlineTerminal, @@ -84,13 +86,6 @@ object TestConfiguration { ) val picklingWithCompilerOptions = picklingOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) - val recheckOptions = defaultOptions.and("-Yrecheck-test") - val scala2CompatMode = defaultOptions.and("-source", "3.0-migration") - val explicitUTF8 = defaultOptions and ("-encoding", "UTF8") - val explicitUTF16 = defaultOptions and ("-encoding", "UTF16") - - /** Enables explicit nulls */ - val explicitNullsOptions = defaultOptions and "-Yexplicit-nulls" /** Default target of the generated class files */ private def defaultTarget: String = { diff --git a/dist/bin/common.bat b/dist/bin/common.bat index 9f47198e757b..7aef606d5509 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -13,7 +13,7 @@ if defined JAVACMD ( set __JAVA_BIN_DIR= for /f "delims=" %%i in ('where /f java.exe') do ( set "__PATH=%%~dpi" - @rem we take first occurence and ignore Oracle path for java executable + @rem we take first occurrence and ignore Oracle path for java executable if not defined __JAVA_BIN_DIR if "!__PATH!"=="!__PATH:javapath=!" set "__JAVA_BIN_DIR=!__PATH!" ) if defined __JAVA_BIN_DIR set "_JAVACMD=!__JAVA_BIN_DIR!\java.exe" diff --git a/docs/_assets/images/contribution/debug-test-code-lens.jpg b/docs/_assets/images/contribution/debug-test-code-lens.jpg new file mode 100644 index 000000000000..3a9622b95f51 Binary files /dev/null and b/docs/_assets/images/contribution/debug-test-code-lens.jpg differ diff --git a/docs/_assets/images/contribution/debug-test-explorer.jpg b/docs/_assets/images/contribution/debug-test-explorer.jpg new file mode 100644 index 000000000000..b59c7ab64f44 Binary files /dev/null and b/docs/_assets/images/contribution/debug-test-explorer.jpg differ diff --git a/docs/_docs/contributing/architecture/types.md b/docs/_docs/contributing/architecture/types.md index 64543e555e69..2dfdc33101a0 100644 --- a/docs/_docs/contributing/architecture/types.md +++ b/docs/_docs/contributing/architecture/types.md @@ -11,7 +11,7 @@ format corresponding to the backing data structure, e.g. `ExprType(...)` corresponds to `class ExprType`, defined in [Types.scala]. > You can inspect the representation of any type using the [dotty.tools.printTypes][DottyTypeStealer] -> script, its usage and integration into your debugging workflow is [described here](../issues/inspection.md). +> script, its usage and integration into your debugging workflow is [described here](../debugging/inspection.md). ### Types of Definitions diff --git a/docs/_docs/contributing/cheatsheet.md b/docs/_docs/contributing/cheatsheet.md new file mode 100644 index 000000000000..33e85a2cf564 --- /dev/null +++ b/docs/_docs/contributing/cheatsheet.md @@ -0,0 +1,37 @@ +--- +layout: doc-page +title: Command Cheatsheet +--- + +## sbt commands + +Below is a cheat sheet of some frequently used commands to be used from SBT +console – `sbt`. + + +| Command | Description | +|------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| +| `scala3/scalac` | Run the compiler directly, with any current changes. | +| `scala3/scala` | Run the main method of a given class name. | +| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | +| `scala Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | +| `repl` | Start REPL | +| `scala3/scalac -print-tasty Foo.tasty` | Print the TASTy of top-level class `Foo` | +| `scala3-bootstrapped/test` | Run all tests for Scala 3. (Slow, recommended for CI only) | +| `scala3-bootstrapped/publishLocal` | Build Scala 3 locally. (Use to debug a specific project) | +| `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | +| `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | +| `scala3-compiler/Test/runMain dotty.tools.printTypes`| Print types underlying representation | +| `scaladoc/generateScalaDocumentation` | Build the documentation website (published to https://dotty.epfl.ch) | +| `scaladoc/generateReferenceDocumentation` | Build the reference documentation website (published to https://docs.scala-lang.org/scala3/reference) | + + +## Shell Commands + +Below is a cheat sheet of some frequently used commands to be used from your +shell. + +| Command | Description | +|--------------------------------------|------------------------------------------------------------------| +| `rm -rv *.tasty *.class out || true` | clean all compiled artifacts, from root dotty directory | +| `git clean -fdx` | a full clean of all files in the codebase not tracked by git | diff --git a/docs/_docs/contributing/checklist.sh b/docs/_docs/contributing/checklist.sh deleted file mode 100755 index d3cfe70b4e21..000000000000 --- a/docs/_docs/contributing/checklist.sh +++ /dev/null @@ -1,60 +0,0 @@ -# #!/usr/bin/env bash -stable=$1 - -rc="$(($stable+1))" -next="$(($rc+1))" - -stable_version="0.$stable.0" -rc_version="0.$rc.0-RC1" -next_version="0.$next.0" -stable_branch="0.$stable.x" -rc_branch="0.$rc.x" - -LIST='- [ ] Publish artifacts to Maven via CI - - [ ] On branch ``, set `baseVersion` to `` and `git tag` it as ``. This will publish artefacts to Sonatype and GitHub Release - - [ ] Merge branch `` into `master` to guarantee that all of the `` commits are propagated to `master` - - [ ] Look at the milestone of the RC version being released. Move all the open issues from it to the next milestone. - - [ ] Create branch `` from `master` - - [ ] On ``, set `baseVersion` to `` and `git tag` it as ``. This will publish artefacts to Sonatype and GitHub Release. - - [ ] On `master`, set `baseVersion` to `` -- [ ] Update `scalaVersion` (and, if applicable, the `sbt-dotty` version) in the Dotty ecosystem projects - - [ ] https://github.com/scala/scala3-example-project [![Build Status](https://travis-ci.org/scala/scala3-example-project.svg?branch=master)](https://travis-ci.org/scala/scala3-example-project) - - [ ] Committed to `master` - - [ ] https://github.com/scala/scala3-example-project/tree/mill - - [ ] https://github.com/scala/scala3.g8 [![Build Status](https://travis-ci.org/scala/scala3.g8.svg?branch=master)](https://travis-ci.org/scala/scala3.g8/) - - [ ] Committed to `master` - - [ ] https://github.com/scala/scala3-cross.g8 [![Build Status](https://travis-ci.org/scala/scala3-cross.g8.svg?branch=master)](https://travis-ci.org/scala/scala3-cross.g8/) - - [ ] Committed to `master` - - [ ] https://github.com/lampepfl/homebrew-brew [![Build Status](https://travis-ci.org/lampepfl/homebrew-brew.svg?branch=master)](https://travis-ci.org/lampepfl/homebrew-brew) - - [ ] Committed to `master` - - SHA256 sum for the artifact: `wget -q -O- https://github.com/lampepfl/dotty/releases/download//sha256sum.txt | grep ".tar.gz"` - - [ ] https://github.com/lampepfl/packtest [![Build Status](https://travis-ci.org/lampepfl/packtest.svg?branch=master)](https://travis-ci.org/lampepfl/packtest) - - [ ] Committed to `master` - - [ ] https://github.com/lampepfl/xml-interpolator [![Build Status](https://travis-ci.org/lampepfl/xml-interpolator.svg?branch=master)](https://travis-ci.org/lampepfl/xml-interpolator) - - [ ] PR submitted - - [ ] PR merged - - [ ] https://github.com/scalacenter/scastie - - [ ] PR submitted - - [ ] PR merged - - [ ] https://scastie.scala-lang.org/ -> Build Settings -> Dotty mentions `` - - [ ] Dotty reference compiler [![Build Status](http://dotty-ci.epfl.ch/api/badges/lampepfl/dotty/status.svg)](http://dotty-ci.epfl.ch/lampepfl/dotty) - - [ ] PR submitted - - [ ] PR merged - - [ ] Scalac [![Build Status](https://travis-ci.org/scala/scala.svg?branch=2.13.x)](https://travis-ci.org/scala/scala) - - [ ] PR submitted - - [ ] PR merged -- [ ] Announce the release - - [ ] Publish releases for the RC and stable versions on GitHub Releases - - [ ] Publish Blog Post on dotty.epfl.ch - - [ ] Make an announcement thread on https://contributors.scala-lang.org - - [ ] Tweet the announcement blog post on https://twitter.com/scala_lang - - [ ] Run workflow releases CI to publish scala on SDKMAN - https://github.com/lampepfl/dotty/actions/workflows/releases.yml - -[Instructions on how to release](https://dotty.epfl.ch/docs/contributing/release.html)' - -echo "$LIST" |\ - sed "s//$stable_version/g" |\ - sed "s//$rc_version/g" |\ - sed "s//$next_version/g" |\ - sed "s//$stable_branch/g" |\ - sed "s//$rc_branch/g" diff --git a/docs/_docs/contributing/community-build.md b/docs/_docs/contributing/community-build.md new file mode 100644 index 000000000000..b382786c614e --- /dev/null +++ b/docs/_docs/contributing/community-build.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: Community Build +--- + +The Community Build contains tests to build and test a corpus of open sources +Scala projects against the latest version of Scala 3. + +## Running it locally + +To run the community build on a local machine, first fetch all the git +submodules with `git submodule update --init` and run `sbt community-build/test` +from the root of the dotty repo. + +To run a single project, you can use the usual syntax for running a single JUnit +test, for example `community-build/testOnly -- *shapeless` + +In CI the community build is split up into 3 separate groups: A, B, and C. To +run one specific build you can also use the same JUnit syntax as above targeting +the individual group. For example: + +``` +sbt "community-build/testOnly dotty.communitybuild.CommunityBuildTestA" +``` + +## Adding your project + +The community build is able to handle both Mill and sbt projects. To add your +project to the community build you can follow these steps: + +1. Ensure your project is compiling with Scala 3. If you need help make sure to + check out the [Scala 3 Migration + Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). + You can see the submodules in + [community-projects](https://github.com/lampepfl/dotty/tree/main/community-build/community-projects/) + for examples of projects that compile with Scala 3. + +2. Open a PR against this repo that: + - Adds your project as a new git submodule + - `git submodule add https://github.com/dotty-staging/XYZ.git community-build/community-projects/XYZ` + - Add the project to [projects.scala](https://github.com/lampepfl/dotty/blob/main/community-build/src/scala/dotty/communitybuild/projects.scala) + - Adds a test in [CommunityBuildTest.scala](https://github.com/lampepfl/dotty/blob/main/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala) + +3. Once the CI is green, someone from the Dotty team will fork your repo and add + it to [dotty-staging](https://github.com/dotty-staging). This enables us to + make changes to your fork if necessary to keep the community build running + smoothly. + +4. Once the fork is created, please update your PR to point to this new fork + instead of your repo. + +## Updating a project + +The projects included in the community build are all forked and located in +[dotty-staging](https://github.com/dotty-staging). When something needs to be +bumped the process is as follows: + +1. Fork the dotty staging repo and sync it with the upstream project. + +2. Once you've verified that the tests are all passing you can then either + request in your PR that the dotty-staging fork be synced or in the + [scala-contributors](https://discord.com/channels/632150470000902164/632628489719382036) + discord channel. + +### Some helpful tips + +- If you're unfamiliar with Git Submodules you can find a nice guide to get + familiar with them [here](https://git-scm.com/book/en/v2/Git-Tools-Submodules). +- Keep in mind that many projects are interrelated. So when you bump one that + change may cascade through multiple different projects causing you to have + to bump multiple. Plan accordingly and at times it's best to pin it to a + stable release version, especially if it's a root library that many others + in the community build are relying on. + +## Looking for the "unmanaged" Scala 3 community build? + +You can find this [here](https://github.com/VirtusLab/community-build3). diff --git a/docs/_docs/contributing/debug-tests.md b/docs/_docs/contributing/debug-tests.md deleted file mode 100644 index a38338fcc745..000000000000 --- a/docs/_docs/contributing/debug-tests.md +++ /dev/null @@ -1,124 +0,0 @@ ---- -layout: doc-page -title: Tests for Debuggability ---- - -## Tools Requires - -- JDB -- expect - -Both are usually pre-installed on macOS and linux distributions. - -## Debug Manually with JDB - -First, compile the file `tests/debug/while.scala`: - -```shell -$ scalac tests/debug/while.scala -``` - -Second, run the compiled class with debugging enabled (suppose the main class is `Test`): - -```shell -$ scala -d Test -``` - -Third, start JDB: - -```shell -$ jdb -attach 5005 -sourcepath tests/debug/ -``` - -You can run `help` for commands that supported by JDB. - -## Debug Automatically with Expect - -### 1. Annotate the source code with debug information. - -Following file (`tests/debug/while.scala`) is an example of annotated source code: - -```scala -object Test { - - def main(args: Array[String]): Unit = { - var a = 1 + 2 - a = a + 3 - a = 4 + 5 // [break] [step: while] - - while (a * 8 < 100) { // [step: a += 1] - a += 1 // [step: while] [cont: print] - } - - print(a) // [break] [cont] - } -} -``` - -The debugging information is annotated as comments to the code in brackets: - -```scala -val x = f(3) // [break] [next: line=5] -val y = 5 -``` - -1. A JDB command must be wrapped in brackets, like `[step]`. All JDB commands can be used. -2. To check output of JDB for a command, use `[cmd: expect]`. -3. If `expect` is wrapped in double quotes, regex is supported. -4. Break commands are collected and set globally. -5. Other commands will be send to jdb in the order they appear in the source file - -Note that JDB uses line number starts from 1. - -### 2. Generate Expect File - -Now we can run the following command to generate an expect file: - -```shell -compiler/test/debug/Gen tests/debug/while.scala > robot -``` - -### 3. Run the Test - -First, compile the file `tests/debug/while.scala`: - -```shell -$ scalac tests/debug/while.scala -``` - -Second, run the compiled class with debugging enabled: - -```shell -$ scala -d Test -``` - -Finally, run the expect script: - -```shell -expect robot -``` - -## Other Tips - -### Adding a New Test - -Just put the annotated source file under `tests/debug/`, it will be automatically -run by the test infrastructure. - -### Run All Debug Tests - -```shell -./compiler/test/debug/test -``` - -### Debug a Debug Test - -If there is any problem with a debug test, first check if the problematic -test work correctly with JDB without automation. - -Then, uncomment the following line in the generated expect file to check the -output of expect: - -``` -# exp_internal 1 -``` diff --git a/docs/_docs/contributing/debugging/debugging.md b/docs/_docs/contributing/debugging/debugging.md new file mode 100644 index 000000000000..a96e6bcdc7db --- /dev/null +++ b/docs/_docs/contributing/debugging/debugging.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: Debugging the Compiler +redirectFrom: /docs/contributing/workflow/debugging.html +--- + +This section goes over the various ways to debug either the compiler or the code +that you're having issues with. This can be just inspecting the trees of your +code or stepping through the dotty codebase with a Debugger. + +The following sections will help you with this: +- [Debugging with your IDE](./ide-debugging.md) +- [How to Inspect Values](./inspection.md) +- [Other Debugging Techniques](./other-debugging.md) diff --git a/docs/_docs/contributing/debugging/ide-debugging.md b/docs/_docs/contributing/debugging/ide-debugging.md new file mode 100644 index 000000000000..af817826565a --- /dev/null +++ b/docs/_docs/contributing/debugging/ide-debugging.md @@ -0,0 +1,202 @@ +--- +layout: doc-page +title: Debugging with your IDE +--- + +The debugger is a powerful tool to navigate the internals of the compiler and track bugs. + +You can start the Scala debugger in VSCode using [Metals](https://scalameta.org/metals/). +In this page you will learn how to configure it, and how to use it. + +## Importing the project in VSCode using Metals + +The first step is to import the build in Metals, if it has not yet been imported. + +To do so you can open the [lampefl/dotty][lampepfl/dotty] repository in VSCode and click `Import build` in Metals view. +It may take a few minutes to import, compile and index the full project. + +![Import build](/images/contribution/import-build.jpg) + +If you have any trouble with importing, you can try to switch the build server from Bloop to sbt, +by running the `Metals: Switch build server` command from VSCode command palette. + +## Debugging the unit tests + +If the module you're working on contains unit tests, you can debug a specific one without any additional work. +It can be accomplished by running `Debug Test` code lens on an exact test case. + +![Debug test via code lens](/images/contribution/debug-test-code-lens.jpg) + +The second way to run a unit test is to use the test explorer tab, which lists all available test cases. +The debugger is started either by pressing a debug button or by selecting `Debug Test` option from the menu. + +![Debug test via code lens](/images/contribution/debug-test-explorer.jpg) + +## Debugging the compilation + +Debugging of the compilation requires additional configuration in order to work. +In VSCode, you can go to the `Run and Debug` view and click `create a launch.json file`. +It creates the `launch.json` file in the `.vscode` folder, in which we will define the debug configurations. + +![Create launch.json file](/images/contribution/launch-config-file.jpg) + +To create a debug configuration: +- Open the `.vscode/launch.json` file +- Click the `Add Configuration` button +- Go down the list of templates and select `Scala: Run main class` + +![Create configuration](/images/contribution/create-config.jpg) + +The added configuration should look like this: +```json +{ + "type": "scala", + "request": "launch", + "name": "Untitled", + "mainClass": "???", + "args": [], + "jvmOptions": [], + "env": {} +} +``` + +This is a template that you need to fill out. +First You can give a `name` to your configuration, for instance `Debug Scala 3 Compiler`. + +The two most important parameters, to debug the compiler, are `mainClass` and `args`. +The `mainClass` of the compiler is `dotty.tools.dotc.Main`. +In the `args` you need to specify the compiler arguments, which must contain at least a Scala file to compile and a `-classpath` option. + +To start with, we can compile the `../tests/pos/HelloWorld.scala` file. +In the classpath, we always need at least the `scala-library_2.13` and the bootstrapped `scala3-library_3`. +To locate them on your filesystem you can run the `export scala3-library-bootstrapped/fullClasspath` command in sbt. + +``` +$ sbt +> export scala3-library-bootstrapped/fullClasspath +/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar +[success] Total time: 1 s, completed Mar 10, 2023, 4:37:43 PM +``` + +Note that it is important to use the bootstrapped version of the `scala3-library` to get the correct TASTy version. + +Additionally you can add the `-color` and `never` arguments to prevent the compiler from printing ANSI codes as strings in the debug console. + +Here is the final configuration: +```json +{ + "type": "scala", + "request": "launch", + "name": "Debug Scala 3 Compiler", + "mainClass": "dotty.tools.dotc.Main", + "args": [ + "../tests/pos/HelloWorld.scala", + "-classpath", + // To replace with your own paths + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar", + "-color", + "never" + ], + "jvmOptions": [], + "env": {} +} +``` + +### Customizing the debug configurations + +#### Compiling several files at once + +You can compile more than one Scala file, by adding them in the `args`: +```json +"args": [ + "file1.scala", + "file2.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar" +] +``` + +#### Depending on a library + +To add a dependency to an external library you need to download it and all its transitive dependencies, and to add them in the classpath. +The Coursier CLI can help you to do that. +For instance to add a dependency to cats you can run: +``` +$ cs fetch org.typelevel::cats-core:2.+ --classpath --scala-version 3 --exclude org.scala-lang:scala-library --exclude org.scala-lang:scala3-library +/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar +``` + +And concatenate the output into the classpath argument, which should already contain the scala-library_2.13 and the bootstrapped scala3-library: + +```json +"args": [ + "using-cats.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar" +] +``` + +#### Add more compiler options + +In the `args` you can add any additional compiler option you want. + +For instance you can add `-Xprint:all` to print all the generated trees after each mega phase. + +Run `scalac -help` to get an overview of the available compiler options. + +#### Defining more than one launch configuration + +You can create as many debug configurations as you need: to compile different files, with different compiler options or different classpaths. + +### Starting the debugger + +Before starting the debugger you need to put a breakpoint in the part of the code that you want to debug. +If you don't know where to start, you can put a breakpoint in the `main` method of the `dotty.tools.dotc.Driver` trait. + +![First breakpoint](/images/contribution/breakpoint.jpg) + +Now to start the debugger, open the debug view, find the drop-down list of all the debug configurations and click on yours. +The debugger should start and pause on your breakpoint. + +![Start debugger](/images/contribution/start-debugger.jpg) + +## Using the debugger + +### Navigating the call stack + +When the debugger has paused, you can see the current call stack in the `Debug and Run` view. +Each frame of the call stack contains different variables, whose values you can see in the `Variables` section of the `Debug and Run` view. + +![Call stack](/images/contribution/call-stack.jpg) + +Analysing the call stack and the variables can help you understand the path taken by the compiler to reach that state. + +### The debugging steps + +The debug toolbar contains the `Continue / Pause`, `Step Over`, `Step Into`, `Step Out`, `Restart` and `Stop` buttons. + +![Debugging steps](/images/contribution/toolbar.jpg) + +You can use the step buttons to execute the code step by step and get a precise understanding of the program. + +### The debug console + +When the debugger has paused, you can evaluate any Scala 3 expression in the debug console. +This is useful to inspect some values or to execute some parts of the code. + +For instance, you can evaluate `tree.show` to pretty-print a tree. + +![Import build](/images/contribution/debug-console.jpg) + +### Conditional breakpoints + +In a breakpoint you can define a condition, in the form of a Boolean expression written in Scala. +The program will stop on the breakpoint as soon as the condition is met. + +To add a condition, right-click on a breakpoint and pick `Edit breakpoint...`. + +For instance, if you know that a bug happens on typing a method `foo`, you can use the condition `tree.symbol.name.show == "foo"` in a breakpoint in the `Typer`. + +![Import build](/images/contribution/conditional-breakpoint.jpg) + +[lampepfl/dotty]: https://github.com/lampepfl/dotty diff --git a/docs/_docs/contributing/issues/inspection.md b/docs/_docs/contributing/debugging/inspection.md similarity index 95% rename from docs/_docs/contributing/issues/inspection.md rename to docs/_docs/contributing/debugging/inspection.md index abedc09ecd3b..a80c3d3462ae 100644 --- a/docs/_docs/contributing/issues/inspection.md +++ b/docs/_docs/contributing/debugging/inspection.md @@ -1,6 +1,7 @@ --- layout: doc-page title: How to Inspect Values +redirectFrom: /docs/contributing/workflow/inspection.html --- In this section, you will find out how to debug the contents of certain objects @@ -11,7 +12,10 @@ while the compiler is running, and inspect produced artifacts of the compiler. Frequently you will need to inspect the content of a particular variable. You can either use `println`s or the debugger, more info on how to setup the latter. -In the remeainder of this article we'll use `println()` inserted in the code, but the same effect can be accomplished by stopping at a breakpoint, and typing `` in the [debug console](./debugging.md#the-debug-console) of the debugger. +In the remainder of this article we'll use `println()` +inserted in the code, but the same effect can be accomplished by stopping at a +breakpoint, and typing `` in the [debug +console](./ide-debugging.md#the-debug-console) of the debugger. When printing a variable, it's always a good idea to call `show` on that variable: `println(x.show)`. Many objects of the compiler define `show`, returning a human-readable string. @@ -102,6 +106,7 @@ If you are curious about the representation of a type, say `[T] =>> List[T]`, you can use a helper program [dotty.tools.printTypes][DottyTypeStealer], it prints the internal representation of types, along with their class. It can be invoked from the sbt shell with three arguments as follows: + ```bash sbt:scala3> scala3-compiler/Test/runMain dotty.tools.printTypes diff --git a/docs/_docs/contributing/issues/other-debugging.md b/docs/_docs/contributing/debugging/other-debugging.md similarity index 87% rename from docs/_docs/contributing/issues/other-debugging.md rename to docs/_docs/contributing/debugging/other-debugging.md index 1aa0fb85e5f8..50be43db51ab 100644 --- a/docs/_docs/contributing/issues/other-debugging.md +++ b/docs/_docs/contributing/debugging/other-debugging.md @@ -1,39 +1,99 @@ --- layout: doc-page title: Other Debugging Techniques +redirectFrom: /docs/contributing/issues/debugging.html --- -## Setting up the playground -Consider the `../issues/Playground.scala` (relative to the Dotty directory) file is: +## Debug Manually with JDB + +First, compile the file `tests/debug/while.scala`: + +```shell +$ scalac tests/debug/while.scala +``` + +Second, run the compiled class with debugging enabled (suppose the main class is `Test`): + +```shell +$ scala -d Test +``` + +Third, start JDB: + +```shell +$ jdb -attach 5005 -sourcepath tests/debug/ +``` + +You can run `help` for commands that supported by JDB. + +## Debug Automatically with Expect + +### 1. Annotate the source code with debug information. + +Following file (`tests/debug/while.scala`) is an example of annotated source code: ```scala -object Playground { - def main(args: Array[String]) = { - println("Hello World") +object Test { + + def main(args: Array[String]): Unit = { + var a = 1 + 2 + a = a + 3 + a = 4 + 5 // [break] [step: while] + + while (a * 8 < 100) { // [step: a += 1] + a += 1 // [step: while] [cont: print] + } + + print(a) // [break] [cont] } } ``` -Then, you can debug Dotty by compiling this file via `scalac ../issues/Playground.scala` (from the SBT console) and collecting various debug output in process. This section documents techniques you can use to collect the debug info. - -[This](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2231) is the entry point to the Typer. The job of the Typer is to take an untyped tree, compute its type and turn it into a typed tree by attaching the type information to that tree. We will use this entry point to practice debugging techniques. E.g.: +The debugging information is annotated as comments to the code in brackets: ```scala - def typed(tree: untpd.Tree, pt: Type, locked: TypeVars)(implicit ctx: Context): Tree = - trace(i"typing $tree", typr, show = true) { - println("Hello Debug!") - /*...*/ +val x = f(3) // [break] [next: line=5] +val y = 5 +``` + +1. A JDB command must be wrapped in brackets, like `[step]`. All JDB commands can be used. +2. To check output of JDB for a command, use `[cmd: expect]`. +3. If `expect` is wrapped in double quotes, regex is supported. +4. Break commands are collected and set globally. +5. Other commands will be send to jdb in the order they appear in the source file + +Note that JDB uses line number starts from 1. + +### 2. Generate Expect File + +Now we can run the following command to generate an expect file: + +```shell +compiler/test/debug/Gen tests/debug/while.scala > robot ``` -Then: +### 3. Run the Test + +First, compile the file `tests/debug/while.scala`: + +```shell +$ scalac tests/debug/while.scala +``` + +Second, run the compiled class with debugging enabled: ```shell -scalac ../issues/Playground.scala +$ scala -d Test ``` -The techniques discussed below can be tried out in place of `println("Hello Debug")` in that location. They are of course applicable throughout the codebase. +Finally, run the expect script: + +```shell +expect robot +``` +## Other tips +### Show for human readable output -## Show for human readable output Many objects in the compiler have a `show` method available on them via implicit rich wrapper: ```scala @@ -42,7 +102,8 @@ println(tree.show) This will output every single tree passing through the typer (or wherever else you inject it) in a human readable form. Try calling `show` on anything you want to be human-readable, and chances are it will be possible to do so. -## How to disable color +### How to disable color + Note that the `show` command above outputs the code in color. This is achieved by injecting special characters into the strings which terminals interpret as commands to change color of the output. This however may not be what you want, e.g. if you want to zero-in on a particular tree: ```scala @@ -56,7 +117,7 @@ To disable color output from `show`, run `scalac` as follows: `scalac -color:never ../issues/Playground.scala` -## Reporting as a non-intrusive println +### Reporting as a non-intrusive println Consider you want to debug the `tree` that goes into `assertPositioned(tree)` in the `typed` method. You can do: ```scala @@ -72,7 +133,7 @@ assertPositioned(tree.reporting(s"Tree is: $result")) `extension (a: A) def reporting(f: WrappedResult[T] ?=> String, p: Printer = Printers.default): A` is defined on all types. The function `f` can be written without the argument since it is a context function. The `result` variable is a part of the `WrapperResult` – a tiny framework powering the `reporting` function. Basically, whenever you are using `reporting` on an object `A`, you can use the `result: A` variable from this function and it will be equal to the object you are calling `reporting` on. -## Printing out trees after phases +### Printing out trees after phases To print out the trees you are compiling after the FrontEnd (scanner, parser, namer, typer) phases: ```shell @@ -121,7 +182,7 @@ dotty.tools.dotc.typer.Namer.recur$3$$anonfun$2(Namer.scala:495) So, the error happened in the Namer's `checkNoConflict` method (after which all the stack frames represent the mechanics of issuing an error, not an intent that produced the error in the first place). -## Configuring the printer output +### Configuring the printer output Printing from the `show` and `-Xprint` is done from the Printers framework (discussed in more details below). The following settings influence the output of the printers: ```scala @@ -181,8 +242,8 @@ package @ { @ ``` -## Figuring out an object creation site -### Via ID +### Figuring out an object creation site +#### Via ID Every [Positioned](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/ast/Positioned.scala) (a parent class of `Tree`) object has a `uniqueId` field. It is an integer that is unique for that tree and doesn't change from compile run to compile run. You can output these IDs from any printer (such as the ones used by `.show` and `-Xprint`) via `-Yshow-tree-ids` flag, e.g.: ```shell @@ -284,7 +345,7 @@ So that tree was created at: Since all the stack frames above it are technical frames executing the tree creation command, and the frame in question is the location where the intent of the tree creation was expressed. -### Via tracer +#### Via tracer Some objects may not be `Positioned` and hence their creation site is not debuggable via the technique in the section above. Say you target a tree at `Typer`'s `typed` method as follows: ```scala @@ -309,7 +370,7 @@ if (tree.show == """println("Hello World")""") { } ``` -## Built-in Logging Architecture +### Built-in Logging Architecture Dotty has a lot of debug calls scattered throughout the code, most of which are disabled by default. At least three (possibly intertwined) architectures for logging are used for that: - Printer @@ -318,7 +379,7 @@ Dotty has a lot of debug calls scattered throughout the code, most of which are These do not follow any particular system and so probably it will be easier to go with `println` most of the times instead. -### Printers +#### Printers Defined in [Printers.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/config/Printers.scala) as a set of variables, each responsible for its own domain. To enable them, replace `noPrinter` with `default`. [Example](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2226) from the code: ```scala @@ -327,7 +388,7 @@ typr.println(i"make contextual function $tree / $pt ---> $ifun") `typr` is a printer. -### Tracing +#### Tracing Defined in [trace.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/reporting/trace.scala). [Example](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2232) from the code: ```scala @@ -344,5 +405,5 @@ To enable for a single trace, do the following: trace.force(i"typing $tree", typr, show = true) { // ... ``` -### Reporter +#### Reporter Defined in [Reporter.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/reporting/Reporter.scala). Enables calls such as `report.log`. To enable, run scalac with `-Ylog:typer` option. diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index af9f2f0783b8..938e7ff36d42 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -1,6 +1,7 @@ --- layout: doc-page title: Getting Started +redirectFrom: /docs/contributing/workflow.html --- ## Scala CLA @@ -25,16 +26,23 @@ never a bad idea to check. - [git] is essential for managing the Scala 3 code, and contributing to GitHub, where the code is hosted. - A Java Virtual Machine (JDK 8 or higher), required for running the build tool. - - download Java from [Oracle Java 8][java8], [Oracle Java 11][java11], - or [AdoptOpenJDK 8/11][adopt]. Refer to [JDK Compatibility][compat] for Scala/Java compatibility detail. + - There are multiple ways to get and manage different Java versions. Some + popular options are: + - [SDKMAN!](https://sdkman.io/) + - [Coursier](https://get-coursier.io/docs/cli-java) + - Or just download Java directly from [Oracle Java 8][java8], [Oracle + Java 11][java11], or [AdoptOpenJDK 8/11][adopt]. Refer to [JDK + Compatibility][compat] for Scala/Java compatibility detail. - Verify that the JVM is installed by running the following command in a terminal: `java -version`. - [sbt][sbt-download], the build tool required to build the Scala 3 compiler and libraries. ## Nice To Have -An IDE, such as [Metals] will help you develop in Scala 3 with features such as autocompletion or goto-definition, -and with the [VS Code][vs-code] text editor you can even use the Scala debugger, or create interactive worksheets for an -iterative workflow. +An IDE, such as [Metals] will help you develop in Scala 3 with features such as +autocompletion, code navigation, debugging, and interactive worksheets. + +Another popular options is [IntelliJ IDEA for +Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html). ## Compiling and Running @@ -43,7 +51,6 @@ Start by cloning the repository: ```bash $ git clone https://github.com/lampepfl/dotty.git $ cd dotty -$ sbt managedSources # Needed for IDE import to succeed ``` Dotty provides a standard sbt build: compiling, running and starting a repl can @@ -56,11 +63,12 @@ $ sbt hello world ``` -There are also bash scripts that can be used in the same way. Assuming that you have cloned the Dotty repo locally, append -the following line on your `.bash_profile`: +There are also bash scripts that can be used in the same way. Assuming that you +have cloned the Dotty repo locally, append the following line on your +`.bash_profile`, or equivalent for your shell: ```shell -$ export PATH=$HOME/dotty/bin:$PATH +$ export PATH=/bin:$PATH ``` and you will be able to run the corresponding commands directly from your console: @@ -73,7 +81,6 @@ $ scalac tests/pos/HelloWorld.scala $ scala HelloWorld ``` - ## Starting a REPL ```bash @@ -109,15 +116,13 @@ where `dotty-version` can be found in the file `project/Build.scala`, like `3.0. ## Generating Documentation To generate this page and other static page docs, run + ```bash $ sbt > scaladoc/generateScalaDocumentation ``` -For more information, see `scaladoc/README.md`. - -Before contributing to Dotty, we invite you to consult the -[Dotty Developer Guidelines](https://github.com/lampepfl/dotty/blob/main/CONTRIBUTING.md). +For more information, see the [scaladoc section](./scaladoc.md). ## Community diff --git a/docs/_docs/contributing/index.md b/docs/_docs/contributing/index.md index 0cc87e4b3500..507149340941 100644 --- a/docs/_docs/contributing/index.md +++ b/docs/_docs/contributing/index.md @@ -12,16 +12,17 @@ also documents the inner workings of the Scala 3 compiler, `dotc`. Keep in mind that the code for `dotc` is continually changing, so the ideas discussed in this guide may fall out of date. This is a living document, so please consider contributing to it on -[GitHub](https://github.com/scala/docs.scala-lang/tree/main/_overviews/scala3-contribution) -if you notice anything out of date, or report any issues -[here](https://github.com/scala/docs.scala-lang/issues). +[GitHub](https://github.com/lampepfl/dotty/tree/main/docs/_docs/contributing) if +you notice anything out of date, or report any issues +[here](https://github.com/lampepfl/dotty/issues). -### Get the Most from This Guide +### Get the most from This Guide `dotc` is built with Scala 3, fully utilising its [new -features](https://docs.scala-lang.org/scala3/new-in-scala3.html). It is recommended that you first have -some familiarity with Scala 3 to get the most out of this guide. You can learn -more in the [language reference](../reference/overview.md). +features](https://docs.scala-lang.org/scala3/new-in-scala3.html). It is +recommended that you first have some familiarity with Scala 3 to get the most +out of this guide. You can learn more in the [language +reference](../reference/overview.md). Many code snippets in this guide make use of shell commands (a line beginning with `$`), and in this case a `bash` compatible shell is assumed. You may have @@ -39,11 +40,3 @@ another programming language. As an example, `dotc` takes text input, verifies that it is a valid Scala program and then produces as output the same program, but written in Java bytecode, and optionally in SJSIR when producing Scala.js output. - -### Contribute Internals-related Knowledge -If you know anything useful at all about Dotty, feel free to log this knowledge: - -- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) -- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) - -In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/docs/_docs/contributing/issues/areas.md b/docs/_docs/contributing/issues/areas.md index 4f9adf79ba77..ce27e9c0a5aa 100644 --- a/docs/_docs/contributing/issues/areas.md +++ b/docs/_docs/contributing/issues/areas.md @@ -1,6 +1,7 @@ --- layout: doc-page title: Common Issue Locations +redirectFrom: /docs/contributing/workflow/areas.html --- Many issues are localised to small domains of the compiler and are self-contained, diff --git a/docs/_docs/contributing/issues/cause.md b/docs/_docs/contributing/issues/cause.md index 5bb04e894f70..e23f6d1f747f 100644 --- a/docs/_docs/contributing/issues/cause.md +++ b/docs/_docs/contributing/issues/cause.md @@ -1,6 +1,7 @@ --- layout: doc-page title: Finding the Cause of an Issue +redirectFrom: /docs/contributing/workflow/cause.html --- In this section, you will be able to answer questions such as: @@ -9,12 +10,13 @@ In this section, you will be able to answer questions such as: - where is a particular object created? - where is a particular value assigned to a variable? -> You may be able to quickly find the source responsible for an issue by consulting [common issue locations](../issues/areas.md) +> You may be able to quickly find the source responsible for an issue by consulting [common issue locations](./areas.md) ## What phase generated a particular tree? -As described in the [compiler lifecycle](../architecture/lifecycle.md#phases), each phase transforms the trees -and types that represent your code in a certain way. +As described in the [compiler lifecycle](../architecture/lifecycle.md#phases-2), +each phase transforms the trees and types that represent your code in a certain +way. To print the code as it is transformed through the compiler, use the compiler flag `-Xprint:all`. After each phase group is completed, you will see the resulting trees representing the code. @@ -36,6 +38,9 @@ the tree was generated by looking for its unique ID, and then generating a stack 3. Run the compiler with `-Ydebug-tree-with-id ` flag. The compiler will print a stack trace pointing to the creation site of the tree with the provided ID. +If you are using a debugger, you can add a breakpoint after `if ownId == debugId then` in method `allocateId` of `Positioned.scala` (you will still need the flag), this will allow you to navigate the stack with more ease. +Do not use a conditional breakpoint, the time overhead is very significant for a method called as often as this one. + ### Enhanced Tree Printing As seen above `-Xprint:` can be enhanced with further configuration flags, found in @@ -61,9 +66,10 @@ To discover why such a *spurious* error is generated, you can trace the code tha adding the `-Ydebug-error` compiler flag, e.g. `scala3/scalac -Ydebug-error Test.scala`. This flag forces a stack trace to be printed each time an error happens, from the site where it occurred. -Analysing the trace will give you a clue about the objects involved in producing the error. -For example, you can add some debug statements before the error is issued to discover -the state of the compiler. [See some useful ways to debug values.](./inspection.md) +Analysing the trace will give you a clue about the objects involved in producing +the error. For example, you can add some debug statements before the error is +issued to discover the state of the compiler. [See some useful ways to debug +values.](../debugging/inspection.md) ### Where was a particular object created? @@ -73,6 +79,7 @@ creation site of that object to understand the logic that created it. You can do this by injecting a *tracer* into the class of an instance in question. A tracer is the following variable: + ```scala val tracer = Thread.currentThread.getStackTrace.mkString("\n") ``` diff --git a/docs/_docs/contributing/issues/checklist.md b/docs/_docs/contributing/issues/checklist.md deleted file mode 100644 index e2fcf32531de..000000000000 --- a/docs/_docs/contributing/issues/checklist.md +++ /dev/null @@ -1,135 +0,0 @@ ---- -layout: doc-page -title: Pull Request Checklist ---- - -Once you solved the issue you were working on, you'll likely want to see your -changes added to the [Scala 3 repo][lampepfl/dotty]. To do that, you need to -prepare a [pull request][pull-request] with your changes. Assuming that the team -is aware of what you've been working, here are some final steps that you'll want -to keep in mind as you create your PR. - -### 1. Sign the CLA - -Make sure you have signed the [Scala CLA][cla]. If you have any questions about -what this is and why it's required you can read further about it [here][cla]. - -### 2. Make sure your work is on its own branch - -When submitting your pull request it's always best to ensure the branch name is -unique to the changes you're working on. It's important not to submit your PR on -your `main` branch as this blocks maintainers from making any changes to your PR -if necessary. - -### 3: Add Tests - -Add at least one test that replicates the problem in the issue, and that shows it is now resolved. - -You may of course add variations of the test code to try and eliminate edge cases. -[Become familiar with testing in Scala 3](./testing.md). - -### 4: Add Documentation - -Please ensure that all code is documented to explain its use, even if only internal -changes are made. This refers to scaladocs and also any changes that might be -necessary in the reference docs. - -### 5: Double check everything - -Here are a couple tips to keep in mind. - -- [DRY (Don't Repeat Yourself)][dry] -- [Scouts Rule][scouts] -- When adding new code try use [optional braces]. If you're rewriting old code, - you should also use optional braces unless it introduces more code changes - that necessary. - -### 6: Commit Messages - -Here are some guidelines when writing commits for Dotty. - -1. If your work spans multiple local commits (for example; if you do safe point - commits while working in a feature branch or work in a branch for long time - doing merges/rebases etc.) then please do not commit it all but rewrite the - history by squashing the commits into one large commit which is accompanied - by a detailed commit message for (as discussed in the following sections). - For more info, see the article: [Git Workflow][git-workflow]. Additionally, - every commit should be able to be used in isolation—that is, each commit must - build and pass all tests. - -2. The first line should be a descriptive sentence about what the commit is - doing. It should be possible to fully understand what the commit does by just - reading this single line. It is **not ok** to only list the ticket number, - type "minor fix" or similar. If the commit has a corresponding ticket, - include a reference to the ticket number, prefixed with "Closes #", at the - beginning of the first line followed by the title of the ticket, assuming - that it aptly and concisely summarizes the commit in a single line. If the - commit is a small fix, then you are done. If not, go to 3. - -3. Following the single line description (ideally no more than 70 characters - long) should be a blank line followed by an enumerated list with the details - of the commit. - -4. Add keywords for your commit (depending on the degree of automation we reach, - the list may change over time): - * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone - is encouraged to give feedback, however. (Remember that @-mentions will - result in notifications also when pushing to a WIP branch, so please only - include this in your commit message when you're ready for your pull - request to be reviewed. Alternatively, you may request a review in the - pull request's description.) - * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the - ticket as fixed in the issue tracker (Assembla understands this). - * ``backport to _branch name_`` - if the fix needs to be cherry-picked to - another branch (like 2.9.x, 2.10.x, etc) - -Example: - -``` -fix: here is your pr title briefly mentioning the topic - -Here is the body of your pr with some more information - - Details 1 - - Details 2 - - Details 3 - -Closes #2 -``` - -### 7: Create your PR! - -When the feature or fix is completed you should open a [Pull -Request](https://help.github.com/articles/using-pull-requests) on GitHub. - -If you're not actually finished yet and are just looking for some initial input -on your approach, feel free to open a [Draft PR][draft]. This lets reviewers -know that you're not finished yet. It's also a good idea to put a [wip] in front -of your pr title to make this extra clear. - -Shortly after creating your pull request a maintainer should assign someone to -review it. If this doesn't happen after a few days, feel free to ping someone on -the [Scala Contributors Discor][discord] or tag someone on the PR. Depending on -the type of pull request there might be multiple people that take a look at your -changes. There might also be community input as we try to keep the review -process as open as possible. - -### 8: Addressing feedback - -More than likely you'll get feedback from the reviewers, so you'll want to make -sure to address everything. When in doubt, don't hesitate to ask for -clarification or more information. - -Once you finally see the "LGTM" (Looks Good To Me or Let's Get This Merged) -you're PR will be merged in! - -[pull-request]: https://docs.github.com/en?query=pull+requests -[lampepfl/dotty]: https://github.com/lampepfl/dotty -[cla]: http://typesafe.com/contribute/cla/scala -[issues]: https://github.com/lampepfl/dotty/issues -[full-list]: https://github.com/lampepfl/dotty/blob/master/CONTRIBUTING.md -[discord]: https://discord.gg/TSmY9zkHar -[dry]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html -[scouts]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html -[optional-braces]: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html -[draft]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests -[git-workflow]: http://sandofsky.com/blog/git-workflow.html diff --git a/docs/_docs/contributing/issues/debugging.md b/docs/_docs/contributing/issues/debugging.md deleted file mode 100644 index 2d8a9e5941e4..000000000000 --- a/docs/_docs/contributing/issues/debugging.md +++ /dev/null @@ -1,189 +0,0 @@ ---- -layout: doc-page -title: Debugging the Compiler ---- - -The debugger is a powerful tool to navigate the internals of the compiler and track bugs. - -You can start the Scala debugger in VSCode using [Metals](https://scalameta.org/metals/). -In this page you will learn how to configure it, and how to use it. - -## Importing the project in VSCode using Metals - -The first step is to import the build in Metals, if it has not yet been imported. - -To do so you can open the [lampefl/dotty][lampepfl/dotty] repository in VSCode and click `Import build` in Metals view. -It may take a few minutes to import, compile and index the full project. - -![Import build](/images/contribution/import-build.jpg) - -If you have any trouble with importing, you can try to switch the build server from Bloop to sbt, -by running the `Metals: Switch build server` command from VSCode command palette. - -## Configuring the debugger - -To configure the debugger in VSCode, you can go to the `Run and Debug` view and click `create a launch.json file`. -It creates the `launch.json` file in the `.vscode` folder, in which we will define the debug configurations. - -![Create launch.json file](/images/contribution/launch-config-file.jpg) - -To create a debug configuration: -- Open the `.vscode/launch.json` file -- Click the `Add Configuration` button -- Go down the list of templates and select `Scala: Run main class` - -![Create configuration](/images/contribution/create-config.jpg) - -The added configuration should look like this: -```json -{ - "type": "scala", - "request": "launch", - "name": "Untitled", - "mainClass": "???", - "args": [], - "jvmOptions": [], - "env": {} -} -``` - -This is a template that you need to fill out. -First You can give a `name` to your configuration, for instance `Debug Scala 3 Compiler`. - -The two most important parameters, to debug the compiler, are `mainClass` and `args`. -The `mainClass` of the compiler is `dotty.tools.dotc.Main`. -In the `args` you need to specify the compiler arguments, which must contain at least a Scala file to compile and a `-classpath` option. - -To start with, we can compile the `../tests/pos/HelloWorld.scala` file. -In the classpath, we always need at least the `scala-library_2.13` and the bootstrapped `scala3-library_3`. -To locate them on your filesystem you can run the `export scala3-library-bootstrapped/fullClasspath` command in sbt. - -``` -$ sbt -> export scala3-library-bootstrapped/fullClasspath -/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar -[success] Total time: 1 s, completed Mar 10, 2023, 4:37:43 PM -``` - -Note that it is important to use the bootstrapped version of the `scala3-library` to get the correct TASTy version. - -Additionally you can add the `-color` and `never` arguments to prevent the compiler from printing ANSI codes as strings in the debug console. - -Here is the final configuration: -```json -{ - "type": "scala", - "request": "launch", - "name": "Debug Scala 3 Compiler", - "mainClass": "dotty.tools.dotc.Main", - "args": [ - "../tests/pos/HelloWorld.scala", - "-classpath", - // To replace with your own paths - "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar", - "-color", - "never" - ], - "jvmOptions": [], - "env": {} -} -``` - -## Customizing the debug configurations - -### Compiling several files at once - -You can compile more than one Scala file, by adding them in the `args`: -```json -"args": [ - "file1.scala", - "file2.scala", - "-classpath", - "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar" -] -``` - -### Depending on a library - -To add a dependency to an external library you need to download it and all its transitive dependencies, and to add them in the classpath. -The Coursier CLI can help you to do that. -For instance to add a dependency to cats you can run: -``` -$ cs fetch org.typelevel::cats-core:2.+ --classpath --scala-version 3 --exclude org.scala-lang:scala-library --exclude org.scala-lang:scala3-library -/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar -``` - -And concatenate the output into the classpath argument, which should already contain the scala-library_2.13 and the bootstrapped scala3-library: - -```json -"args": [ - "using-cats.scala", - "-classpath", - "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar" -] -``` - -### Add more compiler options - -In the `args` you can add any additional compiler option you want. - -For instance you can add `-Xprint:all` to print all the generated trees after each mega phase. - -Run `scalac -help` to get an overview of the available compiler options. - -### Defining more than one launch configuration - -You can create as many debug configurations as you need: to compile different files, with different compiler options or different classpaths. - -## Starting the debugger - -Before starting the debugger you need to put a breakpoint in the part of the code that you want to debug. -If you don't know where to start, you can put a breakpoint in the `main` method of the `dotty.tools.dotc.Driver` trait. - -![First breakpoint](/images/contribution/breakpoint.jpg) - -Now to start the debugger, open the debug view, find the drop-down list of all the debug configurations and click on yours. -The debugger should start and pause on your breakpoint. - -![Start debugger](/images/contribution/start-debugger.jpg) - -## Using the debugger - -### Navigating the call stack - -When the debugger has paused, you can see the current call stack in the `Debug and Run` view. -Each frame of the call stack contains different variables, whose values you can see in the `Variables` section of the `Debug and Run` view. - -![Call stack](/images/contribution/call-stack.jpg) - -Analysing the call stack and the variables can help you understand the path taken by the compiler to reach that state. - -### The debugging steps - -The debug toolbar contains the `Continue / Pause`, `Step Over`, `Step Into`, `Step Out`, `Restart` and `Stop` buttons. - -![Debugging steps](/images/contribution/toolbar.jpg) - -You can use the step buttons to execute the code step by step and get a precise understanding of the program. - -### The debug console - -When the debugger has paused, you can evaluate any Scala 3 expression in the debug console. -This is useful to inspect some values or to execute some parts of the code. - -For instance, you can evaluate `tree.show` to pretty-print a tree. - -![Import build](/images/contribution/debug-console.jpg) - -### Conditional breakpoints - -In a breakpoint you can define a condition, in the form of a Boolean expression written in Scala. -The program will stop on the breakpoint as soon as the condition is met. - -To add a condition, right-click on a breakpoint and pick `Edit breakpoint...`. - -For instance, if you know that a bug happens on typing a method `foo`, you can use the condition `tree.symbol.name.show == "foo"` in a breakpoint in the `Typer`. - -![Import build](/images/contribution/conditional-breakpoint.jpg) - -[lampepfl/dotty]: https://github.com/lampepfl/dotty diff --git a/docs/_docs/contributing/issues/diagnosing-your-issue.md b/docs/_docs/contributing/issues/diagnosing-your-issue.md new file mode 100644 index 000000000000..d6f2e675c1a3 --- /dev/null +++ b/docs/_docs/contributing/issues/diagnosing-your-issue.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: Diagnosing your issue +--- + +This section is meant to help you diagnose and reproduce what the actual issue +is that you're trying to fix. Firstly, you'll need to reproduce the issue, so +that + +- you can understand its cause +- you can verify that any changes made to the codebase have a positive impact on the issue. + +The following sections will help you out with this: + +- [Reproducing an Issue](./reproduce.md) +- [Finding the Cause of an Issue](./cause.md) +- [Common issue locations](./areas.md) diff --git a/docs/_docs/contributing/issues/efficiency.md b/docs/_docs/contributing/issues/efficiency.md deleted file mode 100644 index 07307646a4bb..000000000000 --- a/docs/_docs/contributing/issues/efficiency.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -layout: doc-page -title: Improving Your Workflow ---- - -In the previous sections of this chapter, you saw some techniques for -working with the compiler. Some of these techniques can be used -repetitively, e.g.: - -- Navigating stack frames -- Printing variables in certain ways -- Instrumenting variable definitions with tracers - -The above procedures often take a lot of time when done manually, reducing productivity: -as the cost (in terms of time and effort) is high, you may avoid attempting to do so, -and possibly miss valuable information. - -If you're doing those things really frequently, it is recommended to script your editor -to reduce the number of steps. E.g. navigating to the definition of a stack frame -part when you click it, or instrumenting variables for printing. - -An example of how it is done for Sublime Text 3 is [here](https://github.com/anatoliykmetyuk/scala-debug-sublime). - -True, it takes some time to script your editor, but if you spend a lot of time with issues, it pays off. diff --git a/docs/_docs/contributing/issues/index.md b/docs/_docs/contributing/issues/index.md deleted file mode 100644 index db348d7edd9d..000000000000 --- a/docs/_docs/contributing/issues/index.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -layout: index -title: Finding the Cause of an Issue ---- - -An issue found in the [GitHub repo][lampepfl/dotty] usually describes some code that -manifests undesired behaviour. - -This chapter of the guide describes the different steps to contribute to Dotty: -- [Reproducing an Issue](./reproduce.md) -- [Finding the Cause of an Issue](./cause.md) -- [Debugging the Compiler](./debugging.md) -- [Other debugging techniques](./other-debugging.md) -- [Inspect the values](./inspection.md) -- [Improving your workflow](./efficiency.md) -- [Testing a Fix](./testing.md) -- [Checklist](./checklist.md) diff --git a/docs/_docs/contributing/issues/reproduce.md b/docs/_docs/contributing/issues/reproduce.md index 41d96327ef24..ca5da324a867 100644 --- a/docs/_docs/contributing/issues/reproduce.md +++ b/docs/_docs/contributing/issues/reproduce.md @@ -1,24 +1,61 @@ --- layout: doc-page title: Reproducing an Issue +redirectFrom: /docs/contributing/workflow/reproduce.html --- -To try fixing it, you will first need to reproduce the issue, so that -- you can understand its cause -- you can verify that any changes made to the codebase have a positive impact on the issue. +The first step is to reproduce the issue you're trying to fix. Say you want to +reproduce locally issue [#7710], you would first copy the code from the +*"Minimised Code"* section of the issue to a file named e.g. +`local/i7710.scala`, and then try to compile it from the sbt console opened in +the dotty root directory: -Say you want to reproduce locally issue [#7710], you would first copy the code from the *"Minimised Code"* -section of the issue to a file named e.g. `local/i7710.scala`, -and then try to compile it from the sbt console opened in the dotty root directory: ```bash $ sbt -sbt:scala3> scala3/scalac -d local/out local/i7710.scala +sbt:scala3> scalac -d local/out local/i7710.scala ``` > Here, the `-d` flag specifies a directory `local/out` where generated code will be output. -You can then verify that the local reproduction has the same behaviour as originally reported in the issue. -If so, then you can start to try and fix it. Otherwise, perhaps the issue is out of date, or -is missing information about how to accurately reproduce the issue. +You can then verify that the local reproduction has the same behaviour as +originally reported in the issue. If so, then you can start to try and fix it. +Otherwise, perhaps the issue is out of date, or is missing information about how +to accurately reproduce the issue. + +## Compiling files with scalac + +Let's take a deeper look at this `scalac` command we just used. As we have seen +you can compile a test file either from sbt: + +```bash +$ sbt +> scalac +``` + +in the same way that you could from terminal: + +```bash +$ scalac +``` + +Here are some useful debugging ``: + +* `-Xprint:PHASE1,PHASE2,...` or `-Xprint:all`: prints the `AST` after each + specified phase. Phase names can be found by examining the + `dotty.tools.dotc.transform.*` classes for their `phaseName` field e.g., `-Xprint:erasure`. + You can discover all phases in the `dotty.tools.dotc.Compiler` class +* `-Ylog:PHASE1,PHASE2,...` or `-Ylog:all`: enables `ctx.log("")` logging for + the specified phase. +* `-Ycheck:all` verifies the consistency of `AST` nodes between phases, in + particular checks that types do not change. Some phases currently can't be + `Ycheck`ed, therefore in the tests we run: + `-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef`. +* the last frontier of debugging (before actual debugging) is the range of logging capabilities that +can be enabled through the `dotty.tools.dotc.config.Printers` object. Change any of the desired printer from `noPrinter` to +`default` and this will give you the full logging capability of the compiler. + +You may also want to further inspect the types of a piece of code to verify the +AST. Check out the section on [How to Inspect +Values](../debugging/inspection.md) for a detailed guide on this. ## Dotty Issue Workspace @@ -85,24 +122,25 @@ the dollar notation: `$1` for the first argument, `$2` for the second and so on. ### Multiline Commands -Inside a `launch.iss` file, one command can be spread accross multiple lines. For example, +Inside a `launch.iss` file, one command can be spread across multiple lines. For example, if your command has multiple arguments, you can put each argument on a new line. -Multiline commands can even have comments inbetween lines. This is useful +Multiline commands can even have comments in-between lines. This is useful if you want to try variants of a command with optional arguments (such as configuration). You can put the optional arguments on separate lines, and then decide when they are passed to the command by placing `#` in front to convert it to a comment (i.e. the argument will not be passed). This saves typing the same arguments each time you want to use them. -The following `launch.iss` file is an example of how you can use multiline commands as a -template for solving issues that [run compiled code](../issues/testing.md#checking-program-output). It demonstrates configuring the -`scala3/scalac` command using compiler flags, which are commented out. -Put your favourite flags there for quick usage. +The following `launch.iss` file is an example of how you can use multiline +commands as a template for solving issues that [run compiled +code](../testing.md#checking-program-output). It demonstrates configuring +the `scala3/scalac` command using compiler flags, which are commented out. Put +your favourite flags there for quick usage. ```bash $ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. -scala3/scalac # Invoke the compiler task defined by the Dotty sbt project +scalac # Invoke the compiler task defined by the Dotty sbt project -d $here/out # All the artefacts go to the `out` folder created earlier # -Xprint:typer # Useful debug flags, commented out and ready for quick usage. Should you need one, you can quickly access it by uncommenting it. # -Ydebug-error @@ -113,7 +151,7 @@ scala3/scalac # Invoke the compiler task defined by the Dotty sbt project # -Ycheck:all $here/$1.scala # Invoke the compiler on the file passed as the second argument to the `issue` command. E.g. `issue foo Hello` will compile `Hello.scala` assuming the issue folder name is `foo`. -scala3/scala -classpath $here/out Test # Run main method of `Test` generated by the compiler run. +scala -classpath $here/out Test # Run main method of `Test` generated by the compiler run. ``` ## Conclusion @@ -124,4 +162,4 @@ how to try and detect its root cause. [lampepfl/dotty]: https://github.com/lampepfl/dotty/issues [#7710]: https://github.com/lampepfl/dotty/issues/7710 [dotty-issue-workspace]: https://github.com/anatoliykmetyuk/dotty-issue-workspace -[workspace-readme]: https://github.com/anatoliykmetyuk/dotty-issue-workspace#getting-started \ No newline at end of file +[workspace-readme]: https://github.com/anatoliykmetyuk/dotty-issue-workspace#getting-started diff --git a/docs/_docs/contributing/procedures/index.md b/docs/_docs/contributing/procedures/index.md index db2b09dbe80f..c614a0a88fd4 100644 --- a/docs/_docs/contributing/procedures/index.md +++ b/docs/_docs/contributing/procedures/index.md @@ -4,5 +4,4 @@ title: Procedures --- This chapter of the guide describes: -- [How to release a procedure](./release.md) -- [How to test the vulpix framework](./vulpix.md) \ No newline at end of file +- [How to test the vulpix framework](./vulpix.md) diff --git a/docs/_docs/contributing/procedures/release.md b/docs/_docs/contributing/procedures/release.md index 8f66f8854d4c..c54bb637aff5 100644 --- a/docs/_docs/contributing/procedures/release.md +++ b/docs/_docs/contributing/procedures/release.md @@ -4,7 +4,7 @@ title: Release Procedure --- # Model -The easiest way to produce a release of a GitHub-based open-source software is to tag the most recent commit on the `master` with the version number at regular intervals of time or once a previously agreed milestone is reached. However, this approach to releasing would rest on the assumption that each commit at the `master` branch can potentially be made into a release. We cannot provide the release-grade quality guarantees for each of the `master` commits, though. +The easiest way to produce a release of a GitHub-based open-source software is to tag the most recent commit on the `main` with the version number at regular intervals of time or once a previously agreed milestone is reached. However, this approach to releasing would rest on the assumption that each commit at the `main` branch can potentially be made into a release. We cannot provide the release-grade quality guarantees for each of the `main` commits, though. Consequently, in Dotty, we are using the method above – releasing-by-tagging – to mark release candidates (RC’s) and not the stable releases. The stable releases are also marked by a tag, but we have a procedure to assure their quality. @@ -16,12 +16,12 @@ If such issues are found, their fixes end up on a separate branch dedicated to t Say we want to release the 0.14.0 version. In this section we describe the process to do so (at a glance). ## At the Dotty Repo -1. Tag the latest `master` commit as `0.14.0-RC1`. This commit is the release candidate for the `0.14.0` version. +1. Tag the latest `main` commit as `0.14.0-RC1`. This commit is the release candidate for the `0.14.0` version. 2. Create a branch from that commit called `0.14.x`. This branch is intended to host the subsequent fixes to the RC for the issues that cannot be allowed in the `0.14.0` stable release. 3. Up until the next release date, if we find some issues with `0.14.0-RC1` that cannot end up in the release, we push the fixes to the `0.14.x` branch. 4. At the next release date, we release `0.14.0` from the branch `0.14.x`. We do so by tagging the latest commit at the `0.14.x` branch as `0.14.0`. Some things to note here: - 1. At this point, `0.14.x` (the branch) and `0.14.0-RC1` (the tag at which `0.14.x` branched from `master`) may differ, and the `0.14.x` branch is a more mature version of the `0.14.0-RC1` tag. - 2. `0.14.0` is not the same as the `master`. Only the commits critical for the `0.14.0` release end up at `0.14.x` branch. Not all of the commits made to the `master` during the release cycle are critical to `0.14.0`. However, all of the commits from `0.14.x` must end up on the `master` branch, so we merge `0.14.x` into `master`. + 1. At this point, `0.14.x` (the branch) and `0.14.0-RC1` (the tag at which `0.14.x` branched from `main`) may differ, and the `0.14.x` branch is a more mature version of the `0.14.0-RC1` tag. + 2. `0.14.0` is not the same as the `main`. Only the commits critical for the `0.14.0` release end up at `0.14.x` branch. Not all of the commits made to the `main` during the release cycle are critical to `0.14.0`. However, all of the commits from `0.14.x` must end up on the `main` branch, so we merge `0.14.x` into `main`. 5. After the `0.14.0` version is released, we start the process for releasing `0.15.0` – repeat this algorithm from the beginning with the version set to `0.15.0-RC1` at step (1). ## At the CI @@ -37,24 +37,24 @@ The CI operation is entirely automatic provided you have tagged the release corr Note that after the first stage of the release cycle (see "Publishing artifacts to Maven via CI" section of the checklist below) only three test runs are required to be run at the CI: -- `master` branch's latest *commit* with the updated `baseVersion` +- `main` branch's latest *commit* with the updated `baseVersion` - `` *tag* of the stable version being released - `` *tag* of the RC version being released However, you may end up with as many as 6 tasks being run. The auxiliary tasks may include: - *commit* tests of the *tags* specified above. You may have two of these, corresponding to the two tags. You should see them appearing to have the same commit hash in the CI, but one of them will have the tag next to it and the other one will not. The *tag* one must remain, as the CI tasks on tags publish to maven. CI tasks on commits do not. So it is safe to cancel the task running on the commit, if the commit hash is the same as that of the tag's task commit. -- Older commit from the `master` branch. Look for all the tasks run on the `master` branch in the CI and see if there are more than one of these. Then, find the one testing the most recent commit of the branch. The others can safely be canceled. +- Older commit from the `main` branch. Look for all the tasks run on the `main` branch in the CI and see if there are more than one of these. Then, find the one testing the most recent commit of the branch. The others can safely be canceled. ## Documentation ### Release Procedure Checklist Before we start the release procedure, we create an issue with a release checklist. As we go through the release, we update the checklist. To generate the checklist, run the following command: -`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/master/docs/docs/contributing/checklist.sh) ` +`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/main/docs/docs/contributing/checklist.sh) ` Above, `` is the stable version being released. For example, if you are releasing `0.14.0` and `0.15.0-RC1`, this variable is `14` and the command is as follows: -`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/master/docs/docs/contributing/checklist.sh) 14` +`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/main/docs/docs/contributing/checklist.sh) 14` Copy and paste the output into the release issue. @@ -65,49 +65,15 @@ When no criteria is specified, common sense is to be used. ### GitHub Releases and Blog Post After the release is done, we document it as follows: -- On the GitHub release page, modify the release drafts created by CI. The RC draft should include notable changes introduced since the previous RC. E.g. for `0.14.0-RC1` these are generated by `gren changelog -G --override -D prs --tags=0.13.0-RC1..0.14.0-RC1`. `gren` is available [here](https://github.com/github-tools/github-release-notes), and before running the above command, please make sure that (1) the `origin` branch points to the `lampepfl/dotty` repository and (2) the two tags mentioned in the command are pushed to the `master` branch of that repo. Otherwise, the command won't pick up the tags. +- On the GitHub release page, modify the release drafts created by CI. The RC draft should include notable changes introduced since the previous RC. E.g. for `0.14.0-RC1` these are generated by `gren changelog -G --override -D prs --tags=0.13.0-RC1..0.14.0-RC1`. `gren` is available [here](https://github.com/github-tools/github-release-notes), and before running the above command, please make sure that (1) the `origin` branch points to the `lampepfl/dotty` repository and (2) the two tags mentioned in the command are pushed to the `main` branch of that repo. Otherwise, the command won't pick up the tags. - Create a blog article documenting the most important changes done by the release. ## Ecosystem -After releasing a new version of Dotty, we need to make sure to update the following related projects: - -- [Example Project](https://github.com/scala/scala3-example-project) - - To deploy locally: `git clone https://github.com/scala/scala3-example-project.git && cd dotty-example-project/` - - To test locally: `sbt run` - - [Commit](https://github.com/scala/scala3-example-project/commit/76bf0b4d708206b1901fa7f291f07cd470506e79) updating the Dotty version (only `README` and `build.sbt` files) -- [Example Project with Mill](https://github.com/scala/scala3-example-project/tree/mill) - - Deploy: `git clone https://github.com/scala/scala3-example-project && cd dotty-example-project && git checkout mill` - - Test: `mill root.run` - - [Commit 1](https://github.com/scala/scala3-example-project/commit/e1ad1905ef38d07943e0c176333ba24e306a2078) – `build.sc` only; [Commit 2](https://github.com/scala/scala3-example-project/commit/23bc5df89e72b782ab8e19157d6bbcb67eef30cd) -- [Dotty G8 template](https://github.com/scala/scala3.g8) - - Deploy: `git clone https://github.com/scala/scala3.g8.git` - - Test (note the relative path as argument to `sbt new`, hence this command should be run after Deploy in the same directory as Deploy): `sbt new file://./dotty.g8 --name=foo --description=bar && cd foo && sbt run` - - [Commit](https://github.com/scala/scala3.g8/commit/0cde8fa843e15e916f07f22a196f35a5988b26af) -- [Dotty G8 template with cross build support](https://github.com/scala/scala3-cross.g8) - - Deploy: `git clone https://github.com/scala/scala3-cross.g8.git` - - Test: `sbt new file://./dotty-cross.g8 --name=foo --description=bar && cd foo && sbt run` - - [Commit](https://github.com/scala/scala3-cross.g8/commit/0e3ea2ae8ba8d001e63e5098ff60d728036d358f) -- [Dotty Homebrew Formula](https://github.com/lampepfl/homebrew-brew) - - Deploy: `git clone https://github.com/lampepfl/homebrew-brew.git && cd homebrew-brew` - - [Commit](https://github.com/lampepfl/homebrew-brew/commit/04f7284564387754a360a354159f2f8d6156a6c7). SHA256 sum comes from the issue checklist computed for the release as specified above. The file with checksums is available at [GitHub Releases](https://github.com/lampepfl/dotty/releases) -> release in question -> assets -> `sha256sum.txt` -> `*.tar.gz` file sum. -- [Dotty test under various OSs](https://github.com/lampepfl/packtest) - - Deploy: `git clone https://github.com/lampepfl/packtest.git && cd packtest` - - [Commit](https://github.com/lampepfl/packtest/commit/6d3edf7333e0e788af7c4f4ab976b56905ddf9ed) -- [Scastie](https://github.com/scalacenter/scastie/) - - Deploy (copy-paste the command and append the release id, e.g. `15` for `0.15.0-RC1`): `git clone https://github.com/scalacenter/scastie.git && cd scastie && git remote add staging https://github.com/dotty-staging/scastie && git checkout -b dotty-release-` - - [PR](https://github.com/scalacenter/scastie/pull/433) – push your changes to `staging` repo (as defined in "Deploy" above) with `git push -u staging`, then submit the PR from there. -- [Dotty](https://github.com/lampepfl/dotty/) - - If you are reading this, Dotty should already be deployed on your machine :) - - [PR](https://github.com/lampepfl/dotty/pull/6557) -- [Scalac](https://github.com/scala/scala) - - Deploy: `git clone https://github.com/scala/scala.git && cd scala && git remote add staging https://github.com/dotty-staging/scala && git checkout -b dotty-release-` - - [PR](https://github.com/scala/scala/pull/7993) - -For each need to do the following: - -- Update Dotty version to the latest RC -- Update the sbt-dotty SBT plugin version to the latest published one -- Update the projects' source code to follow the Dotty developments if necessary + +During the release process we ensure that various parts of the community are +also prepared for the new version of Scala so that users can hit the ground +running when the new release is announced. You can see an example of this +[here](https://github.com/lampepfl/dotty/issues/17559). # Procedure in Bash Scripts The below procedure is compiled from [this](https://github.com/lampepfl/dotty/issues/5907#issue-409313505) and [this](https://github.com/lampepfl/dotty/issues/6235#issue-429265748) checklists. It assumes we want to publish the `0.14.0` given the `0.14.0-RC1` release candidate. @@ -128,11 +94,11 @@ git commit -am 'Release Dotty 0.14.0' git tag 0.14.0 git push origin 0.14.0 -git checkout master +git checkout main git merge 0.14.x # Make sure the merge doesn't break anything. In doubt, create a PR to run the CL -git push origin master +git push origin main ######## Publish the 0.15.0-RC1 unstable version – begin the release cycle for 0.15.0 ######## # Move all the unfinished tasks from Milestone 15 to Milestone 16 on GitHub – see https://github.com/lampepfl/dotty/milestones @@ -146,10 +112,10 @@ git tag 0.15.0-RC1 git push origin 0.15.x git push origin 0.15.0-RC1 -git checkout master +git checkout main # Change val baseVersion = "0.15.0" to val baseVersion = "0.16.0" - this will be the next version after `0.15.0-RC1` is promoted to `0.15.0`. git commit -am 'Set baseVersion to 0.16.0' -git push origin master -``` \ No newline at end of file +git push origin main +``` diff --git a/docs/_docs/contributing/scala2-vs-scala3.md b/docs/_docs/contributing/scala2-vs-scala3.md deleted file mode 100644 index b92cbe2381ea..000000000000 --- a/docs/_docs/contributing/scala2-vs-scala3.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -layout: doc-page -title: Divergences between Scala 2 and Dotty ---- - -# Divergences between Scala 2 and Dotty -The following issues encountered when compiling Scala 2 code as-is under Dotty: - -## Scalafix candidates -- If a method is defined `toSet()`, it cannot be called `toSet`. -- “result type of implicit definition needs to be given explicitly” -- There are no `'Symbol`s in Scala 3, you must construct symbols via `new Symbol("foo")` instead of old `'foo` - -## Trivial -- Scala 2.13 libraries cannot be used from Dotty because the dotty-library is compiled against the 2.12 standard library which is not binary-compatible with the 2.13 one. We can't be compatible with both at the same time. -- To use Scala 2.12 dependencies from SBT with Dotty, use `withDottyCompat` as documented [here](https://github.com/scala/scala3-example-project#getting-your-project-to-compile-with-dotty). -- Feature warnings about implicits `scala.language.implicitConversions` are output by default, unlike in Scala 2. This creates noise. Unclear how to turn off. - -Implicit conversions must be applied explicitly: - -```scala -implicit def IterablePath[T](s: Iterable[T])(implicit conv: T => RelPath): RelPath = { - s.foldLeft(rel){_ / conv(_)} -} -``` - -Stronger compile time guarantees on variance. Scala 2 does not assert variance on default parameters to parameters of the function value type. E.g. in geny: - -```scala -# Dotty -def count(f: A => Boolean = (a: A) => true): Int = -| ^^^^^^^^^^^^^^ -|covariant type A occurs in contravariant position in type => A => Boolean of method count$default$1 -``` - -Fix: -```scala -# Dotty -def count[B >: A](f: B => Boolean = (_: B) => true): Int = -``` - -## Tricky -- Scala 3 macros are completely different from Scala 2 ones, requires a migration strategy of its own diff --git a/docs/_docs/contributing/scaladoc.md b/docs/_docs/contributing/scaladoc.md new file mode 100644 index 000000000000..05bcc0166fb6 --- /dev/null +++ b/docs/_docs/contributing/scaladoc.md @@ -0,0 +1,143 @@ +--- +layout: doc-page +title: Scaladoc +--- + +Scaladoc is the documentation tool for [Scala +3](https://github.com/lampepfl/dotty). It uses the TastyInspector to access +definitions, which is an officially supported way to access Scala 3's +perspective of a codebase. + +We're aiming to support all the features Scaladoc did, plus new and exciting ones such as: + +- Markdown syntax! +- displaying project and API documentation together on one site! +- and more! + +**Yes, this page was generated using scaladoc** + +## Running the project + +Use the following commands to generate documentation for this project and for Dotty, respectively: + +``` +sbt scaladoc/generateSelfDocumentation +sbt scaladoc/generateScalaDocumentation +``` + +To actually view the documentation, the easiest way is to run the following in the project root: + +``` +cd scaladoc/output +python3 -m http.server 8080 +``` + +And afterwards point your browser to or + for this project and for Dotty documentation +respectively. + +It's not strictly necessary to go through an HTTP server, but because of CORS +the documentation won't work completely if you don't. + +## CLI and sbt Documentation + +The preferred way to use scaladoc is calling it from sbt `Compile/doc` task or to use CLI interface provided inside `dotty/bin/scaladoc` bash script. +More information about specific scaladoc flags you can find inside [Usage docs](https://docs.scala-lang.org/scala3/guides/scaladoc/settings.html) + +## Developing + +For every PR named with prefix `scaladoc/`, we build documentation for scaladoc and Dotty. For example, for +PR `scaladoc/update-docs` you can find them at: + +- +- +- + +Note that these correspond to the contents of `output` directory - that's +precisely what they are. + +You can also find the result of building the same sites for latest `master` at: + +- +- +- + +You can run the Scaladoc tool from the sbt prompt as a user would run it +with `scaladoc/run`, passing `-d ` and some `.tasty` files as +input on the command line. + +### Testing + +From the sbt prompt, `scaladoc/test` runs the tests. + +You can also run specific signature tests with `testOnly`, +for example `scaladoc/test *scaladoc.signatures.MarkdownCode`. + +Most tests rely on comparing signatures (of classes, methods, objects etc.) extracted from the generated documentation +to signatures found in source files (extracted using Scalameta). Such tests are defined using [SignatureTest](test/dotty/tools/scaladoc/signatures/SignatureTest.scala) class +and its subtypes (such as [TranslatableSignaturesTestCases](test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala)). In this style of test, it's not necessary for expected output to be included, because the test is its own specification. + +WARNING: As the classes mentioned above are likely to evolve, the description below might easily get out of date. +In case of any discrepancies rely on the source files instead. + +`SignatureTest` requires that you specify the names of the files used to extract signatures, +the names of directories containing corresponding TASTY files +and the kinds of signatures from source files (corresponding to keywords used to declare them like `def`, `class`, `object` etc.) +whose presence in the generated documentation will be checked (other signatures, when missing, will be ignored). +The mentioned source files should be located directly inside [](../scaladoc-testcases/src/tests) directory +but the file names passed as parameters should contain neither this path prefix nor `.scala` suffix. + +By default it's expected that all signatures from the source files will be present in the documentation +but not vice versa (because the documentation can contain also inherited signatures). +To validate that a signature present in the source does not exist in the documentation +(because they should be hidden from users) add `//unexpected` comment after the signature in the same line. +This will cause an error if a signature with the same name appears in the documentation +(even if some elements of the signature are slightly different - to avoid accidentally passing tests). +If the signature in the documentation is expected to slightly differ from how it's defined in the source code +you can add a `//expected: ` comment (also in the same line and followed by a space) followed by the expected signature. +Alternatively you can use `/*<-*/` and `/*->*/` as opening and closing parentheses for parts of a signature present in the source but undesired in the documentation (at least at the current stage of development), e.g. + +```scala +def foo/*<-*/()/*->*/: Int +``` + +will make the expected signature be + +```scala +def foo: Int +``` + +instead of + +```scala +def foo(): Int +``` + +Because of the way how signatures in source are parsed, they're expected to span +until the end of a line (including comments except those special ones mentioned +above, which change the behaviour of tests) so if a definition contains an +implementation, it should be placed in a separate line, e.g. + +```scala +def foo: Int + = 1 + +class Bar +{ + //... +} +``` + +Otherwise the implementation would be treated as a part of the signature. + +## FAQ + +### Why use TASTy? + +A documentation tool needs to access compiler information about the project - it +needs to list all definitions, resolve them by name, and query their members. +Tasty Reflect is the dedicated way in Scala 3 of accessing this information. + +## Credits + +- [Flatart](https://www.iconfinder.com/Flatart) - Gitter icon diff --git a/docs/_docs/contributing/sending-in-a-pr.md b/docs/_docs/contributing/sending-in-a-pr.md new file mode 100644 index 000000000000..c99e6a28172b --- /dev/null +++ b/docs/_docs/contributing/sending-in-a-pr.md @@ -0,0 +1,170 @@ +--- +layout: doc-page +title: Sending in a pull request +redirectFrom: /docs/contributing/workflow/checklist.html +--- + +Once you solved the issue you were working on, you'll likely want to see your +changes added to the [Scala 3 repo][lampepfl/dotty]. To do that, you need to +prepare a [pull request][pull-request] with your changes. Assuming that the team +is aware of what you've been working, here are some final steps that you'll want +to keep in mind as you create your PR. + +### 1. Sign the CLA + +Make sure you have signed the [Scala CLA][cla]. If you have any questions about +what this is and why it's required you can read further about it [here][cla]. + +### 2. Make sure your work is on its own branch + +When submitting your pull request it's always best to ensure the branch name is +unique to the changes you're working on. It's important not to submit your PR on +your `main` branch as this blocks maintainers from making any changes to your PR +if necessary. + +### 3: Add Tests + +Add at least one test that replicates the problem in the issue, and that shows it is now resolved. + +You may of course add variations of the test code to try and eliminate edge cases. +[Become familiar with testing in Scala 3](./testing.md). + +### 4: Add Documentation + +Please ensure that all code is documented to explain its use, even if only internal +changes are made. This refers to scaladocs and also any changes that might be +necessary in the reference docs. + +### 5: Double check everything + +Here are a couple tips to keep in mind. + +- [DRY (Don't Repeat Yourself)][dry] +- [Scouts Rule][scouts] +- When adding new code try use [optional braces]. If you're rewriting old code, + you should also use optional braces unless it introduces more code changes + that necessary. + +### 6: Commit Messages + +Here are some guidelines when writing commits for Dotty. + +1. If your work spans multiple local commits (for example; if you do safe point + commits while working in a feature branch or work in a branch for long time + doing merges/rebases etc.) then please do not commit it all but rewrite the + history by squashing the commits into one large commit which is accompanied + by a detailed commit message for (as discussed in the following sections). + For more info, see the article: [Git Workflow][git-workflow]. Additionally, + every commit should be able to be used in isolation—that is, each commit must + build and pass all tests. + +2. The first line should be a descriptive sentence about what the commit is + doing. It should be possible to fully understand what the commit does by just + reading this single line. It is **not ok** to only list the ticket number, + type "minor fix" or similar. If the commit has a corresponding ticket, + include a reference to the ticket number, prefixed with "Closes #", at the + beginning of the first line followed by the title of the ticket, assuming + that it aptly and concisely summarizes the commit in a single line. If the + commit is a small fix, then you are done. If not, go to 3. + +3. Following the single line description (ideally no more than 70 characters + long) should be a blank line followed by an enumerated list with the details + of the commit. + +4. Add keywords for your commit (depending on the degree of automation we reach, + the list may change over time): + * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone + is encouraged to give feedback, however. (Remember that @-mentions will + result in notifications also when pushing to a WIP branch, so please only + include this in your commit message when you're ready for your pull + request to be reviewed. Alternatively, you may request a review in the + pull request's description.) + * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the + ticket as fixed in the issue tracker (Assembla understands this). + * ``backport to _branch name_`` - if the fix needs to be cherry-picked to + another branch (like 2.9.x, 2.10.x, etc) + +Example: + +``` +fix: here is your pr title briefly mentioning the topic + +Here is the body of your pr with some more information + - Details 1 + - Details 2 + - Details 3 + +Closes #2 +``` + +#### Skipping parts of CI + +Depending on what your PR is addressing, sometimes it doesn't make sense to run +every part of CI. For example, maybe you're just updating some documentation and +there is no need to run the community build for this. We skip parts of the CI by +utilizing keywords inside of brackets. The most up-to-date way to see this are +by looking in the `if` statements of jobs. For example you can see some +[here](https://github.com/lampepfl/dotty/blob/5d2812a5937389f8a46f9e97ab9cbfbb3f298d87/.github/workflows/ci.yaml#L54-L64). +Below are commonly used ones: + + +|---------------------------|----------------------------------------| +| `[skip ci]` | Skip the entire CI | +| `[skip community_build]` | Skip the entire community build | +| `[skip community_build_a]`| Skip the "a" community build | +| `[skip community_build_b]`| Skip the "b" community build | +| `[skip community_build_c]`| Skip the "c" community build | +| `[skip docs]` | Skip the scaladoc tests | +| `[skip test]` | Skip the unit tests | +| `[skip test_windows_fast]`| Skip the unit tests subset on Windows | +| `[skip mima]` | Skip the MiMa checks | +| `[skip test_sbt]` | Skip the SBT scripted tests | + + +#### Executes parts of the CI that are skipped on PRs +Depending on what your PR is addressing, sometimes it doesn't make sense to run +parts of the CI that usually ony runs on nightly builds. + +|-------------------------------|---------------------------------------------------------------------------| +| `[test_java8]` | Execute unit tests on Java 8 | +| `[test_windows_full]` | Execute unit tests on Windows | +| `[test_non_bootstrapped]` | Execute unit tests using non-bootstrapped compiler | +| `[test_scala2_library_tasty]` | Execute unit tests using bootstrapped-compiler with Scala 2 library TASTy | + +### 7: Create your PR! + +When the feature or fix is completed you should open a [Pull +Request](https://help.github.com/articles/using-pull-requests) on GitHub. + +If you're not actually finished yet and are just looking for some initial input +on your approach, feel free to open a [Draft PR][draft]. This lets reviewers +know that you're not finished yet. It's also a good idea to put a [wip] in front +of your PR title to make this extra clear. + +Shortly after creating your pull request a maintainer should assign someone to +review it. If this doesn't happen after a few days, feel free to ping someone on +the [Scala Contributors Discord][discord] or tag someone on the PR. Depending on +the type of pull request there might be multiple people that take a look at your +changes. There might also be community input as we try to keep the review +process as open as possible. + +### 8: Addressing feedback + +More than likely you'll get feedback from the reviewers, so you'll want to make +sure to address everything. When in doubt, don't hesitate to ask for +clarification or more information. + +Once you finally see the "LGTM" (Looks Good To Me or Let's Get This Merged) +you're PR will be merged in! + +[pull-request]: https://docs.github.com/en?query=pull+requests +[lampepfl/dotty]: https://github.com/lampepfl/dotty +[cla]: http://typesafe.com/contribute/cla/scala +[issues]: https://github.com/lampepfl/dotty/issues +[full-list]: https://github.com/lampepfl/dotty/blob/master/CONTRIBUTING.md +[discord]: https://discord.gg/TSmY9zkHar +[dry]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html +[scouts]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html +[optional-braces]: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +[draft]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests +[git-workflow]: http://sandofsky.com/blog/git-workflow.html diff --git a/docs/_docs/contributing/setting-up-your-ide.md b/docs/_docs/contributing/setting-up-your-ide.md new file mode 100644 index 000000000000..3bb7d329d50c --- /dev/null +++ b/docs/_docs/contributing/setting-up-your-ide.md @@ -0,0 +1,56 @@ +--- +layout: doc-page +title: Setting up your IDE +--- + +You can use either Metals with your favorite editor (VS Code, Neovim, Sublime) +or [IntelliJ IDEA for +Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html) +to work on the Scala 3 codebase. There are however a few additional +considerations to take into account. + +## Bootstrapping Projects + +The sbt build for dotty implements bootstrapping within the same build, so each component has +two projects: + +``` +sbt:scala3> projects +... +[info] scala3-compiler +[info] scala3-compiler-bootstrapped +... +``` + +These duplicated projects can be confusing and cause issues in IDEs, so it's +import to import the project in a specific way depending on your editor. + +### Metals + +When using Metals, the `-bootstrapped` projects are not exported by default. +Normally this is fine, but if you're working on certain modules like `scaladoc` +you'll actually want these modules exported. In order to achieve this you'll +want to make sure you do two things: + +1. You'll want to find and change the following under + `commonBootstrappedSettings` which is found in the + [`Build.scala`](https://github.com/lampepfl/dotty/blob/main/project/Build.scala) + file. + +```diff + +- bspEnabled := false, ++ bspEnabled := true, +``` + +2. Set `sbt` as your build server instead of the default, Bloop. You can achieve + this with the `Metals: Switch Build Server` command and then choosing sbt. In + VSCode, this looks like this: + +![bsp-switch](https://user-images.githubusercontent.com/777748/241986423-0724ae74-0ebd-42ef-a1b7-4d17678992b4.png) + + +### IntelliJ + +In IntelliJ IDEA, we recommend importing the dotty codebase through BSP, then +the `-bootstrapped` projects are not exported. diff --git a/docs/_docs/contributing/issues/testing.md b/docs/_docs/contributing/testing.md similarity index 84% rename from docs/_docs/contributing/issues/testing.md rename to docs/_docs/contributing/testing.md index 1f7c35c6d58a..f755bcafcd66 100644 --- a/docs/_docs/contributing/issues/testing.md +++ b/docs/_docs/contributing/testing.md @@ -1,6 +1,7 @@ --- layout: doc-page title: Testing Your Changes +redirectFrom: /docs/contributing/workflow/testing.html --- It is important to add tests before a pull request, to verify that everything is working as expected, @@ -39,7 +40,7 @@ of the `tests/` directory. A small selection of test categories include: - `tests/pos` – tests that should compile: pass if compiles successfully. - `tests/neg` – should not compile: pass if fails compilation. Useful, e.g., to test an expected compiler error. -- `tests/run` – these tests not only compile but are also run. +- `tests/run` – these tests not only compile but are also run. Must include at least a `@main def Test = ...`. ### Naming and Running a Test Case @@ -50,7 +51,7 @@ or be within a directory called `i101/` for a multi-file test. To run the test, invoke the sbt command `testCompilation i101` (this will match all tests with `"i101"` in the name, so it is useful to use a unique name) -The test groups – `pos`, `neg`, etc. – are defined in [CompilationTests]. If you want to run a group +The test groups – `pos`, `negAll`, etc. – are defined in [CompilationTests]. If you want to run a group of tests, e.g. `pos`, you can do so via `testOnly *CompilationTests -- *pos` command. ### Testing a Single Input File @@ -79,7 +80,7 @@ in a single run, this is called *joint compilation*. In this case use file names ### Checking Program Output `tests/run` tests verify the run-time behaviour of a test case. The output is checked by invoking a main method -on a class `Test`, this can be done with either +on a class `Test` (which is required to exist even if there is no checkfile), this can be done with either ```scala @main def Test: Unit = assert(1 > 0) ``` @@ -154,6 +155,34 @@ To run `testCompilation` on a bootstrapped Dotty compiler, use Some tests can only be run in bootstrapped compilers; that includes all tests with `with-compiler` in their name. +### Scala 2 library TASTy tests + +Usually we use the Scala 2 library JAR (with classfiles) generated by Scala 2. We can +also use a special version of the library that we generate with only TASTy files. This +JAR is added to the classpath before the Scala 2 library JAR. This allows the compiler +to load the TASTy and the runtime to load the original classfiles. + +The library is compiled in `scala2-library-bootstrapped` with TASTy and classfiles. +These classfiles should not be used. The `scala2-library-tasty` project repackages the +JAR `scala2-library-bootstrapped` to only keep TASTy files. The `scala2-library-tasty-tests` provides some basic tests using the library TASTy JAR. + +``` +$ sbt +> scala2-library-tasty/compile +> scala2-library-tasty-tests/run +> scala2-library-tasty-tests/test +``` + +We can enable this library in the build using the SBT setting `useScala2LibraryTasty`. This setting can only be used by bootstrapped compiler tests and is currently only supported for `test` (or `testCompilation`) and `scalac` (or `run`). + +``` +$ sbt +> set ThisBuild/Build.useScala2LibraryTasty := true +> scala3-compiler-bootstrapped/scalac MyFile.scala +> scala3-compiler-bootstrapped/test +> scala3-compiler-bootstrapped/testCompilation +``` + ### From TASTy tests `testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. @@ -199,6 +228,10 @@ sbt:scala3> scala3-compiler/Test/runMain then compare the changes via version control. +### Scaladoc tests + +See the [Scaladoc contributors guide](./scaladoc.md). + ## Troubleshooting Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory diff --git a/docs/_docs/contributing/tools/ide.md b/docs/_docs/contributing/tools/ide.md deleted file mode 100644 index 5db333bee9d9..000000000000 --- a/docs/_docs/contributing/tools/ide.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -layout: doc-page -title: Using an IDE ---- - -You can use either Metals (VS Code, vim) or IntelliJ IDEA as described on the -IDE sections of the [getting-started page](https://docs.scala-lang.org/scala3/getting-started.html) to work -on the Scala 3 codebase. There are however a few additional considerations to take into account. - - -## Bootstrapping Projects - -The sbt build for dotty implements bootstrapping within the same build, so each component has -two projects: - -``` -sbt:scala3> projects -... -[info] scala3-compiler -[info] scala3-compiler-bootstrapped -... -``` - -These duplicated projects can be confusing and cause issues in IDEs. - -When using Metals, the `-bootstrapped` projects are not exported. - -In IntelliJ IDEA, we recommend importing the dotty codebase through BSP, then the `-bootstrapped` -projects are not exported. - - -## Scala Version warning in Metals - -When using VS Code, Metals might show a warning that the Scala version (`3.0.0-[...]-NIGHTLY`) -is not supported. The reason is that the dotty repository sometimes uses a nightly build as -reference compiler. The IDE experience is going to be limited in this case (semantic features will -only within single files). diff --git a/docs/_docs/contributing/tools/index.md b/docs/_docs/contributing/tools/index.md deleted file mode 100644 index e784e3e15d61..000000000000 --- a/docs/_docs/contributing/tools/index.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -layout: index -title: IDEs and Tools ---- - -This chapter of the guide describes how to use Dotty with IDEs and other tools: -- [IDEs](./ide.md) -- [Use Mill](./mill.md) -- [Use Scalafix](./scalafix.md) diff --git a/docs/_docs/contributing/tools/mill.md b/docs/_docs/contributing/tools/mill.md deleted file mode 100644 index 7d372835395f..000000000000 --- a/docs/_docs/contributing/tools/mill.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -layout: doc-page -title: Basic Operations with Mill ---- - -Here's an example of how to test a project that uses mill: - -```bash -mill utest.jvm[2.12.8].test -``` - -- `utest.jvm` - the name of the compiled module (obtain from `build.sc`) -- `2.12.8` – Scala cross-compile version -- `test` – task to run on the module specified with the specified Scala version - -To get mill of the most recent version, first, find it in https://github.com/lihaoyi/mill/releases (e.g. `0.4.2-1-020e28`). Copy the download link and substitute it in the following command instead of `https://github.com/lihaoyi/mill/releases/download/0.4.1/0.4.1`: - -```bash -# From http://www.lihaoyi.com/mill/ -sudo sh -c '(echo "#!/usr/bin/env sh" && curl -L https://github.com/lihaoyi/mill/releases/download/0.4.1/0.4.1) > /usr/local/bin/mill && chmod +x /usr/local/bin/mill' -``` diff --git a/docs/_docs/contributing/tools/scalafix.md b/docs/_docs/contributing/tools/scalafix.md deleted file mode 100644 index 30c7050f8b3e..000000000000 --- a/docs/_docs/contributing/tools/scalafix.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -layout: doc-page -title: Working with Scalafix ---- - -First, create a new rule as follows (command from https://scalacenter.github.io/scalafix/docs/developers/setup.html): - -```bash -sbt new scalacenter/scalafix.g8 --repo="Repository Name" -``` - -To run the rule against some codebase: - -```bash -scalafix -r file:scalafix/rules/src/main/scala/fix/YourRule.scala your/code/base/ -``` - -Where `YourRule.scala` is the rule you developed and `your/code/base` is the code base you are running the rule against. diff --git a/docs/_docs/contributing/workflow.md b/docs/_docs/contributing/workflow.md deleted file mode 100644 index 1d11dc61a6bf..000000000000 --- a/docs/_docs/contributing/workflow.md +++ /dev/null @@ -1,131 +0,0 @@ ---- -layout: doc-page -title: Workflow ---- - -Check [Getting Started](getting-started.md) for instructions on how to obtain the source code of dotty. -This document details common workflow patterns when working with Dotty. - -## Compiling files with scalac ## - -As we have seen you can compile a test file either from sbt: - -```bash -$ sbt -> scalac -``` - -or from terminal: - -```bash -$ scalac -``` - -Here are some useful debugging ``: - -* `-Xprint:PHASE1,PHASE2,...` or `-Xprint:all`: prints the `AST` after each - specified phase. Phase names can be found by examining the - `dotty.tools.dotc.transform.*` classes for their `phaseName` field e.g., `-Xprint:erasure`. - You can discover all phases in the `dotty.tools.dotc.Compiler` class -* `-Ylog:PHASE1,PHASE2,...` or `-Ylog:all`: enables `ctx.log("")` logging for - the specified phase. -* `-Ycheck:all` verifies the consistency of `AST` nodes between phases, in - particular checks that types do not change. Some phases currently can't be - `Ycheck`ed, therefore in the tests we run: - `-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef`. -* the last frontier of debugging (before actual debugging) is the range of logging capabilities that -can be enabled through the `dotty.tools.dotc.config.Printers` object. Change any of the desired printer from `noPrinter` to -`default` and this will give you the full logging capability of the compiler. - -## Inspecting Types with Type Stealer ## - -You can inspect types with the main method `dotty.tools.printTypes` from the sbt shell, -passing at least three arguments: -- The first argument is a string that introduces some -Scala definitions -- The second argument introduces how the the remaining arguments should be interpreted, -comprising of - - `rhs` - the return type of a definition - - `class` - the signature of a class, after its name - - `method` - the signature of a method, after its name - - `type` - the signature of a type, after its name -- The remaining arguments are type signatures, these may reference definitions introduced by the first argument. - -Each type signature is then be printed, displaying their internal structure, alongside their class, using -the same representation that can later be used in pattern matching to decompose the type. - -Here, we inspect a refinement of a class `Box`: -```bash -$ sbt -> scala3-compiler-bootstrapped/Test/runMain dotty.tools.printTypes "class Box { def x: Any }" "rhs" "Box { def x: Int }" -RefinedType(TypeRef(ThisType(TypeRef(NoPrefix, module class )),class Box), x, ExprType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class )), object scala), class Int))) [class dotty.tools.dotc.core.Types$CachedRefinedType] -``` - -You can also pass the empty string as the second -argument, e.g. to inspect a standard library type: -```bash -$ sbt -> scala3-compiler-bootstrapped/Test/runMain dotty.tools.printTypes "" "rhs" "1 *: EmptyTuple" -AppliedType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class )), object scala), class *:), List(ConstantType(Constant(1)), TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class scala)), object Tuple$package), type EmptyTuple))) -``` - -Here are some other examples you can follow: -- `...printTypes "" class "[T] extends Foo[T] {}"` -- `...printTypes "" method "(x: Int): x.type"` -- `...printTypes "" type "<: Int" "= [T] =>> List[T]"` - -If you want to further inspect the types, and not just print them, the object `dotty.tools.DottyTypeStealer` has a -method `stealType`. It takes the same arguments as `printTypes`, but returns both a `Context` containing the -definitions passed, along with the list of types: -```scala -// compiler/test/dotty/tools/DottyTypeStealer.scala -object DottyTypeStealer extends DottyTest { - - enum Kind: - case `rhs`, `method`, `class`, `type` - ... - - def stealType(kind: Kind, source: String, typeStrings: String*): (Context, List[Type]) = { - ... - } -} -``` -Any test source within `compiler/test` can then call `stealType` for custom purposes. - -## Pretty-printing ## -Many objects in the scalac compiler implement a `Showable` trait (e.g. `Tree`, -`Symbol`, `Type`). These objects may be prettyprinted using the `.show` -method - -## SBT Commands Cheat Sheet ## -The basics of working with Dotty codebase are documented [here](https://dotty.epfl.ch/docs/contributing/getting-started.html) and [here](https://dotty.epfl.ch/docs/contributing/workflow.html). Below is a cheat sheet of some frequently used commands (to be used from SBT console – `sbt`). - - -| Command | Description | -|------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| -| `scala3/scalac` | Run the compiler directly, with any current changes. | -| `scala3/scala` | Run the main method of a given class name. | -| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | -| `scala Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | -| `repl` | Start REPL | -| `scala3/scalac -print-tasty Foo.tasty` | Print the TASTy of top-level class `Foo` | -| `scala3-bootstrapped/test` | Run all tests for Scala 3. (Slow, recommended for CI only) | -| `scala3-bootstrapped/publishLocal` | Build Scala 3 locally. (Use to debug a specific project) | -| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself.| -| `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | -| `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | -| `scala3-compiler/Test/runMain dotty.tools.printTypes`| Print types underlying representation | -| `scaladoc/generateScalaDocumentation` | Build the documentation website (published to https://dotty.epfl.ch) | -| `scaladoc/generateReferenceDocumentation` | Build the reference documentation website (published to https://docs.scala-lang.org/scala3/reference) | - - -## Shell Commands - -| Command | Description | -|--------------------------------------|------------------------------------------------------------------| -| `rm -rv *.tasty *.class out || true` | clean all compiled artifacts, from root dotty directory | - - - - - diff --git a/docs/_docs/internals/cc/alternatives-to-sealed.md b/docs/_docs/internals/cc/alternatives-to-sealed.md new file mode 100644 index 000000000000..cca3b2b65e0a --- /dev/null +++ b/docs/_docs/internals/cc/alternatives-to-sealed.md @@ -0,0 +1,171 @@ +A capture checking variant +========================== + + - Our starting point is the currently implemented system, where encapsulation is achieved by disallowing root capabilities in + the types of sealed type variables. By contrast no restrictions apply + to boxing or unboxing. + + - We now treat all type variables as sealed (so no special `sealed` modifier is necessary anymore). A type variable cannot be instantiated to a type that contains a covariant occurrence of `cap`. The same restriction applies to the types of mutable variables and try expressions. + + - For any immutable variable `x`, introduce a _reach_ capability `x*` which stands for + "all capabilities reachable through `x`". We have `{x} <: {x*} <: dcs(x)}` where the deep capture set `dcs(x)` of `x` + is the union of all capture sets that appear in covariant position in the type of `x`. If `x` and `y` are different + variables then `{x*}` and `{y*}` are unrelated. + + - We modify the VAR rule as follows: + + x: T in E + ----------- + E |- x: T' + + where T' is T with (1) the toplevel capture set replaced by `{x}` and + (2) all covariant occurrences of cap replaced by `x*`, provided there + are no occurrences in `T` at other variances. (1) is standard, + whereas (2) is new. + +- Why is this sound? Covariant occurrences of cap must represent capabilities that are reachable from `x`, so they are included in the meaning of `{x*}`. At the same time, encapsulation is still maintained since no covariant occurrences of cap are allowed in instance types of +type variables. + +## Examples: + +Assume +```scala +type Proc = () => Unit + +class Ref[T](init: T): + private var x: T = init + def get: T = x + def set(y: T) = { x = y } +``` +Note that type parameters no longer need (or can) be annotated with `sealed`. + +The following example does not work. +```scala +def runAll(xs: List[Proc]): Unit = + var cur: List[Proc] = xs // error: Illegal type for var + while cur.nonEmpty do + val next: () => Unit = cur.head + next() + cur = cur.tail + + usingFile: f => + cur = ((() => f.write()): (() ->{f*} Unit)) :: Nil +``` +Same with refs: +```scala +def runAll(xs: List[Proc]): Unit = + val cur = Ref[List[Proc]](xs) // error, illegal type for type argument to Ref + while cur.get.nonEmpty do + val next: () => Unit = cur.get.head + next() + cur.set(cur.get.tail: List[Proc]) + + usingFile: f => + cur.set: + (() => f.write(): () ->{f*} Unit) :: Nil +``` + +The following variant makes the loop typecheck, but +still rejects the incorrect leakage in `usingFile`. +```scala +def runAll(xs: List[Proc]): Unit = + var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR + while cur.nonEmpty do + val next: () ->{xs*} Unit = cur.head + next() + cur = cur.tail: List[() ->{xs*} Unit] + + usingFile: f => + cur = (() => f.write(): () ->{f*} Unit) :: Nil // error since {f*} !<: {xs*} +``` + +Same with refs: +```scala +def runAll(xs: List[Proc]): Unit = + val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR + while cur.get.nonEmpty do + val next: () ->{xs*} Unit = cur.get.head + next() + cur.set(cur.get.tail: List[() ->{xs*} Unit]) + + usingFile: f => + cur.set: + (() => f.write(): () ->{f*} Unit) :: Nil // error since {f*} !<: {xs*} +``` + +More examples. This works: +```scala +def cons(x: Proc, xs: List[Proc]): List[() ->{x, xs*} Unit] = + List.cons[() ->{x, xs*} Unit](x, xs) +``` +And this works as well +```scala +def addOneProc(xs: List[Proc]): List[Proc] = + def x: Proc = () => write("hello") + val result: List[() ->{x, xs*} Unit] = x :: xs + result // OK, we can widen () ->{x, xs*} Unit to cap here. +``` +This doesn't work: +```scala +def cons(x: Proc, xs: Set[Proc]) = + Set.include[Proc](x, xs) // error: can't instantiate type parameter to Proc +``` +But this works: +```scala +def cons(x: Proc, xs: Set[Proc]): Set[() ->{x,xs*} Unit] = + Set.include[() ->{x,xs*} Unit](x, xs) // ok +``` +Say we have `a: () ->{io} Unit` and `as: List[() ->{io} Unit]`. Then `cons(a, as)` +is of type `() ->{a, as*} Unit`, which is a subtype of `() ->{io} Unit`. This follows from +`{a} <: {io}` by rule (Var) and `{as*} <: dcs(as) = {io}` by the subcapturing rules for +reach capabilities. + +This also works: +```scala +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) +``` +And this works as well: +```scala +def compose2[A, B, C](f: A => B, g: B => C): A => C = + z => g(f(z)) +``` +Even this should work: +```scala +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map(compose1) + // ps: List[(A ->{ps*} A, A ->{ps*} A)] + // Hence compose1's parameters are both of type A ->{ps*} A + // Hence its result type is A ->{ps*} A + // So map's type parameter is A ->{ps*} A + // Expanded typing: + // (ps: List[(A ->{ps*} A, A ->{ps*} A)]) + // .map[A ->{ps*} A]: (f: A ->{ps*} A, g: A ->{ps*} A) => + // compose1[A ->{ps*} A, A ->{ps*} A, A ->{ps*} A](f, g) + // : A -> {f, g} A + // The closure is widened to the non-dependent function type + // (f: A ->{ps*} A, g: A ->{ps*} A) -> A ->{ps*} A +``` +But it does not work with `compose2`, since the type variable of `map` cannot be instantiated to `A => A`. + +Syntax Considerations: + + - `x*` is short and has the right connotations. For the spread operator, `xs*` means + _everything contained in x_. Likewise `x*` in capture sets would mean all capabilities + reachable through `x`. + - But then we have capabilities that are not values, undermining the OCap model a bit. + On the other hand, even if we make `x*` values then these would have to be erased in any case. + +Work items: +=========== + + - Implement `x*` references. + - internal representation: maybe have a synthetic private member `*` of + `Any` to which `x*` maps, i.e. `x*` is `x.*`. Advantage: maps like substitutions + and asSeenFrom work out of the box. + - subcapturing: `x <:< x* <: dcs(x)`. + - Narrowing code: in `adaptBoxed` where `x.type` gets widened to `T^{x}`, also + do the covariant `cap` to `x*` replacement. Similarly in `fourthTry` of `TypeComparer`. + - Drop local roots + - Make all type paraneters sealed + diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index 5bb43eb946a8..ab936ddd8512 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -151,7 +151,6 @@ phases. The current list of phases is specified in class [Compiler] as follows: new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods new Getters, // Replace non-private vals and vars with getter defs (fields are added later) new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super - new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization new ElimOuterSelect, // Expand outer selections new ResolveSuper, // Implement super accessors diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 2817a7477b10..aa8cd15f00a0 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -201,7 +201,6 @@ SimpleType1 ::= id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id -Singletons ::= Singleton { ‘,’ Singleton } FunArgType ::= [`erased`] Type | [`erased`] ‘=>’ Type PrefixOp(=>, t) FunArgTypes ::= FunArgType { ‘,’ FunArgType } @@ -209,7 +208,7 @@ ParamType ::= [‘=>’] ParamValueType ParamValueType ::= [‘into’] ExactParamType Into(t) ExactParamType ::= ParamValueType [‘*’] PostfixOp(t, "*") TypeArgs ::= ‘[’ Types ‘]’ ts -Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds +Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) Types ::= Type {‘,’ Type} @@ -318,7 +317,7 @@ Pattern1 ::= PatVar ‘:’ RefinedType | [‘-’] integerLiteral ‘:’ RefinedType Typed(pat, tpe) | [‘-’] floatingPointLiteral ‘:’ RefinedType Typed(pat, tpe) | Pattern2 -Pattern2 ::= [id ‘@’] InfixPattern [‘*’] Bind(name, pat) +Pattern2 ::= [id ‘@’] InfixPattern Bind(name, pat) InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) SimplePattern ::= PatVar Ident(wildcard) | Literal Bind(name, Ident(wildcard)) @@ -357,8 +356,8 @@ ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent -DefParamClause ::= DefTypeParamClause - | DefTermParamClause +DefParamClause ::= DefTypeParamClause + | DefTermParamClause | UsingParamClause TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause @@ -401,10 +400,10 @@ Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec Import(expr, sels) | SimpleRef ‘as’ id Import(EmptyTree, ImportSelector(ref, id)) ImportSpec ::= NamedSelector - | WildcardSelector + | WildCardSelector | ‘{’ ImportSelectors) ‘}’ NamedSelector ::= id [‘as’ (id | ‘_’)] -WildCardSelector ::= ‘*' | ‘given’ [InfixType] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] | WildCardSelector {‘,’ WildCardSelector} @@ -413,29 +412,24 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ ``` -### Declarations and Definitions +### Definitions ```ebnf -RefineDcl ::= ‘val’ ValDcl - | ‘def’ DefDcl - | ‘type’ {nl} TypeDcl -Dcl ::= RefineDcl - | ‘var’ VarDcl -ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) -VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) -DefDcl ::= DefSig ‘:’ Type DefDef(_, name, paramss, tpe, EmptyTree) -DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound - [‘=’ Type] +RefineDef ::= ‘val’ ValDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDef Def ::= ‘val’ PatDef | ‘var’ PatDef | ‘def’ DefDef - | ‘type’ {nl} TypeDcl + | ‘type’ {nl} TypeDef | TmplDef -PatDef ::= ids [‘:’ Type] ‘=’ Expr - | Pattern2 [‘:’ Type] ‘=’ Expr PatDef(_, pats, tpe?, expr) -DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, paramss, tpe, expr) +PatDef ::= ids [‘:’ Type] [‘=’ Expr] + | Pattern2 [‘:’ Type] [‘=’ Expr] PatDef(_, pats, tpe?, expr) +DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) +DefSig ::= id [DefParamClauses] [DefImplicitClause] +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound + [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 82d38bd44d96..6be8a62c7ac4 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -18,7 +18,7 @@ For experimentation and research, Scala 3 introduces _research plugin_. Research are more powerful than Scala 2 analyzer plugins as they let plugin authors customize the whole compiler pipeline. One can easily replace the standard typer by a custom one or create a parser for a domain-specific language. However, research plugins are only -enabled for nightly or snaphot releases of Scala 3. +enabled with the `-experimental` compiler flag or in nightly/snapshot releases of Scala 3. Common plugins that add new phases to the compiler pipeline are called _standard plugins_ in Scala 3. In terms of features, they are similar to diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index 6a898690b565..1396ed04b6d3 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -163,8 +163,36 @@ The new rules are as follows: An implicit `a` defined in `A` is more specific th Condition (*) is new. It is necessary to ensure that the defined relation is transitive. +[//]: # todo: expand with precise rules +**9.** The following change is currently enabled in `-source future`: +Implicit resolution now avoids generating recursive givens that can lead to an infinite loop at runtime. Here is an example: +```scala +object Prices { + opaque type Price = BigDecimal + + object Price{ + given Ordering[Price] = summon[Ordering[BigDecimal]] // was error, now avoided + } +} +``` + +Previously, implicit resolution would resolve the `summon` to the given in `Price`, leading to an infinite loop (a warning was issued in that case). We now use the underlying given in `BigDecimal` instead. We achieve that by adding the following rule for implicit search: + + - When doing an implicit search while checking the implementation of a `given` definition `G` of the form + ``` + given ... = .... + ``` + discard all search results that lead back to `G` or to a given with the same owner as `G` that comes later in the source than `G`. + +The new behavior is currently enabled in `source.future` and will be enabled at the earliest in Scala 3.6. For earlier source versions, the behavior is as +follows: + + - Scala 3.3: no change + - Scala 3.4: A warning is issued where the behavior will change in 3.future. + - Scala 3.5: An error is issued where the behavior will change in 3.future. + +Old-style implicit definitions are unaffected by this change. -[//]: # todo: expand with precise rules diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index d98d80caafc5..8b9a3df5b84c 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -254,7 +254,8 @@ The following two rewritings are tried in order: not a wildcard import, pick the expansion from that import. Otherwise, report an ambiguous reference error. - **Note**: This relaxation is currently enabled only under the `experimental.relaxedExtensionImports` language import. + **Note**: This relaxation of the import rules applies only if the method `m` is used as an extension method. If it is used as a normal method in prefix form, the usual import rules apply, which means that importing `m` from + multiple places can lead to an ambiguity error. 2. If the first rewriting does not typecheck with expected type `T`, and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index f1333bf8811f..bf018278c9fc 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -181,7 +181,7 @@ GivenDef ::= [GivenSig] StructuralInstance | [GivenSig] AnnotType ‘=’ Expr | [GivenSig] AnnotType GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} ‘with’ TemplateBody +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] ``` A given instance starts with the reserved word `given` and an optional _signature_. The signature diff --git a/docs/_docs/reference/contextual/right-associative-extension-methods.md b/docs/_docs/reference/contextual/right-associative-extension-methods.md index 61f0beece6ed..0388d173cd2c 100644 --- a/docs/_docs/reference/contextual/right-associative-extension-methods.md +++ b/docs/_docs/reference/contextual/right-associative-extension-methods.md @@ -61,4 +61,36 @@ For instance, the `+::` method above would become This expansion has to be kept in mind when writing right-associative extension methods with inter-parameter dependencies. -An overall simpler design could be obtained if right-associative operators could _only_ be defined as extension methods, and would be disallowed as normal methods. In that case neither arguments nor parameters would have to be swapped. Future versions of Scala should strive to achieve this simplification. +This expansion also introduces some inconsistencies when calling the extension methods in non infix form. The user needs to invert the order of the arguments at call site manually. For instance: + +```scala + extension [T](x: T) + def *:(xs: List[T]): List[T] = ... + + y.*:(ys) // error when following the parameter definition order + ys.*:(y) + + *:(y)(ys) // error when following the parameter definition order + *:(ys)(y) +``` + +Another limitation of this representation is that it is impossible to pass the +type parameters of the `def` explicitly, (unless called in prefix form). For instance: + +```scala + extension (x: Int) + def *:[T](xs: List[T]): List[T] = ... + + xs.*:[Int](1) // error when trying to set T explicitly +``` + +The expansion of right-associative extension methods also affects the order in which contextual parameters can be passed explicitly. + +Group extension can also behave unintuitively, in general all extension in a +group are extension on the receiver. Except if one of these extensions is a +right-associative extension method, in which case that one is an extension on the type of its argument. For instance: +```scala + extension (a: Int) + def :+(b: Long): Long = ... // extension on Int + def +:(b: Long): Long = ... // extension on Long +``` diff --git a/docs/_docs/reference/dropped-features/existential-types.md b/docs/_docs/reference/dropped-features/existential-types.md index 6ef815152cd0..a7c491dfb3b3 100644 --- a/docs/_docs/reference/dropped-features/existential-types.md +++ b/docs/_docs/reference/dropped-features/existential-types.md @@ -10,7 +10,7 @@ have been dropped. The reasons for dropping them are: - Existential types violate a type soundness principle on which DOT and Scala 3 are constructed. That principle says that every - prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + prefix (`p`, respectively `S`) of a type selection `p.T` or `S#T` must either come from a value constructed at runtime or refer to a type that is known to have only good bounds. diff --git a/docs/_docs/reference/dropped-features/nonlocal-returns.md b/docs/_docs/reference/dropped-features/nonlocal-returns.md index e6490b7ca5bc..b7dae17f5a77 100644 --- a/docs/_docs/reference/dropped-features/nonlocal-returns.md +++ b/docs/_docs/reference/dropped-features/nonlocal-returns.md @@ -5,7 +5,7 @@ title: "Deprecated: Nonlocal Returns" nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html --- -Returning from nested anonymous functions has been deprecated, and will produce a warning from version `3.2`. +Returning from nested anonymous functions is deprecated since Scala 3.2.0. Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. @@ -21,5 +21,3 @@ def firstIndex[T](xs: List[T], elem: T): Int = if x == elem then break(i) -1 ``` - -Note: compiler produces deprecation error on nonlocal returns only with `-source:future` option. diff --git a/docs/_docs/reference/experimental/canthrow.md b/docs/_docs/reference/experimental/canthrow.md index 064d928fe26c..cd0da72f3d4b 100644 --- a/docs/_docs/reference/experimental/canthrow.md +++ b/docs/_docs/reference/experimental/canthrow.md @@ -68,9 +68,6 @@ How can the capability be produced? There are several possibilities: Most often, the capability is produced by having a using clause `(using CanThrow[Exc])` in some enclosing scope. This roughly corresponds to a [`throws`](https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.4.6) clause in Java. The analogy is even stronger since alongside [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) there is also the following type alias defined in the [`scala`](https://scala-lang.org/api/3.x/scala.html) package: ```scala -infix type A = Int -``` -```scala infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R ``` That is, [`R $throws E`](https://scala-lang.org/api/3.x/scala/runtime.html#$throws-0) is a context function type that takes an implicit `CanThrow[E]` parameter and that returns a value of type `R`. What's more, the compiler will translate an infix types with `throws` as the operator to `$throws` applications according to the rules diff --git a/docs/_docs/reference/experimental/given-loop-prevention.md b/docs/_docs/reference/experimental/given-loop-prevention.md new file mode 100644 index 000000000000..e306ba977d45 --- /dev/null +++ b/docs/_docs/reference/experimental/given-loop-prevention.md @@ -0,0 +1,31 @@ +--- +layout: doc-page +title: Given Loop Prevention +redirectFrom: /docs/reference/other-new-features/into-modifier.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into-modifier.html +--- + +Implicit resolution now avoids generating recursive givens that can lead to an infinite loop at runtime. Here is an example: + +```scala +object Prices { + opaque type Price = BigDecimal + + object Price{ + given Ordering[Price] = summon[Ordering[BigDecimal]] // was error, now avoided + } +} +``` + +Previously, implicit resolution would resolve the `summon` to the given in `Price`, leading to an infinite loop (a warning was issued in that case). We now use the underlying given in `BigDecimal` instead. We achieve that by adding the following rule for implicit search: + + - When doing an implicit search while checking the implementation of a `given` definition `G` of the form + ``` + given ... = .... + ``` + discard all search results that lead back to `G` or to a given with the same owner as `G` that comes later in the source than `G`. + +The new behavior is enabled with the `experimental.givenLoopPrevention` language import. If no such import or setting is given, a warning is issued where the behavior would change under that import (for source version 3.4 and later). + +Old-style implicit definitions are unaffected by this change. + diff --git a/docs/_docs/reference/experimental/overview.md b/docs/_docs/reference/experimental/overview.md index 254f103896e4..f70cf32b9c24 100644 --- a/docs/_docs/reference/experimental/overview.md +++ b/docs/_docs/reference/experimental/overview.md @@ -21,6 +21,12 @@ They are enabled by importing the feature or using the `-language` compiler flag In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). They can be imported at the top-level if all top-level definitions are `@experimental`. +### `-experimental` compiler flag + +This flag enables the use of any experimental language feature in the project. +It does this by adding an `@experimental` annotation to all top-level definitions. +Hence, dependent projects also have to be experimental. + ## Experimental language features supported by special compiler options Some experimental language features that are still in research and development can be enabled with special compiler options. These include diff --git a/docs/_docs/reference/metaprogramming/compiletime-ops.md b/docs/_docs/reference/metaprogramming/compiletime-ops.md index 038935badc0b..048c6b6165bb 100644 --- a/docs/_docs/reference/metaprogramming/compiletime-ops.md +++ b/docs/_docs/reference/metaprogramming/compiletime-ops.md @@ -30,7 +30,7 @@ enabling us to handle situations where a value is not present. Note that `S` is the type of the successor of some singleton type. For example the type `S[1]` is the singleton type `2`. -Since tuples are not constant types, even if their constituants are, there is `constValueTuple`, which given a tuple type `(X1, ..., Xn)`, returns a tuple value `(constValue[X1], ..., constValue[Xn])`. +Since tuples are not constant types, even if their constituents are, there is `constValueTuple`, which given a tuple type `(X1, ..., Xn)`, returns a tuple value `(constValue[X1], ..., constValue[Xn])`. ### `erasedValue` diff --git a/docs/_docs/reference/metaprogramming/inline.md b/docs/_docs/reference/metaprogramming/inline.md index 0c4800069bad..f4988d02e0ba 100644 --- a/docs/_docs/reference/metaprogramming/inline.md +++ b/docs/_docs/reference/metaprogramming/inline.md @@ -105,7 +105,8 @@ would typecheck. Inline methods can be recursive. For instance, when called with a constant exponent `n`, the following method for `power` will be implemented by -straight inline code without any loop or recursion. +straight inline code without any loop or recursion. It is worth noting that the number of successive +inlines is limited to 32 and can be modified by the compiler setting `-Xmax-inlines`. ```scala inline def power(x: Double, n: Int): Double = diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index a91e69d985f0..5bfaa167a12f 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -255,7 +255,7 @@ Therefore, while evaluating the quote, it is not possible to accidentally rebind If a quote is well typed, then the generated code is well typed. This is a simple consequence of tracking the type of each expression. An `Expr[T]` can only be created from a quote that contains an expression of type `T`. -Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T`. As mentioned before, `Expr` is covariant in its type parameter. This means that an `Expr[T]` can contain an expression of a subtype of `T`. When spliced in a location that expects a type `T, these expressions also have a valid type. @@ -504,18 +504,22 @@ def let(x: Expr[Any])(using Quotes): Expr[Any] = let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` ``` +It is also possible to refer to the same type variable multiple times in a pattern. + +```scala + case '{ $x: (t, t) } => +``` + While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. -We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. -This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. -But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. + ```scala - case '{ type t; $x: `t` } => + case '{ type t; $x: t } => ``` -This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. + +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables. ```scala - case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => - case '{ type t; $x: (`t`, `t`) } => + case '{ type t >: List[Int] <: Seq[Int]; $x: t } => ``` @@ -526,15 +530,24 @@ It works the same way as a quoted pattern but is restricted to contain a type. Type variables can be used in quoted type patterns to extract a type. ```scala -def empty[T: Type]: Expr[T] = +def empty[T: Type](using Quotes): Expr[T] = Type.of[T] match case '[String] => '{ "" } case '[List[t]] => '{ List.empty[t] } + case '[type t <: Option[Int]; List[t]] => '{ List.empty[t] } ... ``` - `Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. +It is possible to match against a higher-kinded type using appropriate type bounds on type variables. +```scala +def empty[K <: AnyKind : Type](using Quotes): Type[?] = + Type.of[K] match + case '[type f[X]; f] => Type.of[f] + case '[type f[X <: Int, Y]; f] => Type.of[f] + case '[type k <: AnyKind; k ] => Type.of[k] +``` + #### Type testing and casting It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. diff --git a/docs/_docs/reference/new-types/intersection-types-spec.md b/docs/_docs/reference/new-types/intersection-types-spec.md index 8d332fc6ed29..4e26626c0b36 100644 --- a/docs/_docs/reference/new-types/intersection-types-spec.md +++ b/docs/_docs/reference/new-types/intersection-types-spec.md @@ -46,11 +46,10 @@ A & B <: B A & B <: A In another word, `A & B` is the same type as `B & A`, in the sense that the two types have the same values and are subtypes of each other. -If `C` is a type constructor, then `C[A] & C[B]` can be simplified using the following three rules: +If `C` is a co- or contravariant type constructor, then `C[A] & C[B]` can be simplified using the following rules: - If `C` is covariant, `C[A] & C[B] ~> C[A & B]` - If `C` is contravariant, `C[A] & C[B] ~> C[A | B]` -- If `C` is non-variant, emit a compile error When `C` is covariant, `C[A & B] <: C[A] & C[B]` can be derived: diff --git a/docs/_docs/reference/new-types/match-types.md b/docs/_docs/reference/new-types/match-types.md index d646dd11880b..41f3af11214f 100644 --- a/docs/_docs/reference/new-types/match-types.md +++ b/docs/_docs/reference/new-types/match-types.md @@ -140,6 +140,8 @@ An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that appear covariantly and nonvariantly in `Is` are as small as possible and all type variables in `Xs` that appear contravariantly in `Is` are as large as possible. Here, "small" and "large" are understood with respect to `<:`. +But, type parameter will not be "large" if a pattern containing it is matched +against lambda case in co- or contra-variant position. For simplicity, we have omitted constraint handling so far. The full formulation of subtyping tests describes them as a function from a constraint and a pair of diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index 88815ad1e136..b71b20ecc036 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -309,3 +309,9 @@ class MyExperimentalTests { ```
+ +## `-experimental` compiler flag + +This flag enables the use of any experimental language feature in the project. +It does this by adding an `@experimental` annotation to all top-level definitions. +Hence, dependent projects also have to be experimental. diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index e8482cb343d9..98e9a7d3d711 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -78,8 +78,6 @@ A member is _eligible_ if all of the following holds: It is a compile-time error if a simple or renaming selector does not identify any eligible members. -It is a compile-time error if a simple or renaming selector does not identify any eligible members. - Type members are aliased by type definitions, and term members are aliased by method definitions. For instance: ```scala object O: diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index a705c5a3fd79..bf2c27d57863 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -274,7 +274,7 @@ ColonArgument ::= colon [LambdaStart] LambdaStart ::= FunParams (‘=>’ | ‘?=>’) | HkTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ - | ‘'’ ‘[’ Type ‘]’ + | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern @@ -293,6 +293,8 @@ BlockStat ::= Import | Extension | Expr1 | EndMarker +TypeBlock ::= {TypeBlockStat semi} Type +TypeBlockStat ::= ‘type’ {nl} TypeDcl ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr @@ -316,7 +318,7 @@ Pattern1 ::= PatVar ‘:’ RefinedType | [‘-’] integerLiteral ‘:’ RefinedType | [‘-’] floatingPointLiteral ‘:’ RefinedType | Pattern2 -Pattern2 ::= [id ‘@’] InfixPattern [‘*’] +Pattern2 ::= [id ‘@’] InfixPattern InfixPattern ::= SimplePattern { id [nl] SimplePattern } SimplePattern ::= PatVar | Literal @@ -391,10 +393,10 @@ Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec | SimpleRef ‘as’ id ImportSpec ::= NamedSelector - | WildcardSelector + | WildCardSelector | ‘{’ ImportSelectors) ‘}’ NamedSelector ::= id [‘as’ (id | ‘_’)] -WildCardSelector ::= ‘*' | ‘given’ [InfixType] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] | WildCardSelector {‘,’ WildCardSelector} diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md index de11de10402f..7dfcea87bd2d 100644 --- a/docs/_spec/01-lexical-syntax.md +++ b/docs/_spec/01-lexical-syntax.md @@ -27,8 +27,6 @@ The principle of optional braces is that any keyword that can be followed by `{` The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). -´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ - In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows a `colon` token. @@ -45,17 +43,17 @@ colon ::= ':' -- with side conditions explained above ## Identifiers ```ebnf -op ::= opchar {opchar} -varid ::= lower idrest -boundvarid ::= varid - | ‘`’ varid ‘`’ -alphaid ::= upper idrest - | varid -plainid ::= alphaid - | op -id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ -idrest ::= {letter | digit} [‘_’ op] +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] escapeSeq ::= UnicodeEscape | charEscapeSeq UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ @@ -85,7 +83,7 @@ For this purpose, lower case letters include not only a-z, but also all characte The following are examples of variable identifiers: -> ```scala +> ``` > x maxIndex p2p empty_? > `yield` αρετη _y dot_product_* > __system _MAX_LEN_ @@ -94,7 +92,7 @@ The following are examples of variable identifiers: Some examples of constant identifiers are -> ```scala +> ``` > + Object $reserved Džul ǂnûm > ⅰ_ⅲ Ⅰ_Ⅲ ↁelerious ǃqhàà ʹthatsaletter > ``` @@ -106,7 +104,7 @@ User programs should not define identifiers that contain ‘$’ characters. The following names are reserved words instead of being members of the syntactic class `id` of lexical identifiers. -```scala +``` abstract case catch class def do else enum export extends false final finally for given if implicit import lazy match new @@ -121,12 +119,35 @@ type val var while with yield Additionally, the following soft keywords are reserved only in some situations. -´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ - ``` -as derives end extension infix inline opaque open transparent using | * + - +as derives end extension infix inline opaque +open transparent using +| * + - ``` +A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`. + +A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `using`, `|`, `+`, `-`, `*`. + +A soft modifier is treated as an actual modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`). +Between the two words, there may be a sequence of newline tokens and/or other soft modifiers. + +Otherwise, soft keywords are treated as actual keywords in the following situations: + + - `as`, if it appears in a renaming import clause. + - `derives`, if it appears after an extension clause or after the name and possibly parameters of a class, trait, object, or enum definition. + - `end`, if it appears at the start of a line following a statement (i.e. definition or toplevel expression) and is followed on the same line by a single non-comment token that is: + - one of the keywords `for`, `given`, `if`, `match`, `new`, `this`, `throw`, `try`, `val`, `while`, or + - an identifier. + - `extension`, if it appears at the start of a statement and is followed by `(` or `[`. + - `inline`, if it is followed by any token that can start an expression. + - `using`, if it appears at the start of a parameter or argument list. + - `|`, if it separates two patterns in an alternative. + - `+`, `-`, if they appear in front of a type parameter. + - `*`, if it appears in a wildcard import, or if it follows the type of a parameter, or if it appears in a vararg splice `x*`. + +Everywhere else, a soft keyword is treated as a normal identifier. + > When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings. @@ -143,26 +164,32 @@ Scala is a line-oriented language where statements may be terminated by semi-col A newline in a Scala source text is treated as the special token “nl” if the three following criteria are satisfied: 1. The token immediately preceding the newline can terminate a statement. -1. The token immediately following the newline can begin a statement. +1. The token immediately following the newline can begin a statement and is not a _leading infix operator_. 1. The token appears in a region where newlines are enabled. The tokens that can terminate a statement are: literals, identifiers and the following delimiters and reserved words: -```scala -this null true false return type -_ ) ] } +``` +this null true false return type given +_ ) ] } outdent ``` The tokens that can begin a statement are all Scala tokens _except_ the following delimiters and reserved words: -```scala -catch else extends finally forSome match -with yield , . ; : = => <- <: <% ->: # [ ) ] } +``` +catch do else extends finally forSome macro +match then with yield +, . ; : = => <- <: <% >: # =>> ?=> +) ] } outdent ``` -A `case` token can begin a statement only if followed by a -`class` or `object` token. +A _leading infix operator_ is a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that: + +- starts a new line, and +- is not following a blank line, and +- is followed by at least one whitespace character (including new lines) and a token that can start an expression. + +Furthermore, if the operator appears on its own line, the next line must have at least the same indentation width as the operator. Newlines are enabled in: @@ -189,13 +216,13 @@ Multiple newline tokens are accepted in the following places (note that a semico - between the condition of a [conditional expression](06-expressions.html#conditional-expressions) or [while loop](06-expressions.html#while-loop-expressions) and the next following expression, - between the enumerators of a [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops) and the next following expression, and -- after the initial `type` keyword in a [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). +- after the initial `type` keyword in a [type definition](04-basic-definitions.html#type-member-definitions). A single new line token is accepted - in front of an opening brace ‘{’, if that brace is a legal continuation of the current statement or expression, - after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations), if the first token on the next line can start an expression, -- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and +- in front of a [parameter clause](04-basic-definitions.html#method-definitions), and - after an [annotation](11-annotations.html#user-defined-annotations). > The newline tokens between the two lines are not treated as statement separators. @@ -305,8 +332,7 @@ Literal ::= [‘-’] integerLiteral ### Integer Literals ```ebnf -integerLiteral ::= (decimalNumeral | hexNumeral) - [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] ``` @@ -337,11 +363,10 @@ The digits of a numeric literal may be separated by arbitrarily many underscores ### Floating Point Literals ```ebnf -floatingPointLiteral - ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] - | decimalNumeral exponentPart [floatType] - | decimalNumeral floatType -exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] ``` Floating point literals are of type `Float` when followed by a floating point type suffix `F` or `f`, and are of type `Double` otherwise. @@ -423,7 +448,7 @@ Characters must not necessarily be printable; newlines or other control characte > > This would produce the string: > -> ```scala +> ``` > the present string > spans three > lines. @@ -440,7 +465,7 @@ Characters must not necessarily be printable; newlines or other control characte > > evaluates to > -> ```scala +> ``` > the present string > spans three > lines. diff --git a/docs/_spec/02-identifiers-names-and-scopes.md b/docs/_spec/02-identifiers-names-and-scopes.md index 2b34ae8844cf..551781e911d0 100644 --- a/docs/_spec/02-identifiers-names-and-scopes.md +++ b/docs/_spec/02-identifiers-names-and-scopes.md @@ -8,15 +8,15 @@ chapter: 2 Names in Scala identify types, values, methods, and classes which are collectively called _entities_. Names are introduced by local -[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions), +[definitions](04-basic-definitions.html#basic-definitions), [inheritance](05-classes-and-objects.html#class-members), -[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or +[import clauses](04-basic-definitions.html#import-clauses), or [package clauses](09-top-level-definitions.html#packagings) which are collectively called _bindings_. Bindings of different kinds have precedence defined on them: -1. Definitions and declarations that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence. +1. Definitions that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence. 1. Explicit imports have the next highest precedence. 1. Wildcard imports have the next highest precedence. 1. Definitions made available by a package clause, but not also defined in the same compilation unit as the reference to them, as well as imports which are supplied by the compiler but not explicitly written in source code, have the lowest precedence. @@ -48,12 +48,12 @@ A reference to an unqualified (type- or term-) identifier ´x´ is bound by the It is an error if no such binding exists. If ´x´ is bound by an import clause, then the simple name ´x´ is taken to be equivalent to the qualified name to which ´x´ is mapped by the import clause. -If ´x´ is bound by a definition or declaration, then ´x´ refers to the entity introduced by that binding. +If ´x´ is bound by a definition, then ´x´ refers to the entity introduced by that binding. In that case, the type of ´x´ is the type of the referenced entity. A reference to a qualified (type- or term-) identifier ´e.x´ refers to the member of the type ´T´ of ´e´ which has the name ´x´ in the same namespace as the identifier. It is an error if ´T´ is not a [value type](03-types.html#value-types). -The type of ´e.x´ is the member type of the referenced entity in ´T´. +The type of ´e.x´ is specified as a [type designator](03-types.html#type-designators). Binding precedence implies that the way source is bundled in files affects name resolution. In particular, imported names have higher precedence than names, defined in other files, that might otherwise be visible because they are defined in either the current package or an enclosing package. @@ -74,11 +74,11 @@ The compiler supplies imports in a preamble to every source file. This preamble conceptually has the following form, where braces indicate nested scopes: ```scala -import java.lang._ +import java.lang.* { - import scala._ + import scala.* { - import Predef._ + import Predef.* { /* source */ } } } @@ -95,8 +95,8 @@ This allows redundant type aliases to be imported without introducing an ambigui object X { type T = annotation.tailrec } object Y { type T = annotation.tailrec } object Z { - import X._, Y._, annotation.{tailrec => T} // OK, all T mean tailrec - @T def f: Int = { f ; 42 } // error, f is not tail recursive + import X.*, Y.*, annotation.tailrec as T // OK, all T mean tailrec + @T def f: Int = { f ; 42 } // error, f is not tail recursive } ``` @@ -107,7 +107,7 @@ Similarly, imported aliases of names introduced by package statements are allowe package p { class C } // xy.scala -import p._ +import p.* package p { class X extends C } package q { class Y extends C } ``` @@ -132,27 +132,32 @@ package q { The following program illustrates different kinds of bindings and precedences between them. ```scala -package p { // `X' bound by package clause -import Console._ // `println' bound by wildcard import -object Y { - println(s"L4: $X") // `X' refers to `p.X' here - locally { - import q._ // `X' bound by wildcard import - println(s"L7: $X") // `X' refers to `q.X' here - import X._ // `x' and `y' bound by wildcard import - println(s"L9: $x") // `x' refers to `q.X.x' here +package p { // `X' bound by package clause + import Console.* // `println' bound by wildcard import + object Y { + println(s"L4: $X") // `X' refers to `p.X' here locally { - val x = 3 // `x' bound by local definition - println(s"L12: $x") // `x' refers to constant `3' here + import q.* // `X' bound by wildcard import + println(s"L7: $X") // `X' refers to `q.X' here + import X.* // `x' and `y' bound by wildcard import + println(s"L9: $x") // `x' refers to `q.X.x' here locally { - import q.X._ // `x' and `y' bound by wildcard import -// println(s"L15: $x") // reference to `x' is ambiguous here - import X.y // `y' bound by explicit import - println(s"L17: $y") // `y' refers to `q.X.y' here + val x = 3 // `x' bound by local definition + println(s"L12: $x") // `x' refers to constant `3' here locally { - val x = "abc" // `x' bound by local definition - import p.X._ // `x' and `y' bound by wildcard import -// println(s"L21: $y") // reference to `y' is ambiguous here - println(s"L22: $x") // `x' refers to string "abc" here -}}}}}} + import q.X.* // `x' and `y' bound by wildcard import +// println(s"L15: $x") // reference to `x' is ambiguous here + import X.y // `y' bound by explicit import + println(s"L17: $y") // `y' refers to `q.X.y' here + locally { + val x = "abc" // `x' bound by local definition + import p.X.* // `x' and `y' bound by wildcard import +// println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L22: $x") // `x' refers to string "abc" here + } + } + } + } + } +} ``` diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md index bbaac5de03a0..407a69b8c8c5 100644 --- a/docs/_spec/03-types.md +++ b/docs/_spec/03-types.md @@ -7,172 +7,507 @@ chapter: 3 # Types ```ebnf - Type ::= FunctionArgTypes ‘=>’ Type - | TypeLambdaParams ‘=>>’ Type - | InfixType - FunctionArgTypes ::= InfixType - | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ - TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ - TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] - InfixType ::= CompoundType {id [nl] CompoundType} - CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] - | Refinement - AnnotType ::= SimpleType {Annotation} - SimpleType ::= SimpleType TypeArgs - | SimpleType ‘#’ id - | StableId - | Path ‘.’ ‘type’ - | Literal - | ‘(’ Types ‘)’ - TypeArgs ::= ‘[’ Types ‘]’ - Types ::= Type {‘,’ Type} -``` - -We distinguish between proper types and type constructors, which take type parameters and yield types. -All types have a _kind_, either the kind of proper types or a _higher kind_. -A subset of proper types called _value types_ represents sets of (first-class) values. -Types are either _concrete_ or _abstract_. - -Every concrete value type can be represented as a _class type_, i.e. a [type designator](#type-designators) that refers to a [class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a [compound type](#compound-types) representing an intersection of types, possibly with a [refinement](#compound-types) that further constrains the types of its members. - - -Abstract types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters) and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). -Parentheses in types can be used for grouping. - -[^1]: We assume that objects and packages also implicitly - define a class (of the same name as the object or package, but - inaccessible to user programs). - -Non-value types capture properties of identifiers that [are not values](#non-value-types). -For example, a [type constructor](#type-constructors) does not directly specify a type of values. -However, when a type constructor is applied to the correct type arguments, it yields a proper type, which may be a value type. - -Non-value types are expressed indirectly in Scala. -E.g., a method type is described by writing down a method signature, which in itself is not a real type, although it gives rise to a corresponding [method type](#method-types). -Type constructors are another example, as one can write `type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write the corresponding anonymous type function directly. - -`AnyKind` is the super type of all types in the Scala type system. -It has all possible kinds to encode [kind polymorphism](#kind-polymorphism). -As such, it is neither a value type nor a type constructor. - -## Paths +Type ::= FunType + | TypeLambda + | InfixType +FunType ::= FunTypeArgs ‘=>’ Type + | TypeLambdaParams '=>' Type +TypeLambda ::= TypeLambdaParams ‘=>>’ Type +InfixType ::= RefinedType + | RefinedTypeOrWildcard id [nl] RefinedTypeOrWildcard {id [nl] RefinedTypeOrWildcard} +RefinedType ::= AnnotType {[nl] Refinement} +AnnotType ::= SimpleType {Annotation} +SimpleType ::= SimpleLiteral + | SimpleType1 +SimpleType1 ::= id + | Singleton ‘.’ id + | Singleton ‘.’ ‘type’ + | ‘(’ TypesOrWildcards ‘)’ + | Refinement + | SimpleType1 TypeArgs + | SimpleType1 ‘#’ id +Singleton ::= SimpleRef + | SimpleLiteral + | Singleton ‘.’ id +SimpleRef ::= id + | [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [‘[’ id ‘]’] ‘.’ id +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= ParamValueType [‘*’] +TypeArgs ::= ‘[’ TypesOrWildcards ‘]’ +Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>> + +FunTypeArgs ::= InfixType + | ‘(’ [ FunArgTypes ] ‘)’ + | FunParamClause +FunArgTypes ::= FunArgType { ‘,’ FunArgType } +FunArgType ::= Type + | ‘=>’ Type +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type + +TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ +TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] TypeBounds +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [TypeParamClause] TypeBounds + +RefineDef ::= ‘val’ ValDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDef + +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] + +TypesOrWildcards ::= TypeOrWildcard {‘,’ TypeOrWildcard} +TypeOrWildcard ::= Type + | WildcardType +RefinedTypeOrWildcard ::= RefinedType + | WildcardType +WildcardType ::= (‘?‘ | ‘_‘) TypeBounds +``` + +The above grammer describes the concrete syntax of types that can be written in user code. +Semantic operations on types in the Scala type system are better defined in terms of _internal types_, which are desugared from the concrete type syntax. + +## Internal Types + +The following _abstract grammar_ defines the shape of _internal types_. +In this specification, unless otherwise noted, "types" refer to internal types. +Internal types abstract away irrelevant details such as precedence and grouping, and contain shapes of types that cannot be directly expressed using the concrete syntax. +They also contain simplified, decomposed shapes for complex concrete syntax types, such as refined types. + +```ebnf +Type ::= ‘AnyKind‘ + | ‘Nothing‘ + | TypeLambda + | DesignatorType + | ParameterizedType + | ThisType + | SuperType + | LiteralType + | ByNameType + | AnnotatedType + | RefinedType + | RecursiveType + | RecursiveThis + | UnionType + | IntersectionType + | SkolemType + +TypeLambda ::= ‘[‘ TypeParams ‘]‘ ‘=>>‘ Type +TypeParams ::= TypeParam {‘,‘ TypeParam} +TypeParam ::= ParamVariance id TypeBounds +ParamVariance ::= ε | ‘+‘ | ‘-‘ + +DesignatorType ::= Prefix ‘.‘ id +Prefix ::= Type + | PackageRef + | ε +PackageRef ::= id {‘.‘ id} + +ParameterizedType ::= Type ‘[‘ TypeArgs ‘]‘ +TypeArgs ::= TypeArg {‘,‘ TypeArg} +TypeArg ::= Type + | WilcardTypeArg +WildcardTypeArg ::= ‘?‘ TypeBounds + +ThisType ::= classid ‘.‘ ‘this‘ +SuperType ::= classid ‘.‘ ‘super‘ ‘[‘ classid ‘]‘ +LiteralType ::= SimpleLiteral +ByNameType ::= ‘=>‘ Type +AnnotatedType ::= Type Annotation + +RefinedType ::= Type ‘{‘ Refinement ‘}‘ +Refinement ::= ‘type‘ id TypeAliasOrBounds + | ‘def‘ id ‘:‘ TypeOrMethodic + | ‘val‘ id ‘:‘ Type + +RecursiveType ::= ‘{‘ recid ‘=>‘ Type ‘}‘ +RecursiveThis ::= recid ‘.‘ ‘this‘ + +UnionType ::= Type ‘|‘ Type +IntersectionType ::= Type ‘&‘ Type + +SkolemType ::= ‘∃‘ skolemid ‘:‘ Type + +TypeOrMethodic ::= Type + | MethodicType +MethodicType ::= MethodType + | PolyType + +MethodType ::= ‘(‘ MethodTypeParams ‘)‘ TypeOrMethodic +MethodTypeParams ::= ε + | MethodTypeParam {‘,‘ MethodTypeParam} +MethodTypeParam ::= id ‘:‘ Type + +PolyType ::= ‘[‘ PolyTypeParams ‘]‘ TypeOrMethodic +PolyTypeParams ::= PolyTypeParam {‘,‘ PolyTypeParam} +PolyTypeParam ::= ‘id‘ TypeBounds + +TypeAliasOrBounds ::= TypeAlias + | TypeBounds +TypeAlias ::= ‘=‘ Type +TypeBounds ::= ‘<:‘ Type ‘>:‘ Type +``` + +### Translation of Concrete Types into Internal Types + +Concrete types are recursively translated, or desugared, into internal types. +Most shapes of concrete types have a one-to-one translation to shapes of internal types. +We elaborate hereafter on the translation of the other ones. + +### Infix Types ```ebnf -Path ::= StableId - | [id ‘.’] this -StableId ::= id - | Path ‘.’ id - | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id -ClassQualifier ::= ‘[’ id ‘]’ +InfixType ::= CompoundType {id [nl] CompoundType} ``` - -Paths are not types themselves, but they can be a part of named types and in that function form a central role in Scala's type system. -A path is one of the following. +A concrete _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´. +The type is translated to the internal type application `op`´[T_1, T_2]´. +The infix operator `op` may be an arbitrary identifier. -- The empty path ε (which cannot be written explicitly in user programs). -- ´C.´`this`, where ´C´ references a class. - The path `this` is taken as a shorthand for ´C.´`this` where ´C´ is the name of the class directly enclosing the reference. -- ´p.x´ where ´p´ is a path and ´x´ is a stable member of ´p´. - _Stable members_ are packages or members introduced by object definitions or by value definitions of [non-volatile types](#volatile-types). -- ´C.´`super`´.x´ or ´C.´`super`´[M].x´ - where ´C´ references a class and ´x´ references a stable member of the super class or designated parent class ´M´ of ´C´. - The prefix `super` is taken as a shorthand for ´C.´`super` where ´C´ is the name of the class directly enclosing the reference. +Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations). +For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`. +Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative. -A _stable identifier_ is a path which ends in an identifier. +In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity. +If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´. -## Value Types +Under `-source:future`, if the type name is alphanumeric and the target type is not marked [`infix`](./05-classes-and-objects.html#infix), a deprecation warning is emitted. -Every value in Scala has a type which is of one of the following forms. +The type operators `|` and `&` are not really special. +Nevertheless, unless shadowed, they resolve to [the fundamental type aliases `scala.|` and `scala.&`](./12-the-scala-standard-library.html#fundamental-type-aliases), which represent [union and intersection types](#union-and-intersection-types), respectively. -### Singleton Types +### Function Types ```ebnf -SimpleType ::= Path ‘.’ ‘type’ +Type ::= FunTypeArgs ‘=>’ Type +FunTypeArgs ::= InfixType + | ‘(’ [ FunArgTypes ] ‘)’ + | FunParamClause +FunArgTypes ::= FunArgType { ‘,’ FunArgType } +FunArgType ::= Type + | ‘=>’ Type +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type ``` -A _singleton type_ is of the form ´p.´`type`. -Where ´p´ is a path pointing to a value which [conforms](06-expressions.html#expression-typing) to `scala.AnyRef`, the type denotes the set of values consisting of `null` and the value denoted by ´p´ (i.e., the value ´v´ for which `v eq p`). -Where the path does not conform to `scala.AnyRef` the type denotes the set consisting of only the value denoted by ´p´. +The concrete function type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´. +The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´. +An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-definitions.html#by-name-parameters) of type ´T´. + +Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. + +Function types are [covariant](04-basic-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-definitions.md#variance-annotations) in their argument types. - +Function types translate into internal class types that define an `apply` method. +Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ translates to the internal class type `scala.Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. +In particular ´() \Rightarrow R´ is a shorthand for class type `scala.Function´_0´[´R´]`. -### Literal Types +Such class types behave as if they were instances of the following trait: + +```scala +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +``` + +Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. + +_Dependent function types_ are function types whose parameters are named and can referred to in result types. +In the concrete type ´(x_1: T_1, ..., x_n: T_n) \Rightarrow R´, ´R´ can refer to the parameters ´x_i´, notably to form path-dependent types. +It translates to the internal [refined type](#refined-types) +```scala +scala.Function´_n´[´T_1´, ..., ´T_n´, ´S´] { + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +} +``` +where ´S´ is the least super type of ´R´ that does not mention any of the ´x_i´. + +_Polymorphic function types_ are function types that take type arguments. +Their result type must be a function type. +In the concrete type ´[a_1 >: L_1 <: H_1, ..., a_n >: L_1 <: H_1] => (T_1, ..., T_m) => R´, the types ´T_j´ and ´R´ can refer to the type parameters ´a_i´. +It translates to the internal refined type +```scala +scala.PolyFunction { + def apply[´a_1 >: L_1 <: H_1, ..., a_n >: L_1 <: H_1´](´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +} +``` + +### Tuple Types ```ebnf -SimpleType ::= Literal +SimpleType1 ::= ... + | ‘(’ TypesOrWildcards ‘)’ ``` -A literal type `lit` is a special kind of singleton type which denotes the single literal value `lit`. -Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`. +A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is sugar for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`, which is itself a series of nested infix types which are sugar for `*:[´T_1´, *:[´T_2´, ... *:[´T_n´, scala.EmptyTuple]]]`. +The ´T_i´ can be wildcard type arguments. -At run time, an expression `e` is considered to have literal type `lit` if `e == lit`. -Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`. +Notes: + +- `(´T_1´)` is the type ´T_1´, and not `´T_1´ *: scala.EmptyTuple` (´T_1´ cannot be a wildcard type argument in that case). +- `()` is not a valid type (i.e. it is not desugared to `scala.EmptyTuple`). + +### Concrete Refined Types + +```ebnf +RefinedType ::= AnnotType {[nl] Refinement} +SimpleType1 ::= ... + | Refinement +Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>> + +RefineDef ::= ‘val’ ValDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDef +``` + +In the concrete syntax of types, refinements can contain several refined definitions. +They must all be abstract. +Moreover, the refined definitions can refer to each other as well as to members of the parent type, i.e., they have access to `this`. + +In the internal types, each refinement defines exactly one refined definition, and references to `this` must be made explicit in a recursive type. + +The conversion from the concrete syntax to the abstract syntax works as follows: -Literal types are available for all types for which there is dedicated syntax except `Unit`. -This includes the numeric types (other than `Byte` and `Short` which don't currently have syntax), `Boolean`, `Char` and `String`. +1. Create a fresh recursive this name ´\alpha´. +2. Replace every implicit or explicit reference to `this` in the refinement definitions by ´\alpha´. +3. Create nested [refined types](#refined-types), one for every refined definition. +4. Unless ´\alpha´ was never actually used, wrap the result in a [recursive type](#recursive-types) `{ ´\alpha´ => ´...´ }`. -### Stable Types -A _stable type_ is a singleton type, a literal type, or a type that is declared to be a subtype of trait `scala.Singleton`. +### Concrete Type Lambdas -### Type Projection +```ebnf +TypeLambda ::= TypeLambdaParams ‘=>>’ Type +TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ +TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] TypeBounds +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [TypeParamClause] TypeBounds +``` + +At the top level of concrete type lambda parameters, variance annotations are not allowed. +However, in internal types, all type lambda parameters have explicit variance annotations. + +When translating a concrete type lambda into an internal one, the variance of each type parameter is _inferred_ from its usages in the body of the type lambda. + +## Definitions + +From here onwards, we refer to internal types by default. + +### Kinds + +The Scala type system is fundamentally higher-kinded. +_Types_ are either _proper types_, _type constructors_ or _poly-kinded types_. + +- Proper types are the types of _terms_. +- Type constructors are type-level functions from types to types. +- Poly-kinded types can take various kinds. + +All types live in a single lattice with respect to a [_conformance_](#conformance) relationship ´<:´. +The _top type_ is `AnyKind` and the _bottom type_ is `Nothing`: all types conform to `AnyKind`, and `Nothing` conforms to all types. +They can be referred to with [the fundamental type aliases `scala.AnyKind` and `scala.Nothing`](./12-the-scala-standard-library.html#fundamental-type-aliases), respectively. + +Types can be _concrete_ or _abstract_. +An abstract type ´T´ always has lower and upper bounds ´L´ and ´H´ such that ´L >: T´ and ´T <: H´. +A concrete type ´T´ is considered to have itself as both lower and upper bound. + +The kind of a type is indicated by its (transitive) upper bound: + +- A type `´T <:´ scala.Any` is a proper type. +- A type `´T <: K´` where ´K´ is a [_type lambda_](#type-lambdas) (of the form `[´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´`) is a type constructor. +- Other types are poly-kinded; they are neither proper types nor type constructors. + +As a consequece, `AnyKind` itself is poly-kinded. +`Nothing` is _universally-kinded_: it has all kinds at the same time, since it conforms to all types. + +With this representation, it is rarely necessary to explicitly talk about the kinds of types. +Usually, the kinds of types are implicit through their bounds. + +Another way to look at it is that type bounds _are_ kinds. +They represent sets of types: ´>: L <: H´ denotes the set of types ´T´ such that ´L <: T´ and ´T <: H´. +A set of types can be seen as a _type of types_, i.e., as a _kind_. + +#### Conventions + +Type bounds are formally always of the form `´>: L <: H´`. +By convention, we can omit either of both bounds in writing. + +- When omitted, the lower bound ´L´ is `Nothing`. +- When omitted, the higher bound ´H´ is `Any` (_not_ `AnyKind`). + +These conventions correspond to the defaults in the concrete syntax. + +### Proper Types + +Proper types are also called _value types_, as they represent sets of _values_. + +_Stable types_ are value types that contain exactly one non-`null` value. +Stable types can be used as prefixes in named [designator types](#designator-types). +The stable types are + +- designator types referencing a stable term, +- this types, +- super types, +- literal types, +- recursive this types, and +- skolem types. + +Every stable type ´T´ is concrete and has an _underlying_ type ´U´ such that ´T <: U´. + +### Type Constructors + +To each type constructor corresponds an _inferred type parameter clause_ which is computed as follows: + +- For a [type lambda](#type-lambdas), its type parameter clause (including variance annotations). +- For a [polymorphic class type](#type-designators), the type parameter clause of the referenced class definition. +- For a non-class [type designator](#type-designators), the inferred clause of its upper bound. + +### Type Definitions + +A _type definition_ ´D´ represents the right-hand-side of a `type` member definition or the bounds of a type parameter. +It is either: + +- a type alias of the form ´= U´, or +- an abstract type definition with bounds ´>: L <: H´. + +All type definitions have a lower bound ´L´ and an upper bound ´H´, which are types. +For type aliases, ´L = H = U´. + +The type definition of a type parameter is never a type alias. + +## Types + +### Type Lambdas ```ebnf -SimpleType ::= SimpleType ‘#’ id +TypeLambda ::= ‘[‘ TypeParams ‘]‘ ‘=>>‘ Type +TypeParams ::= TypeParam {‘,‘ TypeParam} +TypeParam ::= ParamVariance id TypeBounds +ParamVariance ::= ε | ‘+‘ | ‘-‘ ``` -A _type projection_ ´T´#´x´ references the type member named ´x´ of type ´T´. +A _type lambda_ of the form `[´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´` is a direct representation of a type constructor with ´n´ type parameters. +When applied to ´n´ type arguments that conform to the specified bounds, it produces another type ´U´. +Type lambdas are always concrete types. + +The scope of a type parameter extends over the result type ´U´ as well as the bounds of the type parameters themselves. + +All type constructors conform to some type lambda. + +The type bounds of the parameters of a type lambda are in contravariant position, while its result type is in covariant position. +If some type constructor `´T <:´ [´\pm a_1 >: L_1 <: H_1´, ..., ´\pm a_n >: L_n <: H_n´] =>> ´U´`, then ´T´'s ´i´th type parameter bounds contain the bounds ´>: L_i <: H_i´, and its result type conforms to ´U´. + +Note: the concrete syntax of type lambdas does not allow to specify variances for type parameters. +Instead, variances are inferred from the body of the lambda to be as general as possible. + +##### Example + +```scala +type Lst = [T] =>> List[T] // T is inferred to be covariant with bounds >: Nothing <: Any +type Fn = [A <: Seq[?], B] =>> (A => B) // A is inferred to be contravariant, B covariant + +val x: Lst[Int] = List(1) // ok, Lst[Int] expands to List[Int] +val f: Fn[List[Int], Int] = (x: List[Int]) => x.head // ok - +val g: Fn[Int, Int] = (x: Int) => x // error: Int does not conform to the bound Seq[?] + +def liftPair[F <: [T] =>> Any](f: F[Int]): Any = f +liftPair[Lst](List(1)) // ok, Lst <: ([T] =>> Any) +``` -### Type Designators +### Designator Types ```ebnf -SimpleType ::= StableId +DesignatorType ::= Prefix ‘.‘ id +Prefix ::= Type + | PackageRef + | ε +PackageRef ::= id {‘.‘ id} ``` -A _type designator_ refers to a named value type. -It can be simple or qualified. -All such type designators are shorthands for type projections. +A designator type (or designator for short) is a reference to a definition. +Term designators refer to term definitions, while type designators refer to type definitions. -Specifically, the unqualified type name ´t´ where ´t´ is bound in some class, object, or package ´C´ is taken as a shorthand for -´C.´`this.type#`´t´. -If ´t´ is not bound in a class, object, or package, then ´t´ is taken as a shorthand for ε`.type#`´t´. +In the abstract syntax, the `id` retains whether it is a term or type. +In the concrete syntax, an `id` refers to a *type* designator, while `id.type` refers to a *term* designator. +In that context, term designators are often called _singleton types_. -A qualified type designator has the form `p.t` where `p` is a [path](#paths) and _t_ is a type name. -Such a type designator is equivalent to the type projection `p.type#t`. +Designators with an empty prefix ´\epsilon´ are called direct designators. +They refer to local definitions available in the scope: -###### Example +- Local `type`, `object`, `val`, `lazy val`, `var` or `def` definitions +- Term or type parameters + +The `id`s of direct designators are protected from accidental shadowing in the abstract syntax. +They retain the identity of the exact definition they refer to, rather than relying on scope-based name resolution. [^debruijnoralpha] + +[^debruijnoralpha]: In the literature, this is often achieved through De Bruijn indices or through alpha-renaming when needed. In a concrete implementation, this is often achieved through retaining *symbolic* references in a symbol table. + +The ´\epsilon´ prefix cannot be written in the concrete syntax. +A bare `id` is used instead and resolved based on scopes. + +Named designators refer to *member* definitions of a non-empty prefix: + +- Top-level definitions, including top-level classes, have a package ref prefix +- Class member definitions and refinements have a type prefix + +#### Term Designators + +A term designator ´p.x´ referring to a term definition `t` has an _underlying type_ ´U´. +If ´p = \epsilon´ or ´p´ is a package ref, the underlying type ´U´ is the _declared type_ of `t` and ´p.x´ is a stable type if an only if `t` is a `val` or `object` definition. +Otherwise, the underlying type ´U´ and whether ´p.x´ is a stable type are determined by [`memberType`](#member-type)`(´p´, ´x´)`. + +All term designators are concrete types. +If `scala.Null ´<: U´`, the term designator denotes the set of values consisting of `null` and the value denoted by ´t´, i.e., the value ´v´ for which `t eq v`. +Otherwise, the designator denotes the singleton set only containing ´v´. + +#### Type Designators -Some type designators and their expansions are listed below. -We assume a local type parameter ´t´, a value `maintable` with a type member `Node` and the standard class `scala.Int`, +A type designator ´p.C´ referring to a _class_ definition (including traits and hidden object classes) is a _class type_. +If the class is monomorphic, the type designator is a value type denoting the set of instances of ´C´ or any of its subclasses. +Otherwise it is a type constructor with the same type parameters as the class definition. +All class types are concrete, non-stable types. -| Designator | Expansion | -|-------------------- | --------------------------| -|t | ε.type#t | -|Int | scala.type#Int | -|scala.Int | scala.type#Int | -|data.maintable.Node | data.maintable.type#Node | +If a type designator ´p.T´ is not a class type, it refers to a type definition `T` (a type parameter or a `type` member definition) and has an _underlying [type definition](#type-definitions)_. +If ´p = \epsilon´ or ´p´ is a package ref, the underlying type definition is the _declared type definition_ of `T`. +Otherwise, it is determined by [`memberType`](#member-type)`(´p´, ´T´)`. +A non-class type designator is concrete (resp. stable) if and only if its underlying type definition is an alias ´U´ and ´U´ is itself concrete (resp. stable). ### Parameterized Types ```ebnf -SimpleType ::= SimpleType TypeArgs -TypeArgs ::= ‘[’ Types ‘]’ +ParameterizedType ::= Type ‘[‘ TypeArgs ‘]‘ +TypeArgs ::= TypeArg {‘,‘ TypeArg} +TypeArg ::= Type + | WilcardTypeArg +WildcardTypeArg ::= ‘?‘ TypeBounds ``` -A _parameterized type_ ´T[ T_1, ..., T_n ]´ consists of a type designator ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´. -´T´ must refer to a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´. +A _parameterized type_ ´T[T_1, ..., T_n]´ consists of a type constructor ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´. +The parameterized type is well-formed if - -Say the type parameters have lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´. -The parameterized type is well-formed if each type argument _conforms to its bounds_, i.e. ´\sigma L_i <: T_i <: \sigma U_i´ where ´\sigma´ is the substitution ´[ a_1 := T_1, ..., a_n := T_n ]´. +- ´T´ is a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´, i.e., it must conform to a type lambda of the form ´[\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n] => U´, and +- if ´T´ is an abstract type constructor, none of the type arguments is a wildcard type argument, and +- each type argument _conforms to its bounds_, i.e., given ´\sigma´ the substitution ´[a_1 := T_1, ..., a_n := T_n]´, for each type ´i´: + - if ´T_i´ is a type and ´\sigma L_i <: T_i <: \sigma H_i´, or + - ´T_i´ is a wildcard type argument ´? >: L_{Ti} <: H_{Ti}´ and ´\sigma L_i <: L_{Ti}´ and ´H_{Ti} <: \sigma H_i´. + +´T[T_1, ..., T_n]´ is a _parameterized class type_ if and only if ´T´ is a [class type](#type-designators). +All parameterized class types are value types. + +In the concrete syntax of wildcard type arguments, if both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor (which must be concrete). +If only one bound is omitted, `Nothing` or `Any` is used, as usual. + +Also in the concrete syntax, `_` can be used instead of `?` for compatibility reasons, with the same meaning. +This alternative will be deprecated in the future, and is already deprecated under `-source:future`. + +#### Simplification Rules + +Wildcard type arguments used in covariant or contravariant positions can always be simplified to regular types. + +Let ´T[T_1, ..., T_n]´ be a parameterized type for a concrete type constructor. +Then, applying a wildcard type argument ´? >: L <: H´ at the ´i´'th position obeys the following equivalences: + +- If the type parameter ´T_i´ is declared covariant, then ´T[..., ? >: L <: H, ...] =:= T[..., H, ...]´. +- If the type parameter ´T_i´ is declared contravariant, then ´T[..., ? >: L <: H, ...] =:= T[..., L, ...]´. #### Example Parameterized Types @@ -180,12 +515,12 @@ Given the partial type definitions: ```scala class TreeMap[A <: Comparable[A], B] { ... } -class List[A] { ... } +class List[+A] { ... } class I extends Comparable[I] { ... } -class F[M[A], X] { ... } +class F[M[A], X] { ... } // M[A] desugars to M <: [A] =>> Any class S[K <: String] { ... } -class G[M[Z <: I], I] { ... } +class G[M[Z <: I], I] { ... } // M[Z <: I] desugars to M <: [Z <: I] =>> Any ``` the following parameterized types are well-formed: @@ -196,7 +531,13 @@ List[I] List[List[Boolean]] F[List, Int] +F[[X] =>> List[X], Int] G[S, String] + +List[?] // ? inferred as List[_ >: Nothing <: Any], equivalent to List[Any] +List[? <: String] // equivalent to List[String] +S[? <: String] +F[?, Boolean] // ? inferred as ? >: Nothing <: [A] =>> Any ``` and the following types are ill-formed: @@ -204,96 +545,100 @@ and the following types are ill-formed: ```scala TreeMap[I] // illegal: wrong number of parameters TreeMap[List[I], Int] // illegal: type parameter not within bound +List[[X] => List[X]] F[Int, Boolean] // illegal: Int is not a type constructor F[TreeMap, Int] // illegal: TreeMap takes two parameters, // F expects a constructor taking one +F[[X, Y] => (X, Y)] G[S, Int] // illegal: S constrains its parameter to // conform to String, // G expects type constructor with a parameter // that conforms to Int ``` -#### Wildcard Type Argument +The following code also contains an ill-formed type: - -```ebnf -WildcardType ::= ‘_’ TypeBounds +```scala +trait H[F[A]]: // F[A] desugars to F <: [A] =>> Any, which is abstract + def f: F[_] // illegal : an abstract type constructor + // cannot be applied to wildcard arguments. ``` -A _wildcard type argument_ is of the form `_´\;´>:´\,L\,´<:´\,U´`. -A wildcard type must appear as a type argument of a parameterized type. -The parameterized type to which the wildcard type is applied cannot be an abstract type constructor. - -Both bound clauses may be omitted. -If both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor. -Otherwise, if a lower bound clause `>:´\,L´` is missing, `>:´\,´scala.Nothing` is assumed. -Otherwise, if an upper bound clause `<:´\,U´` is missing, `<:´\,´scala.Any` is assumed. +### This Types -Given the [above type definitions](#example-parameterized-types), the following types are well-formed: - -```scala -List[_] // inferred as List[_ >: Nothing <: Any] -List[_ <: java.lang.Number] -S[_ <: String] -F[_, Boolean] +```ebnf +ThisType ::= classid ‘.‘ ‘this‘ ``` -and the following code contains an ill-formed type: +A _this type_ `´C´.this` denotes the `this` value of class ´C´ within ´C´. -```scala -trait H[F[A]]: - def f: F[_] // illegal : an abstract type constructor - // cannot be applied to wildcard arguments. -``` +This types often appear implicitly as the prefix of [designator types](#designator-types) referring to members of ´C´. +They play a particular role in the type system, since they are affected by the [as seen from](#as-seen-from) operation on types. -Wildcard types may also appear as parts of [infix types](#infix-types), [function types](#function-types), or [tuple types](#tuple-types). -Their expansion is then the expansion in the equivalent parameterized type. +This types are stable types. +The underlying type of `´C´.this` is the [self type](05-classes-and-objects.html#templates) of ´C´. -##### Simplification Rules +### Super Types -Let ´T[T_1, ..., T_n]´ be a parameterized type. -Then, applying a wildcard type argument ´t´ of the form ´\\_ >: L <: U´ at the ´i´'th position obeys the following equivalences: +```ebnf +SuperType ::= classid ‘.‘ ‘super‘ ‘[‘ classid ‘]‘ +``` -- If the type parameter ´T_i´ is declared covariant, then ´t \equiv U´ -- If the type parameter ´T_i´ is declared contravariant, then ´t \equiv L´ +A _super type_ `´C´.super[´D´]` denotes the `this` value of class `C` within `C`, but "widened" to only see members coming from a parent class or trait ´D´. -### Tuple Types +Super types exist for compatibility with Scala 2, which allows shadowing of inner classes. +In a Scala 3-only context, a super type can always be replaced by the corresponding [this type](#this-types). +Therefore, we omit further discussion of super types in this specification. + +### Literal Types ```ebnf -SimpleType ::= ‘(’ Types ‘)’ +LiteralType ::= SimpleLiteral ``` -A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is an alias for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`. +A literal type `lit` denotes the single literal value `lit`. +Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`. -Notes: -- `(´T´)` is just the type ´T´, and not `´T´ *: scala.EmptyTuple`. -- `()` is not a valid type, and not `scala.EmptyTuple`. +At run time, an expression `e` is considered to have literal type `lit` if `e == lit`. +Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`. -If ´n \leq 22´, the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple` is both a subtype and a supertype of tuple class `scala.Tuple´_n´[´T_1´, ..., ´T_n´]`. +Literal types are available for all primitive types, as well as for `String`. +However, only literal types for `Int`, `Long`, `Float`, `Double`, `Boolean`, `Char` and `String` can be expressed in the concrete syntax. -Tuple classes are case classes whose fields can be accessed using selectors `_1`, ..., `_n`. -Their functionality is abstracted in the corresponding `scala.Product_´n´` trait. -The _n_-ary tuple class and product trait are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits). +Literal types are stable types. +Their underlying type is the primitive type containing their value. + +##### Example ```scala -case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´) -extends Product´_n´[´T_1´, ..., ´T_n´] +val x: 1 = 1 +val y: false = false +val z: false = y +val int: Int = x + +val badX: 1 = int // error: Int is not a subtype of 1 +val badY: false = true // error: true is not a subtype of false +``` -trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product: - override def productArity = ´n´ - def _1: ´T_1´ - ... - def _n: ´T_n´ +### By-Name Types + +```ebnf +ByNameType ::= ‘=>‘ Type ``` +A by-name type ´=> T´ denotes the declared type of a by-name term parameter. +By-name types can only appear as the types of parameters in method types, and as type arguments in [parameterized types](#parameterized-types). + + + ### Annotated Types ```ebnf -AnnotType ::= SimpleType {Annotation} +AnnotatedType ::= Type Annotation ``` -An _annotated type_ ´T´ ´a_1, ..., a_n´ attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the type ´T´. +An _annotated type_ ´T a´ attaches the [annotation](11-annotations.html#user-defined-annotations) ´a´ to the type ´T´. ###### Example @@ -303,83 +648,121 @@ The following type adds the `@suspendable` annotation to the type `String`: String @suspendable ``` -### Compound Types +### Refined Types ```ebnf -CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] - | Refinement -Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ -RefineStat ::= Dcl - | ‘type’ TypeDef - | +RefinedType ::= Type ‘{‘ Refinement ‘}‘ +Refinement ::= ‘type‘ id TypeAliasOrBounds + | ‘def‘ id ‘:‘ TypeOrMethodic + | ‘val‘ id ‘:‘ Type ``` -A _compound type_ ´T_1´ `with` ... `with` ´T_n \\{ R \\}´ represents objects with members as given in the component types ´T_1, ..., T_n´ and the refinement ´\\{ R \\}´. -A refinement ´\\{ R \\}´ contains declarations and type definitions. -If a declaration or definition overrides a declaration or definition in one of the component types ´T_1, ..., T_n´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration or definition is said to be “structural” [^2]. +A _refined type_ ´T { R }´ denotes the set of values that belong to ´T´ and also have a _member_ conforming to the refinement ´R´. -[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member. +The refined type ´T { R }´ is well-formed if: + +- ´T´ is a proper type, and +- if ´R´ is a term (`def` or `val`) refinement, the refined type is a proper type, and +- if ´R´ overrides a member of ´T´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply, and +- if ´R´ is a `def` refinement with a [polymorphic method type](#polymorphic-method-types), then ´R´ overrides a member definition of ´T´. + +As an exception to the last rule, a polymorphic method type refinement is allowed if `´T <:´ scala.PolyFunction` and ´id´ is the name `apply`. -Within a method declaration in a structural refinement, the type of any value parameter may only refer to type parameters or abstract types that are contained inside the refinement. -That is, it must refer either to a type parameter of the method itself, or to a type definition within the refinement. -This restriction does not apply to the method's result type. +If the refinement ´R´ overrides no member of ´T´ and is not an occurrence of the `scala.PolyFunction` exception, the refinement is said to be “structural” [^2]. -If no refinement is given, the empty refinement is implicitly added, i.e. ´T_1´ `with` ... `with` ´T_n´ is a shorthand for ´T_1´ `with` ... `with` ´T_n \\{\\}´. +[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member. -A compound type may also consist of just a refinement ´\\{ R \\}´ with no preceding component types. -Such a type is equivalent to `AnyRef` ´\\{ R \\}´. +Note: since a refinement does not define a _class_, it is not possible to use a [this type](#this-types) to reference term and type members of the parent type ´T´ within the refinement. +When the surface syntax of refined types makes such references, a [recursive type](#recursive-types) wraps the refined type, given access to members of self through a recursive-this type. ###### Example -The following example shows how to declare and use a method which has a parameter type that contains a refinement with structural declarations. +Given the following class definitions: ```scala -case class Bird (val name: String) extends Object { - def fly(height: Int) = ... -... -} -case class Plane (val callsign: String) extends Object { - def fly(height: Int) = ... -... -} -def takeoff( - runway: Int, - r: { val callsign: String; def fly(height: Int) }) = { - tower.print(r.callsign + " requests take-off on runway " + runway) - tower.read(r.callsign + " is clear for take-off") - r.fly(1000) -} -val bird = new Bird("Polly the parrot"){ val callsign = name } -val a380 = new Plane("TZ-987") -takeoff(42, bird) -takeoff(89, a380) +trait T: + type X <: Option[Any] + def foo: Any + def fooPoly[A](x: A): Any + +trait U extends T: + override def foo: Int + override def fooPoly[A](x: A): A + +trait V extends T + type X = Some[Int] + def bar: Int + def barPoly[A](x: A): A ``` -Although `Bird` and `Plane` do not share any parent class other than `Object`, the parameter _r_ of method `takeoff` is defined using a refinement with structural declarations to accept any object that declares a value `callsign` and a `fly` method. +We get the following conformance relationships: -### Infix Types +- `U <: T { def foo: Int }` +- `U <: T { def fooPoly[A](x: A): A }` +- `U <: (T { def foo: Int }) { def fooPoly[A](x: A): A }` (we can chain refined types to refine multiple members) +- `V <: T { type X <: Some[Any] }` +- `V <: T { type X >: Some[Nothing] }` +- `V <: T { type X = Some[Int] }` +- `V <: T { def bar: Any }` (a structural refinement) + +The following refined types are not well-formed: + +- `T { def barPoly[A](x: A): A }` (structural refinement for a polymorphic method type) +- `T { type X <: List[Any] }` (does not satisfy overriding rules) +- `List { def head: Int }` (the parent type `List` is not a proper type) +- `T { def foo: List }` (the refined type `List` is not a proper type) +- `T { def foo: T.this.X }` (`T.this` is not allowed outside the body of `T`) + +### Recursive Types ```ebnf -InfixType ::= CompoundType {id [nl] CompoundType} +RecursiveType ::= ‘{‘ recid ‘=>‘ Type ‘}‘ +RecursiveThis ::= recid ‘.‘ ‘this‘ ``` -An _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´. -The type is equivalent to the type application `op`´[T_1, T_2]´. -The infix operator `op` may be an arbitrary identifier. +A _recursive type_ of the form `{ ´\alpha´ => ´T´ }` represents the same values as ´T´, while offering ´T´ access to its _recursive this_ type `´\alpha´`. -Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations). -For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`. -Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative. +Recursive types cannot directly be expressed in the concrete syntax. +They are created as needed when a refined type in the concrete syntax contains a refinement that needs access to the `this` value. +Each recursive type defines a unique self-reference `´\alpha´`, distinct from any other recursive type in the system. -In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity. -If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´. +Recursive types can be unfolded during subtyping as needed, replacing references to its `´\alpha´` by a stable reference to the other side of the conformance relationship. -The type operators `|` and `&` are not really special. -Nevertheless, unless shadowed, they resolve to `scala.|` and `scala.&`, which represent [union and intersection types](#union-and-intersection-types), respectively. +##### Example + +Given the class definitions in the [refined types](#refined-types) section, we can write the following refined type in the source syntax: + +```scala +T { def foo: X } +// equivalent to +T { def foo: this.X } +``` + +This type is not directly expressible as a refined type alone, as the refinement cannot access the `this` value. +Instead, in the abstract syntax of types, it is translated to `{ ´\alpha´ => ´T´ { def foo: ´\alpha´.X } }`. + +Given the following definitions: + +```scala +trait Z extends T: + type X = Option[Int] + def foo: Option[Int] = Some(5) + +val z: Z +``` + +we can check that `z ´<:´ { ´\alpha´ => ´T´ { def foo: ´\alpha´.X } }`. +We first unfold the recursive type, substituting ´z´ for ´\alpha´, resulting in `z ´<:´ T { def foo: z.X }`. +Since the underlying type of ´z´ is ´Z´, we can resolve `z.X` to mean `Option[Int]`, and then validate that `z ´<:´ T` and that `z` has a member `def foo: Option[Int]`. ### Union and Intersection Types -Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see above). +```ebnf +UnionType ::= Type ‘|‘ Type +IntersectionType ::= Type ‘&‘ Type +``` + +Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see [infix types](#infix-types)). However, in this specification, ´S | T´ and ´S & T´ refer to the underlying core concepts of *union and intersection types*, respectively. @@ -390,22 +773,21 @@ From the [conformance rules](#conformance) rules on union and intersection types Moreover, `&` is distributive over `|`. For any type ´A´, ´B´ and ´C´, all of the following relationships hold: -- ´A & B \equiv B & A´, -- ´A | B \equiv B | A´, -- ´(A & B) & C \equiv A & (B & C)´, -- ´(A | B) | C \equiv A | (B | C)´, and -- ´A & (B | C) \equiv (A & B) | (A & C)´. +- ´A & B =:= B & A´, +- ´A | B =:= B | A´, +- ´(A & B) & C =:= A & (B & C)´, +- ´(A | B) | C =:= A | (B | C)´, and +- ´A & (B | C) =:= (A & B) | (A & C)´. -If ´C´ is a type constructor, then ´C[A] & C[B]´ can be simplified using the following three rules: +If ´C´ is a co- or contravariant type constructor, ´C[A] & C[B]´ can be simplified using the following rules: -- If ´C´ is covariant, ´C[A] & C[B] \equiv C[A & B]´ -- If ´C´ is contravariant, ´C[A] & C[B] \equiv C[A | B]´ -- If ´C´ is invariant, emit a compile error +- If ´C´ is covariant, ´C[A] & C[B] =:= C[A & B]´ +- If ´C´ is contravariant, ´C[A] & C[B] =:= C[A | B]´ -From the above rules, we can derive the following conformance relationships: +The right-to-left validity of the above two rules can be derived from the definition of covariance and contravariance and the conformance rules of union and intersection types: -- When ´C´ is covariant, ´C[A & B] <: C[A] & C[B]´. -- When ´C´ is contravariant, ´C[A | B] <: C[A] & C[B]´. +- When ´C´ is covariant, we can derive ´C[A & B] <: C[A] & C[B]´. +- When ´C´ is contravariant, we can derive ´C[A | B] <: C[A] & C[B]´. #### Join of a union type @@ -425,56 +807,61 @@ class B extends C[B] with D with E The join of ´A | B´ is ´C[A | B] & D´ -### Function Types +### Skolem Types ```ebnf -Type ::= FunctionArgs ‘=>’ Type -FunctionArgs ::= InfixType - | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ +SkolemType ::= ‘∃‘ skolemid ‘:‘ Type ``` -The type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´. -The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´. -An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-declarations-and-definitions.md#by-name-parameters) of type ´T´. - -Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. - -Function types are [covariant](04-basic-declarations-and-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-declarations-and-definitions.md#variance-annotations) in their argument types. +Skolem types cannot directly be written in the concrete syntax. +Moreover, although they are proper types, they can never be inferred to be part of the types of term definitions (`val`s, `var`s and `def`s). +They are exclusively used temporarily during subtyping derivations. -Function types are shorthands for class types that define an `apply` method. -Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ is a shorthand for the class type `Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. -In particular ´() \Rightarrow R´ is a shorthand for class type `Function´_0´[´R´]`. +Skolem types are stable types. +A skolem type of the form ´∃ \alpha : T´ represents a stable reference to unknown value of type ´T´. +The identifier ´\alpha´ is chosen uniquely every time a skolem type is created. +However, as a skolem type is stable, it can be substituted in several occurrences in other types. +When "copied" through substitution, all the copies retain the same ´\alpha´, and are therefore equivalent. -Such class types behave as if they were instances of the following trait: +## Methodic Types -```scala -trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: - def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +```ebnf +TypeOrMethodic ::= Type + | MethodicType +MethodicType ::= MethodType + | PolyType ``` -Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. +Methodic types are not real types. +They are not part of the type lattice. -## Non-Value Types +However, they share some meta-properties with types. +In particular, when contained within other types that undertake some substitution, the substitution carries to the types within methodic types. +It is therefore often convenient to think about them as types themselves. -The types explained in the following do not denote sets of values. +Methodic types are used as the "declared type" of `def` definitions that have at least one term or type parameter list. ### Method Types +```ebnf +MethodType ::= ‘(‘ MethodTypeParams ‘)‘ TypeOrMethodic +MethodTypeParams ::= ε + | MethodTypeParam {‘,‘ MethodTypeParam} +MethodTypeParam ::= id ‘:‘ Type +``` + A _method type_ is denoted internally as ´(\mathit{Ps})U´, where ´(\mathit{Ps})´ is a sequence of parameter names and types ´(p_1:T_1, ..., p_n:T_n)´ for some ´n \geq 0´ and ´U´ is a (value or method) type. This type represents named methods that take arguments named ´p_1, ..., p_n´ of types ´T_1, ..., T_n´ and that return a result of type ´U´. Method types associate to the right: ´(\mathit{Ps}\_1)(\mathit{Ps}\_2)U´ is treated as ´(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)´. -A special case are types of methods without any parameters. -They are written here `=> T`. Parameterless methods name expressions that are re-evaluated each time the parameterless method name is referenced. - Method types do not exist as types of values. If a method name is used as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a corresponding function type. ###### Example -The declarations +The definitions ```scala def a: Int @@ -485,19 +872,25 @@ def c (x: Int) (y: String, z: String): String produce the typings ```scala -a: => Int +a: Int b: (Int) Boolean c: (Int) (String, String) String ``` ### Polymorphic Method Types -A polymorphic method type is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type. +```ebnf +PolyType ::= ‘[‘ PolyTypeParams ‘]‘ TypeOrMethodic +PolyTypeParams ::= PolyTypeParam {‘,‘ PolyTypeParam} +PolyTypeParam ::= ‘id‘ TypeBounds +``` + +A polymorphic method type, or _poly type_ for short, is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type. This type represents named methods that take type arguments `´S_1, ..., S_n´` which [conform](#parameterized-types) to the lower bounds `´L_1, ..., L_n´` and the upper bounds `´U_1, ..., U_n´` and that yield results of type ´T´. ###### Example -The declarations +The definitions ```scala def empty[A]: List[A] @@ -511,153 +904,176 @@ empty : [A >: Nothing <: Any] List[A] union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] ``` -### Type Constructors +## Operations on Types -``` -Type ::= ... | TypeLambdaParams ‘=>>’ Type -TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ -TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ -TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] -``` +This section defines a few meta-functions on types and methodic types. - +- [`baseType(´T´, ´C´)`](#base-type): computes the smallest type ´U´ of the form `´p´.´C´[´T_1, ..., T_n´]` such that ´T <: U´. +- [`asSeenFrom(´T´, ´C´, ´p´)`](#as-seen-from): rebases the type ´T´ visible inside the class ´C´ "as seen from" the prefix ´p´. +- [`memberType(´T´, ´id´)`](#member-type): finds a member of a type (`T.id`) and computes its underlying type or bounds. -A _type constructor_ is either: -- a _type lambda_, of the form `[´\mathit{tps}\,´] =>> ´T´` where `[´\mathit{tps}\,´]` is a type parameter clause `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \gt 0´ and ´T´ is either a value type -or another type lambda. -- a reference to a [desugared type declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) upper-bounded by a type lambda. -- a reference to a [polymorphic class](05-classes-and-objects.html##class-definitions). +These meta-functions are mutually recursive. -Each type parameter ´a_i´ of a type lambda has a variance ´v_i´ which cannot be written down by the user but is inferred from the body of the type lambda to maximize the number of types that conform to the type lambda. - +### Base Type -#### Inferred type parameter clause +The meta-function `baseType(´T´, ´C´)`, where ´T´ is a proper type and ´C´ is a class identifier, computes the smallest type ´U´ of the form `´p.C´` or `´p.C´[´U_1, ..., U_n´]` such that ´T <: U´. +If no such type exists, the function is not defined. +The main purpose of `baseType` is to substitute prefixes and class type parameters along the inheritance chain. -To each type constructor corresponds an _inferred type parameter clause_ which is computed as follow: -- For a type lambda, its type parameter clause (including variance annotations). -- For a type declaration upper-bounded by a type lambda ´T´, the inferred clause of ´T´. -- For a polymorphic class, its type parameter clause. +We define `baseType(´T´, ´C´)` as follows. +For brevity, we write `´p.X´[´U_1, ..., U_n´]` instead of `´p.X´` with ´n = 0´. - +Note that the cases of `superType` do not overlap with each other nor with any `baseType` case other than the `superType`-based one. +The cases of `baseType` therefore do not overlap with each other either. +That makes `baseType` an algorithmic partial function. -## Kind Polymorphism +`meet(´p.C[T_1, ..., T_n]´, ´q.C[U_1, ..., U_n]´)` computes an intersection of two (parameterized) class types for the same class, and `join` computes a union: -Type parameters are normally partitioned into _kinds_, indicated by the top type of which it is a subtype. -Proper types are the types of values and are subtypes of `Any`. -Higher-kinded types are type constructors such as `List` or `Map`. -Covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`. -The `Map` type constructor is a subtype of `[X, +Y] =>> Any`. +- if `´p =:= q´` is false, then it is not defined +- otherwise, let ´W_i´ for ´i \in 1, ..., n´ be: + - ´T_i & U_i´ for `meet` (resp. ´T_i | U_i´ for `join`) if the ´i´th type parameter of ´C´ is covariant + - ´T_i | U_i´ for `meet` (resp. ´T_i & U_i´ for `join`) if the ´i´th type parameter of ´C´ is contravariant + - ´T_i´ if ´T_i =:= U_i´ and the ´i´th type parameter of ´C´ is invariant + - not defined otherwise +- if any of the ´W_i´ are not defined, the result is not defined +- otherwise, the result is `´p.C[W_1, ..., W_n]´` -A type can be used only as prescribed by its kind. -Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` _must_ be applied to a type argument, unless they are passed to type parameters of the same kind. +We generalize `meet(´T_1, ..., T_n´)` for a sequence as: -A type parameter whose upper bound is [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) can have any kind and is called an _any-kinded type_. +- not defined for ´n = 0´ +- ´T_1´ if ´n = 1´ +- `meet(meet(´T_1, ..., T_{n-1}´), ´T_n´)` if `meet(´T_1, ..., T_{n-1}´)` is defined +- not defined otherwise -```scala -def f[T <: AnyKind] = ... -``` +##### Examples -The actual type arguments of `f` can then be types of arbitrary kinds. -So the following are all legal: +Given the following definitions: ```scala -f[Int] -f[List] -f[Map] -f[[X] =>> String] -``` - -Since the actual kind of an any-kinded type is unknown, its usage is heavily restricted. -An any-kinded type can neither be the type of a value, nor be instantiated with type parameters. -The only thing one can do with an any-kinded type is to pass it to another any-kinded type argument. - -`AnyKind` plays a special role in Scala's subtype system. -It is a supertype of all other types, no matter what their kind is. -It is also assumed to be kind-compatible with all other types. -Furthermore, `AnyKind` is itself an any-kinded type, so it cannot be the type of values and it cannot be instantiated. - -## Base Types and Member Definitions - -Types of class members depend on the way the members are referenced. -Central here are three notions, namely: -1. the notion of the set of base types of a type ´T´, -1. the notion of a type ´T´ in some class ´C´ seen from some - prefix type ´S´, -1. the notion of the set of member bindings of some type ´T´. - -These notions are defined mutually recursively as follows. - -1. The set of _base types_ of a type is a set of class types, - given as follows. - - The base types of a class type ´C´ with parents ´T_1, ..., T_n´ are ´C´ itself, as well as the base types of the compound type `´T_1´ with ... with ´T_n´ { ´R´ }`. - - The base types of an aliased type are the base types of its alias. - - The base types of an abstract type are the base types of its upper bound. - - The base types of a parameterized type `´C´[´T_1, ..., T_n´]` are the base types of type ´C´, where every occurrence of a type parameter ´a_i´ of ´C´ has been replaced by the corresponding parameter type ´T_i´. - - The base types of a singleton type `´p´.type` are the base types of the type of ´p´. - - The base types of a compound type `´T_1´ with ... with ´T_n´ { ´R´ }` are the _reduced union_ of the base classes of all ´T_i´'s. - This means: Let the multi-set ´\mathscr{S}´ be the multi-set-union of the base types of all ´T_i´'s. - If ´\mathscr{S}´ contains several type instances of the same class, say `´S^i´#´C´[´T^i_1, ..., T^i_n´]` ´(i \in I)´, then all those instances are replaced by one of them which conforms to all others. - It is an error if no such instance exists. - It follows that the reduced union, if it exists, produces a set of class types, where different types are instances of different classes. - - The base types of a type selection `´S´#´T´` are determined as follows. - If ´T´ is an alias or abstract type, the previous clauses apply. - Otherwise, ´T´ must be a (possibly parameterized) class type, which is defined in some class ´B´. - Then the base types of `´S´#´T´` are the base types of ´T´ in ´B´ seen from the prefix type ´S´. - -1. The notion of a type ´T´ _in class ´C´ seen from some prefix type ´S´_ makes sense only if the prefix type ´S´ has a type instance of class ´C´ as a base type, say `´S'´#´C´[´T_1, ..., T_n´]`. -Then we define as follows. - - If `´S´ = ´\epsilon´.type`, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. - - Otherwise, if ´T´ is the ´i´'th type parameter of some class ´D´, then - - If ´S´ has a base type `´D´[´U_1, ..., U_n´]`, for some type parameters `[´U_1, ..., U_n´]`, then ´T´ in ´C´ seen from ´S´ is ´U_i´. - - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. - - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. - - Otherwise, if ´T´ is the singleton type `´D´.this.type` for some class ´D´ then - - If ´D´ is a subclass of ´C´ and ´S´ has a type instance of class ´D´ among its base types, then ´T´ in ´C´ seen from ´S´ is ´S´. - - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. - - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. - - If ´T´ is some other type, then the described mapping is performed to all its type components. - -If ´T´ is a possibly parameterized class type, where ´T´'s class is defined in some other class ´D´, and ´S´ is some prefix type, then we use "´T´ seen from ´S´" as a shorthand for "´T´ in ´D´ seen from ´S´". - -1. The _member bindings_ of a type ´T´ are - 1. all bindings ´d´ such that there exists a type instance of some class ´C´ among the base types of ´T´ and there exists a definition or declaration ´d'´ in ´C´ such that ´d´ results from ´d'´ by replacing every type ´T'´ in ´d'´ by ´T'´ in ´C´ seen from ´T´, and - 2. all bindings of the type's [refinement](#compound-types), if it has one. -2. The member bindinds of ´S & T´ are all the binds of ´S´ *and* all the bindins of ´T´. -3. The member bindings of ´S | T´ are the member bindings of its [join](#join-of-a-union-type). - -The _definition_ of a type projection `S#T` is the member binding ´d_T´ of the type `T` in `S`. -In that case, we also say that `S#T` _is defined by_ ´d_T´. +trait Iterable[+A] +trait List[+A] extends Iterable[A] +trait Map[K, +V] extends Iterable[(K, V)] +trait Foo +``` + +we have the following `baseType` results: + +- `baseType(List[Int], List) = List[Int]` +- `baseType(List[Int], Iterable) = Iterable[Int]` +- `baseType(List[A] & Iterable[B], Iterable) = meet(Iterable[A], Iterable[B]) = Iterable[A & B]` +- `baseType(List[A] & Foo, Iterable) = Iterable[A]` (because `baseType(Foo, Iterable)` is not defined) +- `baseType(Int, Iterable)` is not defined +- `baseType(Map[Int, String], Iterable) = Iterable[(Int, String)]` +- `baseType(Map[Int, String] & Map[String, String], Map)` is not defined (because `K` is invariant) + +### As Seen From + +The meta-function `asSeenFrom(´T´, ´C´, ´p´)`, where ´T´ is a type or methodic type visible inside the class ´C´ and ´p´ is a stable type, rebases the type ´T´ "as seen from" the prefix ´p´. +Essentially, it substitutes this-types and class type parameters in ´T´ to appropriate types visible from outside. +Since `T` is visible inside ´C´, it can contain this-types and class type parameters of ´C´ itself as well as of all its enclosing classes. +This-types of enclosing classes must be mapped to appropriate subprefixes of ´p´, while class type parameters must be mapped to appropriate concrete type arguments. + +`asSeenFrom(´T´, ´C´, ´p´)` only makes sense if ´p´ has a base type for ´C´, i.e., if `baseType(´p´, ´C´)` is defined. + +We define `asSeenFrom(´T´, ´C´, ´p´)` where `baseType(´p´, ´C´) = ´q.C[U_1, ..., U_n]´` as follows: + +- If ´T´ is a reference to the ´i´th class type parameter of some class ´D´: + - If `baseType(´p´, ´D´) ´= r.D[W_1, ..., W_m]´` is defined, then ´W_i´ + - Otherwise, if ´q = \epsilon´ or ´q´ is a package ref, then ´T´ + - Otherwise, ´q´ is a type, ´C´ must be defined in another class ´B´ and `baseType(´q´, ´B´)` must be defined, then `asSeenFrom(´T´, ´B´, ´q´)` +- Otherwise, if ´T´ is a this-type `´D´.this`: + - If ´D´ is a subclass of ´C´ and `baseType(´p´, ´D´)` is defined, then ´p´ (this is always the case when ´D = C´) + - Otherwise, if ´q = \epsilon´ or ´q´ is a package ref, then ´T´ + - Otherwise, ´q´ is a type, ´C´ must be defined in another class ´B´ and `baseType(´q´, ´B´)` must be defined, then `asSeenFrom(´T´, ´B´, ´q´)` +- Otherwise, ´T´ where each if of its type components ´T_i´ is mapped to `asSeenFrom(´T_i´, ´C´, ´p´)`. + +For convenience, we generalize `asSeenFrom` to _type definitions_ ´D´. + +- If ´D´ is an alias ´= U´, then `asSeenFrom(´D´, ´C´, ´p´) = asSeenFrom(´U´, ´C´, ´p´)`. +- If ´D´ is an abstract type definition with bounds ´>: L <: H´, then `asSeenFrom(´D´, ´C´, ´p´) = ´>:´ asSeenFrom(´L´, ´C´, ´p´) ´<:´ asSeenFrom(´H´, ´C´, ´p´)`. + +### Member Type + +The meta-function `memberType(´T´, ´id´, ´p´)`, where ´T´ is a proper type, ´id´ is a term or type identifier, and ´p´ is a stable type, finds a member of a type (`T.id`) and computes its underlying type (for a term) or type definition (for a type) as seen from the prefix ´p´. +For a term, it also computes whether the term is _stable_. +`memberType` is the fundamental operation that computes the _underlying type_ or _underlying type definition_ of a [named designator type](#designator-types). + +The result ´M´ of a `memberType` is one of: + +- undefined, +- a term result with underlying type or methodic type ´U´ and a _stable_ flag, +- a class result with class ´C´, or +- a type result with underlying type definition ´D´. + +As short-hand, we define `memberType(´T´, ´id´)` to be the same as `memberType(´T´, ´id´, ´T´)` when ´T´ is a stable type. + +We define `memberType(´T´, ´id´, ´p´)` as follows: + +- If ´T´ is a possibly parameterized class type of the form ´q.C[T_1, ..., T_n]´ (with ´n \geq 0´): + - Let ´m´ be the [class member](05-classes-and-objects.html#class-members) of ´C´ with name ´id´. + - If ´m´ is not defined, the result is undefined. + - If ´m´ is a class definition, the result is a class result with class ´m´. + - If ´m´ is a term definition in class ´D´ with declared type ´U´, the result is a term result with underlying type [`asSeenFrom`](#as-seen-from)`(´U´, ´D´, ´p´)` and stable flag true if and only if ´m´ is stable. + - If ´m´ is a type member definition in class ´D´, the result is a type result with underlying type definition [`asSeenFrom`](#as-seen-from)`(´U´, ´D´, ´p´)` where ´U´ is defined as follows: + - If ´m´ is an opaque type alias member definition with declared definition ´>: L <: H = V´, then + - ´U´ is ´= V´ if `´p = D.´this` or if we are computing `memberType` in a [_transparent mode_](#type-erasure), + - ´U´ is ´>: L <: H´ otherwise. + - ´U´ is the declared type definition of ´m´ otherwise. +- If ´T´ is another monomorphic type designator of the form ´q.X´: + - Let ´U´ be `memberType(´q´, ´X´)` + - Let ´H´ be the upper bound of ´U´ + - The result is `memberType(´H´, ´id´, ´p´)` +- If ´T´ is another parameterized type designator of the form ´q.X[T_1, ..., T_n]´ (with ´n \geq 0´): + - Let ´U´ be `memberType(´q´, ´X´)` + - Let ´H´ be the upper bound of ´U´ + - The result is `memberType(´H[T_1, ..., T_n]´, ´id´, ´p´)` +- If ´T´ is a parameterized type lambda of the form `´([\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´U)[T_1, ..., T_n]´`: + - The result is `memberType(´[a_1 := T_1, ..., a_n := T_n] U´, ´id´, ´p´)`, i.e., we beta-reduce the type redex. +- If ´T´ is a refined type of the form `´T_1´ { ´R´ }`: + - Let ´M_1´ be the result of `memberType(´T_1´, ´id´, ´p´)`. + - If the name of the refinement ´R´ is not ´id´, let ´M_2´ be undefined. + - Otherwise, let ´M_2´ be the type or type definition of the refinement ´R´, as well as whether it is stable. + - The result is `mergeMemberType(´M_1´, ´M_2´)`. +- If ´T´ is a union type of the form ´T_1 | T_2´: + - Let ´J´ be the [join](#join-of-a-union-type) of ´T´. + - The result is `memberType(´J´, ´id´, ´p´)`. +- If ´T´ is an intersection type of the form ´T_1 & T_2´: + - Let ´M_1´ be the result of `memberType(´T_1´, ´id´, ´p´)`. + - Let ´M_2´ be the result of `memberType(´T_2´, ´id´, ´p´)`. + - The result is `mergeMemberType(´M_1´, ´M_2´)`. +- If ´T´ is a recursive type of the form `{ ´\alpha´ => ´T_1´ }`: + - The result is `memberType(´T_1´, ´id´, ´p ´)`. +- If ´T´ is a stable type: + - Let ´U´ be the underlying type of ´T´. + - The result is `memberType(´U´, ´id´, ´p´)`. +- Otherwise, the result is undefined. + +We define the helper function `mergeMemberType(´M_1´, ´M_2´)` as: + +- If either ´M_1´ or ´M_2´ is undefined, the result is the other one. +- Otherwise, if either ´M_1´ or ´M_2´ is a class result, the result is that one. +- Otherwise, ´M_1´ and ´M_2´ must either both be term results or both be type results. + - If they are term results with underlying types ´U_1´ and ´U_2´ and stable flags ´s_1´ and ´s_2´, the result is a term result whose underlying type is `meet(´U_1´, ´U_2´)` and whose stable flag is ´s_1 \lor s_2´. + - If they are type results with underlying type definitions ´D_1´ and ´D_2´, the result is a type result whose underlying type definition is `intersect(´D_1´, ´D_2´)`. ## Relations between types @@ -665,80 +1081,93 @@ We define the following relations between types. | Name | Symbolically | Interpretation | |------------------|----------------|----------------------------------------------------| -| Equivalence | ´T \equiv U´ | ´T´ and ´U´ are interchangeable in all contexts. | | Conformance | ´T <: U´ | Type ´T´ conforms to ("is a subtype of") type ´U´. | +| Equivalence | ´T =:= U´ | ´T´ and ´U´ conform to each other. | | Weak Conformance | ´T <:_w U´ | Augments conformance for primitive numeric types. | | Compatibility | | Type ´T´ conforms to type ´U´ after conversions. | -### Equivalence - -´\color{red}{\text{TODO SCALA3: Redefine equivalence as mutual conformance?}}´ - -Equivalence ´(\equiv)´ between types is the smallest congruence [^congruence] such that the following holds: - -- If ´t´ is defined by a type alias `type ´t´ = ´T´`, then ´t´ is equivalent to ´T´. -- If a path ´p´ has a singleton type `´q´.type`, then `´p´.type ´\equiv q´.type`. -- If ´O´ is defined by an object definition, and ´p´ is a path consisting only of package or object selectors and ending in ´O´, then `´O´.this.type ´\equiv p´.type`. -- Two [compound types](#compound-types) are equivalent if the sequences of their component are pairwise equivalent, and occur in the same order, and their refinements are equivalent. Two refinements are equivalent if they bind the same names and the modifiers, types and bounds of every declared entity are equivalent in both refinements. -- Two [method types](#method-types) are equivalent if: - - neither are implicit, or they both are [^implicit]; - - they have equivalent result types; - - they have the same number of parameters; and - - corresponding parameters have equivalent types. - Note that the names of parameters do not matter for method type equivalence. -- Two [polymorphic method types](#polymorphic-method-types) are equivalent if they have the same number of type parameters, and, after renaming one set of type parameters by another, the result types as well as lower and upper bounds of corresponding type parameters are equivalent. -- Two [type constructors](#type-constructors) are equivalent if they have the same number of type parameters, and, after renaming one list of type parameters by another, the result types as well as variances, lower and upper bounds of corresponding type parameters are equivalent. - -[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts. -[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword. - ### Conformance -The conformance relation ´(<:)´ is the smallest transitive relation that satisfies the following conditions. - -- Conformance includes equivalence. If ´T \equiv U´ then ´T <: U´. -- For every type `´T´` (of any kind), `scala.Nothing <: ´T´ <: scala.AnyKind`. -- For every value type `´T´`, `´T´ <: scala.Any`. -- For every type constructor `´T´` with type parameters `[´U_1´, ..., ´U_n´]`, `[´U_1´, ..., ´U_n´] =>> scala.Nothing <: ´T´ <: [´U_1´, ..., ´U_n´] =>> scala.Any`. -- For every value type ´T´, `scala.Null <: ´T´` unless `´T´ <: scala.AnyVal`. -- A type variable or abstract type ´t´ conforms to its upper bound and its lower bound conforms to ´t´. -- A class type or parameterized type conforms to any of its base-types. -- A singleton type `´p´.type` conforms to the type of the path ´p´. -- A singleton type `´p´.type` conforms to the type `scala.Singleton`. -- A type projection `´T´#´t´` conforms to `´U´#´t´` if ´T´ conforms to ´U´. -- A parameterized type `´T´[´T_1´, ..., ´T_n´]` conforms to `´T´[´U_1´, ..., ´U_n´]` if the following conditions hold for ´i \in \{ 1, ..., n \}´: - 1. If the ´i´'th type parameter of ´T´ is declared covariant, then ´T_i <: U_i´. [^argisnotwildcard] - 1. If the ´i´'th type parameter of ´T´ is declared contravariant, then ´U_i <: T_i´. [^argisnotwildcard] - 1. If the ´i´'th type parameter of ´T´ is declared neither covariant nor contravariant: - 1. If neither ´T_i´ nor ´U_i´ are wildcard type arguments, then ´U_i \equiv T_i´. - 1. If ´T_i´ is a wildcard type argument of the form ´\\_ >: L_1 <: U_1´ and ´U_i´ is a wildcard argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´T_i´ "interval" is contained in the ´U_i´ "interval"). - 1. If ´U_i´ is a wildcard type argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: T_i´ and ´T_i <: U_2´. -- A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` conforms to each of its component types ´T_i´. -- If ´T <: U_i´ for ´i \in \{ 1, ..., n \}´ and for every binding ´d´ of a type or value ´x´ in ´R´ there exists a member binding of ´x´ in ´T´ which subsumes ´d´, then ´T´ conforms to the compound type `´U_1´ with ... with ´U_n´ {´R\,´}`. -- If ´T <: U´, then ´T <: U | W´ and ´T <: W | U´. -- If ´T <: W´ and ´U <: W´, then ´T | U <: W´. -- If ´T <: U´ and ´T <: W´, then ´T <: U & W´. -- If ´T <: W´, then ´T & U <: W´ and ´U & T <: W´. -- If ´T_i \equiv T_i'´ for ´i \in \{ 1, ..., n\}´ and ´U´ conforms to ´U'´ then the method type ´(p_1:T_1, ..., p_n:T_n) U´ conforms to ´(p_1':T_1', ..., p_n':T_n') U'´. -- The polymorphic type ´[a_1 >: L_1 <: U_1, ..., a_n >: L_n <: U_n] T´ conforms to the polymorphic type ´[a_1 >: L_1' <: U_1', ..., a_n >: L_n' <: U_n'] T'´ if, assuming ´L_1' <: a_1 <: U_1', ..., L_n' <: a_n <: U_n'´ one has ´T <: T'´ and ´L_i <: L_i'´ and ´U_i' <: U_i´ for ´i \in \{ 1, ..., n \}´. -- Type constructors ´T´ and ´T'´ follow a similar discipline. -We characterize ´T´ and ´T'´ by their [inferred type parameter clauses](#inferred-type-parameter-clause) ´[a_1, ..., a_n]´ and ´[a_1', ..., a_n']´. -Then, ´T´ conforms to ´T'´ if any list ´[t_1, ..., t_n]´ -- with declared variances, bounds and higher-order type parameter clauses -- of valid type arguments for ´T'´ is also a valid list of type arguments for ´T´ and ´T[t_1, ..., t_n] <: T'[t_1, ..., t_n]´. -Note that this entails that: - - The bounds on ´a_i´ must be weaker than the corresponding bounds declared for ´a'_i´. - - The variance of ´a_i´ must match the variance of ´a'_i´, where covariance matches covariance, contravariance matches contravariance and any variance matches invariance. - - Recursively, these restrictions apply to the corresponding higher-order type parameter clauses of ´a_i´ and ´a'_i´. +The conformance relation ´(<:)´ is the smallest relation such that ´S <: T´ is true if any of the following conditions hold. +Note that the conditions are not all mutually exclusive. + +- ´S = T´ (i.e., conformance is reflexive by definition). +- ´S´ is `Nothing`. +- ´T´ is `AnyKind`. +- ´S´ is a stable type with underlying type ´S_1´ and ´S_1 <: T´. +- ´S = p.x´ and ´T = q.x´ are term designators and + - `isSubPrefix(´p´, ´q´)`. +- ´S = p.X[S_1, ..., S_n]´ and ´T = q.X[T_1, ..., T_n]´ are possibly parameterized type designators with ´n \geq 0´ and: + - `isSubPrefix(´p´, ´q´)`, and + - it is not the case that ´p.x´ and ´q.X´ are class type designators for different classes, and + - for each ´i \in \{ 1, ..., n \}´: + - the ´i´th type parameter of ´q.X´ is covariant and ´S_i <: T_i´ [^argisnotwildcard], or + - the ´i´th type parameter of ´q.X´ is contravariant and ´T_i <: S_i´ [^argisnotwildcard], or + - the ´i´th type parameter of ´q.X´ is invariant and: + - ´S_i´ and ´T_i´ are types and ´S_i =:= T_i´, or + - ´S_i´ is a type and ´T_i´ is a wildcard type argument of the form ´? >: L_2 <: H_2´ and ´L_2 <: S_i´ and ´S_i <: H_2´, or + - ´S_i´ is a wildcard type argument of the form ´? >: L_1 <: H_1´ and ´T_i´ is a wildcard type argument of the form ´? >: L_2 <: H_2´ and ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´S_i´ "interval" is contained in the ´T_i´ "interval"). +- ´T = q.C[T_1, ..., T_n]´ with ´n \geq 0´ and `baseType(´S´, ´C´)` is defined and `baseType(´S´, ´C´) ´<: T´`. +- ´S = p.X[S_1, ..., S_n]´ and ´p.X´ is non-class type designator and ´H <: T´ where ´H´ is the upper bound of the underlying type definition of ´p.X´. +- ´S = p.C´ and `´T = C´.this` and ´C´ is the hidden class of an `object` and: + - ´p = \epsilon´ or ´p´ is a package ref, or + - `isSubPrefix(´p´, ´D´.this)` where ´D´ is the enclosing class of ´C´. +- `´S = C´.this` and ´T = q.C´ and ´C´ is the hidden class of an `object` and: + - either ´q = \epsilon´ or ´q´ is a package ref, or + - `isSubPrefix(´D´.this, ´q´)` where ´D´ is the enclosing class of ´C´. +- ´S = S_1 | S_2´ and ´S_1 <: T´ and ´S_2 <: T´. +- ´T = T_1 | T_2´ and either ´S <: T_1´ or ´S <: T_2´. +- ´T = T_1 & T_2´ and ´S <: T_1´ and ´S <: T_2´. +- ´S = S_1 & S_2´ and either ´S_1 <: T´ or ´S_2 <: T´. +- `´S = S_1´ @a` and ´S_1 <: T´. +- `´T = T_1´ @a` and ´S <: T_1´ (i.e., annotations can be dropped). +- ´T = q.X´ and ´q.X´ is a non-class type designator and ´S <: L´ where ´L´ is the lower bound of the underlying type definition of ´q.X´. +- ´S = p.X´ and ´p.X´ is a non-class type designator and ´H <: T´ where ´H´ is the upper bound of the underlying type definition of ´p.X´. +- `´S = [\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´S_1´` and `´T = [\pm b_1 >: M_1 <: G_1, ..., \pm b_n >: M_n <: G_n]´ =>> ´T_1´`, and given ´\sigma = [b_1 := a_1, ..., b_n := a_n]´: + - ´S_1 <: \sigma T_1´, and + - for each ´i \in \{ 1, ..., n \}´: + - the variance of ´a_i´ conforms to the variance of ´b_i´ (´+´ conforms to ´+´ and ´\epsilon´, ´-´ conforms to ´-´ and ´\epsilon´, and ´\epsilon´ conforms to ´\epsilon´), and + - ´\sigma (>: M_i <: G_i)´ is contained in ´>: L_i <: H_i´ (i.e., ´L_i <: \sigma M_i´ and ´\sigma G_i <: H_i´). +- ´S = p.X´ and `´T = [\pm b_1 >: M_1 <: G_1, ..., \pm b_n >: M_n <: G_n]´ =>> ´T_1´` and ´S´ is a type constructor with ´n´ type parameters and: + - `´([\pm a_1 >: L_1 <: H_1, ..., \pm a_n >: L_n <: H_n]´ =>> ´S[a_1, ..., a_n]) <: T´` where the ´a_i´ are copies of the type parameters of ´S´ (i.e., we can eta-expand ´S´ to compare it to a type lambda). +- `´T = T_1´ { ´R´ }` and ´S <: T_1´ and, given ´p = S´ if ´S´ is a stable type and ´p = ∃ \alpha : S´ otherwise: + - `´R =´ type ´X >: L <: H´` and `memberType(´p´, ´X´)` is a class result for ´C´ and ´L <: p.C´ and ´p.C <: H´, or + - `´R =´ type ´X >: L_2 <: H_2´` and `memberType(´p´, ´X´)` is a type result with bounds ´>: L_1 <: H_1´ and ´L_2 <: L_1´ and ´H_1 <: H_2´, or + - `´R =´ val ´X: T_2´` and `memberType(´p´, ´X´)` is a stable term result with type ´S_2´ and ´S_2 <: T_2´, or + - `´R =´ def ´X: T_2´` and `memberType(´p´, ´X´)` is a term result with type ´S_2´ and ´T_2´ is a type and ´S_2 <: T_2´, or + - `´R =´ def ´X: T_2´` and `memberType(´p´, ´X´)` is a term result with methodic type ´S_2´ and ´T_2´ is a methodic type and `matches(´S_2´, ´T_2´)`. +- `´S = S_1´ { ´R´ }` and ´S_1 <: T´. +- `´S =´ { ´\alpha´ => ´S_1´ }` and `´T =´ { ´\beta´ => ´T_1´ }` and ´S_1 <: [\beta := \alpha]T_1´. +- `´T =´ { ´\beta´ => ´T_1´ }` and ´S´ is a proper type but not a recursive type and ´p' <: [\beta := p]T_1´ where: + - ´p´ is ´S´ if ´S´ is a stable type and ´∃ \alpha : S´ otherwise, and + - ´p'´ is the result of replacing any top-level recursive type `{ ´\gamma´ => ´Z´ }` in ´p´ with ´[\gamma := p]Z´ (TODO specify this better). +- `´S = (´=> ´S_1)´` and `´T = (´=> ´T_1)´` and ´S_1 <: T_1´. +- `´S =´ scala.Null` and: + - ´T = q.C[T_1, ..., T_n]´ with ´n \geq 0´ and ´C´ does not derive from `scala.AnyVal` and ´C´ is not the hidden class of an `object`, or + - ´T = q.x´ is a term designator with underlying type ´U´ and `scala.Null ´<: U´`, or + - `´T = T_1´ { ´R´ }` and `scala.Null ´<: T_1´`, or + - `´T =´ { ´\beta´ => ´T_1´ }` and `scala.Null ´<: T_1´`. +- ´S´ is a stable type and ´T = q.x´ is a term designator with underlying type ´T_1´ and ´T_1´ is a stable type and ´S <: T_1´. +- `´S = S_1´ { ´R´ }` and ´S_1 <: T´. +- `´S =´ { ´\alpha´ => ´S_1´ }` and ´S_1 <: T´. +- `´T =´ scala.Tuple´_n[T_1, ..., T_n]´` with ´1 \leq n \leq 22´, and `´S <: T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`. + +We define `isSubPrefix(´p´, ´q´)` where ´p´ and ´q´ are prefixes as: + +- If both ´p´ and ´q´ are types, then ´p <: q´. +- Otherwise, ´p = q´ (for empty prefixes and package refs). + +We define `matches(´S´, ´T´)` where ´S´ and ´T´ are types or methodic types as: + +- If ´S´ and ´T´ are types, then ´S <: T´. +- If ´S´ and ´T´ are method types ´(a_1: S_1, ..., a_n: S_n)S'´ and ´(b_1: T_1, ..., b_n: T_n)T'´, then ´\sigma S_i =:= T_i´ for each ´i´ and `matches(´\sigma S'´, ´T'´)`, where ´\sigma = [a_1 := b_1, ..., a_n := b_n]´. +- If ´S´ and ´T´ are poly types ´[a_1 >: L_{s1} <: H_{s1}, ..., a_n >: L_{sn} <: H_{sn}]S'´ and ´[b_1 >: L_{t1} <: H_{t1}, ..., b_n >: L_{tn} <: H_{tn}]T'´, then ´\sigma L_{si} =:= L_{ti}´ and ´\sigma H_{si} =:= H_{ti}´ for each ´i´ and `matches(´\sigma S'´, ´T'´)`, where ´\sigma = [a_1 := b_1, ..., a_n := b_n]´. + +Note that conformance in Scala is _not_ transitive. +Given two abstract types ´A´ and ´B´, and one abstract `type ´C >: A <: B´` available on prefix ´p´, we have ´A <: p.C´ and ´C <: p.B´ but not necessarily ´A <: B´. [^argisnotwildcard]: In these cases, if `T_i` and/or `U_i` are wildcard type arguments, the [simplification rules](#simplification-rules) for parameterized types allow to reduce them to real types. -A declaration or definition in some compound type of class type ´C´ _subsumes_ another declaration of the same name in some compound type or class type ´C'´, if one of the following holds. - -- A value declaration or definition that defines a name ´x´ with type ´T´ subsumes a value or method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. -- A method declaration or definition that defines a name ´x´ with type ´T´ subsumes a method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. -- A type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T´` subsumes a type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T'´` if ´T \equiv T'´. -- A type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L´ <: ´U´` subsumes a type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L'´ <: ´U'´` if ´L' <: L´ and ´U <: U'´. -- A type or class definition that binds a type name ´t´ subsumes an abstract type declaration `type t[´T_1´, ..., ´T_n´] >: L <: U` if ´L <: t <: U´. - #### Least upper bounds and greatest lower bounds The ´(<:)´ relation forms pre-order between types, i.e. it is transitive and reflexive. @@ -749,6 +1178,12 @@ This allows us to define _least upper bounds_ and _greatest lower bounds_ of a s By construction, for all types `A` and `B`, the least upper bound of `A` and `B` is `A | B`, and their greatest lower bound is `A & B`. +### Equivalence + +Equivalence is defined as mutual conformance. + +´S =:= T´ if and only if both ´S <: T´ and ´T <: S´. + ### Weak Conformance In some situations Scala uses a more general conformance relation. @@ -790,25 +1225,28 @@ The application `foo((x: Int) => x.toString)` [resolves](06-expressions.html#ove - `Int => String` is compatible to `ToString` -- when expecting a value of type `ToString`, you may pass a function literal from `Int` to `String`, as it will be SAM-converted to said function; - `ToString` is not compatible to `Int => String` -- when expecting a function from `Int` to `String`, you may not pass a `ToString`. -## Volatile Types - -Type volatility approximates the possibility that a type parameter or abstract type instance of a type does not have any non-null values. -A value member of a volatile type cannot appear in a [path](#paths). - -A type is _volatile_ if it falls into one of four categories: +## Realizability -A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is volatile if one of the following three conditions hold. +A type ´T´ is _realizable_ if and only if it is inhabited by non-null values. +It is defined as: -1. One of ´T_2, ..., T_n´ is a type parameter or abstract type, or -1. ´T_1´ is an abstract type and either the refinement ´R´ or a type ´T_j´ for ´j > 1´ contributes an abstract member to the compound type, or -1. one of ´T_1, ..., T_n´ is a singleton type. +- A term designator ´p.x´ with underlying type ´U´ is realizable if ´p´ is ´\epsilon´ or a package ref or a realizable type and + - `memberType(´p´, ´x´)` has the stable flag, or + - the type returned by `memberType(´p´, ´x´)` is realizable. +- A stable type that is not a term designator is realizable. +- Another type ´T´ is realizable if + - ´T´ is concrete, and + - ´T´ has good bounds. -Here, a type ´S´ _contributes an abstract member_ to a type ´T´ if ´S´ contains an abstract member that is also a member of ´T´. -A refinement ´R´ contributes an abstract member to a type ´T´ if ´R´ contains an abstract declaration which is also a member of ´T´. +A concrete type ´T´ has good bounds if all of the following apply: -A type designator is volatile if it is an alias of a volatile type, or if it designates a type parameter or abstract type that has a volatile type as its upper bound. +- all its non-class type members have good bounds, i.e., their bounds ´L´ and ´H´ are such that ´L <: H´, +- all its type refinements have good bounds, and +- for all base classes ´C´ of ´T´: + - `baseType(´T´, ´C´)` is defined with some result ´p.C[T_1, ..., T_n]´, and + - for all ´i \in \{ 1, ..., n \}´, ´T_i´ is a real type or (when it is a wildcard type argument) it has good bounds. -A singleton type `´p´.type` is volatile, if the underlying type of path ´p´ is volatile. +Note: it is possible for `baseType(´T´, ´C´)` not to be defined because of the `meet` computation, which may fail to merge prefixes and/or invariant type arguments. ## Type Erasure @@ -816,15 +1254,18 @@ A type is called _generic_ if it contains type arguments or type variables. _Type erasure_ is a mapping from (possibly generic) types to non-generic types. We write ´|T|´ for the erasure of type ´T´. The erasure mapping is defined as follows. +Internal computations are performed in a _transparent mode_, which has an effect on how [`memberType`](#member-type) behaves for opaque type aliases. -- The erasure of `scala.AnyKind` is `Object`. -- The erasure of an alias type is the erasure of its right-hand side. -- The erasure of an abstract type is the erasure of its upper bound. +- The erasure of `AnyKind` is `Object`. +- The erasure of a non-class type designator is the erasure of its underlying upper bound. +- The erasure of a term designator is the erasure of its underlying type. - The erasure of the parameterized type `scala.Array´[T_1]´` is `scala.Array´[|T_1|]´`. - The erasure of every other parameterized type ´T[T_1, ..., T_n]´ is ´|T|´. -- The erasure of a singleton type `´p´.type` is the erasure of the type of ´p´. -- The erasure of a type projection `´T´#´x´` is `|´T´|#´x´`. -- The erasure of a compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is the erasure of the intersection dominator of ´T_1, ..., T_n´. +- The erasure of a stable type `´p´` is the erasure of the underlying type of ´p´. +- The erasure of a by-name type `=> ´T_1´` is `scala.Function0`. +- The erasure of an annotated type ´T_1 a´ is ´|T_1|´. +- The erasure of a refined type `´T_1´ { ´R´ }` is ´|T_1|´. +- The erasure of a recursive type `{ ´\alpha´ => ´T_1´ }` and the associated recursive this type ´\alpha´ is ´|T_1|´. - The erasure of a union type ´S | T´ is the _erased least upper bound_ (_elub_) of the erasures of ´S´ and ´T´. - The erasure of an intersection type ´S & T´ is the _eglb_ (erased greatest lower bound) of the erasures of ´S´ and ´T´. diff --git a/docs/_spec/04-basic-declarations-and-definitions.md b/docs/_spec/04-basic-declarations-and-definitions.md deleted file mode 100644 index 5c45cc5c7819..000000000000 --- a/docs/_spec/04-basic-declarations-and-definitions.md +++ /dev/null @@ -1,758 +0,0 @@ ---- -title: Basic Declarations & Definitions -layout: default -chapter: 4 ---- - -# Basic Declarations and Definitions - -```ebnf -Dcl ::= ‘val’ ValDcl - | ‘var’ VarDcl - | ‘def’ FunDcl - | ‘type’ {nl} TypeDcl -PatVarDef ::= ‘val’ PatDef - | ‘var’ VarDef -Def ::= PatVarDef - | ‘def’ FunDef - | ‘type’ {nl} TypeDef - | TmplDef -``` - -A _declaration_ introduces names and assigns them types. -It can form part of a [class definition](05-classes-and-objects.html#templates) or of a refinement in a [compound type](03-types.html#compound-types). - -A _definition_ introduces names that denote terms or types. -It can form part of an object or class definition or it can be local to a block. -Both declarations and definitions produce _bindings_ that associate type names with type definitions or bounds, and that associate term names with types. - -The scope of a name introduced by a declaration or definition is the whole statement sequence containing the binding. -However, there is a restriction on forward references in blocks: -In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´, - -- ´s_k´ cannot be a variable definition. -- If ´s_k´ is a value definition, it must be lazy. - - - -## Value Declarations and Definitions - -```ebnf -Dcl ::= ‘val’ ValDcl -ValDcl ::= ids ‘:’ Type -PatVarDef ::= ‘val’ PatDef -PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr -ids ::= id {‘,’ id} -``` - -A value declaration `val ´x´: ´T´` introduces ´x´ as a name of a value of type ´T´. - -A value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´. -If the value definition is not recursive, the type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of expression ´e´ is assumed. -If a type ´T´ is given, then ´e´ is expected to conform to it. - -Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`. -The effect of the value definition is to bind ´x´ to the value of ´e´ -converted to type ´T´. -A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed. - -A _constant value definition_ is of the form - -```scala -final val x = e -``` - -where `e` is a [constant expression](06-expressions.html#constant-expressions). -The `final` modifier must be present and no type annotation may be given. -References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`. - -Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. -If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows: - -1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´: - -```scala -val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)} -val ´x_1´ = ´\$x´._1 -... -val ´x_n´ = ´\$x´._n -``` - -Here, ´\$x´ is a fresh name. - -2. If ´p´ has a unique bound variable ´x´: - -```scala -val ´x´ = ´e´ match { case ´p´ => ´x´ } -``` - -3. If ´p´ has no bound variables: - -```scala -´e´ match { case ´p´ => ()} -``` - -###### Example - -The following are examples of value definitions - -```scala -val pi = 3.1415 -val pi: Double = 3.1415 // equivalent to first definition -val Some(x) = f() // a pattern definition -val x :: xs = mylist // an infix pattern definition -``` - -The last two definitions have the following expansions. - -```scala -val x = f() match { case Some(x) => x } - -val x´\$´ = mylist match { case x :: xs => (x, xs) } -val x = x´\$´._1 -val xs = x´\$´._2 -``` - -The name of any declared or defined value may not end in `_=`. - -A value declaration `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value declarations `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`. -A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`. -A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`. - -## Variable Declarations and Definitions - -```ebnf -Dcl ::= ‘var’ VarDcl -PatVarDef ::= ‘var’ VarDef -VarDcl ::= ids ‘:’ Type -VarDef ::= PatDef - | ids ‘:’ Type ‘=’ ‘_’ -``` - -A variable declaration `var ´x´: ´T´` is equivalent to the declarations of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`: - -```scala -def ´x´: ´T´ -def ´x´_= (´y´: ´T´): Unit -``` - -An implementation of a class may _define_ a declared variable using a variable definition, or by defining the corresponding setter and getter methods. - -A variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´. -The type ´T´ can be omitted, in which case the type of ´e´ is assumed. -If ´T´ is given, then ´e´ is expected to [conform to it](06-expressions.html#expression-typing). - -Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. -A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-declarations-and-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values. - -The name of any declared or defined variable may not end in `_=`. - -A variable definition `var ´x´: ´T´ = _` can appear only as a member of a template. -It introduces a mutable field with type ´T´ and a default initial value. -The default value depends on the type ´T´ as follows: - -| default | type ´T´ | -|----------|------------------------------------| -|`0` | `Int` or one of its subrange types | -|`0L` | `Long` | -|`0.0f` | `Float` | -|`0.0d` | `Double` | -|`false` | `Boolean` | -|`()` | `Unit` | -|`null` | all other types | - -When they occur as members of a template, both forms of variable definition also introduce a getter method ´x´ which returns the value currently assigned to the variable, as well as a setter method `´x´_=` which changes the value currently assigned to the variable. -The methods have the same signatures as for a variable declaration. -The template then has these getter and setter methods as members, whereas the original variable cannot be accessed directly as a template member. - -###### Example - -The following example shows how _properties_ can be simulated in Scala. -It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds. -Its implementation contains tests that allow only legal values to be assigned to these fields. -The user code, on the other hand, accesses these fields just like normal variables. - -```scala -class TimeOfDayVar { - private var h: Int = 0 - private var m: Int = 0 - private var s: Int = 0 - - def hours = h - def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h - else throw new DateError() - - def minutes = m - def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m - else throw new DateError() - - def seconds = s - def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s - else throw new DateError() -} -val d = new TimeOfDayVar -d.hours = 8; d.minutes = 30; d.seconds = 0 -d.hours = 25 // throws a DateError exception -``` - -A variable declaration `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable declarations `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`. -A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`. -A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`. - -## Type Declarations and Type Aliases - - - -```ebnf -Dcl ::= ‘type’ {nl} TypeDcl -TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] -Def ::= ‘type’ {nl} TypeDef -TypeDef ::= id [TypeParamClause] ‘=’ Type -``` - -### Desugaring of parameterized type declarations -A parameterized type declaration is desugared into an unparameterized type declaration -whose bounds are type lambdas with explicit variance annotations. - -#### Abstract Type -An abstract type -```scala -type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´ -``` -is desugared into an unparameterized abstract type as follow: -- If `L` conforms to `Nothing`, then, - - ```scala -type ´t´ >: Nothing - <: [´\mathit{tps'}\,´] =>> ´U´ - ``` -- otherwise, - - ```scala -type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ - <: [´\mathit{tps'}\,´] =>> ´U´ - ``` - -If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´U´`. - -The same desugaring applies to type parameters. For instance, -```scala -[F[X] <: Coll[X]] -``` -is treated as a shorthand for -```scala -[F >: Nothing <: [X] =>> Coll[X]] -``` - -#### Type Alias -A parameterized type alias -```scala -type ´t´[´\mathit{tps}\,´] = ´T´ -``` -is desugared into an unparameterized type alias -```scala -type ´t´ = [´\mathit{tps'}\,´] =>> ´T´ -``` -where ´\mathit{tps'}´ is computed as in the previous case. - -´\color{red}{\text{TODO SCALA3: Everything else in this section (and the next one -on type parameters) needs to be rewritten to take into account the desugaring described above.}}´ - -A _type declaration_ `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´` declares ´t´ to be an abstract type with lower bound type ´L´ and upper bound type ´U´. -If the type parameter clause `[´\mathit{tps}\,´]` is omitted, ´t´ abstracts over a proper type, otherwise ´t´ stands for a type constructor that accepts type arguments as described by the type parameter clause. - -If a type declaration appears as a member declaration of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: U´. -It is a compile-time error if ´L´ does not conform to ´U´. -Either or both bounds may be omitted. -If the lower bound ´L´ is absent, the bottom type `scala.Nothing` is assumed. -If the upper bound ´U´ is absent, the top type `scala.Any` is assumed. - -A type constructor declaration imposes additional restrictions on the concrete types for which ´t´ may stand. -Besides the bounds ´L´ and ´U´, the type parameter clause may impose higher-order bounds and variances, as governed by the [conformance of type constructors](03-types.html#conformance). - -The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` and the type parameter clause ´\mathit{tps}´ itself. -A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the declaration of the type parameter ´tc´. - -To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the declaration of ´m´. -In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of ´Bound´, ´t´'s second type constructor parameter. -`t[MutableList, Iterable]` is a valid use of ´t´. - -A _type alias_ `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´. -The left hand side of a type alias may have a type parameter clause, e.g. `type ´t´[´\mathit{tps}\,´] = ´T´`. -The scope of a type parameter extends over the right hand side ´T´ and the type parameter clause ´\mathit{tps}´ itself. - -The scope rules for [definitions](#basic-declarations-and-definitions) and [type parameters](#method-declarations-and-definitions) make it possible that a type name appears in its own bound or in its right-hand side. -However, it is a static error if a type alias refers recursively to the defined type constructor itself. -That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´. -It is also an error if an abstract type is directly or indirectly its own upper or lower bound. - -###### Example - -The following are legal type declarations and definitions: - -```scala -type IntList = List[Integer] -type T <: Comparable[T] -type Two[A] = Tuple2[A, A] -type MyCollection[+X] <: Iterable[X] -``` - -The following are illegal: - -```scala -type Abs = Comparable[Abs] // recursive type alias - -type S <: T // S, T are bounded by themselves. -type T <: S - -type T >: Comparable[T.That] // Cannot select from T. - // T is a type, not a value -type MyCollection <: Iterable // Type constructor members must explicitly - // state their type parameters. -``` - -If a type alias `type ´t´[´\mathit{tps}\,´] = ´S´` refers to a class type ´S´, the name ´t´ can also be used as a constructor for objects of type ´S´. - -###### Example - -Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows: - -```scala -type Pair[+A, +B] = Tuple2[A, B] -object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) -} -``` - -As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`. -`Pair` can also be used as a constructor instead of `Tuple2`, as in: - -```scala -val x: Pair[Int, String] = new Pair(1, "abc") -``` - -## Type Parameters - -```ebnf -TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ -VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam -TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type] -``` - -Type parameters appear in type definitions, class definitions, and method definitions. -In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds). - -The most general form of a proper type parameter is -`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`. -Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter. -It is a compile-time error if ´L´ does not conform to ´U´. -´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter. - - - - - -The names of all type parameters must be pairwise different in their enclosing type parameter clause. -The scope of a type parameter includes in each case the whole type parameter clause. -Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. -However, a type parameter may not be bounded directly or indirectly by itself. - -A type constructor parameter adds a nested type parameter clause to the type parameter. -The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`. - -The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. -Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´. -Therefore, their names must only be pairwise different from the names of other visible parameters. -Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible. - -###### Example -Here are some well-formed type parameter clauses: - -```scala -[S, T] -[@specialized T, U] -[Ex <: Throwable] -[A <: Comparable[B], B <: A] -[A, B >: A, C >: A <: B] -[M[X], N[X]] -[M[_], N[_]] // equivalent to previous clause -[M[X <: Bound[X]], Bound[_]] -[M[+X] <: Iterable[X]] -``` - -The following type parameter clauses are illegal: - -```scala -[A >: A] // illegal, `A' has itself as bound -[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound -[A, B, C >: A <: B] // illegal lower bound `A' of `C' does - // not conform to upper bound `B'. -``` - -## Variance Annotations - -Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance). -A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency. - -A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter. -In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, or a type declaration `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position. -Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position. - -The variance position of a type parameter in a type or template is defined as follows. -Let the opposite of covariance be contravariance, and the opposite of invariance be itself. -The top-level of the type or template is always in covariant position. -The variance position changes at the following constructs. - -- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause. -- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause. -- The variance position of the lower bound of a type declaration or type parameter is the opposite of the variance position of the type declaration or parameter. -- The type of a mutable variable is always in invariant position. -- The right-hand side of a type alias is always in invariant position. -- The prefix ´S´ of a type selection `´S´#´T´` is always in invariant position. -- For a type argument ´T´ of a type `´S´[´... T ...´ ]`: -If the corresponding type parameter is invariant, then ´T´ is in invariant position. -If the corresponding type parameter is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´... T ...´ ]`. - - - -References to the type parameters in [object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position. -In these members the type parameter may appear anywhere without restricting its legal variance annotations. - -###### Example -The following variance annotation is legal. - -```scala -abstract class P[+A, +B] { - def fst: A; def snd: B -} -``` - -With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments. -For instance, - -```scala -P[IOException, String] <: P[Throwable, AnyRef] -``` - -If the members of ´P´ are mutable variables, the same variance annotation becomes illegal. - -```scala -abstract class Q[+A, +B](x: A, y: B) { - var fst: A = x // **** error: illegal variance: - var snd: B = y // `A', `B' occur in invariant position. -} -``` - -If the mutable variables are object-private, the class definition becomes legal again: - -```scala -abstract class R[+A, +B](x: A, y: B) { - private[this] var fst: A = x // OK - private[this] var snd: B = y // OK -} -``` - -###### Example - -The following variance annotation is illegal, since ´a´ appears in contravariant position in the parameter of `append`: - -```scala -abstract class Sequence[+A] { - def append(x: Sequence[A]): Sequence[A] - // **** error: illegal variance: - // `A' occurs in contravariant position. -} -``` - -The problem can be avoided by generalizing the type of `append` by means of a lower bound: - -```scala -abstract class Sequence[+A] { - def append[B >: A](x: Sequence[B]): Sequence[B] -} -``` - -###### Example - -```scala -abstract class OutputChannel[-A] { - def write(x: A): Unit -} -``` - -With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`. -That is, a channel on which one can write any object can substitute for a channel on which one can write only strings. - -## Method Declarations and Definitions - -```ebnf -Dcl ::= ‘def’ FunDcl -FunDcl ::= FunSig ‘:’ Type -Def ::= ‘def’ FunDef -FunDef ::= FunSig [‘:’ Type] ‘=’ Expr -FunSig ::= id [FunTypeParamClause] ParamClauses -FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ -ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] -ParamClause ::= [nl] ‘(’ [Params] ‘)’ -Params ::= Param {‘,’ Param} -Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] -ParamType ::= Type - | ‘=>’ Type - | Type ‘*’ -``` - -A _method declaration_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type. -A _method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result. -A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`. -Such a declaration or definition introduces a value with a (possibly polymorphic) method type whose parameter types and result type are as given. - -The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given. -If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body. - -A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type declarations](#type-declarations-and-type-aliases), which introduce type parameters, possibly with bounds. -The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present. - -A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types. - -### Default Arguments - -Each value parameter declaration may optionally define a default argument. -The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type. - -For every parameter ´p_{i,j}´ with a default argument a method named `´f\$´default´\$´n` is generated which computes the default argument expression. -Here, ´n´ denotes the parameter's position in the method declaration. -These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´. -The `´f\$´default´\$´n` methods are inaccessible for user programs. - -###### Example -In the method - -```scala -def compare[T](a: T = 0)(b: T = a) = (a == b) -``` - -the default expression `0` is type-checked with an undefined expected -type. -When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`. -The methods computing the default arguments have the form: - -```scala -def compare´\$´default´\$´1[T]: Int = 0 -def compare´\$´default´\$´2[T](a: T): T = a -``` - -The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given. -Both type parameter names and value parameter names must be pairwise distinct. - -A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments. - -```scala -def f(a: Int = 0)(b: Int = a + 1) = b // OK -// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a" -f(10)() // returns 11 (not 1) -``` - -If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument. - -```scala -implicit val i: Int = 2 -def f(implicit x: Int, s: String = "hi") = s * x -f // "hihi" -``` - -### By-Name Parameters - -```ebnf -ParamType ::= ‘=>’ Type -``` - -The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`. -The type of such a parameter is then the parameterless method type `=> ´T´`. -This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method. -That is, the argument is evaluated using _call-by-name_. - -The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated. - -###### Example -The declaration - -```scala -def whileLoop (cond: => Boolean) (stat: => Unit): Unit -``` - -indicates that both parameters of `whileLoop` are evaluated using call-by-name. - -### Repeated Parameters - -```ebnf -ParamType ::= Type ‘*’ -``` - -The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`. -The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`. -Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´. -That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation. -If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`. - -It is not allowed to define any default arguments in a parameter section with a repeated parameter. - -###### Example -The following method definition computes the sum of the squares of a variable number of integer arguments. - -```scala -def sum(args: Int*) = { - var result = 0 - for (arg <- args) result += arg - result -} -``` - -The following applications of this method yield `0`, `1`, `6`, in that order. - -```scala -sum() -sum(1) -sum(1, 2, 3) -``` - -Furthermore, assume the definition: - -```scala -val xs = List(1, 2, 3) -``` - -The following application of method `sum` is ill-formed: - -```scala -sum(xs) // ***** error: expected: Int, found: List[Int] -``` - -By contrast, the following application is well formed and yields again the result `6`: - -```scala -sum(xs: _*) -``` - -### Method Return Type Inference - -A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive. -In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´. - -###### Example -Assume the following definitions: - -```scala -trait I { - def factorial(x: Int): Int -} -class C extends I { - def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1) -} -``` - -Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. - - - -## Import Clauses - -```ebnf -Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} -ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) -ImportSelectors ::= ‘{’ {ImportSelector ‘,’} - (ImportSelector | ‘_’) ‘}’ -ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] -``` - -An import clause has the form `import ´p´.´I´` where ´p´ is a [stable identifier](03-types.html#paths) and ´I´ is an import expression. -The import expression determines a set of names of importable members of ´p´ which are made available without qualification. -A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers). -The most general form of an import expression is a list of _import selectors_ - -```scala -{ ´x_1´ => ´y_1, ..., x_n´ => ´y_n´, _ } -``` - -for ´n \geq 0´, where the final wildcard `‘_’` may be absent. -It makes available each importable member `´p´.´x_i´` under the unqualified name ´y_i´. I.e. every import selector `´x_i´ => ´y_i´` renames `´p´.´x_i´` to -´y_i´. -If a final wildcard is present, all importable members ´z´ of ´p´ other than `´x_1, ..., x_n,y_1, ..., y_n´` are also made available under their own unqualified names. - -Import selectors work in the same way for type and term members. -For instance, an import clause `import ´p´.{´x´ => ´y\,´}` renames the term -name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´. -At least one of these two names must reference an importable member of ´p´. - -If the target in an import selector is a wildcard, the import selector hides access to the source member. -For instance, the import selector `´x´ => _` “renames” ´x´ to the wildcard symbol (which is unaccessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´. -This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors. - -The scope of a binding introduced by an import-clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first. - -Several shorthands exist. An import selector may be just a simple name ´x´. -In this case, ´x´ is imported without renaming, so the import selector is equivalent to `´x´ => ´x´`. -Furthermore, it is possible to replace the whole import selector list by a single identifier or wildcard. -The import clause `import ´p´.´x´` is equivalent to `import ´p´.{´x\,´}`, i.e. it makes available without qualification the member ´x´ of ´p´. The import clause `import ´p´._` is equivalent to `import ´p´.{_}`, i.e. it makes available without qualification all members of ´p´ (this is analogous to `import ´p´.*` in Java). - -An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`. - -###### Example -Consider the object definition: - -```scala -object M { - def z = 0, one = 1 - def add(x: Int, y: Int): Int = x + y -} -``` - -Then the block - -```scala -{ import M.{one, z => zero, _}; add(zero, one) } -``` - -is equivalent to the block - -```scala -{ M.add(M.z, M.one) } -``` diff --git a/docs/_spec/04-basic-definitions.md b/docs/_spec/04-basic-definitions.md new file mode 100644 index 000000000000..369709b52bff --- /dev/null +++ b/docs/_spec/04-basic-definitions.md @@ -0,0 +1,814 @@ +--- +title: Basic Definitions +layout: default +chapter: 4 +--- + +# Basic Definitions + +```ebnf +PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef +Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | ‘opaque‘ ‘type‘ {nl} OpaqueTypeDef + | TmplDef +``` + +A _definition_ introduces names that denote terms and assigns them types, or that denote types and assigns them [type definitions](./03-types.html#type-definitions). +It can form part of an object or [class definition](05-classes-and-objects.html#templates) or it can be local to a block. + +The scope of a name introduced by a definition is the whole statement sequence containing the definition. +However, there is a restriction on forward references in blocks: +In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´, + +- ´s_k´ cannot be a variable definition. +- If ´s_k´ is a value definition, it must be lazy. + +Moreover, in a block, all term definitions must be concrete, and opaque type alias definitions are not allowed. + + + +## Value Definitions + +```ebnf +PatVarDef ::= ‘val’ PatDef +PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] [‘=’ Expr] +ids ::= id {‘,’ id} +``` + +An abstract value definition `val ´x´: ´T´` introduces ´x´ as a name of a value of _declared type_ ´T´. +´T´ must be explicitly specified and must be a [proper type](03-types.html#proper-types). + +A concrete value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´. +If the value definition is not recursive, the declared type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of the expression ´e´ is assumed. +If a type ´T´ is given, then it must be a [proper type](03-types.html#proper-types) and ´e´ is expected to [conform to it](06-expressions.html#expression-typing). + +Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`. +The effect of the value definition is to bind ´x´ to the value of ´e´ converted to type ´T´. +A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed. + +A _constant value definition_ is of the form + +```scala +final val x = e +``` + +where `e` is a [constant expression](06-expressions.html#constant-expressions). +The `final` modifier must be present and no type annotation may be given. +References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`. + +Concrete value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows: + +1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´: + +```scala +val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)} +val ´x_1´ = ´\$x´._1 +... +val ´x_n´ = ´\$x´._n +``` + +Here, ´\$x´ is a fresh name. + +2. If ´p´ has a unique bound variable ´x´: + +```scala +val ´x´ = ´e´ match { case ´p´ => ´x´ } +``` + +3. If ´p´ has no bound variables: + +```scala +´e´ match { case ´p´ => () } +``` + +###### Example + +The following are examples of value definitions + +```scala +val foo: Int // abstract value definition +val pi = 3.1415 +val pi: Double = 3.1415 // equivalent to first definition +val Some(x) = f() // a pattern definition +val x :: xs = mylist // an infix pattern definition +``` + +The last two definitions have the following expansions. + +```scala +val x = f() match { case Some(x) => x } + +val x´\$´ = mylist match { case x :: xs => (x, xs) } +val x = x´\$´._1 +val xs = x´\$´._2 +``` + +The name of any defined value may not end in `_=`. + +A value definition `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value definitions `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`. +A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`. +A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`. + +## Variable Definitions + +```ebnf +Dcl ::= ‘var’ VarDcl +PatVarDef ::= ‘var’ VarDef +VarDcl ::= ids ‘:’ Type +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +``` + +An abstract variable definition `var ´x´: ´T´` is equivalent to the definition of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`: + +```scala +def ´x´: ´T´ +def ´x´_= (´y´: ´T´): Unit +``` + +An implementation of a class may implement a defined abstract variable using a concrete variable definition, or by defining the corresponding setter and getter methods. + +A concrete variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´. +The type ´T´ can be omitted, in which case the type of ´e´ is assumed. +If ´T´ is given, then it must be a [proper type](03-types.html#proper-types) and ´e´ is expected to [conform to it](06-expressions.html#expression-typing). + +Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values. + +The name of any defined variable may not end in `_=`. + +The right-hand-side of a mutable variable definition that is a member of a template can be the special reference `scala.compiletime.uninitialized`: `var ´x´: ´T´ = scala.compiletime.uninitialized`. +It introduces a mutable field with type ´T´ and a default initial value. +The default value depends on the type ´T´ as follows: + +| default | type ´T´ | +|----------|------------------------------------| +|`0` | `Int` or one of its subrange types | +|`0L` | `Long` | +|`0.0f` | `Float` | +|`0.0d` | `Double` | +|`false` | `Boolean` | +|`()` | `Unit` | +|`null` | all other types | + +`scala.compiletime.uninitialized` can never appear anywhere else. +For compatibility with Scala 2, the syntax `var ´x´: ´T´ = _` is accepted as equivalent to using `uninitialized`. + +When they occur as members of a template, both forms of concrete variable definition also introduce a setter method `´x´_=` which changes the value currently assigned to the variable. +The setter has the same signatures as for an abstract variable definition. +It is then not possible to directly modify the value assigned to the variable; mutations always go through the corresponding setter. + +###### Example + +The following example shows how _properties_ can be simulated in Scala. +It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds. +Its implementation contains tests that allow only legal values to be assigned to these fields. +The user code, on the other hand, accesses these fields just like normal variables. + +```scala +class TimeOfDayVar { + private var h: Int = 0 + private var m: Int = 0 + private var s: Int = 0 + + def hours = h + def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h + else throw new DateError() + + def minutes = m + def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m + else throw new DateError() + + def seconds = s + def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s + else throw new DateError() +} +val d = new TimeOfDayVar +d.hours = 8; d.minutes = 30; d.seconds = 0 +d.hours = 25 // throws a DateError exception +``` + +A variable definition `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable definitions `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`. +A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`. +A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`. + +## Type Member Definitions + +```ebnf +Dcl ::= ‘type’ {nl} TypeDcl +TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +Def ::= ‘type’ {nl} TypeDef + | ‘opaque‘ ‘type‘ {nl} OpaqueTypeDef +TypeDef ::= id [TypeParamClause] ‘=’ Type +OpaqueTypeDef ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] ‘=’ Type +``` + +_Type members_ can be abstract type members, type aliases, or opaque type aliases. + +A possibly parameterized _abstract type member_ definition `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´` declares ´t´ to be an abstract type. +If omitted, ´L´ and ´H´ are implied to be `Nothing` and `scala.Any`, respectively. + +A possibly parameterized _type alias_ definition `type ´t´[´\mathit{tps}\,´] = ´T´` defines ´t´ to be a concrete type member. + +A possibly parameterized _opaque type alias_ definition `opaque type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´ = ´T´` defines ´t´ to be an opaque type alias with public bounds `>: ´L´ <: ´H´` and a private alias `= ´T´`. + +If a type parameter clause `[´\mathit{tps}\,´]` is present, it is desugared away according to the rules in the following section. + +### Desugaring of parameterized type definitions + +A parameterized type definition is desugared into an unparameterized type definition whose bounds are [type lambdas](03-types.html#type-lambdas) with explicit variance annotations. + +The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` or the alias `= ´T´` and the type parameter clause ´\mathit{tps}´ itself. +A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the definition of the type parameter ´tc´. + +To illustrate nested scoping, these definitions are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the definition of ´m´. +In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of `Bound`, ´t´'s second type constructor parameter. +`t[MutableList, Iterable]` is a valid use of ´t´. + +#### Abstract Type + +A parameterized abstract type +```scala +type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´ +``` +is desugared into an unparameterized abstract type as follows: +- If `L` conforms to `Nothing`, then, + + ```scala +type ´t´ >: Nothing + <: [´\mathit{tps'}\,´] =>> ´H´ + ``` +- otherwise, + + ```scala +type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ + <: [´\mathit{tps'}\,´] =>> ´H´ + ``` + +If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´H´`. + +The same desugaring applies to type parameters. +For instance, +```scala +[F[X] <: Coll[X]] +``` +is treated as a shorthand for +```scala +[F >: Nothing <: [X] =>> Coll[X]] +``` + +#### Type Alias + +A parameterized type alias +```scala +type ´t´[´\mathit{tps}\,´] = ´T´ +``` +is desugared into an unparameterized type alias +```scala +type ´t´ = [´\mathit{tps'}\,´] =>> ´T´ +``` +where ´\mathit{tps'}´ is computed as in the previous case. + +#### Opaque Type Alias + +A parameterized type alias +```scala +type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´H´ = ´T´ +``` +is desugared into an unparameterized opaque type alias as follows: +- If `L` conforms to `Nothing`, then, + + ```scala +type ´t´ >: Nothing <: [´\mathit{tps'}\,´] =>> ´H´ = [´\mathit{tps'}\,´] =>> ´T´ + ``` +- otherwise, + + ```scala +type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ <: [´\mathit{tps'}\,´] =>> ´H´ = [´\mathit{tps'}\,´] =>> ´T´ + ``` +where ´\mathit{tps'}´ is computed as in the previous cases. + +### Non-Parameterized Type Member Definitions + +An _abstract type member_ definition `type ´t´ >: ´L´ <: ´H´` declares ´t´ to be an abstract type whose [type definition](03-types.html#type-definitions) has the lower bound type ´L´ and upper bound type ´H´. + +If a type definition appears as a member definition of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: H´. +It is a compile-time error if ´L´ does not conform to ´H´. + +A _type alias_ definition `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´. + +An _opaque type alias_ definition `opaque type ´t´ >: ´L´ <: ´H´ = ´T´` defines ´t´ to be an opaque type alias with public bounds `>: ´L´ <: ´H´` and a private alias `= ´T´`. +An opaque type alias can only be declared within a [template](./05-classes-and-objects.html#templates). +It cannot be `private` and cannot be overridden in subclasses. +In order for the definition to be valid, ´T´ must satisfy some constraints: + +- ´L <: T´ and ´T <: H´ must be true, +- ´T´ must not be a context function type, and +- If ´T´ is a type lambda, its result must be a proper type (i.e., it cannot be a curried type lambda). + +When viewed from within its enclosing template, an opaque type alias behaves as a type alias with type definition `= ´T´`. +When viewed from anywhere else, it behaves as an abstract type member with type definition `>: ´L´ <: ´H´`. +See [`memberType`](./03-types.html#member-type) for the precise mechanism that governs this dual view. + +The scope rules for [definitions](#basic-definitions) and [type parameters](#method-definitions) make it possible that a type name appears in its own bounds or in its right-hand side. +However, it is a static error if a type alias refers recursively to the defined type itself. +That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´. +It is also an error if an abstract type is directly or indirectly its own upper or lower bound. + +###### Example + +The following are legal type definitions: + +```scala +type IntList = List[Integer] +type T <: Comparable[T] +type Two[A] = Tuple2[A, A] // desugars to Two = [A] =>> Tuple2[A, A] +type MyCollection[+X] <: Iterable[X] // desugars to MyCollection <: [+X] =>> Iterable[X] +``` + +The following are illegal: + +```scala +type Abs = Comparable[Abs] // recursive type alias + +type S <: T // S, T are bounded by themselves. +type T <: S + +type T >: Comparable[T.That] // Cannot select from T. + // T is a type, not a value +type MyCollection <: Iterable // The reference to the type constructor + // Iterable must explicitly state its type arguments. +``` + +If a type alias `type ´t´ = ´S´` refers to a class type ´S´ (or to a type lambda that is the eta-expansion of class type ´S´), the name ´t´ can also be used as a constructor for objects of type ´S´. + +###### Example + +Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows: + +```scala +type Pair[+A, +B] = Tuple2[A, B] +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`. +`Pair` can also be used as a constructor instead of `Tuple2`, as in: + +```scala +val x: Pair[Int, String] = new Pair(1, "abc") +``` + +## Type Parameters + +```ebnf +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam +TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type] +``` + +Type parameters appear in type definitions, class definitions, and method definitions. +In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds). + +The most general form of a proper type parameter is +`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`. +Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter. +It is a compile-time error if ´L´ does not conform to ´U´. +´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter. + + + + + +The names of all type parameters must be pairwise different in their enclosing type parameter clause. +The scope of a type parameter includes in each case the whole type parameter clause. +Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. +However, a type parameter may not be bounded directly or indirectly by itself. + +A type constructor parameter adds a nested type parameter clause to the type parameter. +The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`. + +The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. +Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´. +Therefore, their names must only be pairwise different from the names of other visible parameters. +Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible. + +###### Example +Here are some well-formed type parameter clauses: + +```scala +[S, T] +[@specialized T, U] +[Ex <: Throwable] +[A <: Comparable[B], B <: A] +[A, B >: A, C >: A <: B] +[M[X], N[X]] +[M[_], N[_]] // equivalent to previous clause +[M[X <: Bound[X]], Bound[_]] +[M[+X] <: Iterable[X]] +``` + +The following type parameter clauses are illegal: + +```scala +[A >: A] // illegal, `A' has itself as bound +[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound +[A, B, C >: A <: B] // illegal lower bound `A' of `C' does + // not conform to upper bound `B'. +``` + +## Variance Annotations + +Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance). +A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency. + +A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter. +In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` or `opaque type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´ = ´S´`, type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position. +Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position. + +The variance position of a type parameter in a type or template is defined as follows. +Let the opposite of covariance be contravariance, and the opposite of invariance be itself. +The top-level of the type or template is always in covariant position. +The variance position changes at the following constructs. + +- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause. +- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause. +- The variance position of the lower bound of a type definition or type parameter is the opposite of the variance position of the type definition or parameter. +- The type of a mutable variable is always in invariant position. +- The right-hand side of a type alias is always in invariant position. +- The prefix ´p´ of a type selection `´p.T´` is always in invariant position. +- For a type argument ´T´ of a type `´S´[´..., T, ...´]`: + - If the corresponding type parameter of ´S´ is invariant, then ´T´ is in invariant position. + - If the corresponding type parameter of ´S´ is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´..., T, ...´]`. + +References to the type parameters in [object-private values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position. +In these members the type parameter may appear anywhere without restricting its legal variance annotations. + +###### Example +The following variance annotation is legal. + +```scala +abstract class P[+A, +B] { + def fst: A + def snd: B +} +``` + +With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments. +For instance, + +```scala +P[IOException, String] <: P[Throwable, AnyRef] +``` + +If the members of ´P´ are mutable variables, the same variance annotation becomes illegal. + +```scala +abstract class Q[+A, +B](x: A, y: B) { + var fst: A = x // **** error: illegal variance: + var snd: B = y // `A', `B' occur in invariant position. +} +``` + +If the mutable variables are object-private, the class definition becomes legal again: + +```scala +abstract class R[+A, +B](x: A, y: B) { + private var fst: A = x // OK + private var snd: B = y // OK +} +``` + +###### Example + +The following variance annotation is illegal, since ´A´ appears in contravariant position in the parameter of `append`: + +```scala +abstract class Sequence[+A] { + def append(x: Sequence[A]): Sequence[A] + // **** error: illegal variance: + // `A' occurs in contravariant position. +} +``` + +The problem can be avoided by generalizing the type of `append` by means of a lower bound: + +```scala +abstract class Sequence[+A] { + def append[B >: A](x: Sequence[B]): Sequence[B] +} +``` + +###### Example + +```scala +abstract class OutputChannel[-A] { + def write(x: A): Unit +} +``` + +With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`. +That is, a channel on which one can write any object can substitute for a channel on which one can write only strings. + +## Method Definitions + +```ebnf +Def ::= ‘def’ FunDef +FunDef ::= FunSig [‘:’ Type] [‘=’ Expr] +FunSig ::= id [FunTypeParamClause] ParamClauses +FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] +ParamClause ::= [nl] ‘(’ [Params] ‘)’ +Params ::= Param {‘,’ Param} +Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] +ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ +``` + +An _abstract method definition_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type. +A _concrete method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result. +A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`. + +If there is no type or term parameter clause, a method definition introduces a method with a proper type, which is also its result type. +Otherwise, it introduces a method with a methodic type whose parameter types and result type are as given. + +The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given. +If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body. + +A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type definitions](#type-definitions), which introduce type parameters, possibly with bounds. +The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present. + +A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types. + +A unary operator must not have explicit parameter lists even if they are empty. +A unary operator is a method named `"unary_´op´"` where ´op´ is one of `+`, `-`, `!`, or `~`. + +### Default Arguments + +Each value parameter may optionally define a default argument. +The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type. + +For every parameter ´p_{i,j}´ with a default argument, a method named `´f\$´default´\$´n` is generated which computes the default argument expression. +Here, ´n´ denotes the parameter's position in the method definition. +These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´. +The `´f\$´default´\$´n` methods are inaccessible for user programs. + +###### Example +In the method + +```scala +def compare[T](a: T = 0)(b: T = a) = (a == b) +``` + +the default expression `0` is type-checked with an undefined expected type. +When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`. +The methods computing the default arguments have the form: + +```scala +def compare´\$´default´\$´1[T]: Int = 0 +def compare´\$´default´\$´2[T](a: T): T = a +``` + +The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given. +Both type parameter names and value parameter names must be pairwise distinct. + +A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments. + +```scala +def f(a: Int = 0)(b: Int = a + 1) = b // OK +// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a" +f(10)() // returns 11 (not 1) +``` + +If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument. + +```scala +implicit val i: Int = 2 +def f(implicit x: Int, s: String = "hi") = s * x +f // "hihi" +``` + +### By-Name Parameters + +```ebnf +ParamType ::= ‘=>’ Type +``` + +The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`. +The type of such a parameter is then the [by-name type](./03-types.html#by-name-types) `=> ´T´`. +This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method. +That is, the argument is evaluated using _call-by-name_. + +The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated. + +###### Example +The definition + +```scala +def whileLoop (cond: => Boolean) (stat: => Unit): Unit +``` + +indicates that both parameters of `whileLoop` are evaluated using call-by-name. + +### Repeated Parameters + +```ebnf +ParamType ::= Type ‘*’ +``` + +The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`. +The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`. +Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´. +That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation. +If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`. + +It is not allowed to define any default arguments in a parameter section with a repeated parameter. + +###### Example +The following method definition computes the sum of the squares of a variable number of integer arguments. + +```scala +def sum(args: Int*) = { + var result = 0 + for (arg <- args) result += arg + result +} +``` + +The following applications of this method yield `0`, `1`, `6`, in that order. + +```scala +sum() +sum(1) +sum(1, 2, 3) +``` + +Furthermore, assume the definition: + +```scala +val xs = List(1, 2, 3) +``` + +The following application of method `sum` is ill-formed: + +```scala +sum(xs) // ***** error: expected: Int, found: List[Int] +``` + +By contrast, the following application is well formed and yields again the result `6`: + +```scala +sum(xs: _*) +``` + +### Method Return Type Inference + +A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive. +In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´. + +###### Example +Assume the following definitions: + +```scala +trait I { + def factorial(x: Int): Int +} +class C extends I { + def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1) +} +``` + +Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. + + + +## Import Clauses + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpecifier + | SimpleRef `as` id +ImportSpecifier ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors ‘}’ +NamedSelector ::= id [(‘as’ | ’=>’) (id | ‘_’)] +WildcardSelector ::= ‘*’ | ’_’ | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildcardSelector} +``` + +- In a `NamedSelector`, `=>` can only be used when inside an `ImportSelectors` and is then equivalent to `as`, to be deprecated in the future. +- In a `WildcardSelector`, `_` is equivalent to `*`, to be deprecated in the future. + +An `ImportSpecifier` that is a single `NamedSelector` or `WildcardSelector` is equivalent to an `‘{‘ ImportSelectors ‘}‘` list with that single selector. + +An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`. + +An import clause with a single import expression has the form `import ´p´.´I´` where ´p´ is a [prefix](03-types.html#designator-types) and ´I´ is an import specifier. +The import specifier determines a set of names of importable members of ´p´ which are made available without qualification as well as a set of importable `given` members which are made available in the implicit scope. +A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers). +The most general form of an import specifier is a list of _import selectors_ + +```scala +{ ´x_1´ as ´y_1, ..., x_n´ as ´y_n´, *, given ´T_1´, ..., given ´T_m´, given } +``` + +for ´n \geq 0´ and ´m \geq 0´, where the wildcards `‘*’` and `’given’` may be absent. +They are decomposed into non-given selectors and given selectors. + +### Non-given Imports + +Non-given selectors make available each importable member `´p´.´x_i´` under the unqualified name ´y_i´. +In other words, every import selector `´x_i´ as ´y_i´` renames `´p´.´x_i´` to ´y_i´. +When `as ´y_i´` is omitted, ´y_i´ is assumed to be ´x_i´. +If a final wildcard `‘*’` is present, all non-`given` importable members ´z´ of ´p´ other than `´x_1, ..., x_n, y_1, ..., y_n´` are also made available under their own unqualified names. + +Non-given import selectors work in the same way for type and term members. +For instance, an import clause `import ´p´.´x´ as ´y´` renames the term name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´. +At least one of these two names must reference an importable member of ´p´. + +If the target in an import selector is an underscore `as _`, the import selector hides access to the source member instead of importing it. +For instance, the import selector `´x´ as _` “renames” ´x´ to the underscore symbol (which is not accessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´. +This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors. + +The scope of a binding introduced by a non-given import clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first. + +### Given Imports + +Given selectors make available in the implicit scope all the importable `given` and `implicit` members `´p´.´x´` such that `´p.x´` is a subtype of ´T_i´. +A bare `given` selector without type is equivalent to `given scala.Any`. + +The names of the given members are irrelevant for the selection, and are not made available in the normal scope of unqualified names. + +###### Example +Consider the object definition: + +```scala +object M { + def z = 0 + def one = 1 + def add(x: Int, y: Int): Int = x + y +} +``` + +Then the block + +```scala +{ + import M.{one, z as zero, *} + add(zero, one) +} +``` + +is equivalent to the block + +```scala +{ + M.add(M.z, M.one) +} +``` diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md index 6feda780417a..e1d4ace3d81f 100644 --- a/docs/_spec/05-classes-and-objects.md +++ b/docs/_spec/05-classes-and-objects.md @@ -46,8 +46,8 @@ It is forbidden for a template's superclass constructor ´sc´ to be an [enum cl The _least proper supertype_ of a template is the class type or [compound type](03-types.html#compound-types) consisting of all its parent class types. The statement sequence ´\mathit{stats}´ contains member definitions that define new members or overwrite members in the parent classes. -If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain declarations of abstract members. -If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain declarations of abstract type members, but not of abstract term members. +If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain definitions of abstract members. +If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain definitions of abstract type members, but not of abstract term members. Furthermore, ´\mathit{stats}´ may in any case also contain expressions; these are executed in the order they are given as part of the initialization of a template. The sequence of template statements may be prefixed with a formal parameter definition and an arrow, e.g. `´x´ =>`, or `´x´:´T´ =>`. @@ -310,6 +310,7 @@ LocalModifier ::= ‘abstract’ | ‘sealed’ | ‘implicit’ | ‘lazy’ + | ‘infix’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ ``` @@ -320,7 +321,7 @@ Modifiers preceding a repeated definition apply to all constituent definitions. The rules governing the validity and meaning of a modifier are as follows. ### `private` -The `private` modifier can be used with any definition or declaration in a template. +The `private` modifier can be used with any definition in a template. Private members of a template can be accessed only from within the directly enclosing template and its companion module or [companion class](#object-definitions). The `private` modifier is also valid for [top-level](09-top-level-definitions.html#packagings) templates. @@ -358,18 +359,17 @@ A different form of qualification is `protected[this]`. A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. In addition, the restrictions for unqualified `protected` apply. ### `override` -The `override` modifier applies to class member definitions or declarations. -It is mandatory for member definitions or declarations that override some other concrete member definition in a parent class. -If an `override` modifier is given, there must be at least one overridden member definition or declaration (either concrete or abstract). +The `override` modifier applies to class member definitions. +It is mandatory for member definitions that override some other concrete member definition in a parent class. +If an `override` modifier is given, there must be at least one overridden member definition (either concrete or abstract). ### `abstract override` The `override` modifier has an additional significance when combined with the `abstract` modifier. That modifier combination is only allowed for value members of traits. -We call a member ´M´ of a template _incomplete_ if it is either abstract (i.e. defined by a declaration), or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete. +We call a member ´M´ of a template _incomplete_ if it is either abstract, or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete. Note that the `abstract override` modifier combination does not influence the concept whether a member is concrete or abstract. -A member is _abstract_ if only a declaration is given for it; it is _concrete_ if a full definition is given. ### `abstract` The `abstract` modifier is used in class definitions. @@ -386,7 +386,7 @@ A `final` class member definition may not be overridden in subclasses. A `final` class may not be inherited by a template. `final` is redundant for object definitions. Members of final classes or objects are implicitly also final, so the `final` modifier is generally redundant for them, too. -Note, however, that [constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object. +Note, however, that [constant value definitions](04-basic-definitions.html#value-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object. `final` is permitted for abstract classes but it may not be applied to traits or incomplete members, and it may not be combined in one modifier list with `sealed`. ### `sealed` @@ -401,6 +401,31 @@ happen at all). Attempting to access a lazy value during its initialization might lead to looping behavior. If an exception is thrown during initialization, the value is considered uninitialized, and a later access will retry to evaluate its right hand side. +### `infix` +The `infix` modifier applies to method definitions and type definitions. +It signals that the method or type is intended for use in infix position, even if it has an alphanumeric name. + +If a method overrides another, their `infix` annotations must agree. Either both are annotated with `infix`, or none of them are. + +The first non-receiver parameter list of an `infix` method must define exactly one parameter. Examples: + +```scala +infix def op1(x: S): R // ok +infix def op2[T](x: T)(y: S): R // ok +infix def op3[T](x: T, y: S): R // error: two parameters +extension (x: A) + infix def op4(y: B): R // ok + infix def op5(y1: B, y2: B): R // error: two parameters +``` + +`infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like + +```scala +infix type op[X, Y] +``` + +can be applied using infix syntax, i.e., `A op B`. + ###### Example The following code illustrates the use of qualified private: @@ -481,15 +506,15 @@ Here, If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed. - If a formal parameter declaration ´x: T´ is preceded by a `val` or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) for this parameter is implicitly added to the class. + If a formal parameter definition ´x: T´ is preceded by a `val` or `var` keyword, an accessor [definition](04-basic-definitions.html#value-definitions) for this parameter is implicitly added to the class. - The getter introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter. - If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. - In invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´. + The accessor introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter. + If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-definitions.html#variable-definitions) is also implicitly added to the class. + An invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´. - The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s). + The formal parameter definition may contain modifiers, which then carry over to the accessor definition(s). When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed. - A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters). + A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-definitions.html#by-name-parameters). - ´t´ is a [template](#templates) of the form @@ -607,7 +632,7 @@ If the case class definition contains an empty value parameter list, the `unappl def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = x ne null ``` -The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters). +The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-definitions.html#repeated-parameters). A method named `copy` is implicitly added to every case class unless the class already has a member (directly defined or inherited) with that name, or the class has a repeated parameter. The method is defined as follows: @@ -872,14 +897,14 @@ Such a class ´C´ is conceptually seen as a pair of a Scala class that contains Generally, a _companion module_ of a class is an object which has the same name as the class and is defined in the same scope and compilation unit. Conversely, the class is called the _companion class_ of the module. -Very much like a concrete class definition, an object definition may still contain declarations of abstract type members, but not of abstract term members. +Very much like a concrete class definition, an object definition may still contain definitions of abstract type members, but not of abstract term members. ## Enum Definitions ```ebnf TmplDef ::= ‘enum’ EnumDef -EnumDef ::= id ClassConstr [‘extends’ [ConstrApps]] EnumBody +EnumDef ::= id ClassConstr [‘extends’ ConstrApps] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ EnumStat ::= TemplateStat | {Annotation [nl]} {Modifier} EnumCase @@ -900,18 +925,15 @@ First, some terminology and notational conventions: - We use `<...>` for syntactic constructs that in some circumstances might be empty. For instance, `` represents one or more parameter lists `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or nothing at all. - Enum classes fall into two categories: - - _parameterized_ enum classes have at least one of the following: - - a type parameter section, denoted as `[´\mathit{tps}\,´]`; - - one or more (possibly empty) parameter sections, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. - - _unparameterized_ enum classes have no type parameter sections and no parameter sections. + - _parameterized_ enum classes have at least one or more (possibly empty) term parameter clauses, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _unparameterized_ enum classes have no term parameter clauses, but may optionally have a type parameter clause, denoted as `[´\mathit{tps}\,´]`. - Enum cases fall into three categories: - - - _Class cases_ are those cases that are parameterized, either with a type parameter section `[´\mathit{tps}\,´]` or with one or more (possibly empty) parameter sections `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. - - _Simple cases_ are cases of an unparameterized enum that have neither parameters nor an extends clause or body. + - _Class enum cases_ are those cases that possibly have a type parameter clause `[´\mathit{tps}\,´]`, and necessarily have one or more (possibly empty) parameter clauses `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _Simple enum cases_ are those cases that have no parameter clauses and no extends clause. That is, they consist of a name only. - - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + - _Value enum cases_ are those cases that have no parameter clauses but that do have a (possibly generated) `extends` clause. -- Simple cases and value cases are collectively called _singleton cases_. +- Simple enum cases and value enum cases are collectively called _singleton enum cases_. ###### Example @@ -945,13 +967,11 @@ enum Option[+T]: ### Lowering of Enum Definitions ###### Summary -An enum class is represented as a `sealed` class that extends the `scala.reflect.Enum` trait. +An enum class is represented as a `sealed abstract` class that extends the `scala.reflect.Enum` trait. Enum cases are represented as follows: -- a class case is mapped to a `case class`, -- a singleton case is mapped to a `val` definition, where - - Simple cases all share a single implementation class. - - Value cases will each be implemented by a unique class. +- a class enum case is mapped to a `case class` member of the enum class' companion object, +- a singleton enum case is mapped to a `val` member of the enum class' companion object, implemented by a local class definition. Whether that local class is shared with other singleton cases, and which ones, is left as an implementation detail. ###### Precise rules The `scala.reflect.Enum` trait defines a single public method, `ordinal`: @@ -964,106 +984,119 @@ transparent trait Enum extends Any, Product, Serializable: ``` There are nine desugaring rules. Rule (1) desugars enum definitions. -Rules (2) and (3) desugar simple cases. -Rules (4) to (6) define `extends` clauses for cases that are missing them. -Rules (7) to (9) define how such cases with `extends` clauses map into `case class`es or `val`s. +Rule (2) desugars cases of comma-separated names to simple enum cases. +Rules (3) to (7) desugar inferrable details of enum cases. +Rules (8) and (9) define how fully-desugared enum cases map into `case class`es or `val`s. +Explicit `extends` clauses must be provided in the following cases, where rules (2) to (6) do not apply: +- any enum case of a parameterized enum, +- any singleton enum case of an unparameterized enum with non-variant type parameters, +- any class enum case of an enum with type parameters, where the case also has type parameters. 1. An `enum` definition ```scala - enum ´E´ ... { } + enum ´E´ extends { } ``` expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8). The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class. ```scala - sealed abstract class ´E´ ... extends with scala.reflect.Enum { - import ´E´.{ } - + sealed abstract class ´E´ + extends with scala.reflect.Enum { + import ´E´.{ } + } object ´E´ { } ``` -2. A singleton case consisting of a comma-separated list of enum names +2. A simple enum case consisting of a comma-separated list of names ```scala case ´C_1´, ..., ´C_n´ ``` - expands to + expands to the following simple enum cases ```scala case ´C_1´; ...; case ´C_n´ ``` Any modifiers or annotations on the original case extend to all expanded cases. - This result is then further rewritten by either (3 or 4). +

This result is then further rewritten by either (3 or 4).

-3. A singleton case without an extends clause +3. A simple enum case `´C´` of an unparameterized enum `´E´` without type parameters ```scala case ´C´ ``` - of an unparameterized enum `´E´` expands to the following simple enum case in `´E´`'s companion object: + expands to the following value enum case: ```scala - val ´C´ = $new(n, "C") + case ´C´ extends ´E´ ``` - Here, `$new` is a private method that creates an instance of ´E´ (see below). + This result is then further rewritten with rule (8). -4. A singleton case without an extends clause +4. A simple enum case `´C´` of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters ```scala case ´C´ ``` - of an enum `´E´` with type parameters + where `´\mathit{tps}´` are of the following form ```scala ´\mathit{v}_1´ ´T_1´ >: ´L_1´ <: ´U_1´ , ... , ´\mathit{v}_n´ ´T_n´ >: ´L_n´ <: ´U_n´ (n > 0) ``` - where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: + and where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: ```scala case ´C´ extends ´E´[´B_1´, ..., ´B_n´] ``` where `´B_i´` is `´L_i´` if `´\mathit{v}_i´ = '+'` and `´U_i´` if `´\mathit{v}_i´ = '-'`. - This result is then further rewritten with rule (8). - **NOTE:** It is not permitted for enums with non-variant type parameters to have singleton cases without an extends clause. +

This result is then further rewritten with rule (8).

-5. A class case without an extends clause +5. A class enum case with type parameters, but without an extends clause ```scala - case ´C´ + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) ``` - of an enum `´E´` that does not take type parameters expands to + of an unparameterized enum `´E´` without type parameters expands to ```scala - case ´C´ extends ´E´ + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´ ``` This result is then further rewritten with rule (9). -6. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case with neither type parameters nor an extends clause +6. A class enum case without type parameters or an extends clause ```scala - case ´C´ + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) ``` - expands to + of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters expands to ```scala - case ´C´[´\mathit{tps}´] extends ´E´[´\mathit{tps}´] + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´[´\mathit{tps}´] ``` - This result is then further rewritten with rule (9). - For class cases that have type parameters themselves, an extends clause needs to be given explicitly. - + This result is then further rewritten with rule (7). -7. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case without type parameters but with an extends clause +7. A class enum case without type parameters, but has an extends clause ```scala - case ´C´ extends + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ``` - expands to + of an enum `´E´[´\mathit{tps}´]` with type parameters expands to ```scala - case ´C´[´\mathit{tps}´] extends + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ``` - provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `` or in a type argument in ``. + provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or in a type argument in ``. +

+ This result is then further rewritten with rule (9). -8. A value case +8. A singleton enum case ```scala case ´C´ extends ``` expands to the following `val` definition in `´E´`'s companion object: ```scala - val ´C´ = new { ; def ordinal = ´\mathit{n}´ } + val ´C´ = $factory(_$ordinal = ´\mathit{n}´, $name = "C") ``` where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + `$factory` is a placeholder that expands its arguments into an expression that produces something equivalent to + a new instance of the following (possibly shared) anonymous class: + ```scala + new { + def ordinal: Int = _$ordinal + override def toString: String = $name + } + ``` The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. +

**NOTE:** It is an error if a value case refers to a type parameter of `´E´` in a type argument within ``. -9. A class case +9. A class enum case ```scala case ´C´ extends ``` @@ -1074,6 +1107,7 @@ Rules (7) to (9) define how such cases with `extends` clauses map into `case cla } ``` where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. +

**NOTE:** It is an error if a class case refers to a type parameter of `´E´` in a parameter type in `` or `` or in a type argument of ``, unless that parameter is already a type parameter of the case, i.e. the parameter name is defined in ``. ###### Superclass of an enum case @@ -1106,34 +1140,6 @@ private def $new(_$ordinal: Int, $name: String) = override def toString = $name ``` - -###### Example - -Consider the more complex enumeration `Color`, consisting of value enum cases: -```scala -enum Color(val rgb: Int): - case Red extends Color(0xFF0000) - case Green extends Color(0x00FF00) - case Blue extends Color(0x0000FF) -``` - -The three value cases will expand as follows in the companion of `Color`: - -```scala -val Red = new Color(0xFF0000): - def ordinal: Int = 0 - override def productPrefix: String = "Red" - override def toString: String = "Red" -val Green = new Color(0x00FF00): - def ordinal: Int = 1 - override def productPrefix: String = "Green" - override def toString: String = "Green" -val Blue = new Color(0x0000FF): - def ordinal: Int = 2 - override def productPrefix: String = "Blue" - override def toString: String = "Blue" -``` - ### Widening of enum cases post-construction The compiler-generated `apply` and `copy` methods of an class enum case ```scala @@ -1151,20 +1157,6 @@ An enum `´E´` (possibly generic) that defines one or more singleton cases, and It returns the singleton case value whose identifier is `name`. - A method `values` which returns an `Array[´E'´]` of all singleton case values defined by `E`, in the order of their definitions. -### Factory method for simple enum cases - -If an enum `´E´` contains at least one simple case, its companion object will define in addition: - - - A private method `$new` which defines a new simple case value with given ordinal number and name. - This method can be thought as being defined as follows. - - ```scala - private def $new(_$ordinal: Int, $name: String): ´E´ with runtime.EnumValue - ``` - - `$new` returns a new instance of an anonymous class which implements the abstract `Product` methods that it inherits from `Enum`. - - if `´E´` inherits from `java.lang.Enum` the anonymous class does not override the `ordinal` or `toString` methods, as these are final in `java.lang.Enum`. - Additionally `productPrefix` will delegate to `this.name`. - ### Translation of Java-compatible enums A Java-compatible enum is an enum that extends `java.lang.Enum`. @@ -1211,4 +1203,4 @@ A correctly typed version would use an _explicit_, _invariant_ type parameter ` ```scala enum View[-´T´]: case Refl[´R´](f: ´R´ => ´R´) extends View[´R´] -``` \ No newline at end of file +``` diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md index fa21b4330728..5043e752ebe6 100644 --- a/docs/_spec/06-expressions.md +++ b/docs/_spec/06-expressions.md @@ -10,22 +10,26 @@ chapter: 6 Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr | Expr1 Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | ‘if‘ Expr ‘then‘ Expr [[semi] ‘else‘ Expr] | ‘while’ ‘(’ Expr ‘)’ {nl} Expr - | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] - | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr + | ‘while’ Expr ‘do’ Expr + | ‘try’ Expr [Catches] [‘finally’ Expr] + | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr | ‘throw’ Expr | ‘return’ [Expr] | [SimpleExpr ‘.’] id ‘=’ Expr | SimpleExpr1 ArgumentExprs ‘=’ Expr | PostfixExpr | PostfixExpr Ascription - | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ PostfixExpr ::= InfixExpr [id [nl]] InfixExpr ::= PrefixExpr | InfixExpr id [nl] InfixExpr + | InfixExpr MatchClause PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) | BlockExpr + | SimpleExpr ‘.’ MatchClause | SimpleExpr1 [‘_’] SimpleExpr1 ::= Literal | Path @@ -36,6 +40,7 @@ SimpleExpr1 ::= Literal | SimpleExpr1 ArgumentExprs | XmlExpr Exprs ::= Expr {‘,’ Expr} +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ BlockExpr ::= ‘{’ CaseClauses ‘}’ | ‘{’ Block ‘}’ Block ::= BlockStat {semi BlockStat} [ResultExpr] @@ -44,6 +49,7 @@ ResultExpr ::= Expr1 Ascription ::= ‘:’ InfixType | ‘:’ Annotation {Annotation} | ‘:’ ‘_’ ‘*’ +Catches ::= ‘catch‘ (Expr | ExprCaseClause) ``` Expressions are composed of operators and operands. @@ -85,7 +91,7 @@ This object implements methods in class `scala.AnyRef` as follows: - `eq(´x\,´)` and `==(´x\,´)` return `true` iff the argument ´x´ is also the "null" object. - `ne(´x\,´)` and `!=(´x\,´)` return true iff the argument x is not also the "null" object. - `isInstanceOf[´T\,´]` always returns `false`. -- `asInstanceOf[´T\,´]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type ´T´. +- `asInstanceOf[´T\,´]` returns the [default value](04-basic-definitions.html#value-definitions) of type ´T´. - `##` returns ``0``. A reference to any other member of the "null" object causes a `NullPointerException` to be thrown. @@ -100,7 +106,7 @@ SimpleExpr ::= Path A designator refers to a named term. It can be a _simple name_ or a _selection_. A simple name ´x´ refers to a value as specified [here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes). -If ´x´ is bound by a definition or declaration in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´. +If ´x´ is bound by a definition in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´. If ´r´ is a [stable identifier](03-types.html#paths) of type ´T´, the selection ´r.x´ refers statically to a term member ´m´ of ´r´ that is identified in ´T´ by the name ´x´. @@ -205,8 +211,9 @@ An application `´f(e_1, ..., e_m)´` applies the method `´f´` to the argument For this expression to be well-typed, the method must be *applicable* to its arguments: If ´f´ has a method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´`, each argument expression ´e_i´ is typed with the corresponding parameter type ´T_i´ as expected type. -Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., m)´. +Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., n)´. The method ´f´ must be _applicable_ to its arguments ´e_1, ..., e_n´ of types ´S_1, ..., S_n´. +If the last parameter type of ´f´ is [repeated](04-basic-definitions.html#repeated-parameters), [harmonization](#harmonization) is attempted on the suffix ´e_m, ..., e_n´ of the expression list that match the repeated parameter. We say that an argument expression ´e_i´ is a _named_ argument if it has the form `´x_i=e'_i´` and `´x_i´` is one of the parameter names `´p_1, ..., p_n´`. Once the types ´S_i´ have been determined, the method ´f´ of the above method type is said to be applicable if all of the following conditions hold: @@ -235,7 +242,7 @@ The behavior of by-name parameters is preserved if the application is transforme In this case, the local value for that parameter has the form `val ´y_i´ = () => ´e´` and the argument passed to the method is `´y_i´()`. The last argument in an application may be marked as a sequence argument, e.g. `´e´: _*`. -Such an argument must correspond to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same). +Such an argument must correspond to a [repeated parameter](04-basic-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same). Furthermore, the type of ´e´ must conform to `scala.Seq[´T´]`, for some type ´T´ which conforms to ´S´. In this case, the argument list is transformed by replacing the sequence ´e´ with its elements. When the application uses named arguments, the vararg parameter has to be specified exactly once. @@ -304,7 +311,7 @@ The result of transforming ´f´ is a block of the form where every argument in ´(\mathit{args}\_1), ..., (\mathit{args}\_l)´ is a reference to one of the values ´x_1, ..., x_k´. To integrate the current application into the block, first a value definition using a fresh name ´y_i´ is created for every argument in ´e_1, ..., e_m´, which is initialised to ´e_i´ for positional arguments and to ´e'_i´ for named arguments of the form `´x_i=e'_i´`. -Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-declarations-and-definitions.html#method-declarations-and-definitions) of this parameter. +Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-definitions.html#method-definitions) of this parameter. Let ´\mathit{args}´ be a permutation of the generated names ´y_i´ and ´z_i´ such such that the position of each name matches the position of its corresponding parameter in the method type `(´p_1:T_1, ..., p_n:T_n´)´U´`. The final result of the transformation is a block of the form @@ -453,7 +460,7 @@ Block ::= BlockStat {semi BlockStat} [ResultExpr] ``` A _block expression_ `{´s_1´; ...; ´s_n´; ´e\,´}` is constructed from a sequence of block statements ´s_1, ..., s_n´ and a final expression ´e´. -The statement sequence may not contain two definitions or declarations that bind the same name in the same namespace. +The statement sequence may not contain two definitions that bind the same name in the same namespace. The final expression can be omitted, in which case the unit value `()` is assumed. The expected type of the final expression ´e´ is the expected type of the block. @@ -544,6 +551,8 @@ This expression is then interpreted as ´e.\mathit{op}(e_1,...,e_n)´. A left-associative binary operation ´e_1;\mathit{op};e_2´ is interpreted as ´e_1.\mathit{op}(e_2)´. If ´\mathit{op}´ is right-associative and its parameter is passed by name, the same operation is interpreted as ´e_2.\mathit{op}(e_1)´. If ´\mathit{op}´ is right-associative and its parameter is passed by value, it is interpreted as `{ val ´x´=´e_1´; ´e_2´.´\mathit{op}´(´x\,´) }`, where ´x´ is a fresh name. +Under `-source:future`, if the method name is alphanumeric and the target method is not marked [`infix`](./05-classes-and-objects.html#infix), a deprecation warning is emitted. + ### Assignment Operators An _assignment operator_ is an operator symbol (syntax category `op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character “`=`”, with the following exceptions: @@ -676,12 +685,14 @@ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { ```ebnf Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | ‘if‘ Expr ‘then‘ Expr [[semi] ‘else‘ Expr] ``` The _conditional expression_ `if (´e_1´) ´e_2´ else ´e_3´` chooses one of the values of ´e_2´ and ´e_3´, depending on the value of ´e_1´. The condition ´e_1´ is expected to conform to type `Boolean`. The then-part ´e_2´ and the else-part ´e_3´ are both expected to conform to the expected type of the conditional expression. -The type of the conditional expression is the [weak least upper bound](03-types.html#weak-conformance) of the types of ´e_2´ and ´e_3´. +If there is no expected type, [harmonization](#harmonization) is attempted on ´e_2´ and ´e_3´. +The type of the conditional expression is the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the types of ´e_2´ and ´e_3´ after harmonization. A semicolon preceding the `else` symbol of a conditional expression is ignored. The conditional expression is evaluated by evaluating first ´e_1´. @@ -694,6 +705,7 @@ The conditional expression `if (´e_1´) ´e_2´` is evaluated as if it was `if ```ebnf Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘while’ Expr ‘do’ Expr ``` The _while loop expression_ `while (´e_1´) ´e_2´` is typed and evaluated as if it was an application of `whileLoop (´e_1´) (´e_2´)` where the hypothetical method `whileLoop` is defined as follows. @@ -841,7 +853,11 @@ The type of a return expression is `scala.Nothing`. The expression ´e´ may be omitted. The return expression `return` is type-checked and evaluated as if it were `return ()`. -Returning from the method from within a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`. +### Non-Local Returns (deprecated) + +Returning from a method from within a nested function is deprecated. + +It is implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`. Any exception catches between the point of return and the enclosing methods might see and catch that exception. A key comparison makes sure that this exception is only caught by the method instance which is terminated by the return. @@ -864,15 +880,19 @@ The type of a throw expression is `scala.Nothing`. ## Try Expressions ```ebnf -Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] +Expr1 ::= ‘try’ Expr [Catches] [‘finally’ Expr] + +Catches ::= ‘catch‘ (Expr | ExprCaseClause) ``` -A _try expression_ is of the form `try { ´b´ } catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) +A _try expression_ is of the form `try ´b´ catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) ```scala { case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } ``` +If the handler is a single `ExprCaseClause`, it is a shorthand for that `ExprCaseClause` wrapped in a pattern matching anonymous function. + This expression is evaluated by evaluating the block ´b´. If evaluation of ´b´ does not cause an exception to be thrown, the result of ´b´ is returned. Otherwise the handler ´h´ is applied to the thrown exception. @@ -881,11 +901,11 @@ If the handler contains no case matching the thrown exception, the exception is More generally, if the handler is a `PartialFunction`, it is applied only if it is defined at the given exception. Let ´\mathit{pt}´ be the expected type of the try expression. -The block ´b´ is expected to conform to ´\mathit{pt}´. +The expression ´b´ is expected to conform to ´\mathit{pt}´. The handler ´h´ is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`. -The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) of the type of ´b´ and the result type of ´h´. +The type of the try expression is the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the type of ´b´ and the result type of ´h´. -A try expression `try { ´b´ } finally ´e´` evaluates the block ´b´. +A try expression `try ´b´ finally ´e´` evaluates the expression ´b´. If evaluation of ´b´ does not cause an exception to be thrown, the expression ´e´ is evaluated. If an exception is thrown during evaluation of ´e´, the evaluation of the try expression is aborted with the thrown exception. If no exception is thrown during evaluation of ´e´, the result of ´b´ is returned as the result of the try expression. @@ -893,10 +913,10 @@ If no exception is thrown during evaluation of ´e´, the result of ´b´ is ret If an exception is thrown during evaluation of ´b´, the finally block ´e´ is also evaluated. If another exception ´e´ is thrown during evaluation of ´e´, evaluation of the try expression is aborted with the thrown exception. If no exception is thrown during evaluation of ´e´, the original exception thrown in ´b´ is re-thrown once evaluation of ´e´ has completed. -The block ´b´ is expected to conform to the expected type of the try expression. +The expression ´b´ is expected to conform to the expected type of the try expression. The finally expression ´e´ is expected to conform to type `Unit`. -A try expression `try { ´b´ } catch ´e_1´ finally ´e_2´` is a shorthand for `try { try { ´b´ } catch ´e_1´ } finally ´e_2´`. +A try expression `try ´b´ catch ´e_1´ finally ´e_2´` is a shorthand for `try { try ´b´ catch ´e_1´ } finally ´e_2´`. ## Anonymous Functions @@ -1011,7 +1031,7 @@ The definition of "constant expression" depends on the platform, but they includ - A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object) - An element of an enumeration from the underlying platform - A literal array, of the form `Array´(c_1, ..., c_n)´`, where all of the ´c_i´'s are themselves constant expressions -- An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). +- An identifier defined by a [constant value definition](04-basic-definitions.html#value-definitions). ## Statements @@ -1030,7 +1050,6 @@ TemplateStat ::= Import Statements occur as parts of blocks and templates. A _statement_ can be an import, a definition or an expression, or it can be empty. -Statements used in the template of a class definition can also be declarations. An expression that is used as a statement can have an arbitrary value type. An expression statement ´e´ is evaluated by evaluating ´e´ and discarding the result of the evaluation. @@ -1042,6 +1061,29 @@ When prefixing a class or object definition, modifiers `abstract`, `final`, and Evaluation of a statement sequence entails evaluation of the statements in the order they are written. +## Harmonization + +_Harmonization_ of a list of expressions tries to adapt `Int` literals to match the types of sibling trees. +For example, when writing + +```scala +scala.collection.mutable.ArrayBuffer(5.4, 6, 6.4) +``` + +the inferred element type would be `AnyVal` without harmonization. +Harmonization turns the integer literal `6` into the double literal `6.0` so that the element type becomes `Double`. + +Formally, given a list of expressions ´e_1, ..., e_n´ with types ´T_1, ..., T_n´, harmonization behaves as follows: + +1. If there is an expected type, return the original list. +2. Otherwise, if there exists ´T_i´ that is not a primitive numeric type (`Char`, `Byte`, `Short`, `Int`, `Long`, `Float`, `Double`), return the original list. +3. Otherwise, + 1. Partition the ´e_i´ into the integer literals ´f_j´ and the other expressions ´g_k´. + 2. If all the ´g_k´ have the same numeric type ´T´, possibly after widening, and if all the integer literals ´f_j´ can be converted without loss of precision to ´T´, return the list of ´e_i´ where every int literal is converted to ´T´. + 3. Otherwise, return the original list. + +Harmonization is used in [conditional expressions](#conditional-expressions) and [pattern matches](./08-pattern-matching.html), as well as in [local type inference](#local-type-inference). + ## Implicit Conversions Implicit conversions can be applied to expressions whose type does not match their expected type, to qualifiers in selections, and to unapplied methods. @@ -1063,14 +1105,10 @@ An expression ´e´ of polymorphic type which does not appear as the function part of a type application is converted to a type instance of ´T´ by determining with [local type inference](#local-type-inference) instance types `´T_1, ..., T_n´` for the type variables `´a_1, ..., a_n´` and implicitly embedding ´e´ in the [type application](#type-applications) `´e´[´T_1, ..., T_n´]`. -###### Numeric Widening -If ´e´ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) to the expected type, it is widened to the expected type using one of the numeric conversion methods `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` defined [in the standard library](12-the-scala-standard-library.html#numeric-value-types). - -Since conversions from `Int` to `Float` and from `Long` to `Float` or `Double` may incur a loss of precision, those implicit conversions are deprecated. -The conversion is permitted for literals if the original value can be recovered, that is, if conversion back to the original type produces the original value. +###### Numeric Literal Conversion +If the expected type is `Byte`, `Short`, `Long` or `Char`, and the expression ´e´ is an `Int` literal fitting in the range of that type, it is converted to the same literal in that type. -###### Numeric Literal Narrowing -If the expected type is `Byte`, `Short` or `Char`, and the expression ´e´ is an integer literal fitting in the range of that type, it is converted to the same literal in that type. +Likewise, if the expected type is `Float` or `Double`, and the expression ´e´ is a numeric literal (of any type) fitting in the range of that type, it is converted to the same literal in that type. ###### Value Discarding If ´e´ has some value type and the expected type is `Unit`, ´e´ is converted to the expected type by embedding it in the term `{ ´e´; () }`. @@ -1255,7 +1293,7 @@ Solving means finding a substitution ´\sigma´ of types ´T_i´ for the type pa It is a compile time error if no such substitution exists. If several substitutions exist, local-type inference will choose for each type variable ´a_i´ a minimal or maximal type ´T_i´ of the solution space. -A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the type ´T´ of the expression. +A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-definitions.html#variance-annotations) in the type ´T´ of the expression. A _minimal_ type ´T_i´ will be chosen in all other situations, i.e. if the variable appears covariantly, non-variantly or not at all in the type ´T´. We call such a substitution an _optimal solution_ of the given constraint system for the type ´T´. diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md index 2cd80f227cd4..dacc0c0c277e 100644 --- a/docs/_spec/07-implicits.md +++ b/docs/_spec/07-implicits.md @@ -49,7 +49,7 @@ However, if such a method misses arguments for its implicit parameters, such arg The actual arguments that are eligible to be passed to an implicit parameter of type ´T´ fall into two categories. First, eligible are all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) or an implicit parameter. -To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-declarations-and-definitions.html#import-clauses). +To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-definitions.html#import-clauses). If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit scope of the implicit parameter's type, ´T´. The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. diff --git a/docs/_spec/08-pattern-matching.md b/docs/_spec/08-pattern-matching.md index 1d50b814ee24..4a34ae8631c4 100644 --- a/docs/_spec/08-pattern-matching.md +++ b/docs/_spec/08-pattern-matching.md @@ -276,7 +276,7 @@ SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ``` A _pattern sequence_ ´p_1, ..., p_n´ appears in two contexts. -First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-definitions.html#repeated-parameters) of type `S*`. Second, in an extractor pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if the extractor object ´x´ does not have an `unapply` method, but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´. The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. @@ -484,9 +484,12 @@ Therefore, the right hand side of the case clause, `y.n`, of type `Int`, is foun ## Pattern Matching Expressions ```ebnf + InfixExpr ::= InfixExpr MatchClause + SimpleExpr ::= SimpleExpr ‘.’ MatchClause Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ CaseClauses ::= CaseClause {CaseClause} CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block + ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr ``` A _pattern matching expression_ @@ -518,7 +521,8 @@ If no such bounds can be found, a compile time error results. If such bounds are found, the pattern matching clause starting with ´p´ is then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´. The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression. -The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks ´b_i´. +If there is no expected type, [harmonization](./03-types.html#harmonization) is attempted on the list of all blocks ´b_i´. +The type of the pattern matching expression is then the [least upper bound](03-types.html#least-upper-bounds-and-greatest-lower-bounds) of the types of all blocks ´b_i´ after harmonization. When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the [selector value](#patterns). Say this case is `case ´p_i \Rightarrow b_i´`. @@ -595,7 +599,7 @@ If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) to ``` Here, each ´x_i´ is a fresh name. -As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the weak least upper bound of the types of all ´b_i´. +As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the least upper bound of the types of all ´b_i´. ```scala new scala.Function´k´[´S_1, ..., S_k´, ´T´] { @@ -619,7 +623,7 @@ new scala.PartialFunction[´S´, ´T´] { } ``` -Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the types of all ´b_i´. +Here, ´x´ is a fresh name and ´T´ is the least upper bound of the types of all ´b_i´. The final default case in the `isDefinedAt` method is omitted if one of the patterns ´p_1, ..., p_n´ is already a variable or wildcard pattern. ###### Example diff --git a/docs/_spec/11-annotations.md b/docs/_spec/11-annotations.md index 3388d55318ea..3381856d78fb 100644 --- a/docs/_spec/11-annotations.md +++ b/docs/_spec/11-annotations.md @@ -17,8 +17,8 @@ Annotations associate meta-information with definitions. A simple annotation has the form `@´c´` or `@´c(a_1, ..., a_n)´`. Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`. -Annotations may apply to definitions or declarations, types, or expressions. -An annotation of a definition or declaration appears in front of that definition. +Annotations may apply to definitions, types, or expressions. +An annotation of a definition appears in front of that definition. An annotation of a type appears after that type. An annotation of an expression ´e´ appears after the expression ´e´, separated by a colon. More than one annotation clause may apply to an entity. @@ -86,7 +86,7 @@ def f(x: Option[Int]) = (x: @unchecked) match { ``` Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive, and could produce a warning because `Option` is a `sealed` class. -* `@uncheckedStable` When applied a value declaration or definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). +* `@uncheckedStable` When applied a value definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). For instance, the following member definitions are legal: ```scala type A { type T } @@ -97,7 +97,7 @@ val y: x.T // OK since `x' is still a path Without the `@uncheckedStable` annotation, the designator `x` would not be a path since its type `A with B` is volatile. Hence, the reference `x.T` would be malformed. -When applied to value declarations or definitions that have non-volatile types, the annotation has no effect. +When applied to value definitions that have non-volatile types, the annotation has no effect. * `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate specialized definitions for primitive types. An optional list of primitive types may be given, in which case specialization takes into account only those types. diff --git a/docs/_spec/12-the-scala-standard-library.md b/docs/_spec/12-the-scala-standard-library.md index 441955df9b4f..df8626b5119c 100644 --- a/docs/_spec/12-the-scala-standard-library.md +++ b/docs/_spec/12-the-scala-standard-library.md @@ -12,10 +12,23 @@ Some of these classes are described in the following. ![Class hierarchy of Scala](public/images/classhierarchy.png) + +## Fundamental Type Aliases + +The `scala` package provides the following fundamental type aliases, which expose to user code some forms of [types](./03-types.html) that cannot otherwise be written: + +```scala +type AnyKind = ´x´ // where ´x´ is the internal AnyKind type +type Nothing = ´x´ // where ´x´ is the internal Nothing type +type | = [A, B] =>> A ´|´ B // where | is the internal union type operator +type & = [A, B] =>> A ´&´ B // where & is the internal intersection type operator +``` + ## Root Classes -The root of this hierarchy is formed by class `Any`. +The root of this hierarchy is formed by class `scala.Any`. Every class in a Scala execution environment inherits directly or indirectly from this class. +By definition, `Any` is also the top [proper type](./03-types.html#proper-types). Class `Any` has two direct subclasses: `AnyRef` and `AnyVal`. The subclass `AnyRef` represents all values which are represented as objects in the underlying host system. @@ -304,42 +317,42 @@ case class Tuple´n´[+T_1, ..., +T_n](_1: T_1, ..., _´n´: T_´n´) { --> ### The `Function` Classes -For each class type `Function´n´` where ´n = 0, ..., 22´, Scala defines the following function class: +For each natural ´n \geq 0´, the `scala` package defines the following function class: ```scala package scala trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ - override def toString = "" - def curried: ´T_1´ => ... => ´T_n´ => R = ... - def tupled: ((´T_1´, ..., ´T_n´)) => R = ... ``` -For function types `Function´n´` where ´n > 22´, Scala defines a unique function class: +These classes participate in the desugaring of [concrete function types](./03-types.html#function-types). +For values of ´n \leq 22´, the `Function´_n´` classes define additional methods: ```scala package scala -trait FunctionXXL: - def apply(xs: IArray[Object]): Object - override def toString = "" +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + ... + override def toString = "" + def curried: ´T_1´ => ... => ´T_n´ => R = ... + def tupled: ((´T_1´, ..., ´T_n´)) => R = ... ``` -There is no loss of type safety, as the internal representation is still `Function´n´` for all ´n´. -However this means methods `curried` and `tupled` are not available on functions with more than 22 parameters. - The implicitly imported [`Predef`](#the-predef-object) object defines the name `Function` as an alias of `Function1`. - The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain. Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain). ```scala class PartialFunction[-A, +B] extends Function1[A, B] { def isDefinedAt(x: A): Boolean + + ... // various derived methods } ``` +`PartialFunction` participates in the desugaring of [pattern matching anonymous functions](08-pattern-matching.html#pattern-matching-anonymous-functions). + ### Trait `Product` @@ -349,6 +362,58 @@ All case classes automatically extend the `Product` trait (and generate syntheti All enum definitions automatically extend the `reflect.Enum` trait (and generate synthetic methods to conform to it). +### Tuple Classes + +Tuples are a form of _HLists_ defined by the following classes: + +```scala +/** Superclass of all tuples. */ +sealed trait Tuple extends Product: + /** Return a new tuple by prepending the element to `this` tuple. */ + inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = ... + ... + +object Tuple: + /** Type of the element at position N in the tuple X. */ + type Elem[X <: Tuple, N <: Int] = ... + ... + +/** A tuple of 0 elements. */ +type EmptyTuple = EmptyTuple.type + +/** A tuple of 0 elements. */ +case object EmptyTuple extends Tuple: + override def toString(): String = "()" + +/** Tuple of arbitrary non-zero arity */ +sealed trait NonEmptyTuple extends Tuple: + /** Get the i-th element of this tuple. */ + inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = ... + ... + +sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple + +object `*:` : + def unapply[H, T <: Tuple](x: H *: T): (H, T) = (x.head, x.tail) +``` + +For ´1 \leq n \leq 22´, the concrete implementations of `*:` are instances of `Tuple´_n´` classes, which also implement corresponding `Product´_n´` traits. +They are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits). + +```scala +trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product: + override def productArity: Int = ´n´ + def _1: ´T_1´ + ... + def _n: ´T_n´ + +final case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´) + extends *:[´T_1´, ´T_2´ *: ... _: ´T_n´ *: EmptyTuple] + with Product´_n´[´T_1´, ..., ´T_n´] +``` + +For ´n > 22´, the concrete implementations of ´*:´ are instances of implementation-specific private classes. + ### Class `Array` All operations on arrays desugar to the corresponding operations of the underlying platform. diff --git a/docs/_spec/TODOreference/changed-features/imports.md b/docs/_spec/APPLIEDreference/changed-features/imports.md similarity index 100% rename from docs/_spec/TODOreference/changed-features/imports.md rename to docs/_spec/APPLIEDreference/changed-features/imports.md diff --git a/docs/_spec/TODOreference/changed-features/interpolation-escapes.md b/docs/_spec/APPLIEDreference/changed-features/interpolation-escapes.md similarity index 100% rename from docs/_spec/TODOreference/changed-features/interpolation-escapes.md rename to docs/_spec/APPLIEDreference/changed-features/interpolation-escapes.md diff --git a/docs/_spec/TODOreference/changed-features/match-syntax.md b/docs/_spec/APPLIEDreference/changed-features/match-syntax.md similarity index 100% rename from docs/_spec/TODOreference/changed-features/match-syntax.md rename to docs/_spec/APPLIEDreference/changed-features/match-syntax.md diff --git a/docs/_spec/TODOreference/changed-features/operators.md b/docs/_spec/APPLIEDreference/changed-features/operators.md similarity index 100% rename from docs/_spec/TODOreference/changed-features/operators.md rename to docs/_spec/APPLIEDreference/changed-features/operators.md diff --git a/docs/_spec/TODOreference/changed-features/wildcards.md b/docs/_spec/APPLIEDreference/changed-features/wildcards.md similarity index 100% rename from docs/_spec/TODOreference/changed-features/wildcards.md rename to docs/_spec/APPLIEDreference/changed-features/wildcards.md diff --git a/docs/_spec/TODOreference/contextual/given-imports.md b/docs/_spec/APPLIEDreference/contextual/given-imports.md similarity index 100% rename from docs/_spec/TODOreference/contextual/given-imports.md rename to docs/_spec/APPLIEDreference/contextual/given-imports.md diff --git a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md index 6ef815152cd0..a7c491dfb3b3 100644 --- a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md +++ b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md @@ -10,7 +10,7 @@ have been dropped. The reasons for dropping them are: - Existential types violate a type soundness principle on which DOT and Scala 3 are constructed. That principle says that every - prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + prefix (`p`, respectively `S`) of a type selection `p.T` or `S#T` must either come from a value constructed at runtime or refer to a type that is known to have only good bounds. diff --git a/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md b/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..b7dae17f5a77 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/nonlocal-returns.md @@ -0,0 +1,23 @@ +--- +layout: doc-page +title: "Deprecated: Nonlocal Returns" + +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html +--- + +Returning from nested anonymous functions is deprecated since Scala 3.2.0. + +Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A better alternative to nonlocal returns and also the `scala.util.control.Breaks` API is provided by [`scala.util.boundary` and `boundary.break`](http://dotty.epfl.ch/api/scala/util/boundary$.html). + +Example: + +```scala +import scala.util.boundary, boundary.break +def firstIndex[T](xs: List[T], elem: T): Int = + boundary: + for (x, i) <- xs.zipWithIndex do + if x == elem then break(i) + -1 +``` diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance.md b/docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md similarity index 91% rename from docs/_spec/TODOreference/dropped-features/weak-conformance.md rename to docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md index b1478326b2c9..03760642293d 100644 --- a/docs/_spec/TODOreference/dropped-features/weak-conformance.md +++ b/docs/_spec/APPLIEDreference/dropped-features/weak-conformance.md @@ -44,4 +44,5 @@ Therefore, Scala 3 drops the general notion of weak conformance, and instead keeps one rule: `Int` literals are adapted to other numeric types if necessary. -[More details](weak-conformance-spec.md) +For more details, see Sections "Types > Weak Conformance" and "Expressions > Harmonization" in the specification. +TODO Link to the spec when it is published. diff --git a/docs/_spec/TODOreference/dropped-features/wildcard-init.md b/docs/_spec/APPLIEDreference/dropped-features/wildcard-init.md similarity index 100% rename from docs/_spec/TODOreference/dropped-features/wildcard-init.md rename to docs/_spec/APPLIEDreference/dropped-features/wildcard-init.md diff --git a/docs/_spec/TODOreference/other-new-features/control-syntax.md b/docs/_spec/APPLIEDreference/other-new-features/control-syntax.md similarity index 100% rename from docs/_spec/TODOreference/other-new-features/control-syntax.md rename to docs/_spec/APPLIEDreference/other-new-features/control-syntax.md diff --git a/docs/_spec/TODOreference/other-new-features/opaques.md b/docs/_spec/APPLIEDreference/other-new-features/opaques.md similarity index 87% rename from docs/_spec/TODOreference/other-new-features/opaques.md rename to docs/_spec/APPLIEDreference/other-new-features/opaques.md index d8c4d37bcb3b..e6d614b3931d 100644 --- a/docs/_spec/TODOreference/other-new-features/opaques.md +++ b/docs/_spec/APPLIEDreference/other-new-features/opaques.md @@ -176,4 +176,28 @@ l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm ``` In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. -[More details](opaques-details.md) +## Top-level Opaque Types + +An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested +objects and classes and in all other source files. Example: +```scala +// in test1.scala +opaque type A = String +val x: A = "abc" + +object obj: + val y: A = "abc" // error: found: "abc", required: A + +// in test2.scala +def z: String = x // error: found: A, required: String +``` +This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to +```scala +object test1$package: + opaque type A = String + val x: A = "abc" + +object obj: + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A +``` +The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. diff --git a/docs/_spec/Dockerfile b/docs/_spec/Dockerfile index 1fc28081c59f..6f0c349da396 100644 --- a/docs/_spec/Dockerfile +++ b/docs/_spec/Dockerfile @@ -1,3 +1,5 @@ +# Keep in sync with relevant parts of .github/workflows/spec.yml + FROM ruby:2.7 RUN apt-get install -y curl \ diff --git a/docs/_spec/README.md b/docs/_spec/README.md index b9eba413f8a2..f8a59e86896c 100644 --- a/docs/_spec/README.md +++ b/docs/_spec/README.md @@ -1,6 +1,6 @@ # WIP Scala 3 Language Specification -**This is still a work in progress, and should *not* be regarded as a source of truth.** +**This is still a work in progress. There are still Scala 3 features missing, as well as some areas that have not been updated since 2.13 yet.** First of all, the language specification is meant to be correct, precise and clear. @@ -25,8 +25,8 @@ To preview locally, run the following commands in the docs/_spec subfolder: env UID="$(id -u)" GID="$(id -g)" docker-compose up ``` -and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. - + cve{"Is this + a fix for a CVE?"} + -- yes --> ocb + + subgraph "CVE" + ocb{"Does it cause + any new failures + in the full CB?"} + + -- yes --> + regFix[\"Try to provide + a followup fix for + a regressions"/] + + -- failure --> + debate[\"Possible workarounds + for new regressions are discussed + by the compiler team"/] + + regFix -- success --> ocb + end + ocb -- no --> acc + debate -->|"decision on + the recommended + workarounds"| acc + + cve -- no --> + incompat{"Does the fix + break forward + compatibiliy?"} + -- yes --> reject + + incompat -- no --> + + regression{"Is this a fix for + a regression present + also in LTS?"} + -- yes --> + + regIsLTS{"Was the last version + affected by + the regression released + before 3.3.0?"} + -- yes --> ocbReg + + subgraph "LTS Regression" + ocbReg{"Does it cause + any new failures + in the full CB?"} + + -- yes --> + regFixReg[\"Try to provide + a followup fix for + a regressions"/] + + -- failure --> + debateReg[\"Impact of both new and old regression + and possible workarounds + are discussed by the compiler team."/] + + regFixReg -- success --> ocbReg + end + ocbReg -- no --> acc + debateReg -->|"decision on + the recommended + workarounds for + the new regression"| acc + debateReg -->|"decision on + the recommended + workarounds for + the old regression"| reject + + regression -- no --> types + regIsLTS -- no --> types + types{"Can the fix + change types + in any correct + Scala 3 code?"} + -- yes --> request + types -- no --> ocbOther + + request{"Is backport + of the fix + heavily requested?"} + -- yes --> debateReq + request -- no --> reject + + debateReq[\"Possibility of + the backport is discussed + by the compiler team"/] + --> |"backport is rejected"| reject + debateReq --> |"backport is accepted"| ocbOther + + subgraph "Other Fixes" + ocbOther{"Does it cause + any new failures + in the full CB?"} + + -- yes --> + regFixOther[\"Try to provide + a followup fix for + a regressions"/] + -- success --> ocbOther + + ocbOther -- no --> + lint{"Does it introduce + any new warnings + behind flags?"} + -- yes --> + lintOcb{"Does it cause any + new failures in the full CB + after forcing a new flag?"} + -- yes --> regFixOther + end + + lint -- no --> acc + lintOcb -- no --> acc + regFixOther -- failure --> reject + + acc(["The PR is backported"]) + reject(["The PR is not backported"]) +``` + +CVE stands for Common Vulnerabilities and Exposures and in the chart above it means an issue with an CVE-ID assigned to it. + +CB stands for Community Build, and by full CB we mean the full run of [the Scala 3 Open Community Build](https://github.com/VirtusLab/community-build3). + +## How should things be backported? + +The backporting process is tracked by [a GitHub Project](https://github.com/orgs/lampepfl/projects/6) in the lampepfl organization. Every PR merged to the `main` branch is automatically added to the `Needs Assessment` column. Those PRs are reviewed by the release officer or other appointed person. They can decide to remove them from the project (backport rejected) or to move them to the `Backporting in progress` column. If the PR with the backport has any differences from the original PR, the person doing the backport will ask the author and reviewers of the original change to review the backport. After merging the backport, the PR will be moved to the `Backport done` column and, after the release, to the `Released` column. + +Maintainers can request backporting the entirety or a part of previously rejected PR by adding it to the `Backport requested` column. + +Labels from the `backport:*` are used only for backports that targets versions with already released RCs. They can be used to mark changes on the main that are fixing a critical bug present in the Scala Next RC release or changes that were backported to the future Scala LTS versions that should be also backported to the current RCs. + +## The release cycles + +Two separate lines of the compiler require two intertwined release cycles. + +Scala Next strictly follows a six-week release train model. Every six weeks, a release candidate for the next version is published. During the next six weeks, we may release subsequent RCs containing fixes to critical bugs found in the previous RCs. A bug may be considered critical only if it is a regression; that is, some code that was correctly passing a compilation in any earlier versions of Scala 3 is now either failing compilation, crashing the compiler, or generating incorrect output (bytecode or TASTy). The compiler team decides which regression is considered a critical bug that requires a new RC and which can be fixed in the next release. After six weeks, the clock resets, the last released RC is promoted to a stable release, and the RC for the next version is published. + +If there is less than a week left before the release, and the last RC still contains critical bugs, the compiler team may decide to postpone publishing the stable version. There will always be at least one whole week between publishing the last RC and promoting it to the status of a stable release. This delay doesn't affect the RC1 date for the next version. It will be released six weeks after the previous version's RC1. The goal is to ensure that delay in releasing one version doesn't cause future releases to be larger in terms of the number of merged PRs, as it can make regressions inside of them more complex to pinpoint and fix, leading to the accumulation of delays for future versions. + +Scala LTS has a more relaxed release model. RC1 for the next version is published after the stable release of the previous version. Similar to Scala Next, we may release more RCs, fixing bugs. Unlike Scala Next, the bug doesn't need to be considered critical to guarantee the new RC. For Sala LTS, our primary goal is stability, so delays are acceptable. We guarantee that a stable release is at least six weeks after the first RC and at least one week after the last RC. + +The two release cycles are not synchronized in any way, as any synchronization would be broken on any delay in the Scala LTS cycle. + +The compiler team may pause the release cycles for a week or two on occasions such as New Year or a conference that most of the team is attending. + +### What is being released? + +For the Scala LTS, what is released as an RC is always the current head of the release branch for the next release. + +For the Scala Next minor releases RC, by default, it is the head of the `main` branch. Based on the Open Community Build results, the compiler team may decide to base the release on some earlier state of the branch. diff --git a/project/ScaladocGeneration.scala b/project/ScaladocGeneration.scala index fd972311da1d..ade9b65d5445 100644 --- a/project/ScaladocGeneration.scala +++ b/project/ScaladocGeneration.scala @@ -97,6 +97,10 @@ object ScaladocGeneration { def key: String = "-no-link-warnings" } + case class NoLinkAssetWarnings(value: Boolean) extends Arg[Boolean] { + def key: String = "-no-link-asset-warnings" + } + case class VersionsDictionaryUrl(value: String) extends Arg[String] { def key: String = "-versions-dictionary-url" } diff --git a/project/TastyMiMaFilters.scala b/project/TastyMiMaFilters.scala new file mode 100644 index 000000000000..0d2ed387da33 --- /dev/null +++ b/project/TastyMiMaFilters.scala @@ -0,0 +1,93 @@ +import java.util.Arrays.asList +import tastymima.intf._ + +object TastyMiMaFilters { + val StdlibBootstrapped: java.util.List[ProblemMatcher] = asList( + // Probably OK + ProblemMatcher.make(ProblemKind.IncompatibleSelfTypeChange, "scala.*"), + + // Probably OK: Case class with varargs + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.StringContext.parts"), // before: scala.[Predef.String]; after: scala.collection.immutable.Seq[Predef.String] @scala.annotation.internal.Repeated + + // Probably OK: ConstantType for `null` versus `scala.Null` + // Calls to the default getter seem to link correctly. + // Tested in scala2-library-bootstrapped/test/scala/collection/UnrolledBufferTest.scala + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.Unrolled.$default$4"), + + // Probably OK: Overriding java method (`public abstract Object underlying();` with `def underlying: Object`) + // Calls to the underlying seem to link correctly. + // Tested in scala2-library-bootstrapped/test/Main.scala + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.math.Big*.underlying"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.math.ScalaNumericConversions.underlying"), + + // Problem: super accessors + // In Scala 3 these accessors are added in the `postyper` phase. + // In Scala 2 these accessors are added in the `superaccessors` phase after typer. + // Are these accessors in the Scala 2 pickles? If so, it implies that TASTy Query/MiMa is ignoring them in Scala 2 but not Scala 3. + // Otherwise, if these are not in the Scala 2 pickles, we might need to remove them when compiling with -Ycompile-scala2-library + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeqOps.superscala$collection$immutable$IndexedSeqOps$$slice"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.StrictOptimizedSeqOps.superscala$collection$immutable$StrictOptimizedSeqOps$$sorted"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeq.superscala$collection$immutable$IndexedSeq$$*"/* sameElements, canEqual */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSetOps.superscala$collection$SortedSetOps$$*"/* min, max */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSet.superscala$collection$SortedSet$$equals"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.LinearSeqOps.superscala$collection$LinearSeqOps$$sameElements"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedMap.superscala$collection$SortedMap$$equals"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SeqOps.superscala$collection$SeqOps$$*"/* concat, sizeCompare */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.BitSetOps.superscala$collection$BitSetOps$$*"/* min, intersect, concat, diff, max */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone"), // The member scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone was concrete or did not exist but is abstract in current version + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.util.control.NoStackTrace.superscala$util$control$NoStackTrace$$fillInStackTrace"), + + // TASTy-MiMa bug (probably OK): `private[scala] var` in case class + // This is probably because we can only access the next field from the scala library. + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.immutable.::.next$access$1"), + + // Probably OK: Problem Missing setter for `protected var` + // All the classes that contain these `protected var`s are private in `collection` or `convert` + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.index_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.myCurrent_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.maxLength_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.stack_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.ChampStepperBase.maxSize_="), // The member scala.collection.convert.impl.ChampStepperBase.maxSize_= with signature (scala.Int):scala.Unit was concrete or did not exist but is abstract in current version + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.iN_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.iN_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.maxLength_="), + + // Problem: ??? + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.nn"), // The member scala.Predef.nn with signature (1,java.lang.Object):java.lang.Object does not have a correspondant in current version + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.ne"), // The member scala.Predef.ne with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.eq"), // The member scala.Predef.eq with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version + + // Problem: protected lazy val (processThread, (futureThread, futureValue), destroyer) = { ... } + // https://github.com/scala/scala/blob/cff8a9af4da67658d8e1e32f929e1aff03ffa384/src/library/scala/sys/process/ProcessImpl.scala#L99C5-L99C83 + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.destroyer"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureThread"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.processThread"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureValue"), // before: lazy val; after: def + + // Problem? + // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/convert/JavaCollectionWrappers.scala#L66-L71 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), // The member scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator with signature ():scala.collection.convert.JavaCollectionWrappers.IteratorWrapper does not have a correspondant in current version + + // Problem? + // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/mutable/ArrayBuilder.scala#L504C1-L504C87 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.mutable.ArrayBuilder.ofUnit.addAll"), // The member scala.collection.mutable.ArrayBuilder.ofUnit.addAll with signature (java.lang.Object,scala.Int,scala.Int):scala.collection.mutable.ArrayBuilder$.ofUnit does not have a correspondant in current version + + // Probably OK (TASTy MiMa bug): Patched Predef members + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.valueOf"), // The member scala.Predef.valueOf with signature (1):java.lang.Object does not have a correspondant in current version + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.summon"), // The member scala.Predef.summon with signature (1,java.lang.Object):java.lang.Object does not have a correspondant in current version + + // TASTy-MiMa bugs + ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.appendedAll"), + ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.concat"), + ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.prependedAll"), + ProblemMatcher.make(ProblemKind.InternalError, "scala.concurrent.duration.package.*"), + + // Problems introduced in 2.13.11: Implicit classes with complex signatures + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFromLowPriority1.buildFromSortedSetOps"), // The symbol scala.collection.BuildFromLowPriority1.buildFromSortedSetOps has an incompatible type in current version: before: [CC <: ([X] =>> (scala.collection.SortedSet[X] & scala.collection.SortedSetOps[X, CC, ?])), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[(CC[A0] & scala.collection.SortedSet[A0]), A, (CC[A] & scala.collection.SortedSet[A])]; after: [CC >: ([X] =>> scala.Nothing) <: ([X] =>> scala.&[scala.collection.SortedSet[X], scala.collection.SortedSetOps[X, CC, ?]]), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[scala.&[CC[A0], scala.collection.SortedSet[A0]], A, scala.&[CC[A], scala.collection.SortedSet[A]]] + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromMapOps"), // The symbol scala.collection.BuildFrom.buildFromMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.Map[X, Y] & scala.collection.MapOps[X, Y, CC, ?])), K0, V0, K, V]scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.Map[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.Map[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.Map[X, Y], scala.collection.MapOps[X, Y, CC, ?]]), K0, V0, K, V]scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.Map[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.Map[K, V]]] + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromSortedMapOps"), // The symbol scala.collection.BuildFrom.buildFromSortedMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.SortedMap[X, Y] & scala.collection.SortedMapOps[X, Y, CC, ?])), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.SortedMap[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.SortedMap[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.SortedMap[X, Y], scala.collection.SortedMapOps[X, Y, CC, ?]]), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.SortedMap[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.SortedMap[K, V]]] + ) +} diff --git a/project/build.properties b/project/build.properties index 46e43a97ed86..52413ab79a18 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.2 +sbt.version=1.9.3 diff --git a/project/plugins.sbt b/project/plugins.sbt index ccbcdeed22fc..c94d4d5afe8d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,14 +8,16 @@ libraryDependencySchemes += addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.20") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") + +addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") diff --git a/project/scripts/addToBackportingProject.scala b/project/scripts/addToBackportingProject.scala new file mode 100644 index 000000000000..2c1929972791 --- /dev/null +++ b/project/scripts/addToBackportingProject.scala @@ -0,0 +1,84 @@ +//> using scala 3.3.1 +//> using toolkit 0.2.1 +//> using lib pro.kordyjan::pytanie:0.1.7 + +import pytanie.* +import sttp.client4.* + +lazy val apiToken = + System.getenv("GRAPHQL_API_TOKEN") + +case class ID(value: String) derives WrapperVariable + +val PROJECT_ID = ID("PVT_kwDOACj3ec4AWSoi") +val FIELD_ID = ID("PVTF_lADOACj3ec4AWSoizgO7uJ4") + +@main def run(commitSha: String) = + val (id, date) = getPrData(commitSha) + val newId = addItem(id) + timestampItem(newId, date) + +def getPrData(commitSha: String): (ID, String) = + val res = query""" + |query prForCommit { + | repository(owner:"lampepfl", name:"dotty") { + | object(expression: $commitSha){ + | __typename + | ... on Commit { + | associatedPullRequests(first: 1) { + | nodes { + | number + | id + | mergedAt + | } + | } + | } + | } + | } + |} + """.send( + uri"https://api.github.com/graphql", + "DummyUser", + apiToken + ) + val pr = res.repository.`object`.asCommit.get.associatedPullRequests.nodes.head + (ID(pr.id), pr.mergedAt) + +def timestampItem(id: ID, date: String) = + query""" + |mutation editField { + | updateProjectV2ItemFieldValue(input: { + | projectId: $PROJECT_ID, + | itemId: $id, + | fieldId: $FIELD_ID, + | value: { text: $date } + | }) { + | projectV2Item { + | updatedAt + | } + | } + |} + """.send( + uri"https://api.github.com/graphql", + "DummyUser", + apiToken + ) + +def addItem(id: ID) = + val res = query""" + |mutation addItem { + | addProjectV2ItemById(input: { + | projectId: $PROJECT_ID, + | contentId: $id + | }) { + | item { + | id + | } + | } + |} + """.send( + uri"https://api.github.com/graphql", + "DummyUser", + apiToken + ) + ID(res.addProjectV2ItemById.item.id) diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala index 2e554a885c79..dbb14f2c4587 100755 --- a/project/scripts/bisect.scala +++ b/project/scripts/bisect.scala @@ -235,10 +235,10 @@ class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Bo val scala3CompilerProject = if bootstrapped then "scala3-compiler-bootstrapped" else "scala3-compiler" val scala3Project = if bootstrapped then "scala3-bootstrapped" else "scala3" val validationCommandStatusModifier = if shouldFail then "! " else "" // invert the process status if failure was expected - val bisectRunScript = s""" + val bisectRunScript = raw""" |scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1) |rm -r out - |sbt "clean; ${scala3Project}/publishLocal" + |sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal" |${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" """.stripMargin "git bisect start".! diff --git a/project/scripts/check-cla.sh b/project/scripts/check-cla.sh index 1a91363f5079..e4e489830f11 100755 --- a/project/scripts/check-cla.sh +++ b/project/scripts/check-cla.sh @@ -2,7 +2,7 @@ set -eux echo "Pull request submitted by $AUTHOR"; -if [ "$AUTHOR" = "github-actions[bot]" ] ; then +if [[ "$AUTHOR" == "github-actions[bot]" || "$AUTHOR" == "dependabot[bot]" ]] ; then echo "CLA check for $AUTHOR successful"; else signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed"); diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests index 2168e3e8e334..e9403d988b98 100755 --- a/project/scripts/cmdScaladocTests +++ b/project/scripts/cmdScaladocTests @@ -23,8 +23,8 @@ SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}" dist/target/pack/bin/scaladoc \ -d "$OUT1" \ -project "scaladoc testcases" \ - -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \ - -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}"\#library/src \ + -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \ + -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}"\#library/src \ -source-links:github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}" \ "-external-mappings:.*scala/.*::scaladoc3::https://dotty.epfl.ch/api/,.*java/.*::javadoc::https://docs.oracle.com/javase/8/docs/api/" \ "-skip-by-regex:.+\.internal($|\..+)" \ @@ -37,7 +37,7 @@ dist/target/pack/bin/scaladoc \ "-snippet-compiler:scaladoc-testcases/docs=compile" \ "-comment-syntax:scaladoc-testcases/src/example/comment-md=markdown,scaladoc-testcases/src/example/comment-wiki=wiki" \ -siteroot scaladoc-testcases/docs \ - -project-footer "Copyright (c) 2002-2023, LAMP/EPFL" \ + -project-footer "Copyright (c) 2002-$(date +%Y), LAMP/EPFL" \ -default-template static-site-main \ -author -groups -revision main -project-version "${DOTTY_BOOTSTRAPPED_VERSION}" \ "-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/," \ diff --git a/project/scripts/cmdTests b/project/scripts/cmdTests index 3405c06b056f..453590084b00 100755 --- a/project/scripts/cmdTests +++ b/project/scripts/cmdTests @@ -25,9 +25,9 @@ grep -qe "$EXPECTED_OUTPUT" "$tmp" echo "testing sbt scalac -print-tasty" clear_out "$OUT" -"$SBT" ";scalac $SOURCE -d $OUT ;scalac -print-tasty -color:never $TASTY" > "$tmp" +"$SBT" ";scalac $SOURCE -d $OUT ;scalac -print-tasty -color:never $OUT/$TASTY" > "$tmp" grep -qe "0: ASTs" "$tmp" -grep -qe "0: tests/pos/HelloWorld.scala" "$tmp" +grep -qe "0: 41 \[tests/pos/HelloWorld.scala\]" "$tmp" echo "testing that paths SourceFile annotations are relativized" clear_out "$OUT" diff --git a/project/scripts/scala2-library-tasty-mima.sh b/project/scripts/scala2-library-tasty-mima.sh new file mode 100755 index 000000000000..7118ee28c2f3 --- /dev/null +++ b/project/scripts/scala2-library-tasty-mima.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -eux + +source $(dirname $0)/cmdTestsCommon.inc.sh + +TASTY_FROMAT_FILE="tasty/src/dotty/tools/tasty/TastyFormat.scala" +MINOR_TASTY_VERSION_SUPPORTED_BY_TASTY_MIMA=3 +MINOR_TASTY_VERSION=$(grep -oE 'val MinorVersion: Int = ([0-9]+)' $TASTY_FROMAT_FILE | grep -oE '[0-9]+') +EXPERIMENTAL_TASTY_VERSION=$(grep -oE 'val ExperimentalVersion: Int = ([0-9]+)' $TASTY_FROMAT_FILE | grep -oE '[0-9]+') + +setTastyVersion() { + sed -i -E -e "s/val MinorVersion: Int = [0-9]+/val MinorVersion: Int = $1/" -e "s/val ExperimentalVersion: Int = [0-9]+/val ExperimentalVersion: Int = $2/" $TASTY_FROMAT_FILE +} + +setTastyVersion $MINOR_TASTY_VERSION_SUPPORTED_BY_TASTY_MIMA 0 + +# Run scala2-library-bootstrapped/tastyMiMaReportIssues using a custom TASTy version. +# We clean before to make sure all sources are recompiled using the new TASTY version. +# We clean after to make sure no other test will use the TASTy generated with this version. +# We set -Ycheck:all to check that -Ycompile-scala2-library does not gererate inconsistent trees. +"$SBT" 'clean; scala2-library-bootstrapped/clean; reload; set `scala2-library-bootstrapped`/scalacOptions += "-Ycheck:all"; scala2-library-bootstrapped/tastyMiMaReportIssues; clean; scala2-library-bootstrapped/clean' + +setTastyVersion $MINOR_TASTY_VERSION $EXPERIMENTAL_TASTY_VERSION diff --git a/sbt-bridge/src/dotty/tools/xsbt/Action.java b/sbt-bridge/src/dotty/tools/xsbt/Action.java new file mode 100644 index 000000000000..2a1818fef78c --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/Action.java @@ -0,0 +1,28 @@ +package dotty.tools.xsbt; + +import java.util.Optional; + +final public class Action implements xsbti.Action { + private final String _title; + private final Optional _description; + private final WorkspaceEdit _edit; + + public Action(String title, Optional description, WorkspaceEdit edit) { + super(); + this._title = title; + this._description = description; + this._edit = edit; + } + + public String title() { + return _title; + } + + public Optional description() { + return _description; + } + + public WorkspaceEdit edit() { + return _edit; + } +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java index 92b8062700c4..6e2095a9df1e 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java +++ b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridge.java @@ -19,6 +19,6 @@ public final class CompilerBridge implements CompilerInterface2 { public void run(VirtualFile[] sources, DependencyChanges changes, String[] options, Output output, AnalysisCallback callback, Reporter delegate, CompileProgress progress, Logger log) { CompilerBridgeDriver driver = new CompilerBridgeDriver(options, output); - driver.run(sources, callback, log, delegate); + driver.run(sources, callback, log, delegate, progress); } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java index 12291120b157..2d54d4e83404 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java +++ b/sbt-bridge/src/dotty/tools/xsbt/CompilerBridgeDriver.java @@ -10,15 +10,26 @@ import dotty.tools.dotc.ScalacCommand; import dotty.tools.dotc.config.Properties; import dotty.tools.dotc.core.Contexts; +import dotty.tools.dotc.util.SourceFile; import dotty.tools.io.AbstractFile; +import dotty.tools.io.PlainFile; +import dotty.tools.io.Path; +import dotty.tools.io.Streamable; import scala.collection.mutable.ListBuffer; +import scala.jdk.javaapi.CollectionConverters; import scala.io.Codec; import xsbti.Problem; import xsbti.*; import xsbti.compile.Output; +import xsbti.compile.CompileProgress; import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.Comparator; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.Arrays; public class CompilerBridgeDriver extends Driver { @@ -50,15 +61,72 @@ public boolean sourcesRequired() { return false; } - synchronized public void run(VirtualFile[] sources, AnalysisCallback callback, Logger log, Reporter delegate) { - DelegatingReporter reporter = new DelegatingReporter(delegate); + private static VirtualFile asVirtualFile(SourceFile sourceFile, DelegatingReporter reporter, + HashMap lookup) { + return lookup.computeIfAbsent(sourceFile.file(), path -> { + reportMissingFile(reporter, sourceFile); + if (sourceFile.file().jpath() != null) + return new FallbackPathBasedFile(sourceFile); + else + return new FallbackVirtualFile(sourceFile); + }); + } + + private static void reportMissingFile(DelegatingReporter reporter, SourceFile sourceFile) { + String underline = String.join("", Collections.nCopies(sourceFile.path().length(), "^")); + String message = + sourceFile.path() + ": Missing Zinc virtual file\n" + + underline + "\n" + + " Falling back to placeholder for the given source file (of class " + sourceFile.getClass().getName() + ")\n" + + " This is likely a bug in incremental compilation for the Scala 3 compiler.\n" + + " Please report it to the Scala 3 maintainers at https://github.com/lampepfl/dotty/issues."; + reporter.reportBasicWarning(message); + } + + synchronized public void run( + VirtualFile[] sources, AnalysisCallback callback, Logger log, Reporter delegate, CompileProgress progress) { + VirtualFile[] sortedSources = new VirtualFile[sources.length]; + System.arraycopy(sources, 0, sortedSources, 0, sources.length); + Arrays.sort(sortedSources, (x0, x1) -> x0.id().compareTo(x1.id())); + + ListBuffer sourcesBuffer = new ListBuffer<>(); + HashMap lookup = new HashMap<>(sources.length, 0.25f); + + for (int i = 0; i < sources.length; i++) { + VirtualFile source = sortedSources[i]; + AbstractFile abstractFile = asDottyFile(source); + sourcesBuffer.append(abstractFile); + lookup.put(abstractFile, source); + } + + DelegatingReporter reporter = new DelegatingReporter(delegate, sourceFile -> { + // TODO: possible situation here where we use -from-tasty and TASTy source files but + // the reporter log is associated to a Scala source file? + + // Zinc will use the output of this function to possibly lookup a mapped virtual file, + // e.g. convert `${ROOT}/Foo.scala` to `/path/to/Foo.scala` if it exists in the lookup map. + VirtualFile vf = lookup.get(sourceFile.file()); + if (vf != null) + return vf.id(); + else + // follow Zinc, which uses the path of the source file as a fallback. + return sourceFile.path(); + }); + + ProgressCallbackImpl progressCallback = new ProgressCallbackImpl(progress); + + IncrementalCallback incCallback = new IncrementalCallback(callback, sourceFile -> + asVirtualFile(sourceFile, reporter, lookup) + ); + try { log.debug(this::infoOnCachedCompiler); Contexts.Context initialCtx = initCtx() .fresh() .setReporter(reporter) - .setSbtCallback(callback); + .setIncCallback(incCallback) + .setProgressCallback(progressCallback); Contexts.Context context = setup(args, initialCtx).map(t -> t._2).getOrElse(() -> initialCtx); @@ -70,28 +138,28 @@ synchronized public void run(VirtualFile[] sources, AnalysisCallback callback, L log.debug(this::prettyPrintCompilationArguments); Compiler compiler = newCompiler(context); - VirtualFile[] sortedSources = new VirtualFile[sources.length]; - System.arraycopy(sources, 0, sortedSources, 0, sources.length); - Arrays.sort( - sortedSources, - new Comparator() { - @Override - public int compare(VirtualFile x0, VirtualFile x1) { - return x0.id().compareTo(x1.id()); - } - } - ); - - ListBuffer sourcesBuffer = new ListBuffer<>(); - for (VirtualFile file: sortedSources) - sourcesBuffer.append(asDottyFile(file)); doCompile(compiler, sourcesBuffer.toList(), context); for (xsbti.Problem problem: delegate.problems()) { - callback.problem(problem.category(), problem.position(), problem.message(), problem.severity(), - true); + try { + AnalysisCallback2 callback2 = (AnalysisCallback2)callback; + callback2.problem2( + problem.category(), + problem.position(), + problem.message(), + problem.severity(), + true, // reported + problem.rendered(), + problem.diagnosticCode(), + problem.diagnosticRelatedInformation(), + problem.actions() + ); + } catch (NoClassDefFoundError e) { + callback.problem(problem.category(), problem.position(), problem.message(), problem.severity(), + true); + } } - } else { + } else { delegate.printSummary(); } @@ -105,11 +173,28 @@ public int compare(VirtualFile x0, VirtualFile x1) { } private static AbstractFile asDottyFile(VirtualFile virtualFile) { - if (virtualFile instanceof PathBasedFile) - return new ZincPlainFile((PathBasedFile) virtualFile); + if (virtualFile instanceof PathBasedFile) { + java.nio.file.Path path = ((PathBasedFile) virtualFile).toPath(); + return new PlainFile(new Path(path)); + } try { - return new ZincVirtualFile(virtualFile); + return new dotty.tools.io.VirtualFile(virtualFile.name(), virtualFile.id()) { + { + // fill in the content + try (OutputStream output = output()) { + try (InputStream input = virtualFile.input()) { + Streamable.Bytes bytes = new Streamable.Bytes() { + @Override + public InputStream inputStream() { + return input; + } + }; + output.write(bytes.toByteArray()); + } + } + } + }; } catch (IOException e) { throw new IllegalArgumentException("invalid file " + virtualFile.name(), e); } diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java index 25b934000144..3bcff72601a7 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java +++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java @@ -3,11 +3,15 @@ */ package dotty.tools.xsbt; +import java.util.List; + import scala.Tuple2; import scala.collection.mutable.HashMap; +import scala.jdk.javaapi.CollectionConverters; import dotty.tools.dotc.core.Contexts.Context; import dotty.tools.dotc.reporting.AbstractReporter; +import dotty.tools.dotc.reporting.CodeAction; import dotty.tools.dotc.reporting.Diagnostic; import dotty.tools.dotc.reporting.Message; import dotty.tools.dotc.util.SourceFile; @@ -15,12 +19,21 @@ import xsbti.Position; import xsbti.Severity; +import java.util.Collections; +import java.util.function.*; + final public class DelegatingReporter extends AbstractReporter { private xsbti.Reporter delegate; - public DelegatingReporter(xsbti.Reporter delegate) { + // A function that can lookup the `id` of the VirtualFile + // associated with a SourceFile. If there is not an associated virtual file, + // then it is the path of the SourceFile as a String. + private final Function lookupVirtualFileId; + + public DelegatingReporter(xsbti.Reporter delegate, Function lookupVirtualFileId) { super(); this.delegate = delegate; + this.lookupVirtualFileId = lookupVirtualFileId; } public void dropDelegate() { @@ -35,20 +48,25 @@ public void printSummary(Context ctx) { public void doReport(Diagnostic dia, Context ctx) { Severity severity = severityOf(dia.level()); Position position = positionOf(dia.pos().nonInlined()); - - StringBuilder rendered = new StringBuilder(); - rendered.append(messageAndPos(dia, ctx)); Message message = dia.msg(); - StringBuilder messageBuilder = new StringBuilder(); - messageBuilder.append(message.message()); + String text; + if (Diagnostic.shouldExplain(dia, ctx) && !message.explanation().isEmpty()) + text = message.message() + System.lineSeparator() + explanation(message, ctx); + else + text = message.message(); + String rendered = messageAndPos(dia, ctx); String diagnosticCode = String.valueOf(message.errorId().errorNumber()); - boolean shouldExplain = Diagnostic.shouldExplain(dia, ctx); - if (shouldExplain && !message.explanation().isEmpty()) { - rendered.append(explanation(message, ctx)); - messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx)); - } + List actions = CollectionConverters.asJava(message.actions(ctx)); + Problem problem = new Problem(position, text, severity, rendered, diagnosticCode, actions, lookupVirtualFileId); + delegate.log(problem); + } - delegate.log(new Problem(position, messageBuilder.toString(), severity, rendered.toString(), diagnosticCode)); + public void reportBasicWarning(String message) { + Position position = PositionBridge.noPosition; + Severity severity = Severity.Warn; + String diagnosticCode = "-1"; // no error code + List actions = Collections.emptyList(); + delegate.log(new Problem(position, message, severity, message, diagnosticCode, actions, lookupVirtualFileId)); } private static Severity severityOf(int level) { @@ -63,9 +81,9 @@ private static Severity severityOf(int level) { return severity; } - private static Position positionOf(SourcePosition pos) { - if (pos.exists()){ - return new PositionBridge(pos, pos.source()); + private Position positionOf(SourcePosition pos) { + if (pos.exists()) { + return new PositionBridge(pos, lookupVirtualFileId.apply(pos.source())); } else { return PositionBridge.noPosition; } diff --git a/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java b/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java new file mode 100644 index 000000000000..28c2170d2b50 --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/FallbackPathBasedFile.java @@ -0,0 +1,20 @@ +package dotty.tools.xsbt; + +import dotty.tools.dotc.util.SourceFile; + +/**A basic implementation of PathBasedFile that is only used when + * the real virtual file can not be found. + * + * See FallbackVirtualFile for more details. + */ +public class FallbackPathBasedFile extends FallbackVirtualFile implements xsbti.PathBasedFile { + + public FallbackPathBasedFile(SourceFile sourceFile) { + super(sourceFile); + } + + public java.nio.file.Path toPath() { + return sourceFile.file().jpath(); + } + +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java b/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java new file mode 100644 index 000000000000..6fcb6ef73e1f --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/FallbackVirtualFile.java @@ -0,0 +1,36 @@ +package dotty.tools.xsbt; + +import dotty.tools.dotc.util.SourceFile; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +/**A basic implementation of VirtualFile that is only used when + * the real virtual file can not be found. + * + * This has a very basic implementation of contentHash that is almost certainly colliding more than the implementation + * in Zinc. It does not matter anyway as Zinc will recompile the associated source file, because it did not recieve the + * same virtual file back. + */ +public class FallbackVirtualFile extends xsbti.BasicVirtualFileRef implements xsbti.VirtualFile { + + protected final SourceFile sourceFile; + + public FallbackVirtualFile(SourceFile sourceFile) { + super(sourceFile.path()); + this.sourceFile = sourceFile; + } + + private static byte[] toBytes(char[] chars) { + return new String(chars).getBytes(StandardCharsets.UTF_8); + } + + public InputStream input() { + return new java.io.ByteArrayInputStream(toBytes(sourceFile.content())); + } + + public long contentHash() { + int murmurHash3 = scala.util.hashing.MurmurHash3$.MODULE$.bytesHash(toBytes(sourceFile.content())); + return (long) murmurHash3; + } + +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java new file mode 100644 index 000000000000..3c3d33c1c1fe --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java @@ -0,0 +1,60 @@ +package dotty.tools.xsbt; + +import dotty.tools.dotc.util.SourceFile; +import java.util.function.Function; + +public final class IncrementalCallback implements dotty.tools.dotc.sbt.interfaces.IncrementalCallback { + + private final xsbti.AnalysisCallback delegate; + private final Function asVirtualFile; + + public IncrementalCallback(xsbti.AnalysisCallback delegate, Function asVirtualFile) { + this.delegate = delegate; + this.asVirtualFile = asVirtualFile; + } + + @Override + public void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { + delegate.api(asVirtualFile.apply(sourceFile), classApi); + } + + @Override + public void startSource(SourceFile sourceFile) { + delegate.startSource(asVirtualFile.apply(sourceFile)); + } + + @Override + public void mainClass(SourceFile sourceFile, String className) { + delegate.mainClass(asVirtualFile.apply(sourceFile), className); + } + + @Override + public boolean enabled() { + return delegate.enabled(); + } + + @Override + public void usedName(String className, String name, java.util.EnumSet useScopes) { + delegate.usedName(className, name, useScopes); + } + + @Override + public void binaryDependency(java.nio.file.Path onBinaryEntry, String onBinaryClassName, String fromClassName, SourceFile fromSourceFile, xsbti.api.DependencyContext context) { + delegate.binaryDependency(onBinaryEntry, onBinaryClassName, fromClassName, asVirtualFile.apply(fromSourceFile), context); + } + + @Override + public void classDependency(String onClassName, String sourceClassName, xsbti.api.DependencyContext context) { + delegate.classDependency(onClassName, sourceClassName, context); + } + + @Override + public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) { + delegate.generatedLocalClass(asVirtualFile.apply(source), classFile); + } + + @Override + public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { + delegate.generatedNonLocalClass(asVirtualFile.apply(source), classFile, binaryClassName, srcClassName); + } +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java new file mode 100644 index 000000000000..597a964eb944 --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java @@ -0,0 +1,74 @@ +package dotty.tools.xsbt; + +import dotty.tools.dotc.util.SourceFile; +import java.util.function.Function; +import java.util.Optional; + +import java.io.File; + +/** To be compatible with the Zinc 1.3 API */ +public final class OldIncrementalCallback implements dotty.tools.dotc.sbt.interfaces.IncrementalCallback { + + private final xsbti.AnalysisCallback delegate; + + public OldIncrementalCallback(xsbti.AnalysisCallback delegate) { + this.delegate = delegate; + } + + private static File asJavaFile(SourceFile sourceFile) { + File jfileOrNull = sourceFile.file().file(); + if (jfileOrNull != null) return jfileOrNull; + throw new IllegalArgumentException("SourceFile " + sourceFile + " is not backed by a java.io.File"); + } + + @SuppressWarnings("deprecation") + @Override + public void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { + delegate.api(asJavaFile(sourceFile), classApi); + } + + @SuppressWarnings("deprecation") + @Override + public void startSource(SourceFile sourceFile) { + delegate.startSource(asJavaFile(sourceFile)); + } + + @SuppressWarnings("deprecation") + @Override + public void mainClass(SourceFile sourceFile, String className) { + delegate.mainClass(asJavaFile(sourceFile), className); + } + + @Override + public boolean enabled() { + return delegate.enabled(); + } + + @Override + public void usedName(String className, String name, java.util.EnumSet useScopes) { + delegate.usedName(className, name, useScopes); + } + + @SuppressWarnings("deprecation") + @Override + public void binaryDependency(java.nio.file.Path onBinaryEntry, String onBinaryClassName, String fromClassName, SourceFile fromSourceFile, xsbti.api.DependencyContext context) { + delegate.binaryDependency(onBinaryEntry.toFile(), onBinaryClassName, fromClassName, asJavaFile(fromSourceFile), context); + } + + @Override + public void classDependency(String onClassName, String sourceClassName, xsbti.api.DependencyContext context) { + delegate.classDependency(onClassName, sourceClassName, context); + } + + @SuppressWarnings("deprecation") + @Override + public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) { + delegate.generatedLocalClass(asJavaFile(source), classFile.toFile()); + } + + @SuppressWarnings("deprecation") + @Override + public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { + delegate.generatedNonLocalClass(asJavaFile(source), classFile.toFile(), binaryClassName, srcClassName); + } +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java b/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java index 6b3c25e2e27c..eb01da25ba1c 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java +++ b/sbt-bridge/src/dotty/tools/xsbt/PositionBridge.java @@ -12,10 +12,12 @@ import java.io.File; import java.util.Optional; +import java.util.function.Function; public class PositionBridge implements Position { private final SourcePosition pos; private final SourceFile src; + private final String pathId; public static final Position noPosition = new Position() { public Optional sourceFile() { @@ -45,9 +47,10 @@ public String toString() { } }; - public PositionBridge(SourcePosition pos, SourceFile src) { + public PositionBridge(SourcePosition pos, String path) { this.pos = pos; - this.src = src; + this.src = pos.source(); + this.pathId = path; } @Override @@ -82,17 +85,7 @@ public Optional offset() { @Override public Optional sourcePath() { - if (!src.exists()) - return Optional.empty(); - - AbstractFile sourceFile = pos.source().file(); - if (sourceFile instanceof ZincPlainFile) { - return Optional.of(((ZincPlainFile) sourceFile).underlying().id()); - } else if (sourceFile instanceof ZincVirtualFile) { - return Optional.of(((ZincVirtualFile) sourceFile).underlying().id()); - } else { - return Optional.of(sourceFile.path()); - } + return Optional.of(pathId); } @Override @@ -131,7 +124,7 @@ public String toString() { else return path; } - + @Override public Optional startOffset() { if (src.content().length == 0) diff --git a/sbt-bridge/src/dotty/tools/xsbt/Problem.java b/sbt-bridge/src/dotty/tools/xsbt/Problem.java index 29d64cc26c4a..532bb35786c4 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/Problem.java +++ b/sbt-bridge/src/dotty/tools/xsbt/Problem.java @@ -1,8 +1,23 @@ package dotty.tools.xsbt; +import java.util.List; import java.util.Optional; +import java.util.function.Function; + +import static java.util.stream.Collectors.toList; + +import dotty.tools.dotc.reporting.CodeAction; +import dotty.tools.dotc.rewrites.Rewrites.ActionPatch; +import dotty.tools.dotc.util.SourcePosition; +import dotty.tools.dotc.util.SourceFile; + +import scala.jdk.javaapi.CollectionConverters; +import scala.jdk.javaapi.OptionConverters; + import xsbti.Position; import xsbti.Severity; +import xsbti.VirtualFile; + final public class Problem implements xsbti.Problem { private final Position _position; @@ -10,14 +25,23 @@ final public class Problem implements xsbti.Problem { private final Severity _severity; private final Optional _rendered; private final String _diagnosticCode; + private final List _actions; + + // A function that can lookup the `id` of the VirtualFile + // associated with a SourceFile. If there is not an associated virtual file, + // then it is the path of the SourceFile as a String. + private final Function _lookupVirtualFileId; - public Problem(Position position, String message, Severity severity, String rendered, String diagnosticCode) { + public Problem(Position position, String message, Severity severity, String rendered, String diagnosticCode, List actions, + Function lookupVirtualFileId) { super(); this._position = position; this._message = message; this._severity = severity; this._rendered = Optional.of(rendered); this._diagnosticCode = diagnosticCode; + this._actions = actions; + this._lookupVirtualFileId = lookupVirtualFileId; } public String category() { @@ -56,6 +80,38 @@ public Optional diagnosticCode() { } } + public List actions() { + if (_actions.isEmpty()) { + return java.util.Collections.emptyList(); + } else { + // Same as with diagnosticCode, we need to ensure we don't create the actual + // Action until we are here to ensure that when using an older version of sbt/zinc + // with the new versions of the compiler, this doesn't blow up because this is + // never getting called. + return _actions + .stream() + .map(action -> new Action(action.title(), OptionConverters.toJava(action.description()), toWorkspaceEdit(CollectionConverters.asJava(action.patches()), _lookupVirtualFileId))) + .collect(toList()); + } + } + + private static WorkspaceEdit toWorkspaceEdit(List patches, Function lookupVirtualFileId) { + return new WorkspaceEdit( + patches + .stream() + .map(patch -> new TextEdit(positionOf(patch.srcPos(), lookupVirtualFileId), patch.replacement())) + .collect(toList()) + ); + } + + private static Position positionOf(SourcePosition pos, Function lookupVirtualFileId) { + if (pos.exists()){ + return new PositionBridge(pos, lookupVirtualFileId.apply(pos.source())); + } else { + return PositionBridge.noPosition; + } + } + @Override public String toString() { return "Problem(" + _position + ", " + _message + ", " + _severity + ", " + _rendered + ", " + _diagnosticCode + ")"; diff --git a/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java b/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java new file mode 100644 index 000000000000..f5fb78f12bb1 --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/ProgressCallbackImpl.java @@ -0,0 +1,35 @@ +package dotty.tools.xsbt; + +import dotty.tools.dotc.sbt.interfaces.ProgressCallback; +import dotty.tools.dotc.CompilationUnit; + +import xsbti.compile.CompileProgress; + +public final class ProgressCallbackImpl implements ProgressCallback { + private boolean _cancelled = false; // TODO: atomic boolean? + private final CompileProgress _progress; + + public ProgressCallbackImpl(CompileProgress progress) { + _progress = progress; + } + + @Override + public void cancel() { + _cancelled = true; + } + + @Override + public boolean isCancelled() { + return _cancelled; + } + + @Override + public void informUnitStarting(String phase, CompilationUnit unit) { + _progress.startUnit(phase, unit.source().file().path()); + } + + @Override + public boolean progress(int current, int total, String currPhase, String nextPhase) { + return _progress.advance(current, total, currPhase, nextPhase); + } +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java b/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java new file mode 100644 index 000000000000..df717446b2f2 --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/TextEdit.java @@ -0,0 +1,23 @@ +package dotty.tools.xsbt; + +import xsbti.Position; + +final public class TextEdit implements xsbti.TextEdit { + private final Position _position; + private final String _newText; + + public TextEdit(Position position, String newText) { + super(); + this._position = position; + this._newText = newText; + } + + public Position position() { + return _position; + } + + public String newText() { + return _newText; + } + +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java b/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java new file mode 100644 index 000000000000..153de63e3765 --- /dev/null +++ b/sbt-bridge/src/dotty/tools/xsbt/WorkspaceEdit.java @@ -0,0 +1,20 @@ +package dotty.tools.xsbt; + +import java.util.List; + +import xsbti.TextEdit; + +final public class WorkspaceEdit implements xsbti.WorkspaceEdit { + + private final List _changes; + + public WorkspaceEdit(List changes) { + super(); + this._changes = changes; + } + + public List changes() { + return _changes; + } + +} diff --git a/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java b/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java deleted file mode 100644 index 68b3494cb84b..000000000000 --- a/sbt-bridge/src/dotty/tools/xsbt/ZincPlainFile.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Zinc - The incremental compiler for Scala. - * Copyright Lightbend, Inc. and Mark Harrah - */ - -package dotty.tools.xsbt; - -import xsbti.PathBasedFile; - -public class ZincPlainFile extends dotty.tools.io.PlainFile { - private final PathBasedFile _underlying; - - public ZincPlainFile(PathBasedFile underlying) { - super(new dotty.tools.io.Path(underlying.toPath())); - this._underlying = underlying; - } - - public PathBasedFile underlying() { - return _underlying; - } -} \ No newline at end of file diff --git a/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java b/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java deleted file mode 100644 index a79686270f34..000000000000 --- a/sbt-bridge/src/dotty/tools/xsbt/ZincVirtualFile.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Zinc - The incremental compiler for Scala. - * Copyright Lightbend, Inc. and Mark Harrah - */ - -package dotty.tools.xsbt; - -import dotty.tools.io.Streamable; -import xsbti.VirtualFile; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -public class ZincVirtualFile extends dotty.tools.io.VirtualFile { - private final VirtualFile _underlying; - - public ZincVirtualFile(VirtualFile underlying) throws IOException { - super(underlying.name(), underlying.id()); - this._underlying = underlying; - - // fill in the content - OutputStream output = output(); - try { - Streamable.Bytes bytes = new Streamable.Bytes() { - @Override - public InputStream inputStream() { - return underlying.input(); - } - }; - output.write(bytes.toByteArray()); - } finally { - output.close(); - } - } - - public VirtualFile underlying() { - return _underlying; - } -} diff --git a/sbt-bridge/src/xsbt/CachedCompilerImpl.java b/sbt-bridge/src/xsbt/CachedCompilerImpl.java index 0b876475e51e..8b7779f9c9cb 100644 --- a/sbt-bridge/src/xsbt/CachedCompilerImpl.java +++ b/sbt-bridge/src/xsbt/CachedCompilerImpl.java @@ -13,6 +13,9 @@ import dotty.tools.dotc.Main; import dotty.tools.xsbt.InterfaceCompileFailed; import dotty.tools.xsbt.DelegatingReporter; +import dotty.tools.xsbt.OldIncrementalCallback; + +import dotty.tools.dotc.sbt.interfaces.IncrementalCallback; // deprecation warnings are suppressed because scala3-sbt-bridge must stay compatible with Zinc 1.3 // see https://github.com/lampepfl/dotty/issues/10816 @@ -60,9 +63,11 @@ synchronized public void run(File[] sources, DependencyChanges changes, Analysis return msg; }); + IncrementalCallback incCallback = new OldIncrementalCallback(callback); + Context ctx = new ContextBase().initialCtx().fresh() - .setSbtCallback(callback) - .setReporter(new DelegatingReporter(delegate)); + .setIncCallback(incCallback) + .setReporter(new DelegatingReporter(delegate, source -> source.file().absolutePath())); dotty.tools.dotc.reporting.Reporter reporter = Main.process(commandArguments(sources), ctx); if (reporter.hasErrors()) { diff --git a/sbt-bridge/src/xsbt/CompilerInterface.java b/sbt-bridge/src/xsbt/CompilerInterface.java index 3f26036eee6d..c48ee4c9d909 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.java +++ b/sbt-bridge/src/xsbt/CompilerInterface.java @@ -54,6 +54,7 @@ private boolean isClassLoaderValid() { } } + @SuppressWarnings("deprecation") public void run(File[] sources, DependencyChanges changes, AnalysisCallback callback, Logger log, Reporter delegate, CompileProgress progress, CachedCompiler cached) { cached.run(sources, changes, callback, log, delegate, progress); diff --git a/sbt-bridge/src/xsbt/DottydocRunner.java b/sbt-bridge/src/xsbt/DottydocRunner.java index e4c35a317e71..a91ff087cea9 100644 --- a/sbt-bridge/src/xsbt/DottydocRunner.java +++ b/sbt-bridge/src/xsbt/DottydocRunner.java @@ -53,7 +53,7 @@ public void run() { args = retained.toArray(new String[retained.size()]); Context ctx = new ContextBase().initialCtx().fresh() - .setReporter(new DelegatingReporter(delegate)); + .setReporter(new DelegatingReporter(delegate, source -> source.file().absolutePath())); try { Class dottydocMainClass = Class.forName("dotty.tools.dottydoc.Main"); diff --git a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala new file mode 100644 index 000000000000..bcdac0547e75 --- /dev/null +++ b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala @@ -0,0 +1,79 @@ +package xsbt + +import org.junit.{ Test, Ignore } +import org.junit.Assert._ + +/**Only does some rudimentary checks to assert compat with sbt. + * More thorough tests are found in compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala + */ +class CompileProgressSpecification { + + @Test + def totalIsMoreWhenSourcePath = { + val srcA = """class A""" + val srcB = """class B""" + val extraC = """trait C""" // will only exist in the `-sourcepath`, causing a late compile + val extraD = """trait D""" // will only exist in the `-sourcepath`, causing a late compile + val srcE = """class E extends C""" // depends on class in the sourcepath + val srcF = """class F extends C, D""" // depends on classes in the sourcepath + + val compilerForTesting = new ScalaCompilerForUnitTesting + + val totalA = compilerForTesting.extractTotal(srcA)() + assertTrue("expected more than 1 unit of work for a single file", totalA > 1) + + val totalB = compilerForTesting.extractTotal(srcA, srcB)() + assertEquals("expected twice the work for two sources", totalA * 2, totalB) + + val totalC = compilerForTesting.extractTotal(srcA, srcE)(extraC) + assertEquals("expected 2x+1 the work for two sources, and 1 late compile", totalA * 2 + 1, totalC) + + val totalD = compilerForTesting.extractTotal(srcA, srcF)(extraC, extraD) + assertEquals("expected 2x+2 the work for two sources, and 2 late compiles", totalA * 2 + 2, totalD) + } + + @Test + def multipleFilesVisitSamePhases = { + val srcA = """class A""" + val srcB = """class B""" + val compilerForTesting = new ScalaCompilerForUnitTesting + val Seq(phasesA, phasesB) = compilerForTesting.extractEnteredPhases(srcA, srcB) + assertTrue("expected some phases, was empty", phasesA.nonEmpty) + assertEquals(phasesA, phasesB) + } + + @Test + def multipleFiles = { + val srcA = """class A""" + val srcB = """class B""" + val compilerForTesting = new ScalaCompilerForUnitTesting + val allPhases = compilerForTesting.extractProgressPhases(srcA, srcB) + assertTrue("expected some phases, was empty", allPhases.nonEmpty) + val someExpectedPhases = // just check some "fundamental" phases, don't put all phases to avoid brittleness + Set( + "parser", + "typer[indexing]", "typer[typechecking]", "typer[checkingJava]", + "sbt-deps", + "posttyper", + "sbt-api", + "SetRootTree", + "pickler", + "inlining", + "postInlining", + "staging", + "splicing", + "pickleQuotes", + "MegaPhase{pruneErasedDefs,...,arrayConstructors}", + "erasure", + "constructors", + "genSJSIR", + "genBCode" + ) + val missingExpectedPhases = someExpectedPhases -- allPhases.toSet + val msgIfMissing = + s"missing expected phases: $missingExpectedPhases. " + + s"Either the compiler phases changed, or the encoding of Run.SubPhases.subPhase" + assertTrue(msgIfMissing, missingExpectedPhases.isEmpty) + } + +} diff --git a/sbt-bridge/test/xsbt/DependencySpecification.scala b/sbt-bridge/test/xsbt/DependencySpecification.scala index a3fec950e120..54d37048dd09 100644 --- a/sbt-bridge/test/xsbt/DependencySpecification.scala +++ b/sbt-bridge/test/xsbt/DependencySpecification.scala @@ -209,4 +209,4 @@ class DependencySpecification { compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD) classDependencies } -} \ No newline at end of file +} diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index 819bedec3cbc..2b2b7d26c716 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -1,6 +1,7 @@ package xsbt import xsbti.UseScope +import ScalaCompilerForUnitTesting.Callbacks import org.junit.{ Test, Ignore } import org.junit.Assert._ @@ -226,7 +227,7 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting - val (_, callback) = + val (_, Callbacks(callback, _)) = compilerForTesting.compileSrcs(List(List(sealedClass, in))) val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index e58f9fefd92d..f17be692ee50 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -13,20 +13,40 @@ import dotty.tools.io.PlainFile.toPlainFile import dotty.tools.xsbt.CompilerBridge import TestCallback.ExtractedClassDependencies +import ScalaCompilerForUnitTesting.Callbacks + +object ScalaCompilerForUnitTesting: + case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) /** * Provides common functionality needed for unit tests that require compiling * source code using Scala compiler. */ class ScalaCompilerForUnitTesting { - import scala.language.reflectiveCalls + + def extractEnteredPhases(srcs: String*): Seq[List[String]] = { + val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) + val run = testProgress.runs.head + tempSrcFiles.map(src => run.unitPhases(src.id)) + } + + def extractTotal(srcs: String*)(extraSourcePath: String*): Int = { + val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList) + val run = testProgress.runs.head + run.total + } + + def extractProgressPhases(srcs: String*): List[String] = { + val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) + testProgress.runs.head.phases + } /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ def extractApiFromSrc(src: String): Seq[ClassLike] = { - val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src) + val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src) analysisCallback.apis(tempSrcFile) } @@ -35,7 +55,7 @@ class ScalaCompilerForUnitTesting { * extracted by ExtractAPI class. */ def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList) + val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList) tempSrcFiles.map(analysisCallback.apis) } @@ -53,7 +73,7 @@ class ScalaCompilerForUnitTesting { assertDefaultScope: Boolean = true ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file - val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc) + val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc) if (assertDefaultScope) for { (className, used) <- analysisCallback.usedNamesAndScopes @@ -71,7 +91,7 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, analysisCallback) = compileSrcs(sources: _*) + val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) srcFiles .map { srcFile => val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) @@ -93,7 +113,7 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, testCallback) = compileSrcs(srcs) + val (_, Callbacks(testCallback, _)) = compileSrcs(srcs) val memberRefDeps = testCallback.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) @@ -122,50 +142,60 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = { val temp = IO.createTemporaryDirectory val analysisCallback = new TestCallback + val testProgress = new TestCompileProgress val classesDir = new File(temp, "classes") classesDir.mkdir() val bridge = new CompilerBridge - val files = for ((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield { - val srcFiles = compilationUnit.toSeq.zipWithIndex.map { + val files = for ((compilationUnits, unitId) <- groupedSrcs.zipWithIndex) yield { + val extraFiles = sourcePath.toSeq.zipWithIndex.map { + case (src, i) => + val fileName = s"Extra-$unitId-$i.scala" + prepareSrcFile(temp, fileName, src) + } + val srcFiles = compilationUnits.toSeq.zipWithIndex.map { (src, i) => val fileName = s"Test-$unitId-$i.scala" prepareSrcFile(temp, fileName, src) } - val virtualSrcFiles = srcFiles.map(file => TestVirtualFile(file.toPath)).toArray + val virtualSrcFiles = srcFiles.toArray val classesDirPath = classesDir.getAbsolutePath.toString val output = new SingleOutput: def getOutputDirectory() = classesDir + val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString) + bridge.run( - virtualSrcFiles.toArray, + virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath), + Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath, output, analysisCallback, new TestReporter, - new CompileProgress {}, + testProgress, new TestLogger ) + testProgress.completeRun() + srcFiles } - (files.flatten.toSeq, analysisCallback) + (files.flatten.toSeq, Callbacks(analysisCallback, testProgress)) } - def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = { compileSrcs(List(srcs.toList)) } - private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { + private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = { val srcFile = new File(baseDir, fileName) IO.write(srcFile, src) - srcFile + new TestVirtualFile(srcFile.toPath) } } diff --git a/sbt-bridge/test/xsbt/TestVirtualFile.scala b/sbt-bridge/test/xsbt/TestVirtualFile.scala index db00038272a8..2c7729d0a6cd 100644 --- a/sbt-bridge/test/xsbt/TestVirtualFile.scala +++ b/sbt-bridge/test/xsbt/TestVirtualFile.scala @@ -1,6 +1,6 @@ package xsbt -import xsbti.PathBasedFile +import xsbti.{PathBasedFile, VirtualFileRef} import java.nio.file.{Files, Path} import scala.io.Source import scala.io.Codec @@ -8,7 +8,17 @@ import scala.io.Codec class TestVirtualFile(path: Path) extends PathBasedFile: override def contentHash(): Long = ??? override def input(): java.io.InputStream = Files.newInputStream(path) - override def id(): String = name() + lazy val absolutePath: String = path.toAbsolutePath.toString() + override def id(): String = absolutePath override def name(): String = path.toFile.getName override def names(): Array[String] = ??? override def toPath(): Path = path + + + override def hashCode(): Int = absolutePath.hashCode() + + override def equals(x: Any): Boolean = this.eq(x.asInstanceOf[AnyRef]) || x.match { + case vf: VirtualFileRef => vf.id() == id() + } + + diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index a0919dc69bc4..3398590b169a 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -1,4 +1,4 @@ -/** Copied from https://github.com/sbt/sbt/blob/0.13/interface/src/test/scala/xsbti/TestCallback.scala */ +// Taken from https://github.com/sbt/zinc/blob/aa1c04f445092e87f76aaceee4da61ea0724419e/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala package xsbti import java.io.File @@ -8,61 +8,121 @@ import xsbti.VirtualFileRef import xsbti.api.ClassLike import xsbti.api.DependencyContext import DependencyContext._ -import java.util.EnumSet +import java.{util => ju} +import ju.Optional + +class TestCallback extends AnalysisCallback2 { + case class TestUsedName(name: String, scopes: ju.EnumSet[UseScope]) -class TestCallback extends AnalysisCallback -{ - case class TestUsedName(name: String, scopes: EnumSet[UseScope]) val classDependencies = new ArrayBuffer[(String, String, DependencyContext)] - val binaryDependencies = new ArrayBuffer[(File, String, String, File, DependencyContext)] - val products = new ArrayBuffer[(File, File)] - val usedNamesAndScopes = scala.collection.mutable.Map.empty[String, Set[TestUsedName]].withDefaultValue(Set.empty) - val classNames = scala.collection.mutable.Map.empty[File, Set[(String, String)]].withDefaultValue(Set.empty) - val apis: scala.collection.mutable.Map[File, Seq[ClassLike]] = scala.collection.mutable.Map.empty + val binaryDependencies = + new ArrayBuffer[(Path, String, String, VirtualFileRef, DependencyContext)] + val productClassesToSources = + scala.collection.mutable.Map.empty[Path, VirtualFileRef] + val usedNamesAndScopes = scala.collection.mutable.Map + .empty[String, Set[TestUsedName]] + .withDefaultValue(Set.empty) + val classNames = scala.collection.mutable.Map + .empty[VirtualFileRef, Set[(String, String)]] + .withDefaultValue(Set.empty) + val apis: scala.collection.mutable.Map[VirtualFileRef, Seq[ClassLike]] = + scala.collection.mutable.Map.empty def usedNames = usedNamesAndScopes.view.mapValues(_.map(_.name)).toMap - override def startSource(source: File): Unit = { - assert(!apis.contains(source), s"startSource can be called only once per source file: $source") + override def startSource(source: File): Unit = ??? + override def startSource(source: VirtualFile): Unit = { + assert( + !apis.contains(source), + s"startSource can be called only once per source file: $source" + ) apis(source) = Seq.empty } - override def startSource(source: VirtualFile): Unit = ??? - override def binaryDependency(binary: File, name: String, fromClassName: String, source: File, context: DependencyContext): Unit = { + override def binaryDependency( + binary: File, + name: String, + fromClassName: String, + source: File, + context: DependencyContext + ): Unit = ??? + override def binaryDependency( + binary: Path, + name: String, + fromClassName: String, + source: VirtualFileRef, + context: DependencyContext + ): Unit = { binaryDependencies += ((binary, name, fromClassName, source, context)) } - override def binaryDependency(binary: Path, name: String, fromClassName: String, source: VirtualFileRef, context: DependencyContext): Unit = ??? - - override def generatedNonLocalClass(source: File, - module: File, - binaryClassName: String, - srcClassName: String): Unit = { - products += ((source, module)) - classNames(source) += ((srcClassName, binaryClassName)) + + override def generatedNonLocalClass( + source: File, + module: File, + binaryClassName: String, + srcClassName: String + ): Unit = ??? + + override def generatedNonLocalClass( + sourceFile: VirtualFileRef, + classFile: Path, + binaryClassName: String, + srcClassName: String + ): Unit = { + productClassesToSources += ((classFile, sourceFile)) + classNames(sourceFile) += ((srcClassName, binaryClassName)) () } - override def generatedNonLocalClass(source: VirtualFileRef, module: Path, binaryClassName: String, srcClassName: String): Unit = ??? - override def generatedLocalClass(source: File, module: File): Unit = { - products += ((source, module)) + override def generatedLocalClass(source: File, module: File): Unit = ??? + override def generatedLocalClass( + sourceFile: VirtualFileRef, + classFile: Path + ): Unit = { + productClassesToSources += ((classFile, sourceFile)) () } - override def generatedLocalClass(source: VirtualFileRef, module: Path): Unit = ??? - override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = { - if (onClassName != sourceClassName) classDependencies += ((onClassName, sourceClassName, context)) + override def classDependency( + onClassName: String, + sourceClassName: String, + context: DependencyContext + ): Unit = { + if (onClassName != sourceClassName) + classDependencies += ((onClassName, sourceClassName, context)) } - override def usedName(className: String, name: String, scopes: EnumSet[UseScope]): Unit = { + override def usedName( + className: String, + name: String, + scopes: ju.EnumSet[UseScope] + ): Unit = { usedNamesAndScopes(className) += TestUsedName(name, scopes) } - override def api(source: File, classApi: ClassLike): Unit = { + override def api(source: File, classApi: ClassLike): Unit = ??? + override def api(source: VirtualFileRef, classApi: ClassLike): Unit = { apis(source) = classApi +: apis(source) } - override def api(source: VirtualFileRef, classApi: ClassLike): Unit = ??? - override def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () + override def problem( + category: String, + pos: xsbti.Position, + message: String, + severity: xsbti.Severity, + reported: Boolean + ): Unit = () + override def problem2( + category: String, + pos: Position, + msg: String, + severity: Severity, + reported: Boolean, + rendered: Optional[String], + diagnosticCode: Optional[xsbti.DiagnosticCode], + diagnosticRelatedInformation: ju.List[xsbti.DiagnosticRelatedInformation], + actions: ju.List[xsbti.Action] + ): Unit = () override def dependencyPhaseCompleted(): Unit = () override def apiPhaseCompleted(): Unit = () override def enabled(): Boolean = true @@ -71,29 +131,39 @@ class TestCallback extends AnalysisCallback override def mainClass(source: VirtualFileRef, className: String): Unit = ??? override def classesInOutputJar(): java.util.Set[String] = ??? - override def getPickleJarPair(): java.util.Optional[xsbti.T2[Path, Path]] = ??? + override def getPickleJarPair(): java.util.Optional[xsbti.T2[Path, Path]] = + ??? override def isPickleJava(): Boolean = ??? } object TestCallback { - case class ExtractedClassDependencies(memberRef: Map[String, Set[String]], - inheritance: Map[String, Set[String]], - localInheritance: Map[String, Set[String]]) + case class ExtractedClassDependencies( + memberRef: Map[String, Set[String]], + inheritance: Map[String, Set[String]], + localInheritance: Map[String, Set[String]] + ) object ExtractedClassDependencies { def fromPairs( - memberRefPairs: collection.Seq[(String, String)], - inheritancePairs: collection.Seq[(String, String)], - localInheritancePairs: collection.Seq[(String, String)] - ): ExtractedClassDependencies = { - ExtractedClassDependencies(pairsToMultiMap(memberRefPairs), + memberRefPairs: collection.Seq[(String, String)], + inheritancePairs: collection.Seq[(String, String)], + localInheritancePairs: collection.Seq[(String, String)] + ): ExtractedClassDependencies = { + ExtractedClassDependencies( + pairsToMultiMap(memberRefPairs), pairsToMultiMap(inheritancePairs), - pairsToMultiMap(localInheritancePairs)) + pairsToMultiMap(localInheritancePairs) + ) } - private def pairsToMultiMap[A, B](pairs: collection.Seq[(A, B)]): Map[A, Set[B]] = { - pairs.groupBy(_._1).view.mapValues(values => values.map(_._2).toSet) - .toMap.withDefaultValue(Set.empty) + private def pairsToMultiMap[A, B]( + pairs: collection.Seq[(A, B)] + ): Map[A, Set[B]] = { + pairs + .groupBy(_._1) + .view + .mapValues(values => values.map(_._2).toSet) + .toMap + .withDefaultValue(Set.empty) } } } - diff --git a/sbt-bridge/test/xsbti/TestCompileProgress.scala b/sbt-bridge/test/xsbti/TestCompileProgress.scala new file mode 100644 index 000000000000..d5dc81dfda24 --- /dev/null +++ b/sbt-bridge/test/xsbti/TestCompileProgress.scala @@ -0,0 +1,33 @@ +package xsbti + +import xsbti.compile.CompileProgress + +import scala.collection.mutable + +class TestCompileProgress extends CompileProgress: + class Run: + private[TestCompileProgress] val _phases: mutable.Set[String] = mutable.LinkedHashSet.empty + private[TestCompileProgress] val _unitPhases: mutable.Map[String, mutable.Set[String]] = mutable.LinkedHashMap.empty + private[TestCompileProgress] var _latestTotal: Int = 0 + + def phases: List[String] = _phases.toList + def unitPhases: collection.MapView[String, List[String]] = _unitPhases.view.mapValues(_.toList) + def total: Int = _latestTotal + + private val _runs: mutable.ListBuffer[Run] = mutable.ListBuffer.empty + private var _currentRun: Run = new Run + + def runs: List[Run] = _runs.toList + + def completeRun(): Unit = + _runs += _currentRun + _currentRun = new Run + + override def startUnit(phase: String, unitPath: String): Unit = + _currentRun._unitPhases.getOrElseUpdate(unitPath, mutable.LinkedHashSet.empty) += phase + + override def advance(current: Int, total: Int, prevPhase: String, nextPhase: String): Boolean = + _currentRun._phases += prevPhase + _currentRun._phases += nextPhase + _currentRun._latestTotal = total + true diff --git a/sbt-test/compilerReporter/i14576/Test.scala b/sbt-test/compilerReporter/i14576/Test.scala index d94a49145f81..4f65c2267134 100644 --- a/sbt-test/compilerReporter/i14576/Test.scala +++ b/sbt-test/compilerReporter/i14576/Test.scala @@ -10,8 +10,5 @@ object Test: def f(x: Text) = println(x.str) f("abc") - // private[this] and = _ are deprecated under -source:future - private[this] var x: AnyRef = _ - - // under -source:future, `_` is deprecated for wildcard arguments of types: use `?` instead - val xs: List[_] = Nil + @deprecated("", "") def deprecatedFun(): Unit = () + deprecatedFun() diff --git a/sbt-test/compilerReporter/i14576/build.sbt b/sbt-test/compilerReporter/i14576/build.sbt index 9831c23c103e..cc0402a7ba5e 100644 --- a/sbt-test/compilerReporter/i14576/build.sbt +++ b/sbt-test/compilerReporter/i14576/build.sbt @@ -10,7 +10,7 @@ lazy val resetMessages = taskKey[Unit]("empties the messages list") lazy val root = (project in file(".")) .settings( - scalacOptions += "-source:future", + scalacOptions += "-source:future-migration", extraAppenders := { s => Seq(ConsoleAppender(FakePrintWriter)) }, assertFeatureSummary := { assert { @@ -24,7 +24,7 @@ lazy val root = (project in file(".")) }, assertDeprecationSummary := { assert { - FakePrintWriter.messages.exists(_.contains("there were 3 deprecation warnings; re-run with -deprecation for details")) + FakePrintWriter.messages.exists(_.contains("there was 1 deprecation warning; re-run with -deprecation for details")) } }, assertNoDeprecationSummary := { diff --git a/sbt-test/compilerReporter/simple/Source.scala b/sbt-test/compilerReporter/simple/Source.scala index 6f06785990c3..fcfd8672475b 100644 --- a/sbt-test/compilerReporter/simple/Source.scala +++ b/sbt-test/compilerReporter/simple/Source.scala @@ -7,4 +7,7 @@ trait Wr { object Er { val a = er1 -} \ No newline at end of file + + def f: Int = 1 + val x = f _ +} diff --git a/sbt-test/compilerReporter/simple/project/Reporter.scala b/sbt-test/compilerReporter/simple/project/Reporter.scala index 6c3b60cebb3a..a22b5cfb904d 100644 --- a/sbt-test/compilerReporter/simple/project/Reporter.scala +++ b/sbt-test/compilerReporter/simple/project/Reporter.scala @@ -2,6 +2,8 @@ import sbt._ import Keys._ import KeyRanks.DTask +import scala.jdk.CollectionConverters.* + object Reporter { import xsbti.{Reporter, Problem, Position, Severity} @@ -27,27 +29,62 @@ object Reporter { check := (Compile / compile).failure.map(_ => { val problems = reporter.problems println(problems.toList) - assert(problems.size == 1) - // make sure position reported by zinc are proper - val mainProblem = problems.head + problems match { + case Array(err, warning) => + // Checking the error reported + val eline = err.position().line() + assert(eline.isPresent() == true) + assert(eline.get() == 9) + + val ediagnosticCode = err.diagnosticCode() + assert(ediagnosticCode.isPresent() == true) + val ecode = ediagnosticCode.get().code() + assert(ecode == "6") + + val epointer = err.position().pointer() + assert(epointer.isPresent() == true) + assert(epointer.get() == 10) + + assert(err.position.offset.isPresent) + + assert(err.severity == Severity.Error) // not found: er1, + + // Checking the warning reported + + val wline = warning.position().line() + assert(wline.isPresent() == true) + assert(wline.get() == 12) + + val wdiagnosticCode = warning.diagnosticCode() + assert(wdiagnosticCode.isPresent() == true) + val wcode = wdiagnosticCode.get().code() + assert(wcode == "99") + + val wpointer = warning.position().pointer() + assert(wpointer.isPresent() == true) + assert(wpointer.get() == 12) + + assert(warning.position.offset.isPresent) + + assert(warning.severity == Severity.Warn) // Only function types can be followed by _ but the current expression has type Int + + val actions = warning.actions().asScala.toList + + assert(actions.size == 1) + + val action = actions.head - val line = mainProblem.position().line() - assert(line.isPresent() == true) - assert(line.get() == 9) + assert(action.title() == "Rewrite to function value") - val diagnosticCode = mainProblem.diagnosticCode() - assert(diagnosticCode.isPresent() == true) - val code = diagnosticCode.get() - assert(diagnosticCode.get().code() == "6") + val edits = action.edit().changes().asScala.toList - val pointer = mainProblem.position().pointer() - assert(pointer.isPresent() == true) - assert(pointer.get() == 10) + assert(edits.size == 2) - assert(problems.forall(_.position.offset.isPresent)) + case somethingElse => + assert(false, s"Only expected to have a single error and a single warning, but instead got: ${somethingElse.toString}") - assert(problems.count(_.severity == Severity.Error) == 1) // not found: er1, + } }).value ) } diff --git a/sbt-test/java-compat/i18764/Test.scala b/sbt-test/java-compat/i18764/Test.scala new file mode 100644 index 000000000000..030afb46b953 --- /dev/null +++ b/sbt-test/java-compat/i18764/Test.scala @@ -0,0 +1,4 @@ + +import org.jooq.impl.TableRecordImpl + +class TRecord extends TableRecordImpl[TRecord](null) {} diff --git a/sbt-test/java-compat/i18764/build.sbt b/sbt-test/java-compat/i18764/build.sbt new file mode 100644 index 000000000000..2ad74478d52b --- /dev/null +++ b/sbt-test/java-compat/i18764/build.sbt @@ -0,0 +1,9 @@ + +scalaVersion := sys.props("plugin.scalaVersion") + +lazy val dependencies = Seq( + "org.jooq" % "jooq-codegen" % "3.18.7", +) + +lazy val jooqtest = (project in file(".")) + .settings(libraryDependencies ++= dependencies) diff --git a/sbt-test/java-compat/i18764/test b/sbt-test/java-compat/i18764/test new file mode 100644 index 000000000000..5df2af1f3956 --- /dev/null +++ b/sbt-test/java-compat/i18764/test @@ -0,0 +1 @@ +> compile diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..49c55a7c4d9c --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java @@ -0,0 +1,11 @@ +// this test ensures that it is possible to read a java annotation from TASTy. +package a; + +import java.lang.annotation.*; + + +@Documented +@Retention(RetentionPolicy.CLASS) +@Target({ ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.TYPE, ElementType.PACKAGE }) +public @interface A { +} diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..93f99e9892fe --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC +package a diff --git a/scaladoc/scripts/tocheck.txt b/sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep similarity index 100% rename from scaladoc/scripts/tocheck.txt rename to sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..51c7322bf264 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala @@ -0,0 +1,9 @@ +package b + +import a.A + +object B { + @A + val foo = 23 +} + diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt new file mode 100644 index 000000000000..18f6b8224968 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt @@ -0,0 +1,21 @@ +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory + ) + +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-annotation-classes")), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/test b/sbt-test/pipelining/Yjava-tasty-annotation/test new file mode 100644 index 000000000000..6f7f57e91ab1 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-annotation/test @@ -0,0 +1,5 @@ +> a/compile +# Test depending on a java compiled annotation through TASTy +> b/compile +# double check against the real java classes +> bAlt/compile diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..26bf8a246774 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java @@ -0,0 +1,7 @@ +// this test ensures that ExtractAPI does not cause a crash +// when looking at sealedDescendants of a Java enum. +package a; + +public enum A { + X, Y, Z; +} diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..93f99e9892fe --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC +package a diff --git a/tests/neg-custom-args/fatal-warnings/i6190a.check b/sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep similarity index 100% rename from tests/neg-custom-args/fatal-warnings/i6190a.check rename to sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..a648bb4e83d6 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala @@ -0,0 +1,17 @@ +package b + +import a.A + +object B { + + def formattedEnum(e: A): String = e match { + case A.X => "X" + case A.Y => "Y" + case A.Z => "Z" + } + + @main def test = + assert(A.values.toList == List(A.X, A.Y, A.Z)) + assert(A.values.toList.map(formattedEnum) == List("X", "Y", "Z")) +} + diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt new file mode 100644 index 000000000000..aca2391987e9 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt @@ -0,0 +1,31 @@ +lazy val a = project.in(file("a")) + .settings( + compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory + ) + + +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime + ) diff --git a/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-enum/test b/sbt-test/pipelining/Yjava-tasty-enum/test new file mode 100644 index 000000000000..fa53c47aea59 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-enum/test @@ -0,0 +1,5 @@ +> a/compile +# test depending on a java compiled enum through TASTy +> b/run +# double check against the real java classes +> bAlt/run diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..381da612df90 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java @@ -0,0 +1,5 @@ +package a; + +public class A { + public String VALUE = "A"; +} diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..8cfc7fa44d87 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THE PURPOSE OF THIS FILE IS TO MAKE SBT SEND A.java TO THE SCALA COMPILER +package a diff --git a/tests/run-custom-args/erased/erased-select-prefix.check b/sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep similarity index 100% rename from tests/run-custom-args/erased/erased-select-prefix.check rename to sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep b/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..43a45ae53ce2 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala @@ -0,0 +1,9 @@ +package b + +object B { + val A_VALUE = (new a.A).VALUE + + @main def test = { + assert(A_VALUE == "A", s"actually was $A_VALUE") + } +} diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt new file mode 100644 index 000000000000..e4b15d3d9c7e --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt @@ -0,0 +1,49 @@ +// `a` contains mixed java/scala sources so sbt will send java sources to Scala compiler. +lazy val a = project.in(file("a")) + .settings( + compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory + ) + +// recompile `a` with `-from-tasty` flag to test idempotent read/write java signatures. +// Requires -Yjava-tasty to be set in order to read them. +lazy val a_from_tasty = project.in(file("a_from_tasty")) + .settings( + Compile / sources := Seq((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar"), + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar")), + scalacOptions += "-from-tasty", // read the jar file tasties as the source files + scalacOptions += "-Yjava-tasty", + scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory + ) + +lazy val b = project.in(file("b")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar")), + ) + .settings( + // we have to fork the JVM if we actually want to run the code with correct failure semantics + fork := true, + // make sure the java classes are visible at runtime + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes"), + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes")), + ) + .settings( + // we have to fork the JVM if we actually want to run the code with correct failure semantics + fork := true, + // make sure the java classes are visible at runtime + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-classes"), + ) diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/test b/sbt-test/pipelining/Yjava-tasty-from-tasty/test new file mode 100644 index 000000000000..b4ce2965b995 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/test @@ -0,0 +1,7 @@ +> a/compile +# test reading java tasty with -from-tasty +> a_from_tasty/compile +# test java tasty is still written even with -from-tasty +> b/run +# double check against the real java classes +> bAlt/run diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..b798a9dedce9 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java @@ -0,0 +1,56 @@ +// this test ensures that Object can accept Any from Scala +// see Definitions.FromJavaObjectSymbol +package a; + +public class A { + + public static class Inner extends Object { + public T field1; + public T getter1() { return field1; } + public Object field2; + public Object getter2() { return field2; } + + public Inner(T param1, Object param2) { + this.field1 = param1; + this.field2 = param2; + } + + public void meth1(T arg) {} + public void meth2(U arg) {} + } + + public static class Inner_sel extends java.lang.Object { + public T field1; + public T getter1() { return field1; } + public java.lang.Object field2; + public java.lang.Object getter2() { return field2; } + + public Inner_sel(T param1, java.lang.Object param2) { + this.field1 = param1; + this.field2 = param2; + } + + public void meth1(T arg) {} + public void meth2(U arg) {} + } + + // 1. At the top level: + public void meth1(Object arg) {} + public void meth1_sel(java.lang.Object arg) {} + public void meth2(T arg) {} // T implicitly extends Object + + // 2. In a class type parameter: + public void meth3(scala.collection.immutable.List arg) {} + public void meth3_sel(scala.collection.immutable.List arg) {} + public void meth4(scala.collection.immutable.List arg) {} + + // 3. As the type parameter of an array: + public void meth5(Object[] arg) {} + public void meth5_sel(java.lang.Object[] arg) {} + public void meth6(T[] arg) {} + + // 4. As the repeated argument of a varargs method: + public void meth7(Object... args) {} + public void meth7_sel(java.lang.Object... args) {} + public void meth8(T... args) {} +} diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java new file mode 100644 index 000000000000..6dd2608883d8 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java @@ -0,0 +1,35 @@ +// this test ensures that Object can accept Any from Scala +// see Definitions.FromJavaObjectSymbol +package a; + +import java.lang.Object; + +// same signatures that reference Object explicitly from A.java, but with the java.lang.Object import +public class AImport { + + public static class Inner extends Object { + public T field1; + public T getter1() { return field1; } + public Object field2; + public Object getter2() { return field2; } + + public Inner(T param1, Object param2) { + this.field1 = param1; + this.field2 = param2; + } + + public void meth1(T arg) {} + } + + // 1. At the top level: + public void meth1(Object arg) {} + + // 2. In a class type parameter: + public void meth3(scala.collection.immutable.List arg) {} + + // 3. As the type parameter of an array: + public void meth5(Object[] arg) {} + + // 4. As the repeated argument of a varargs method: + public void meth7(Object... args) {} +} diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..93f99e9892fe --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC +package a diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..b2a1c300bfd0 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala @@ -0,0 +1,56 @@ +package b + +import a.A + +// keep in sync with Bexplicit.scala +object B { + + val newA = new A + + val newAInner = new A.Inner[Int](23, true) + val newAInner_sel = new A.Inner_sel[Int](23, true) + + @main + def test = { + newA.meth1(1) // OK + newA.meth1_sel(1) // OK + newA.meth2(1) // OK + newA.meth3(List[Int](1)) // OK + newA.meth3_sel(List[Int](1)) // OK + newA.meth4(List[Int](1)) // OK + newA.meth5(Array[Object]("abc")) // OK + newA.meth5_sel(Array[Object]("abc")) // OK + newA.meth6(Array[String]("abc")) // Ok + // newA.meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + // newA.meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] + newA.meth7(1) // OK (creates a reference array) + newA.meth7_sel(1) // OK (creates a reference array) + newA.meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) + val ai = Array[Int](1) + newA.meth7(ai: _*) // OK (will copy the array at Erasure) + newA.meth7_sel(ai: _*) // OK (will copy the array at Erasure) + newA.meth8(ai: _*) // OK (will copy the array at Erasure) + + newAInner.meth1(1) // OK + newAInner.meth2(1) // OK + newAInner_sel.meth1(1) // OK + newAInner_sel.meth2(1) // OK + + assert((newAInner.field1: Int) == 23) // OK + newAInner.field1 = 31 // OK + assert((newAInner.getter1: Int) == 31) // OK + assert(newAInner.field2 == true) // OK + newAInner.field2 = false // OK + assert(newAInner.getter2 == false) // OK + + assert((newAInner_sel.field1: Int) == 23) // OK + newAInner_sel.field1 = 31 // OK + assert((newAInner_sel.getter1: Int) == 31) // OK + assert(newAInner_sel.field2 == true) // OK + newAInner_sel.field2 = false // OK + assert(newAInner_sel.getter2 == false) // OK + + BImport.testImport() // OK + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala new file mode 100644 index 000000000000..17d3fbca1591 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala @@ -0,0 +1,31 @@ +package b + +import a.AImport + +object BImport { + + val newA = new AImport + + val newAInner = new AImport.Inner[Int](23, true) + + def testImport() = { + newA.meth1(1) // OK + newA.meth3(List[Int](1)) // OK + newA.meth5(Array[Object]("abc")) // OK + // newA.meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + newA.meth7(1) // OK (creates a reference array) + val ai = Array[Int](1) + newA.meth7(ai: _*) // OK (will copy the array at Erasure) + + newAInner.meth1(1) // OK + + assert((newAInner.field1: Int) == 23) // OK + newAInner.field1 = 31 // OK + assert((newAInner.getter1: Int) == 31) // OK + + assert(newAInner.field2 == true) // OK + newAInner.field2 = false // OK + assert(newAInner.getter2 == false) // OK + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt new file mode 100644 index 000000000000..6738db3016fa --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt @@ -0,0 +1,57 @@ +lazy val a = project.in(file("a")) + .settings( + compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory + ) + +// compiles the same sources as a, but with -Ytest-pickler +lazy val aCheck = project.in(file("a-check")) + .settings( + scalacOptions += "-Ytest-pickler", // check that the pickler is correct + Compile / sources := (a / Compile / sources).value, // use the same sources as a + compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes-2"), // send classfiles to a different directory + ) + + +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes"), // make sure the java classes are visible at runtime + ) + +// negative compilation tests +lazy val c = project.in(file("c")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + +// same as c, but adds the real classes to the classpath instead of the tasty jar +lazy val cAlt = project.in(file("c-alt")) + .settings( + Compile / sources := (c / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-enum-classes")), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..05dff30bd63e --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala @@ -0,0 +1,16 @@ +package c + +import a.A + +object C { + + val newA = new A + + @main + def test = { + newA.meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + newA.meth5_sel(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + newA.meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala new file mode 100644 index 000000000000..dc27126060b8 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala @@ -0,0 +1,14 @@ +package c + +import a.AImport + +object CImport { + + val newA = new AImport + + @main + def test = { + newA.meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/test b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/test new file mode 100644 index 000000000000..7a34a0cb5ec1 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/test @@ -0,0 +1,12 @@ +# compile Java sources, and send them to TASTy +> a/compile +# compile Java sources, and check that they are pickled correctly +> aCheck/compile +# test depending on a java compiled enum through TASTy +> b/run +# double check against the real java classes +> bAlt/run +# check that java Array T is Array T & Object +-> c/compile +# double check against the real java classes +-> cAlt/compile diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..1fcb7e78ae3d --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java @@ -0,0 +1,14 @@ +// this test ensures that it is possible to read a generic java class from TASTy. +package a; + +public abstract class A { + private final int _value; + + protected A(final int value) { + this._value = value; + } + + public int value() { + return _value; + } +} diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..93f99e9892fe --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC +package a diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep b/sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..f132e012a5fc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala @@ -0,0 +1,15 @@ +package b + +import a.A + +class B[T] { + val inner = new A[T](23) {} +} + +object B { + @main def test = { + val derived: Int = (new B[Int]).inner.value + assert(derived == 23, s"actually was $derived") + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt new file mode 100644 index 000000000000..07e2ea56fbaa --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt @@ -0,0 +1,29 @@ +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-generic-classes"), // send classfiles to a different directory + ) + +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-generic-classes"), // make sure the java classes are visible at runtime + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-generic-classes")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-generic-classes"), // make sure the java classes are visible at runtime + ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-generic/test b/sbt-test/pipelining/Yjava-tasty-generic/test new file mode 100644 index 000000000000..2265d58a8262 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-generic/test @@ -0,0 +1,5 @@ +> a/compile +# Test depending on a java generic class through TASTy +> b/run +# double check against the real java classes +> bAlt/run diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java new file mode 100644 index 000000000000..c48f149849a6 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java @@ -0,0 +1,9 @@ +package a; + +public class A { + public static final String VALUE = "A"; + + public String add(T t) { + return VALUE + t.toString(); + } +} diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala new file mode 100644 index 000000000000..93f99e9892fe --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala @@ -0,0 +1,2 @@ +// THIS FILE EXISTS SO THAT `A.java` WILL BE COMPILED BY SCALAC +package a diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep b/sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..32d001075d40 --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala @@ -0,0 +1,17 @@ +package b + +import a.A + +object B { + val finalResult: "A" = A.VALUE + + val a_B: String = (new A()).add("B") + val a_true: String = (new A()).add(true) + + @main def test = { + assert(finalResult == "A", s"actually was $finalResult") + assert(a_B == "AB", s"actually was $a_B") + assert(a_true == "Atrue", s"actually was $a_true") + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt new file mode 100644 index 000000000000..512344f0635b --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt @@ -0,0 +1,29 @@ +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Yjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), + scalacOptions += "-Ycheck:all", + Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-result-types-classes"), // send classfiles to a different directory + ) + +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-result-types-classes"), // make sure the java classes are visible at runtime + ) + +// same as b, but adds the real classes to the classpath instead of the tasty jar +lazy val bAlt = project.in(file("b-alt")) + .settings( + Compile / sources := (b / Compile / sources).value, + Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-result-types-classes")), + scalacOptions += "-Ycheck:all", + ) + .settings( + fork := true, // we have to fork the JVM if we actually want to run the code with correct failure semantics + Runtime / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-result-types-classes"), // make sure the java classes are visible at runtime + ) diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/test b/sbt-test/pipelining/Yjava-tasty-result-types/test new file mode 100644 index 000000000000..4a758ea9991d --- /dev/null +++ b/sbt-test/pipelining/Yjava-tasty-result-types/test @@ -0,0 +1,5 @@ +> a/compile +# Test depending on a java static final result, and method result through TASTy +> b/run +# double check against the real java classes +> bAlt/run diff --git a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala index b7f1b1007420..c6fac6b796c0 100644 --- a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala +++ b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala @@ -19,8 +19,8 @@ class DivideZero extends PluginPhase with StandardPlugin { val phaseName = name - override val runsAfter = Set(Staging.name) - override val runsBefore = Set(Pickler.name) + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) def init(options: List[String]): List[PluginPhase] = this :: Nil diff --git a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties b/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties deleted file mode 100644 index 46e43a97ed86..000000000000 --- a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.8.2 diff --git a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala index 798868d72640..4b1597d287d4 100644 --- a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala +++ b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala @@ -38,4 +38,6 @@ class TopLevel2 object TopLevel3 -class TopLevel4 \ No newline at end of file +class TopLevel4 + +object TopLevelModuleSuffix$ diff --git a/sbt-test/source-dependencies/compactify/test b/sbt-test/source-dependencies/compactify/test index b56be3e5d4aa..64be9af369b5 100644 --- a/sbt-test/source-dependencies/compactify/test +++ b/sbt-test/source-dependencies/compactify/test @@ -5,4 +5,4 @@ -> outputEmpty $ delete src/main/scala/For.scala src/main/scala/Nested.scala > compile -> outputEmpty \ No newline at end of file +> outputEmpty diff --git a/sbt-test/source-dependencies/mirror-product/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/MyProduct.scala new file mode 100644 index 000000000000..acad1358f62b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int) diff --git a/sbt-test/source-dependencies/mirror-product/Test.scala b/sbt-test/source-dependencies/mirror-product/Test.scala new file mode 100644 index 000000000000..e53d7b999517 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/Test.scala @@ -0,0 +1,10 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemTypes] match + case _: (Int *: EmptyTuple) => 1 + case _: (Int *: String *: EmptyTuple) => 2 + +@main def Test = + assert(foo[MyProduct] == 2) diff --git a/sbt-test/source-dependencies/mirror-product/build.sbt b/sbt-test/source-dependencies/mirror-product/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala new file mode 100644 index 000000000000..87e5af62bd7e --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/changes/MyProduct.scala @@ -0,0 +1 @@ +case class MyProduct(x: Int, y: String) diff --git a/sbt-test/source-dependencies/mirror-product/test b/sbt-test/source-dependencies/mirror-product/test new file mode 100644 index 000000000000..fbcd15fa4153 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-product/test @@ -0,0 +1,7 @@ +> compile + +# change the case class constructor +$ copy-file changes/MyProduct.scala MyProduct.scala + +# Both MyProduct.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run diff --git a/sbt-test/source-dependencies/mirror-sum/Sum.scala b/sbt-test/source-dependencies/mirror-sum/Sum.scala new file mode 100644 index 000000000000..782f5a89d913 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Sum.scala @@ -0,0 +1,2 @@ +sealed trait Sum +case class Child1() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/Test.scala b/sbt-test/source-dependencies/mirror-sum/Test.scala new file mode 100644 index 000000000000..9cb6e2c78d64 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/Test.scala @@ -0,0 +1,12 @@ +import scala.deriving.Mirror +import scala.compiletime.erasedValue + +object Test: + transparent inline def foo[T](using m: Mirror.Of[T]): Int = + inline erasedValue[m.MirroredElemLabels] match + case _: ("Child1" *: EmptyTuple) => 1 + case _: ("Child1" *: "Child2" *: EmptyTuple) => 2 + + def main(args: Array[String]): Unit = + assert(foo[Sum] == 2) + diff --git a/sbt-test/source-dependencies/mirror-sum/build.sbt b/sbt-test/source-dependencies/mirror-sum/build.sbt new file mode 100644 index 000000000000..de89f34feb3b --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/build.sbt @@ -0,0 +1,4 @@ +scalaVersion := sys.props("plugin.scalaVersion") +// Use more precise invalidation, otherwise the reference to `Sum` in +// Test.scala is enough to invalidate it when a child is added. +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } diff --git a/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala new file mode 100644 index 000000000000..13ec68223ed4 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/changes/Sum.scala @@ -0,0 +1,3 @@ +sealed trait Sum +case class Child1() extends Sum +case class Child2() extends Sum diff --git a/sbt-test/source-dependencies/mirror-sum/test b/sbt-test/source-dependencies/mirror-sum/test new file mode 100644 index 000000000000..baf5f17f9905 --- /dev/null +++ b/sbt-test/source-dependencies/mirror-sum/test @@ -0,0 +1,7 @@ +> compile + +# Add a child +$ copy-file changes/Sum.scala Sum.scala + +# Both Sum.scala and Test.scala should be recompiled, otherwise the assertion will fail +> run diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/A.scala b/sbt-test/source-dependencies/stable-ctx-bounds/A.scala new file mode 100644 index 000000000000..67dd2ff18205 --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/A.scala @@ -0,0 +1,5 @@ +package database + +object A { + def wrapper: B.Wrapper = ??? +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/B.scala b/sbt-test/source-dependencies/stable-ctx-bounds/B.scala new file mode 100644 index 000000000000..ab203de92ce5 --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/B.scala @@ -0,0 +1,29 @@ +package database + +object B { + trait GetValue[T] + + object GetValue { + implicit def inst[T]: GetValue[T] = ??? + } + + class ResultSet { + def getV[A: GetValue]: A = ??? + } + + trait DBParse[T] { + def apply(rs: ResultSet): T + } + + class AVG() { + def call: String = "AVG" + } + + object ClientOwnerId { + class CompanyId + + def parseClientOwnerId[T: DBParse]: Unit = {} + } + + class Wrapper(companyId: ClientOwnerId.CompanyId) +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/C.scala b/sbt-test/source-dependencies/stable-ctx-bounds/C.scala new file mode 100644 index 000000000000..1379e9e87b4a --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/C.scala @@ -0,0 +1,8 @@ +package database + +object C { + def foo: Unit = { + val rs: B.ResultSet = ??? + rs.getV[String] + } +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/build.sbt b/sbt-test/source-dependencies/stable-ctx-bounds/build.sbt new file mode 100644 index 000000000000..bc65e91e91d7 --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/build.sbt @@ -0,0 +1,27 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual") +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/changes/B.scala b/sbt-test/source-dependencies/stable-ctx-bounds/changes/B.scala new file mode 100644 index 000000000000..3b3cd69ea17d --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/changes/B.scala @@ -0,0 +1,27 @@ +package database + +object B { + trait GetValue[T] + + object GetValue { + implicit def inst[T]: GetValue[T] = ??? + } + + class ResultSet { + def getV[A: GetValue]: A = ??? + } + + trait DBParse[T] + + class AVG() { + def call: String = "AVG2" + } + + object ClientOwnerId { + class CompanyId + + def parseClientOwnerId[T: DBParse]: Unit = {} + } + + class Wrapper(companyId: ClientOwnerId.CompanyId) +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/project/CompileState.scala b/sbt-test/source-dependencies/stable-ctx-bounds/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/source-dependencies/stable-ctx-bounds/test b/sbt-test/source-dependencies/stable-ctx-bounds/test new file mode 100644 index 000000000000..b42e64920bd5 --- /dev/null +++ b/sbt-test/source-dependencies/stable-ctx-bounds/test @@ -0,0 +1,15 @@ +> compile +> recordPreviousIterations + +# change only the body of a method +$ copy-file changes/B.scala B.scala + +# Only B.scala should be recompiled. Previously, this lead to a subsequent +# compilation round because context bounds were desugared into names unique to +# the whole compilation unit, and in the first `compile` the two context bounds +# of B.scala were desugared into `evidence$2` and `evidence$1` in this order +# (because the definitions were visited out of order), but in the second call +# to `compile` we traverse them in order as we typecheck B.scala and ended up +# with `evidence$1` and `evidence$2` instead. +> compile +> checkIterations 1 diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala new file mode 100644 index 000000000000..06c191cba9ed --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Sealed.scala @@ -0,0 +1,2 @@ +sealed trait Sealed +class Child1 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/Test.scala b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala new file mode 100644 index 000000000000..086e359babc4 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/Test.scala @@ -0,0 +1,3 @@ +class Test { + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/build.sbt b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt new file mode 100644 index 000000000000..1c4c78828a55 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/build.sbt @@ -0,0 +1,29 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +ThisBuild / incOptions ~= { _.withUseOptimizedSealed(true) } + +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual") +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala new file mode 100644 index 000000000000..7ce9b1119871 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed1.scala @@ -0,0 +1,3 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala new file mode 100644 index 000000000000..ccf27d31d219 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Sealed2.scala @@ -0,0 +1,4 @@ +sealed trait Sealed +class Child1 extends Sealed +class Child2 extends Sealed +class Child3 extends Sealed diff --git a/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala new file mode 100644 index 000000000000..559fef4d481a --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/changes/Test1.scala @@ -0,0 +1,7 @@ +class Test { + def foo(x: Sealed): Int = x match + case _: Child1 => 1 + case _: Child2 => 1 + + val s: Sealed = new Child1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/source-dependencies/useOptimizedSealed/test b/sbt-test/source-dependencies/useOptimizedSealed/test new file mode 100644 index 000000000000..6680e9aab923 --- /dev/null +++ b/sbt-test/source-dependencies/useOptimizedSealed/test @@ -0,0 +1,24 @@ +# Compile Sealed.scala and Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed +$ copy-file changes/Sealed1.scala Sealed.scala + +# Only Sealed.scala needs to be recompiled because Test.scala does not +# match on a value of type `Sealed`. +> compile +> checkIterations 1 + +> clean +$ copy-file changes/Test1.scala Test.scala +> compile +> recordPreviousIterations + +# Add an extra children to Sealed again +$ copy-file changes/Sealed2.scala Sealed.scala + +# Test.scala will be recompiled in a second round because it matches +# on a value of type `Sealed`. +> compile +> checkIterations 2 diff --git a/sbt-test/tasty-compat/add-overload/build.sbt b/sbt-test/tasty-compat/add-overload/build.sbt index 82dc596134c8..52a04f07148e 100644 --- a/sbt-test/tasty-compat/add-overload/build.sbt +++ b/sbt-test/tasty-compat/add-overload/build.sbt @@ -16,7 +16,7 @@ lazy val `a-changes` = project.in(file("a-changes")) lazy val c = project.in(file(".")) .settings( - scalacOptions ++= Seq("-from-tasty", "-Ycheck:readTasty"), + scalacOptions ++= Seq("-from-tasty", "-Ycheck:readTasty", "-Xfatal-warnings"), Compile / sources := Seq(new java.io.File("c-input/B.tasty")), Compile / unmanagedClasspath += (ThisBuild / baseDirectory).value / "c-input", Compile / classDirectory := (ThisBuild / baseDirectory).value / "c-output" diff --git a/sbt-test/tasty-compat/only-tasty/a/A.scala b/sbt-test/tasty-compat/only-tasty/a/A.scala new file mode 100644 index 000000000000..79a242ade5cd --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A: + def f(x: Int): Int = x + 1 + inline def g(x: Int): Int = x + 1 diff --git a/sbt-test/tasty-compat/only-tasty/b/B.scala b/sbt-test/tasty-compat/only-tasty/b/B.scala new file mode 100644 index 000000000000..3241f5060d5c --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/b/B.scala @@ -0,0 +1,5 @@ +package b + +object B: + def f(x: Int): Int = x + 2 + inline def g(x: Int): Int = x + 2 diff --git a/sbt-test/tasty-compat/only-tasty/build.sbt b/sbt-test/tasty-compat/only-tasty/build.sbt new file mode 100644 index 000000000000..ae52f2b53768 --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/build.sbt @@ -0,0 +1,16 @@ +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Youtput-only-tasty", + ) + +lazy val b = project.in(file("b")) + .settings( + scalacOptions += "-Youtput-only-tasty", + Compile / exportJars := true, + ) + +lazy val c = project.in(file("c")) + .dependsOn(a, b) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/tasty-compat/only-tasty/c/C.scala b/sbt-test/tasty-compat/only-tasty/c/C.scala new file mode 100644 index 000000000000..4d14c9134813 --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/c/C.scala @@ -0,0 +1,9 @@ +import a.A +import b.B + +object C extends App { + assert(A.f(0) == 1) + assert(A.g(0) == 1) + assert(B.f(0) == 2) + assert(B.g(0) == 2) +} diff --git a/sbt-test/tasty-compat/only-tasty/project/DottyInjectedPlugin.scala b/sbt-test/tasty-compat/only-tasty/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..fb946c4b8c61 --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion") + ) +} diff --git a/sbt-test/tasty-compat/only-tasty/test b/sbt-test/tasty-compat/only-tasty/test new file mode 100644 index 000000000000..31b609222ed6 --- /dev/null +++ b/sbt-test/tasty-compat/only-tasty/test @@ -0,0 +1,6 @@ +# compile library A +> a/compile +# compile library B +> b/compile +# compile library C, from source, against TASTy of A and B (where B loaded from jar) +> c/compile diff --git a/scala2-library-tasty-tests/src/Main.scala b/scala2-library-tasty-tests/src/Main.scala new file mode 100644 index 000000000000..b33219271201 --- /dev/null +++ b/scala2-library-tasty-tests/src/Main.scala @@ -0,0 +1,77 @@ +package hello + +import scala.util.* +import scala.compiletime.testing.typeChecks + +enum Color: + case Red, Green, Blue + +object HelloWorld: + def main(args: Array[String]): Unit = { + println("hello 2.13 library bootstrapped") + println(Color.Red) + println(Color.Green) + println(Color.Blue) + + testScala2UnapplySignatures() + testScala2ObjectParents() + testScala2CaseClassUnderscoreMembers() + testScalaNumberUnderlying() + testArrayOps() + scala.collection.mutable.UnrolledBufferTest.test() + } + + def testScala2UnapplySignatures() = { + val _: Option[Int] = Some.unapply(Some(1)) + val _: Option[Int] = Right.unapply(Right(1)) + val _: Option[(Int, List[Int])] = ::.unapply(::(1, Nil)) + + val _: Option[Int] = Tuple1.unapply(Tuple1(1)) + val _: Option[(Int, Int)] = Tuple2.unapply((1, 2)) + val _: Option[(Int, Int, Int)] = Tuple3.unapply((1, 2, 3)) + } + + def testScala2ObjectParents() = { + assert(!typeChecks("Either: scala.deriving.Mirror.Sum")) + assert(!typeChecks("Either: scala.deriving.Mirror")) + } + + def testScala2CaseClassUnderscoreMembers() = { + val some: Some[Int] = Some(1) + assert(!typeChecks("some._1")) + } + + def testScalaNumberUnderlying() = { + import scala.math.{ScalaNumericConversions, ScalaNumber} + + val _: java.math.BigInteger = BigInt(1).underlying + val _: Object = (BigInt(1): ScalaNumericConversions).underlying + val _: Object = (BigInt(1): ScalaNumber).underlying + + // val _: java.math.BigDecimal = BigDecimal(1).underlying // FIXME: inferred result type of non-private method + val _: Object = (BigDecimal(1): ScalaNumericConversions).underlying + val _: Object = (BigDecimal(1): ScalaNumber).underlying + + class MyNumber1(override val underlying: BigInt) extends ScalaNumericConversions { + def doubleValue: Double = ???; def floatValue: Float = ???; + def intValue: Int = ???; def longValue: Long = ??? + def isWhole: Boolean = ??? + } + val _: BigInt = MyNumber1(1).underlying + val _: Object = (MyNumber1(1): ScalaNumericConversions).underlying + val _: Object = (MyNumber1(1): ScalaNumber).underlying + + class MyNumber2(override val underlying: Object) extends ScalaNumber { + def doubleValue: Double = ???; def floatValue: Float = ???; + def intValue: Int = ???; def longValue: Long = ??? + def isWhole: Boolean = ??? + } + val _: Object = MyNumber2(BigInt(1)).underlying + val _: Object = (MyNumber2(BigInt(1)): ScalaNumber).underlying + } + + def testArrayOps() = { + new collection.ArrayOps[String](Array[String]("foo")).exists(x => true) + } + +end HelloWorld diff --git a/scala2-library-tasty-tests/src/UnrolledBufferTest.scala b/scala2-library-tasty-tests/src/UnrolledBufferTest.scala new file mode 100644 index 000000000000..b5bec5c08227 --- /dev/null +++ b/scala2-library-tasty-tests/src/UnrolledBufferTest.scala @@ -0,0 +1,7 @@ +package scala.collection +package mutable + +object UnrolledBufferTest: + def test(): Unit = + new UnrolledBuffer.Unrolled[Int](0, Array.empty[Int], null, null) + new UnrolledBuffer.Unrolled[Int](0, Array.empty[Int], null) // using default null argument diff --git a/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala b/scala2-library-tasty-tests/test/BootstrappedStdLibTASYyTest.scala similarity index 97% rename from stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala rename to scala2-library-tasty-tests/test/BootstrappedStdLibTASYyTest.scala index 100292b1f5a7..db810cd87f26 100644 --- a/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala +++ b/scala2-library-tasty-tests/test/BootstrappedStdLibTASYyTest.scala @@ -58,7 +58,7 @@ class BootstrappedStdLibTASYyTest: val size = loadBlacklisted.size for (notBlacklisted, i) <- loadBlacklist.zipWithIndex do val blacklist = loadBlacklisted - notBlacklisted - println(s"Trying withouth $notBlacklisted in the blacklist (${i+1}/$size)") + println(s"Trying without $notBlacklisted in the blacklist (${i+1}/$size)") try { loadWithTastyInspector(blacklist) shouldBeWhitelisted = notBlacklisted :: shouldBeWhitelisted @@ -76,7 +76,7 @@ class BootstrappedStdLibTASYyTest: val blacklist0 = loadBlacklisted.union(compileBlacklisted) for (notBlacklisted, i) <- compileBlacklist.zipWithIndex do val blacklist = blacklist0 - notBlacklisted - println(s"Trying withouth $notBlacklisted in the blacklist (${i+1}/$size)") + println(s"Trying without $notBlacklisted in the blacklist (${i+1}/$size)") try { compileFromTastyInJar(blacklist) shouldBeWhitelisted = notBlacklisted :: shouldBeWhitelisted diff --git a/scaladoc-js/common/src/utils/html.scala b/scaladoc-js/common/src/utils/html.scala index 1a7f108a3555..23562e04679e 100644 --- a/scaladoc-js/common/src/utils/html.scala +++ b/scaladoc-js/common/src/utils/html.scala @@ -12,8 +12,8 @@ object HTML { case class Tag[T <: domhtml.Element](private val elemFactory: () => T): private def textNode(s: String): Text = document.createTextNode(s) - def apply(tags: TagArg*): T = apply()(tags:_*) - def apply(first: AttrArg, rest: AttrArg*): T = apply((first +: rest):_*)() + def apply(tags: TagArg*): T = apply()(tags*) + def apply(first: AttrArg, rest: AttrArg*): T = apply((first +: rest)*)() def apply(attrs: AttrArg*)(tags: TagArg*): T = val elem: T = elemFactory() def unpackTags(tags: TagArg*): Unit = tags.foreach { @@ -26,11 +26,11 @@ object HTML { case ("id", id) => elem.id = id case ("class", value) => value.split("\\s+").foreach(cls => elem.classList.add(cls)) case (attr, value) => elem.setAttribute(attr, value) - case s: Seq[AppliedAttr] => unpackAttributes(s*) + case s: Seq[AppliedAttr @unchecked] => unpackAttributes(s*) } - unpackTags(tags:_*) - unpackAttributes(attrs:_*) + unpackTags(tags*) + unpackAttributes(attrs*) elem object Tag: @@ -118,4 +118,4 @@ object HTML { val titleAttr =Attr("title") val onkeyup = Attr("onkeyup") -} \ No newline at end of file +} diff --git a/scaladoc-testcases/src/example/level2/Documentation.scala b/scaladoc-testcases/src/example/level2/Documentation.scala index bbfd31669f3c..f118b2ad150e 100644 --- a/scaladoc-testcases/src/example/level2/Documentation.scala +++ b/scaladoc-testcases/src/example/level2/Documentation.scala @@ -163,7 +163,7 @@ abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c def table(foo: String) = ??? protected[example] val valWithScopeModifier = ??? - protected[this] val valWithScopeModifierThis = ??? + protected val valWithScopeModifierThis = ??? var iAmAVar = ??? } diff --git a/scaladoc-testcases/src/tests/exports1.scala b/scaladoc-testcases/src/tests/exports1.scala index 8c47ddc94b79..f719bca35eb1 100644 --- a/scaladoc-testcases/src/tests/exports1.scala +++ b/scaladoc-testcases/src/tests/exports1.scala @@ -14,18 +14,18 @@ class A: //unexpected = 1 var aVar1: 1 = 1 - type HKT[T[_], X] //expected: final type HKT = [T[_], X] =>> HKT[T, X] + type HKT[T[_], X] //expected: final type HKT = [T[_], X] =>> a.HKT[T, X] = T[X] - type SomeRandomType = (List[_] | Seq[_]) & String //expected: final type SomeRandomType = SomeRandomType - def x[T[_], X](x: X): HKT[T, X] + type SomeRandomType = (List[?] | Seq[?]) & String //expected: final type SomeRandomType = a.SomeRandomType + def x[T[_], X](x: X): HKT[T, X] //expected: def x[T[_], X](x: X): A.this.HKT[T, X] = ??? def fn[T, U]: T => U = ??? object Object //expected: val Obj: Object.type - val x: HKT[List, Int] + val x: HKT[List, Int] //expected: val x: A.this.HKT[List, Int] = ??? - class Class(val a: Int, val b: Int) extends Serializable //expected: final type Class = Class - enum Enum: //expected: final type Enum = Enum + class Class(val a: Int, val b: Int) extends Serializable //expected: final type Class = a.Class + enum Enum: //expected: final type Enum = a.Enum case A case B(i: Int) case C[T]() extends Enum diff --git a/scaladoc-testcases/src/tests/functionTypeSignatures.scala b/scaladoc-testcases/src/tests/functionTypeSignatures.scala index 0c456b9526c6..7b06c645fe31 100644 --- a/scaladoc-testcases/src/tests/functionTypeSignatures.scala +++ b/scaladoc-testcases/src/tests/functionTypeSignatures.scala @@ -4,7 +4,13 @@ type A = ((Int, Int)) => Int type B = (Int | String) => Int +type B1 = Int | String => Int //expected: type B1 = (Int | String) => Int + type C = (Int & String) => Int +type C1 = Int & String => Int //expected: type C1 = (Int & String) => Int + +type D = Int | (String => Int) + type E = (A => B) => B diff --git a/scaladoc-testcases/src/tests/hkts.scala b/scaladoc-testcases/src/tests/hkts.scala index d8e29d993821..c04fcf1e0cd1 100644 --- a/scaladoc-testcases/src/tests/hkts.scala +++ b/scaladoc-testcases/src/tests/hkts.scala @@ -46,7 +46,7 @@ trait Case14[C[_]] class SomeClass extends Case14[List] -def method1[E, T](value: List[_ >: E]): Int = 0 +def method1[E, T](value: List[? >: E]): Int = 0 def method2[F[+X] <: Option[X], A](fa: F[A]): A = fa.get import scala.collection.immutable.ArraySeq diff --git a/scaladoc-testcases/src/tests/infixTypes.scala b/scaladoc-testcases/src/tests/infixTypes.scala new file mode 100644 index 000000000000..30fc982f2bca --- /dev/null +++ b/scaladoc-testcases/src/tests/infixTypes.scala @@ -0,0 +1,141 @@ +package tests +package infixTypes + +import annotation.showAsInfix + +@showAsInfix +infix trait SomeTrait[A, B] + +infix trait SomeTrait2[A, B] + +def someTrait1[C, D]: C SomeTrait D + = ??? + +def someTrait2[E, F]: SomeTrait[E, F] //expected: def someTrait2[E, F]: E SomeTrait F + = ??? + +def someTrait3[G, H]: G SomeTrait2 H //expected: def someTrait3[G, H]: SomeTrait2[G, H] + = ??? + +trait +++[A, B] + +trait ++*[A, B] + +trait ++:[A, B] + +trait +*:[A, B] + +trait ***[A, B] + +trait **:[A, B] + +def foo[A, B, C, D]: (A SomeTrait B) +++ (C SomeTrait2 D) //expected: def foo[A, B, C, D]: (A SomeTrait B) +++ SomeTrait2[C, D] + = ??? + +// left-associative, same precedence + +def a0[X, Y, Z]: X +++ Y +++ Z + = a1 + +def a1[X, Y, Z]: (X +++ Y) +++ Z //expected: def a1[X, Y, Z]: X +++ Y +++ Z + = a0 + +def a2[X, Y, Z]: X +++ (Y +++ Z) + = ??? + +def a0x[X, Y, Z]: X +++ Y ++* Z //expected: def a0x[X, Y, Z]: (X +++ Y) ++* Z + = a1x + +def a1x[X, Y, Z]: (X +++ Y) ++* Z + = a0x + +def a2x[X, Y, Z]: X +++ (Y ++* Z) + = ??? + +// right-associative, same precedence + +def a3[X, Y, Z]: X ++: Y ++: Z + = a5 + +def a4[X, Y, Z]: (X ++: Y) ++: Z + = ??? + +def a5[X, Y, Z]: X ++: (Y ++: Z) //expected: def a5[X, Y, Z]: X ++: Y ++: Z + = a3 + +def a3x[X, Y, Z]: X ++: Y +*: Z //expected: def a3x[X, Y, Z]: X ++: (Y +*: Z) + = a5x + +def a4x[X, Y, Z]: (X ++: Y) +*: Z + = ??? + +def a5x[X, Y, Z]: X ++: (Y +*: Z) + = a3x + +// left and right associative, same precedence + +def a6[X, Y, Z]: (X +++ Y) ++: Z + = ??? + +def a7[X, Y, Z]: X +++ (Y ++: Z) + = ??? + +// left-associative, mixed precedence + +def b0[X, Y, Z]: X +++ Y *** Z //expected: def b0[X, Y, Z]: X +++ (Y *** Z) + = ??? + +def b1[X, Y, Z]: (X +++ Y) *** Z + = ??? + +def b2[X, Y, Z]: X +++ (Y *** Z) + = ??? + +def b3[X, Y, Z]: X *** Y +++ Z //expected: def b3[X, Y, Z]: (X *** Y) +++ Z + = ??? + +def b4[X, Y, Z]: (X *** Y) +++ Z + = ??? + +def b5[X, Y, Z]: X *** (Y +++ Z) + = ??? + +// right-associative, mixed precedence + +def c0[X, Y, Z]: X ++: Y **: Z //expected: def c0[X, Y, Z]: X ++: (Y **: Z) + = ??? + +def c1[X, Y, Z]: (X ++: Y) **: Z + = ??? + +def c2[X, Y, Z]: X ++: (Y **: Z) + = ??? + +def c3[X, Y, Z]: X **: Y ++: Z //expected: def c3[X, Y, Z]: (X **: Y) ++: Z + = ??? + +def c4[X, Y, Z]: (X **: Y) ++: Z + = ??? + +def c5[X, Y, Z]: X **: (Y ++: Z) + = ??? + +// left and right associative, mixed precedence + +def d0[X, Y, Z]: X +++ Y **: Z //expected: def d0[X, Y, Z]: X +++ (Y **: Z) + = ??? + +def d1[X, Y, Z]: (X +++ Y) **: Z + = ??? + +def d2[X, Y, Z]: X +++ (Y **: Z) + = ??? + +def d3[X, Y, Z]: X *** Y ++: Z //expected: def d3[X, Y, Z]: (X *** Y) ++: Z + = ??? + +def d4[X, Y, Z]: (X *** Y) ++: Z + = ??? + +def d5[X, Y, Z]: X *** (Y ++: Z) + = ??? diff --git a/scaladoc-testcases/src/tests/matchTypeTuple.scala b/scaladoc-testcases/src/tests/matchTypeTuple.scala new file mode 100644 index 000000000000..59dd630cb7a6 --- /dev/null +++ b/scaladoc-testcases/src/tests/matchTypeTuple.scala @@ -0,0 +1,10 @@ +package tests +package matchTypeTuple + +// issue 16084 + +sealed trait TupleTest[Take[_, _], Drop[_, _]]: + type Split[T <: Tuple, N <: Int] = (Take[T, N], Drop[T, N]) + + inline def splitAt[This <: Tuple](n: Int): Split[This, n.type] + = ??? diff --git a/scaladoc-testcases/src/tests/pathDependentTypes.scala b/scaladoc-testcases/src/tests/pathDependentTypes.scala new file mode 100644 index 000000000000..aa4708738195 --- /dev/null +++ b/scaladoc-testcases/src/tests/pathDependentTypes.scala @@ -0,0 +1,20 @@ +package tests +package pathDependentTypes + +import deriving.Mirror.ProductOf + +// issue 16143 + +trait Foo[A]: + type Out + +trait Bar[A]: + type Out + +def foo[A](using f: Foo[A])(using b: Bar[f.Out]): b.Out + = ??? + +// issue 16057 + +def fromProductTyped[P <: Product](p: P)(using m: ProductOf[P]): m.MirroredElemTypes + = ??? diff --git a/scaladoc-testcases/src/tests/snippetTestcase2.scala b/scaladoc-testcases/src/tests/snippetTestcase2.scala index c85207b46f59..6a26767efa86 100644 --- a/scaladoc-testcases/src/tests/snippetTestcase2.scala +++ b/scaladoc-testcases/src/tests/snippetTestcase2.scala @@ -2,7 +2,7 @@ package tests package snippetTestcase2 trait Quotes2[A] { - val r1: r1Module[_] = ??? + val r1: r1Module[?] = ??? trait r1Module[A] { type X object Y { diff --git a/scaladoc-testcases/src/tests/supertypeParamsSubstitution.scala b/scaladoc-testcases/src/tests/supertypeParamsSubstitution.scala new file mode 100644 index 000000000000..f80f92afafa2 --- /dev/null +++ b/scaladoc-testcases/src/tests/supertypeParamsSubstitution.scala @@ -0,0 +1,20 @@ +package tests +package supertypeParamsSubstitution + +class MyIter[A, CC[_], C]: + def foo: A + = ??? + def bar: CC[CC[A]] + = ??? + def baz: C + = ??? + +class MyList[T] extends MyIter[T, MyList, MyList[T]] +//expected: def foo: T +//expected: def bar: MyList[MyList[T]] +//expected: def baz: MyList[T] + +class MyListInt extends MyList[Int] +//expected: def foo: Int +//expected: def bar: MyList[MyList[Int]] +//expected: def baz: MyList[Int] diff --git a/scaladoc-testcases/src/tests/thisType.scala b/scaladoc-testcases/src/tests/thisType.scala new file mode 100644 index 000000000000..942e50af86ec --- /dev/null +++ b/scaladoc-testcases/src/tests/thisType.scala @@ -0,0 +1,8 @@ +package tests +package thisType + +// issue 16024 + +class X[Map[_, _[_]]]: + inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = //expected: inline def map[F[_]](f: [t] => (x$1: t) => F[t]): Map[this.type, F] + ??? diff --git a/scaladoc-testcases/src/tests/visibility.scala b/scaladoc-testcases/src/tests/visibility.scala index b474eb15eebd..b0ca8d041c74 100644 --- a/scaladoc-testcases/src/tests/visibility.scala +++ b/scaladoc-testcases/src/tests/visibility.scala @@ -1,6 +1,8 @@ package tests package visibility +import scala.language.`3.3` // to avoid migration warnings/errors of private[this] + private object PrivateTopLevelObject //unexpected private[tests] object PrivateInOuterPackageTopLevelObject //unexpected diff --git a/scaladoc/.gitignore b/scaladoc/.gitignore deleted file mode 100644 index 893c4fbd144f..000000000000 --- a/scaladoc/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# root where we generate documentation -output \ No newline at end of file diff --git a/scaladoc/.scalafmt.conf b/scaladoc/.scalafmt.conf deleted file mode 100644 index 9df0d88b69ce..000000000000 --- a/scaladoc/.scalafmt.conf +++ /dev/null @@ -1 +0,0 @@ -version = "2.6.3" diff --git a/scaladoc/README.md b/scaladoc/README.md index 774543996c7a..ee401161b364 100644 --- a/scaladoc/README.md +++ b/scaladoc/README.md @@ -1,151 +1,4 @@ # Scaladoc -Scaladoc is the documentation tool for -[Scala 3](https://github.com/lampepfl/dotty). -It uses the TastyInspector to access definitions, -which is an officially supported way to access Scala 3's perspective of a -codebase. - -We're aiming to support all the features Scaladoc did, plus new and exciting ones such as: - -- Markdown syntax! -- displaying project and API documentation together on one site! -- and more! - -**Yes, this page was generated using scaladoc** - -## Running the project - -Use the following commands to generate documentation for this project and for Dotty, respectively: - -``` -sbt scaladoc/generateSelfDocumentation -sbt scaladoc/generateScalaDocumentation -``` - -To actually view the documentation, the easiest way is to run the following in the project root: - -``` -cd scaladoc/output -python3 -m http.server 8080 -``` - -And afterwards point your browser to or - for this project and for Dotty documentation -respectively. - -It's not strictly necessary to go through an HTTP server, but because of CORS -the documentation won't work completely if you don't. - -## CLI and SBT Documentation - -The preferred way to use scaladoc is calling it from sbt `Compile/doc` task or to use CLI interface provided inside `dotty/bin/scaladoc` bash script. -More information about specific scaladoc flags you can find inside [Usage docs](https://docs.scala-lang.org/scala3/guides/scaladoc/settings.html) - -## Developing - -At least two of our contributors use [Metals](https://scalameta.org/metals/) to -work on the project. - -For every PR named with prefix `scaladoc/`, we build documentation for scaladoc and Dotty. For example, for -PR `scaladoc/update-docs` you can find them at: - -- -- -- - -Note that these correspond to the contents of `output` directory - that's -precisely what they are. - -You can also find the result of building the same sites for latest `master` at: - -- -- -- - -### Testing - -Most tests rely on comparing signatures (of classes, methods, objects etc.) extracted from the generated documentation -to signatures found in source files. Such tests are defined using [SignatureTest](test/dotty/tools/scaladoc/signatures/SignatureTest.scala) class -and its subtypes (such as [TranslatableSignaturesTestCases](test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala)) - -WARNING: As the classes mentioned above are likely to evolve, the description below might easily get out of date. -In case of any discrepancies rely on the source files instead. - -`SignatureTest` requires that you specify the names of the files used to extract signatures, -the names of directories containing corresponding TASTY files -and the kinds of signatures from source files (corresponding to keywords used to declare them like `def`, `class`, `object` etc.) -whose presence in the generated documentation will be checked (other signatures, when missing, will be ignored). -The mentioned source files should be located directly inside [](../scaladoc-testcases/src/tests) directory -but the file names passed as parameters should contain neither this path prefix nor `.scala` suffix. - -By default it's expected that all signatures from the source files will be present in the documentation -but not vice versa (because the documentation can contain also inherited signatures). -To validate that a signature present in the source does not exist in the documentation -(because they should be hidden from users) add `//unexpected` comment after the signature in the same line. -This will cause an error if a signature with the same name appears in the documentation -(even if some elements of the signature are slightly different - to avoid accidentally passing tests). -If the signature in the documentation is expected to slightly differ from how it's defined in the source code -you can add a `//expected: ` comment (also in the same line and followed by a space) followed by the expected signature. -Alternatively you can use `/*<-*/` and `/*->*/` as opening and closing parentheses for parts of a signature present in the source but undesired in the documentation (at least at the current stage of development), e.g. - -``` -def foo/*<-*/()/*->*/: Int -``` - -will make the expected signature be - -``` -def foo: Int -``` - -instead of - -``` -def foo(): Int -``` - -Because of the way how signatures in source are parsed, they're expected to span until the end of a line (including comments except those special ones mentioned above, which change the behaviour of tests) so if a definition contains an implementation, it should be placed in a separate line, e.g. - -``` -def foo: Int - = 1 - -class Bar -{ - //... -} -``` - -Otherwise the implementation would be treated as a part of the signature. - -## Contributing - -We're happy that you'd like to help us! - -We have two issue labels you should take a look at: `good first issue` and -`self-contained`. First is easy pickings: you'll be able to contribute without -needing to dive too deep into the project. Second is reverse: it's an issue -that's you may find interesting, complex and self-contained enough that you can -continue chipping away at it without needing to worry too much about merge -conflicts. - -To contribute to the project with your code, fork this repo and create a pull request from a fresh branch from there. -To keep the history of commits clean, make sure your commits are squashed into one -and all your changes are applied on top of the latest master branch (if not - rebase on it instead of merging it). -Make sure all the tests pass (simply run `sbt test` to verify that). - -## FAQ - - -### Why use TASTy? - -A documentation tool needs to access compiler information about the project - it -needs to list all definitions, resolve them by name, and query their members. -Tasty Reflect is the dedicated way in Scala 3 of accessing this information. - -## Credits - -- [Flatart](https://www.iconfinder.com/Flatart) - Gitter icon - - +You can find the documentation to help better understand and contribute to +scaladoc [here in the contributing guide](https://dotty.epfl.ch/docs/contributing/scaladoc.html). diff --git a/scaladoc/e2e/.prettierrc b/scaladoc/e2e/.prettierrc deleted file mode 100644 index 2fd31d7ed46b..000000000000 --- a/scaladoc/e2e/.prettierrc +++ /dev/null @@ -1,9 +0,0 @@ -{ - "printWidth": 100, - "tabWidth": 2, - "semi": true, - "singleQuote": true, - "trailingComma": "all", - "bracketSpacing": true, - "arrowParens": "avoid" -} diff --git a/scaladoc/e2e/README.md b/scaladoc/e2e/README.md deleted file mode 100644 index b9f088cd7198..000000000000 --- a/scaladoc/e2e/README.md +++ /dev/null @@ -1,12 +0,0 @@ -## Scaladoc e2e test suite - -### Prerequisites - -- install Node.js -- run `npm i` - -### Running tests - -- generate the test docs: `sbt scaladoc/generateTestcasesDocumentation` -- run the web server in the `output` directory: `python3 -m http.server 8080` -- run `npm run cypress:open` to see the cypress UI or `npm run cypress:run` to run tests heedlessly diff --git a/scaladoc/e2e/cypress.json b/scaladoc/e2e/cypress.json deleted file mode 100644 index 0967ef424bce..000000000000 --- a/scaladoc/e2e/cypress.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/scaladoc/e2e/cypress/integration/filter-bar.spec.ts b/scaladoc/e2e/cypress/integration/filter-bar.spec.ts deleted file mode 100644 index fb2a1fbefe66..000000000000 --- a/scaladoc/e2e/cypress/integration/filter-bar.spec.ts +++ /dev/null @@ -1,303 +0,0 @@ -describe('filter-bar', () => { - beforeEach(openPage); - - it('properly renders filters extracted from the document', () => { - const filterBar = new FilterBarFixture().toggle(); - - const testTable: TestTable = [ - ['Visibility', ['public', 'protected']], - ['Keywords', ['no keywords', 'abstract', 'case', 'final', 'sealed']], - ['Extension', ['Standard member', 'from tests']], - ]; - - testFilterBarOptions(filterBar, testTable); - }); - - it('properly filters the definition list through search box', () => { - const tabs = new TabsFixture(); - const filterBar = new FilterBarFixture().toggle(); - - // type - tabs.definition('publicType').should('be.visible'); - // protected type - tabs.definition('protectedType').should('be.visible'); - - filterBar.input.type('protectedType'); - - // protected type - tabs.definition('protectedType').should('be.visible'); - // type - tabs.definition('publicType').should('not.be.visible'); - - const testTable: TestTable = [ - ['Visibility', ['public', 'protected']], - ['Keywords', ['no keywords']], - ['Extension', ['Standard member']], - ]; - - testFilterBarOptions(filterBar, testTable); - }); - - it('works with select all / deselect all', () => { - const filterBar = new FilterBarFixture().toggle(); - const group = filterBar.group(0); - const batchSelection = filterBar.group(0).batchSelection; - - const public = () => group.filterOption('public').then(x => x.isSelected); - const protected = () => group.filterOption('protected').then(x => x.isSelected); - - public().should('be.equal', true); - protected().should('be.equal', true); - - batchSelection.deselectAll(); - - public().should('be.equal', false); - protected().should('be.equal', false); - - batchSelection.selectAll(); - - public().should('be.equal', true); - protected().should('be.equal', true); - }); - - describe('filter configurations', () => { - describe('returns empty list after deselecting', () => { - it(`'public' and 'no keywords'`, () => { - const filterBar = new FilterBarFixture().toggle(); - filterBar.group(0).toggleFilter('public'); - filterBar.group(1).toggleFilter('no keywords'); - - new TabsFixture().definitionTypes.should('not.be.visible'); - }); - - it(`'Standard member'`, () => { - new FilterBarFixture().toggle().group(2).toggleFilter('Standard member'); - - new TabsFixture().definitionTypes.should('not.be.visible'); - }); - - it('all visibility options', () => { - new FilterBarFixture().toggle().group(0).toggleFilter('public', 'protected'); - - new TabsFixture().definitionTypes.should('not.be.visible'); - }); - - it('all keywords options', () => { - new FilterBarFixture() - .toggle() - .group(1) - .toggleFilter('no keywords', 'abstract', 'case', 'final', 'sealed'); - - new TabsFixture().definitionTypes.should('not.be.visible'); - }); - - it('all extension options', () => { - new FilterBarFixture().toggle().group(2).toggleFilter('Standard member', 'from tests'); - - new TabsFixture().definitionTypes.should('not.be.visible'); - }); - }); - - describe('returns filtered list after deselecting', () => { - it(`'protected'`, () => { - const tabs = new TabsFixture(); - - tabs.definition('protected').should('be.visible'); - new FilterBarFixture().toggle().group(0).toggleFilter('protected'); - tabs.definition('protected').should('not.be.visible'); - }); - - it(`'no keywords', 'case', 'final' and 'sealed'`, () => { - const tabs = new TabsFixture(); - - // protected object - tabs.definition('ProtectedObject').should('be.visible'); - - // sealed case class - tabs.definition('D').should('be.visible'); - - // final case class - tabs.definition('E').should('be.visible'); - - new FilterBarFixture() - .toggle() - .group(1) - .toggleFilter('no keywords', 'case', 'final', 'sealed'); - - // protected object - tabs.definition('ProtectedObject').should('not.be.visible'); - - // sealed abstract class - tabs.definition('B').should('be.visible'); - - // abstract case class - tabs.definition('C').should('be.visible'); - - // sealed case class - tabs.definition('D').should('not.be.visible'); - - // final case class - tabs.definition('E').should('not.be.visible'); - }); - - it(`'no keywords', 'final' and 'sealed'`, () => { - const tabs = new TabsFixture(); - - // protected object - tabs.definition('ProtectedObject').should('be.visible'); - - new FilterBarFixture().toggle().group(1).toggleFilter('no keywords', 'final', 'sealed'); - - // protected object - tabs.definition('ProtectedObject').should('not.be.visible'); - - // sealed abstract class - tabs.definition('B').should('be.visible'); - - // abstract case class - tabs.definition('C').should('be.visible'); - - // sealed case class - tabs.definition('D').should('be.visible'); - - // final case class - tabs.definition('E').should('be.visible'); - }); - }); - }); -}); - -class FilterBarFixture { - private get toggleButton() { - return cy.findByTestId('filterToggleButton'); - } - - group(at: number) { - return new FilterBarGroupFixture(at); - } - - get input() { - return new FilterInputFixture(); - } - - toggle() { - this.toggleButton.click(); - - return this; - } -} - -class FilterBarGroupFixture { - constructor(private readonly index: number) {} - - private get group() { - return cy.findAllByTestId('filterGroup').eq(this.index); - } - - private get filterButtons() { - return this.group - .findByTestId('filterGroupList') - .findAllByTestId('filterGroupButton') - .filter(':visible'); - } - - get title() { - return this.group.findByTestId('filterGroupTitle'); - } - - get batchSelection() { - return new BatchSelectionFixture(() => this.group); - } - - get filterOptionsValues() { - return this.filterOptions.then(options => { - const acc: string[] = []; - options.forEach(o => o.name.then(v => acc.push(v))); - return cy.wrap(acc); - }); - } - - filterOption(name: string) { - return this.filterButtons - .contains(name) - .then($el => new FilterOptionFixture(() => cy.wrap($el))); - } - - get filterOptions() { - return ( - this.filterButtons - // .filter(':visible') - .then($buttons => - cy.wrap($buttons.toArray().map(el => new FilterOptionFixture(() => cy.wrap(el)))), - ) - ); - } - - toggleFilter(...names: string[]) { - names.forEach(name => this.filterButtons.contains(name).click()); - return this; - } -} - -class FilterOptionFixture { - constructor(private readonly root: () => Cypress.Chainable>) {} - - get name() { - return this.root().then($el => $el.text()); - } - - get isSelected() { - return this.root().then($el => cy.wrap($el.data('selected'))); - } -} - -class TabsFixture { - get definitionTypes() { - return cy.findAllByTestId('definitionList'); - } - - definition(name: string) { - return this.definitionTypes.contains(name); - } -} - -class FilterInputFixture { - private get input() { - return cy.findByTestId('filterBarInput'); - } - - type(phrase: string) { - this.input.type(phrase); - } -} - -class BatchSelectionFixture { - constructor(private readonly root: () => Cypress.Chainable>) {} - - private get container() { - return this.root().findByTestId('filterGroupBatchToggle'); - } - - selectAll() { - this.container.findByText('Select All').click(); - } - - deselectAll() { - this.container.findByText('Deselect All').click(); - } -} - -function openPage() { - cy.visit('http://localhost:8080/testcases/api/tests/-filter-test/index.html'); -} - -type TestTable = [string, string[]][]; - -function testFilterBarOptions(filterBar: FilterBarFixture, testTable: TestTable) { - testTable.forEach(([title, filterOptions], index) => { - const group = filterBar.group(index); - - group.title.should('have.text', title); - group.filterOptionsValues.should('deep.equal', filterOptions); - }); -} diff --git a/scaladoc/e2e/cypress/support/commands.ts b/scaladoc/e2e/cypress/support/commands.ts deleted file mode 100644 index 44cbd485bc2e..000000000000 --- a/scaladoc/e2e/cypress/support/commands.ts +++ /dev/null @@ -1 +0,0 @@ -import "@testing-library/cypress/add-commands"; diff --git a/scaladoc/e2e/cypress/support/index.ts b/scaladoc/e2e/cypress/support/index.ts deleted file mode 100644 index 8dc2f81f81f8..000000000000 --- a/scaladoc/e2e/cypress/support/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import './commands'; -import { configure } from '@testing-library/cypress'; - -configure({ testIdAttribute: 'data-test-id' }); diff --git a/scaladoc/e2e/package-lock.json b/scaladoc/e2e/package-lock.json deleted file mode 100644 index bcebfd932f1d..000000000000 --- a/scaladoc/e2e/package-lock.json +++ /dev/null @@ -1,2057 +0,0 @@ -{ - "name": "scaladoc-e2e", - "version": "1.0.0", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "@babel/code-frame": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz", - "integrity": "sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==", - "dev": true, - "requires": { - "@babel/highlight": "^7.10.4" - } - }, - "@babel/helper-validator-identifier": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz", - "integrity": "sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==", - "dev": true - }, - "@babel/highlight": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.4.tgz", - "integrity": "sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==", - "dev": true, - "requires": { - "@babel/helper-validator-identifier": "^7.10.4", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "@babel/runtime": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.5.tgz", - "integrity": "sha512-plcc+hbExy3McchJCEQG3knOsuh3HH+Prx1P6cLIkET/0dLuQDEnrT+s27Axgc9bqfsmNUNHfscgMUdBpC9xfg==", - "dev": true, - "requires": { - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/runtime-corejs3": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.12.5.tgz", - "integrity": "sha512-roGr54CsTmNPPzZoCP1AmDXuBoNao7tnSA83TXTwt+UK5QVyh1DIJnrgYRPWKCF2flqZQXwa7Yr8v7VmLzF0YQ==", - "dev": true, - "requires": { - "core-js-pure": "^3.0.0", - "regenerator-runtime": "^0.13.4" - } - }, - "@cypress/listr-verbose-renderer": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz", - "integrity": "sha1-p3SS9LEdzHxEajSz4ochr9M8ZCo=", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "cli-cursor": "^1.0.2", - "date-fns": "^1.27.2", - "figures": "^1.7.0" - }, - "dependencies": { - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, - "@cypress/request": { - "version": "2.88.5", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.5.tgz", - "integrity": "sha512-TzEC1XMi1hJkywWpRfD2clreTa/Z+lOrXDCxxBTBPEcY5azdPi56A6Xw+O4tWJnaJH3iIE7G5aDXZC6JgRZLcA==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.3", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } - }, - "@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", - "dev": true, - "requires": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" - }, - "dependencies": { - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "@jest/types": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-26.6.2.tgz", - "integrity": "sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==", - "dev": true, - "requires": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^15.0.0", - "chalk": "^4.0.0" - } - }, - "@samverschueren/stream-to-observable": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz", - "integrity": "sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ==", - "dev": true, - "requires": { - "any-observable": "^0.3.0" - } - }, - "@testing-library/cypress": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/@testing-library/cypress/-/cypress-7.0.1.tgz", - "integrity": "sha512-LtggqG/7Hdc1EiKdmqXQwxWOO3ET1dkZtq0S8mIe8o+xaOtaVLrdCn0dE8Bi4Aj7z3w51w6wN9STdYymnUPlnQ==", - "dev": true, - "requires": { - "@babel/runtime": "^7.11.2", - "@testing-library/dom": "^7.22.2" - } - }, - "@testing-library/dom": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-7.26.5.tgz", - "integrity": "sha512-2v/fv0s4keQjJIcD4bjfJMFtvxz5icartxUWdIZVNJR539WD9oxVrvIAPw+3Ydg4RLgxt0rvQx3L9cAjCci0Kg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.10.4", - "@babel/runtime": "^7.10.3", - "@types/aria-query": "^4.2.0", - "aria-query": "^4.2.2", - "chalk": "^4.1.0", - "dom-accessibility-api": "^0.5.1", - "lz-string": "^1.4.4", - "pretty-format": "^26.4.2" - } - }, - "@types/aria-query": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.0.tgz", - "integrity": "sha512-iIgQNzCm0v7QMhhe4Jjn9uRh+I6GoPmt03CbEtwx3ao8/EfoQcmgtqH4vQ5Db/lxiIGaWDv6nwvunuh0RyX0+A==", - "dev": true - }, - "@types/istanbul-lib-coverage": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz", - "integrity": "sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw==", - "dev": true - }, - "@types/istanbul-lib-report": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", - "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", - "dev": true, - "requires": { - "@types/istanbul-lib-coverage": "*" - } - }, - "@types/istanbul-reports": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz", - "integrity": "sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA==", - "dev": true, - "requires": { - "@types/istanbul-lib-report": "*" - } - }, - "@types/node": { - "version": "14.14.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz", - "integrity": "sha512-6QlRuqsQ/Ox/aJEQWBEJG7A9+u7oSYl3mem/K8IzxXG/kAGbV1YPD9Bg9Zw3vyxC/YP+zONKwy8hGkSt1jxFMw==", - "dev": true - }, - "@types/sinonjs__fake-timers": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz", - "integrity": "sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg==", - "dev": true - }, - "@types/sizzle": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.2.tgz", - "integrity": "sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg==", - "dev": true - }, - "@types/yargs": { - "version": "15.0.9", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.9.tgz", - "integrity": "sha512-HmU8SeIRhZCWcnRskCs36Q1Q00KBV6Cqh/ora8WN1+22dY07AZdn6Gel8QZ3t26XYPImtcL8WV/eqjhVmMEw4g==", - "dev": true, - "requires": { - "@types/yargs-parser": "*" - } - }, - "@types/yargs-parser": { - "version": "15.0.0", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-15.0.0.tgz", - "integrity": "sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw==", - "dev": true - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "any-observable": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/any-observable/-/any-observable-0.3.0.tgz", - "integrity": "sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog==", - "dev": true - }, - "arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true - }, - "aria-query": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", - "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", - "dev": true, - "requires": { - "@babel/runtime": "^7.10.2", - "@babel/runtime-corejs3": "^7.10.2" - } - }, - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true - }, - "async": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", - "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==", - "dev": true - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "dev": true - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", - "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", - "dev": true - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "blob-util": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", - "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", - "dev": true - }, - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", - "dev": true - }, - "buffer-from": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", - "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", - "dev": true - }, - "cachedir": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", - "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", - "dev": true - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - } - } - }, - "check-more-types": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", - "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", - "dev": true - }, - "ci-info": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", - "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", - "dev": true - }, - "cli-cursor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-1.0.2.tgz", - "integrity": "sha1-ZNo/fValRBLll5S9Ytw1KV6PKYc=", - "dev": true, - "requires": { - "restore-cursor": "^1.0.1" - } - }, - "cli-table3": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.0.tgz", - "integrity": "sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ==", - "dev": true, - "requires": { - "colors": "^1.1.2", - "object-assign": "^4.1.0", - "string-width": "^4.2.0" - } - }, - "cli-truncate": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-0.2.1.tgz", - "integrity": "sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ=", - "dev": true, - "requires": { - "slice-ansi": "0.0.4", - "string-width": "^1.0.1" - }, - "dependencies": { - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - } - } - }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "colors": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", - "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", - "dev": true, - "optional": true - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "dev": true - }, - "common-tags": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", - "dev": true, - "requires": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" - } - }, - "core-js-pure": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.6.5.tgz", - "integrity": "sha512-lacdXOimsiD0QyNf9BC/mxivNJ/ybBGJXQFKzRekp1WTHoVUWsUHEn+2T8GJAzzIhyOuXA+gOxCVN3l+5PLPUA==", - "dev": true - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "cypress": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-5.5.0.tgz", - "integrity": "sha512-UHEiTca8AUTevbT2pWkHQlxoHtXmbq+h6Eiu/Mz8DqpNkF98zjTBLv/HFiKJUU5rQzp9EwSWtms33p5TWCJ8tQ==", - "dev": true, - "requires": { - "@cypress/listr-verbose-renderer": "^0.4.1", - "@cypress/request": "^2.88.5", - "@cypress/xvfb": "^1.2.4", - "@types/sinonjs__fake-timers": "^6.0.1", - "@types/sizzle": "^2.3.2", - "arch": "^2.1.2", - "blob-util": "2.0.2", - "bluebird": "^3.7.2", - "cachedir": "^2.3.0", - "chalk": "^4.1.0", - "check-more-types": "^2.24.0", - "cli-table3": "~0.6.0", - "commander": "^4.1.1", - "common-tags": "^1.8.0", - "debug": "^4.1.1", - "eventemitter2": "^6.4.2", - "execa": "^4.0.2", - "executable": "^4.1.1", - "extract-zip": "^1.7.0", - "fs-extra": "^9.0.1", - "getos": "^3.2.1", - "is-ci": "^2.0.0", - "is-installed-globally": "^0.3.2", - "lazy-ass": "^1.6.0", - "listr": "^0.14.3", - "lodash": "^4.17.19", - "log-symbols": "^4.0.0", - "minimist": "^1.2.5", - "moment": "^2.27.0", - "ospath": "^1.2.2", - "pretty-bytes": "^5.4.1", - "ramda": "~0.26.1", - "request-progress": "^3.0.0", - "supports-color": "^7.2.0", - "tmp": "~0.2.1", - "untildify": "^4.0.0", - "url": "^0.11.0", - "yauzl": "^2.10.0" - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "date-fns": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-1.30.1.tgz", - "integrity": "sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw==", - "dev": true - }, - "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "dom-accessibility-api": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.4.tgz", - "integrity": "sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ==", - "dev": true - }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "elegant-spinner": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/elegant-spinner/-/elegant-spinner-1.0.1.tgz", - "integrity": "sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4=", - "dev": true - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "eventemitter2": { - "version": "6.4.3", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.3.tgz", - "integrity": "sha512-t0A2msp6BzOf+QAcI6z9XMktLj52OjGQg+8SJH6v5+3uxNpWYRR3wQmfA+6xtMU9kOC59qk9licus5dYcrYkMQ==", - "dev": true - }, - "execa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", - "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "get-stream": "^5.0.0", - "human-signals": "^1.1.1", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.0", - "onetime": "^5.1.0", - "signal-exit": "^3.0.2", - "strip-final-newline": "^2.0.0" - }, - "dependencies": { - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - } - } - }, - "executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "requires": { - "pify": "^2.2.0" - } - }, - "exit-hook": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-1.1.1.tgz", - "integrity": "sha1-8FyiM7SMBdVP/wd2XfhQfpXAL/g=", - "dev": true - }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "extract-zip": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-1.7.0.tgz", - "integrity": "sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA==", - "dev": true, - "requires": { - "concat-stream": "^1.6.2", - "debug": "^2.6.9", - "mkdirp": "^0.5.4", - "yauzl": "^2.10.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "figures": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz", - "integrity": "sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5", - "object-assign": "^4.1.0" - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "fs-extra": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.0.1.tgz", - "integrity": "sha512-h2iAoN838FqAFJY2/qVpzFXy+EBxfVE220PalAqQLDVsFOHLJrZvut5puAbCdNv6WJk+B8ihI+k0c7JK5erwqQ==", - "dev": true, - "requires": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^1.0.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } - }, - "getos": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", - "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", - "dev": true, - "requires": { - "async": "^3.2.0" - } - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "global-dirs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-2.0.1.tgz", - "integrity": "sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A==", - "dev": true, - "requires": { - "ini": "^1.3.5" - } - }, - "graceful-fs": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz", - "integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==", - "dev": true - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", - "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", - "dev": true, - "requires": { - "ajv": "^6.12.3", - "har-schema": "^2.0.0" - } - }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true - }, - "indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", - "dev": true - }, - "is-ci": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", - "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", - "dev": true, - "requires": { - "ci-info": "^2.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true - }, - "is-installed-globally": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.3.2.tgz", - "integrity": "sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g==", - "dev": true, - "requires": { - "global-dirs": "^2.0.1", - "is-path-inside": "^3.0.1" - } - }, - "is-observable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-observable/-/is-observable-1.1.0.tgz", - "integrity": "sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA==", - "dev": true, - "requires": { - "symbol-observable": "^1.1.0" - } - }, - "is-path-inside": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.2.tgz", - "integrity": "sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==", - "dev": true - }, - "is-promise": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", - "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==", - "dev": true - }, - "is-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", - "dev": true - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - }, - "dependencies": { - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "dev": true - } - } - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - } - }, - "lazy-ass": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", - "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", - "dev": true - }, - "listr": { - "version": "0.14.3", - "resolved": "https://registry.npmjs.org/listr/-/listr-0.14.3.tgz", - "integrity": "sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA==", - "dev": true, - "requires": { - "@samverschueren/stream-to-observable": "^0.3.0", - "is-observable": "^1.1.0", - "is-promise": "^2.1.0", - "is-stream": "^1.1.0", - "listr-silent-renderer": "^1.1.1", - "listr-update-renderer": "^0.5.0", - "listr-verbose-renderer": "^0.5.0", - "p-map": "^2.0.0", - "rxjs": "^6.3.3" - }, - "dependencies": { - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true - } - } - }, - "listr-silent-renderer": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz", - "integrity": "sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4=", - "dev": true - }, - "listr-update-renderer": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz", - "integrity": "sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA==", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "cli-truncate": "^0.2.1", - "elegant-spinner": "^1.0.1", - "figures": "^1.7.0", - "indent-string": "^3.0.0", - "log-symbols": "^1.0.2", - "log-update": "^2.3.0", - "strip-ansi": "^3.0.1" - }, - "dependencies": { - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "log-symbols": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-1.0.2.tgz", - "integrity": "sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg=", - "dev": true, - "requires": { - "chalk": "^1.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, - "listr-verbose-renderer": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz", - "integrity": "sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "cli-cursor": "^2.1.0", - "date-fns": "^1.27.2", - "figures": "^2.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", - "dev": true - }, - "lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=", - "dev": true - }, - "log-symbols": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", - "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", - "dev": true, - "requires": { - "chalk": "^4.0.0" - } - }, - "log-update": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-2.3.0.tgz", - "integrity": "sha1-iDKP19HOeTiykoN0bwsbwSayRwg=", - "dev": true, - "requires": { - "ansi-escapes": "^3.0.0", - "cli-cursor": "^2.0.0", - "wrap-ansi": "^3.0.1" - }, - "dependencies": { - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - } - } - }, - "lz-string": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.4.4.tgz", - "integrity": "sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY=", - "dev": true - }, - "merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "mime-db": { - "version": "1.44.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz", - "integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==", - "dev": true - }, - "mime-types": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz", - "integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==", - "dev": true, - "requires": { - "mime-db": "1.44.0" - } - }, - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, - "moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==", - "dev": true - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "requires": { - "path-key": "^3.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "onetime": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-1.1.0.tgz", - "integrity": "sha1-ofeDj4MUxRbwXs78vEzP4EtO14k=", - "dev": true - }, - "ospath": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", - "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", - "dev": true - }, - "p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - }, - "pretty-bytes": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.4.1.tgz", - "integrity": "sha512-s1Iam6Gwz3JI5Hweaz4GoCD1WUNUIyzePFy5+Js2hjwGVt2Z79wNN+ZKOZ2vB6C+Xs6njyB84Z1IthQg8d9LxA==", - "dev": true - }, - "pretty-format": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz", - "integrity": "sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==", - "dev": true, - "requires": { - "@jest/types": "^26.6.2", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^17.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - } - } - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "psl": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", - "dev": true - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true - }, - "querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "dev": true - }, - "ramda": { - "version": "0.26.1", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz", - "integrity": "sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ==", - "dev": true - }, - "react-is": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.1.tgz", - "integrity": "sha512-NAnt2iGDXohE5LI7uBnLnqvLQMtzhkiAOLXTmv+qnF9Ky7xAPcX8Up/xWIhxvLVGJvuLiNc4xQLtuqDRzb4fSA==", - "dev": true - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - } - } - }, - "regenerator-runtime": { - "version": "0.13.7", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz", - "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==", - "dev": true - }, - "request-progress": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", - "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", - "dev": true, - "requires": { - "throttleit": "^1.0.0" - } - }, - "restore-cursor": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-1.0.1.tgz", - "integrity": "sha1-NGYfRohjJ/7SmRR5FSJS35LapUE=", - "dev": true, - "requires": { - "exit-hook": "^1.0.0", - "onetime": "^1.0.0" - } - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "rxjs": { - "version": "6.6.3", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.3.tgz", - "integrity": "sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ==", - "dev": true, - "requires": { - "tslib": "^1.9.0" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "dev": true - }, - "slice-ansi": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-0.0.4.tgz", - "integrity": "sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU=", - "dev": true - }, - "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, - "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - } - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==", - "dev": true - }, - "throttleit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", - "dev": true - }, - "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "requires": { - "rimraf": "^3.0.0" - } - }, - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - } - }, - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "typedarray": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", - "dev": true - }, - "typescript": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.5.tgz", - "integrity": "sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ==", - "dev": true - }, - "universalify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-1.0.0.tgz", - "integrity": "sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==", - "dev": true - }, - "untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true - }, - "uri-js": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.0.tgz", - "integrity": "sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", - "dev": true, - "requires": { - "punycode": "1.3.2", - "querystring": "0.2.0" - }, - "dependencies": { - "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true - } - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "dev": true - }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "dev": true - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wrap-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-3.0.1.tgz", - "integrity": "sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo=", - "dev": true, - "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - } - } -} diff --git a/scaladoc/e2e/package.json b/scaladoc/e2e/package.json deleted file mode 100644 index 8cd476a5e74c..000000000000 --- a/scaladoc/e2e/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "scaladoc-e2e", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "cypress:open": "cypress open", - "cypress:run": "cypress run" - }, - "author": "", - "license": "ISC", - "devDependencies": { - "@testing-library/cypress": "^7.0.1", - "@types/node": "^14.14.6", - "cypress": "^5.5.0", - "typescript": "^4.0.5" - } -} diff --git a/scaladoc/e2e/tsconfig.json b/scaladoc/e2e/tsconfig.json deleted file mode 100644 index cd4b429240d0..000000000000 --- a/scaladoc/e2e/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": ["es5", "dom", ], - "types": ["cypress", "@testing-library/cypress", "node"] - }, - "include": [ - "**/*.ts" - ] - } \ No newline at end of file diff --git a/scaladoc/noResultStructure.html b/scaladoc/noResultStructure.html deleted file mode 100644 index 93ef4bbf1396..000000000000 --- a/scaladoc/noResultStructure.html +++ /dev/null @@ -1,6 +0,0 @@ -
- Sick face -

No results match your filter criteria

-

Try adjusting or clearing your filters
to display better result

- -
\ No newline at end of file diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 0ead006af84d..dd6e798f17a5 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -285,7 +285,7 @@ document getTocListElement(id).parentElement.classList.toggle("active"); } if (lastEntry.isIntersecting) { - window.location.hash = ""; + history.replaceState(history.state, "", window.location.pathname + window.location.search); removeAllHighlights(); const id = getIdOfElement(lastEntry); diff --git a/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css b/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css index a6450984131e..d0957691fb1e 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css @@ -533,6 +533,50 @@ content: url("../../../../images/icon-buttons/gitter/dark/selected.svg"); } +/* custom button */ + +.icon-button.custom-dark{ + display: none; +} + +.icon-button.custom::after { + content: ""; + background-image: var(--bgimage); + background-repeat: no-repeat; + background-position: center; + background-size: contain; + display: block; + max-width: 100%; + max-height: 100%; +} + +.theme-dark .icon-button.custom-dark{ + display: unset; +} + +.theme-dark .icon-button.custom-dark::after{ + content: ""; + background-image: var(--bgimage-dark); + background-repeat: no-repeat; + background-position: center; + background-size: contain; + display: block; + max-width: 100%; + max-height: 100%; +} + +.theme-dark .icon-button.custom{ + display: none; +} + +.icon-button.custom:hover{ + opacity: 0.8; +} + +.icon-button.custom-dark:hover{ + opacity: 0.8; +} + /* copy button */ .icon-button.copy-button::after { @@ -830,4 +874,4 @@ .theme-dark .documentableElement .ar.icon-button.expanded.selected::after { content: url("../../../../images/icon-buttons/arrow-down/dark/selected.svg"); -} \ No newline at end of file +} diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/footer.css b/scaladoc/resources/dotty_res/styles/theme/layout/footer.css index 7c169af00591..9178e4a01acc 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/footer.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/footer.css @@ -64,7 +64,7 @@ display: none; } - #footer.mobile-footer .text-mobile { + #footer.mobile-footer .text-mobile { display: flex; width: 100%; justify-content: center; @@ -78,5 +78,5 @@ #footer.mobile-footer > .text-mobile { display: flex; } - + } \ No newline at end of file diff --git a/scaladoc/scripts/linkstovisit.txt b/scaladoc/scripts/linkstovisit.txt deleted file mode 100644 index c615ece17d35..000000000000 --- a/scaladoc/scripts/linkstovisit.txt +++ /dev/null @@ -1,13 +0,0 @@ -./self/api/dotty.tools.scaladoc/tasty/comments/wiki/-converter/index.html,on any page try minimizing window to see if relative styles are applied correctly -./self/api/dotty.tools.scaladoc/-base-key/index.html,is searchbar correct width -./self/api/dotty.tools.scaladoc/as-map.html,are logos in good propotions -./self/api/dotty.tools.scaladoc/model/api/as-signature.html,is sidebar not collapsing/overlaying -./self/api/dotty.tools.scaladoc/-documentable-element/index.html,are overrides pointing to correct function? is filtering working -./self/api/dotty.tools.scaladoc/model/api/-member/index.html,is source link pointing to correct element (object Member)? -./self/api/dotty.tools.scaladoc/-scala-content-node/index.html,you can see is graph properly rendered; can you navigate by nodes up and down the hierarchy -./self/api/index/index.html,package overview -./self/api/dotty.tools.scaladoc/index.html,example package -./self/api/dotty.tools.scaladoc/-documentable-element/index.html,example classlike -./self/api/dotty.tools.scaladoc/get-from-extra.html,example package level method -./self/api/dotty.tools.scaladoc/put.html,example extension -./self/api/dotty.tools.scaladoc/-dot-diagram-builder/build.html,example method diff --git a/scaladoc/scripts/mk-index.sh b/scaladoc/scripts/mk-index.sh deleted file mode 100755 index e2df5a2e551c..000000000000 --- a/scaladoc/scripts/mk-index.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env bash - -MYDIR="$(dirname "$(readlink -f "$0")")" - - -function html-beginning { - cat << EOF - - -EOF -} - -function html-ending { -cat << EOF - - -EOF -} - -function print-beginning { - cat << EOF -

$1

-
    -EOF -} - -function print-list-element { - cat << EOF -
  • $2
  • -EOF -} - -function print-list-element-text { - cat << EOF -
  • $1
  • -EOF -} - -function print-ending { - cat << 'EOF' -
-EOF -} - -cd "$1" || exit - - -html-beginning -print-beginning "Available project:" -for f in * -do - ! [[ -d $f ]] && continue - # assuming that there's only one "root" index file - # if there's a static site, it's at depth 1 - # otherwise at depth 2 - INDEX=$(find "$f" -maxdepth 1 -name 'index.html') - if [[ -z $INDEX ]] - then - INDEX=$(find "$f" -maxdepth 2 -name 'index.html') - fi - print-list-element "$INDEX" "$f" -done -print-ending - -print-beginning "Links for manual testing:" -while read line; do - print-list-element-text "$line" -done < "$MYDIR/tocheck.txt" - -while read line; do - IFS=',' read INDEX f <<< "${line}" - print-list-element "$INDEX" "$f" -done < "$MYDIR/linkstovisit.txt" -print-ending -html-ending diff --git a/scaladoc/src/dotty/tools/scaladoc/QuickLink.scala b/scaladoc/src/dotty/tools/scaladoc/QuickLink.scala index 77ee9916885a..04bc9d59cde3 100644 --- a/scaladoc/src/dotty/tools/scaladoc/QuickLink.scala +++ b/scaladoc/src/dotty/tools/scaladoc/QuickLink.scala @@ -7,7 +7,7 @@ object QuickLink: def usage: String = """List of quick links that is displayed in the header of documentation. |The setting accepts list of quick links in format: text::url - |The first `::` occurence is taken as the delimiter.""".stripMargin + |The first `::` occurrence is taken as the delimiter.""".stripMargin def parse(s: String): Either[String, QuickLink] = s.split(delimiter, 2).toList match case text :: url :: Nil => Right(QuickLink(text, url)) diff --git a/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala b/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala index c4776f2840c2..edd7b41b6182 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala @@ -8,7 +8,7 @@ case class Module(rootPackage: Member, members: Map[DRI, Member]) object ScalaModuleProvider: def mkModule()(using ctx: DocContext): Module = - val (result, rootDoc) = ScaladocTastyInspector().result() + val (result, rootDoc) = ScaladocTastyInspector.loadDocs() val (rootPck, rest) = result.partition(_.name == "API") val (emptyPackages, nonemptyPackages) = (rest ++ rootPck.flatMap(_.members)) .filter(p => p.members.nonEmpty || p.docs.nonEmpty).sortBy(_.name) diff --git a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala index fa02e87548e6..260529b024db 100644 --- a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala +++ b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala @@ -39,6 +39,7 @@ object Scaladoc: documentSyntheticTypes: Boolean = false, snippetCompiler: List[String] = Nil, noLinkWarnings: Boolean = false, + noLinkAssetWarnings: Boolean = false, versionsDictionaryUrl: Option[String] = None, generateInkuire : Boolean = false, apiSubdirectory : Boolean = false, @@ -141,7 +142,7 @@ object Scaladoc: ) if other.nonEmpty then report.warning( - s"scaladoc suports only .tasty and .jar files, following files will be ignored: ${other.mkString(", ")}" + s"scaladoc supports only .tasty and .jar files, following files will be ignored: ${other.mkString(", ")}" ) def defaultDest(): File = @@ -202,7 +203,7 @@ object Scaladoc: classpath.get, bootclasspath.get, destFile, - siteRoot.nonDefault, + Option(siteRoot.withDefault(siteRoot.default)), projectVersion.nonDefault, projectLogo.nonDefault, projectFooter.nonDefault, @@ -221,6 +222,7 @@ object Scaladoc: YdocumentSyntheticTypes.get, snippetCompiler.get, noLinkWarnings.get, + noLinkAssetWarnings.get, versionsDictionaryUrl.nonDefault, generateInkuire.get, apiSubdirectory.get, diff --git a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala index 96e7854b45cf..403acb19305d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala @@ -43,7 +43,7 @@ class ScaladocSettings extends SettingGroup with AllScalaSettings: val socialLinks: Setting[List[String]] = MultiStringSetting("-social-links", "social-links", - "Links to social sites. '[github|twitter|gitter|discord]::link' syntax is used.") + "Links to social sites. '[github|twitter|gitter|discord]::link' or 'custom::link::light_icon_file_name[::dark_icon_file_name]' syntax is used. For custom links, the icons must be present in '_assets/images/'") val deprecatedSkipPackages: Setting[List[String]] = MultiStringSetting("-skip-packages", "packages", "Deprecated, please use `-skip-by-id` or `-skip-by-regex`") @@ -87,6 +87,12 @@ class ScaladocSettings extends SettingGroup with AllScalaSettings: false ) + val noLinkAssetWarnings: Setting[Boolean] = BooleanSetting( + "-no-link-asset-warnings", + "Avoid warnings for incorrect links of assets like images, static pages, etc.", + false + ) + val versionsDictionaryUrl: Setting[String] = StringSetting( "-versions-dictionary-url", "versions dictionary url", @@ -127,5 +133,5 @@ class ScaladocSettings extends SettingGroup with AllScalaSettings: "List of quick links that is displayed in the header of documentation." ) - def scaladocSpecificSettings: Set[Setting[_]] = + def scaladocSpecificSettings: Set[Setting[?]] = Set(sourceLinks, legacySourceLink, syntax, revision, externalDocumentationMappings, socialLinks, skipById, skipByRegex, deprecatedSkipPackages, docRootContent, snippetCompiler, generateInkuire, defaultTemplate, scastieConfiguration, quickLinks) diff --git a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala index a07029d06c50..545d9176675a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala +++ b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala @@ -5,12 +5,16 @@ enum SocialLinks(val url: String, val className: String): case Twitter(tUrl: String) extends SocialLinks(tUrl, "twitter") case Gitter(gUrl: String) extends SocialLinks(gUrl, "gitter") case Discord(dUrl: String) extends SocialLinks(dUrl, "discord") + case Custom(cUrl: String, lightIcon: String, darkIcon: String) extends SocialLinks(cUrl, "custom") object SocialLinks: + val LowercaseNamePattern = "^[a-z]+$".r + def parse(s: String): Either[String, SocialLinks] = val errorPrefix = s"Social links arg $s is invalid: " val splitted = s.split("::") - splitted.head match { + + splitted.head.toLowerCase match { case "github" if splitted.size == 2 => Right(Github(splitted(1))) case "github" => Left(errorPrefix + "For 'github' arg expected one argument: url") case "twitter" if splitted.size == 2 => Right(Twitter(splitted(1))) @@ -19,5 +23,8 @@ object SocialLinks: case "gitter" => Left(errorPrefix + "For 'gitter' arg expected one argument: url") case "discord" if splitted.size == 2 => Right(Discord(splitted(1))) case "discord" => Left(errorPrefix + "For 'discord' arg expected one argument: url") + case LowercaseNamePattern() if splitted.size == 4 => Right(Custom(splitted(1), splitted(2), splitted(3))) + case LowercaseNamePattern() if splitted.size == 3 => Right(Custom(splitted(1), splitted(2), splitted(2))) + case LowercaseNamePattern() => Left(errorPrefix + "For the 'custom' link, a minimum of two arguments is expected: URL, light icon file name, [dark icon file name]") case _ => Left(errorPrefix) } diff --git a/scaladoc/src/dotty/tools/scaladoc/api.scala b/scaladoc/src/dotty/tools/scaladoc/api.scala index 5af55f76a211..159db2ecaf7b 100644 --- a/scaladoc/src/dotty/tools/scaladoc/api.scala +++ b/scaladoc/src/dotty/tools/scaladoc/api.scala @@ -145,10 +145,10 @@ case class LinkToType(signature: Signature, dri: DRI, kind: Kind) case class HierarchyGraph(edges: Seq[(LinkToType, LinkToType)], sealedNodes: Set[LinkToType] = Set.empty): def vertecies: Seq[LinkToType] = edges.flatten((a, b) => Seq(a, b)).distinct def verteciesWithId: Map[LinkToType, Int] = vertecies.zipWithIndex.toMap - def +(edge: (LinkToType, LinkToType)): HierarchyGraph = HierarchyGraph((edges :+ edge).distinct) - def ++(edges: Seq[(LinkToType, LinkToType)]): HierarchyGraph = edges.foldLeft(this) { - case (acc, edge) => acc + edge - } + def +(edge: (LinkToType, LinkToType)): HierarchyGraph = this ++ Seq(edge) + def ++(edges: Seq[(LinkToType, LinkToType)]): HierarchyGraph = + this.copy(edges = this.edges.view.concat(edges).distinct.toSeq) + object HierarchyGraph: def empty = HierarchyGraph(Seq.empty) def withEdges(edges: Seq[(LinkToType, LinkToType)]) = HierarchyGraph.empty ++ edges diff --git a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala index 4201cae4e2e6..e6ebe0d2cc7a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala @@ -142,7 +142,7 @@ class WikiCodeBlockParser( codeBlock.setCharsFromContent block.appendChild(codeBlock) } else { - val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq:_*)) + val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq*)) block.appendChild(codeBlock) } } diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala index 58898339db5d..1d634ab75dba 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala @@ -79,9 +79,9 @@ class DocRender(signatureRenderer: SignatureRenderer)(using DocContext): case UnorderedList(items) => ul(listItems(items)) case OrderedList(items, style) => ol(listItems(items)) // TODO use style case Chain(items: Seq[Inline]) => span(items.map(renderElement)) - case Italic(text) => span(cls:="italic")(renderElement(text)) + case Italic(text) => em(renderElement(text)) case Underline(text) => span(cls:="underline")(renderElement(text)) - case Bold(text) => span(cls:="bold")(renderElement(text)) + case Bold(text) => strong(renderElement(text)) case Monospace(text) => code(renderElement(text)) case Superscript(text) => span(cls:="superscript")(renderElement(text)) // TODO implement style case Subscript(text) => span(cls:="subscript")(renderElement(text)) // TODO implement style diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala index 93b86ce0bc51..20f3335a44ef 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala @@ -32,8 +32,8 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do case _ => Nil) :+ (Attr("data-pathToRoot") := pathToRoot(page.link.dri)) - val htmlTag = html(attrs: _*)( - head((mkHead(page) :+ docHead):_*), + val htmlTag = html(attrs*)( + head((mkHead(page) :+ docHead)*), body( if !page.hasFrame then docBody else mkFrame(page.link, parents, docBody, toc) @@ -166,7 +166,14 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do def icon(link: SocialLinks) = link.className args.socialLinks.map { link => a(href := link.url) ( - button(cls := s"icon-button ${icon(link)}") + link match + case SocialLinks.Custom(_, lightIcon, darkIcon) => + Seq( + button(cls := s"icon-button ${icon(link)}", style := s"--bgimage:url(../../../../images/$lightIcon)"), + button(cls := s"icon-button ${icon(link)}-dark", style := s"--bgimage-dark:url(../../../../images/$darkIcon)") + ) + case _ => + button(cls := s"icon-button ${icon(link)}") ) } @@ -209,7 +216,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do a(href := pathToPage(link.dri, b.dri))(b.name), "/" )).dropRight(1) - div(cls := "breadcrumbs container")(innerTags:_*) + div(cls := "breadcrumbs container")(innerTags*) val (apiNavOpt, docsNavOpt): (Option[(Boolean, Seq[AppliedTag])], Option[(Boolean, Seq[AppliedTag])]) = buildNavigation(link) @@ -308,18 +315,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do "Generated with" ), div(cls := "right-container")( - a(href := "https://github.com/lampepfl/dotty") ( - button(cls := "icon-button gh") - ), - a(href := "https://twitter.com/scala_lang") ( - button(cls := "icon-button twitter") - ), - a(href := "https://discord.com/invite/scala") ( - button(cls := "icon-button discord"), - ), - a(href := "https://gitter.im/scala/scala") ( - button(cls := "icon-button gitter"), - ), + socialLinks, div(cls := "text")(textFooter) ), div(cls := "text-mobile")(textFooter) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala index 996b422b44fd..612444fd4ffd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala @@ -26,7 +26,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext val headNode = m.inheritedFrom.map(form => signatureRenderer.renderLink(form.name, form.dri)) val tailNodes = defs.flatMap(renderDef) val nodes = headNode.fold(tailNodes.drop(1))(_ +: tailNodes) - tableRow("Definition Classes", div(nodes:_*)) + tableRow("Definition Classes", div(nodes*)) case _ => Nil @@ -250,7 +250,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext val memberInf = memberInfo(member, withBrief = true) val annots = annotations(member) - div(topLevelAttr:_*)( + div(topLevelAttr*)( div(cls := "documentableElement-expander")( Option.when(annots.nonEmpty || originInf.nonEmpty || memberInf.nonEmpty)(button(cls := "icon-button ar show-content")).toList, annots.map(div(_)).toList, diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala index b84c07b4bade..3e49af2e0576 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala @@ -20,7 +20,7 @@ enum Resource(val path: String): trait Resources(using ctx: DocContext) extends Locations, Writer: private def dynamicJsData = val str = jsonObject("filterDefaults" -> jsonObject( - FilterAttributes.defaultValues.toSeq.map { case (n, v) => n -> jsonString(v) }:_* + FilterAttributes.defaultValues.toSeq.map { case (n, v) => n -> jsonString(v) }* )) Resource.Text("scripts/data.js", s"var scaladocData = $str") diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala index 5c6235b14e7d..65c67d3457a5 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala @@ -18,7 +18,7 @@ trait SignatureRenderer: def renderElement(e: SignaturePart, modifiers: AppliedAttr*): AppliedTag = renderElementWith(e, modifiers*) def renderLink(name: String, dri: DRI, modifiers: AppliedAttr*) = - renderLinkContent(name, dri, modifiers:_*) + renderLinkContent(name, dri, modifiers*) def unresolvedLink(content: TagArg, modifiers: AppliedAttr*) = span(Attr("data-unresolved-link") := "", modifiers)(content) @@ -26,7 +26,7 @@ trait SignatureRenderer: def renderLinkContent(content: TagArg, dri: DRI, modifiers: AppliedAttr*) = link(dri) match case Some(link) => a(href := link, modifiers)(content) - case _ => unresolvedLink(content, modifiers:_*) + case _ => unresolvedLink(content, modifiers*) def renderElementWith(e: SignaturePart, modifiers: AppliedAttr*) = e match case Name(name, dri) => @@ -34,7 +34,7 @@ trait SignatureRenderer: renderLink(name, dri, attrs*) case Type(name, Some(dri)) => val attrs = Seq(Attr("t") := "t") ++ modifiers - renderLink(name, dri, attrs:_*) + renderLink(name, dri, attrs*) case Type(name, None) => span(Attr("t") := "t")(name) case Keyword(name) => span(Attr("t") := "k")(name) case Plain(name) => raw(name) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala index ef7c06416e27..7f64ce92ffc8 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala @@ -29,10 +29,24 @@ trait SiteRenderer(using DocContext) extends Locations: def siteContent(pageDri: DRI, content: ResolvedTemplate): PageContent = import content.ctx - def tryAsDri(str: String): Option[String] = + + def tryAsDriPlain(str: String): Option[String] = val (path, prefix) = str match case HashRegex(path, prefix) => (path, prefix) case _ => (str, "") + val res = ctx.driForLink(content.template.file, path).filter(driExists) + res.headOption.map(pathToPage(pageDri, _) + prefix) + + def tryAsDri(str: String): Option[String] = + val newStr = + str.dropWhile(c => c == '.' || c == '/').replaceAll("/", ".") match + case str if str.endsWith("$.html") => str.stripSuffix("$.html") + case str if str.endsWith(".html") => str.stripSuffix(".html") + case _ => str + + val (path, prefix) = newStr match + case HashRegex(path, prefix) => (path, prefix) + case _ => (newStr, "") val res = ctx.driForLink(content.template.file, path).filter(driExists) res.headOption.map(pathToPage(pageDri, _) + prefix) @@ -45,11 +59,11 @@ trait SiteRenderer(using DocContext) extends Locations: )( resolveLink(pageDri, str.stripPrefix("/")) ) - def asStaticSite: Option[String] = tryAsDri(str) + def asStaticSite: Option[String] = tryAsDriPlain(str).orElse(tryAsDri(str)) /* Link resolving checks performs multiple strategies with following priority: 1. We check if the link is a valid URL e.g. http://dotty.epfl.ch - 2. We check if the link leads to other static site + 2. We check if the link leads to other static site or API pages, example: [[exemple.scala.Foo]] || [Foo](../exemple/scala/Foo.html) 3. We check if the link leads to existing asset e.g. images/logo.svg -> /_assets/images/logo.svg */ @@ -57,7 +71,10 @@ trait SiteRenderer(using DocContext) extends Locations: .orElse(asStaticSite) .orElse(asAsset) .getOrElse { - report.warn(s"Unable to resolve link '$str'", content.template.templateFile.file) + if (!summon[DocContext].args.noLinkAssetWarnings){ + val msg = s"Unable to resolve link '$str'" + report.warn(msg, content.template.templateFile.file) + } str } diff --git a/scaladoc/src/dotty/tools/scaladoc/site/SidebarParser.scala b/scaladoc/src/dotty/tools/scaladoc/site/SidebarParser.scala index 1aefeaa21032..d9dc9983a270 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/SidebarParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/SidebarParser.scala @@ -7,6 +7,8 @@ import com.fasterxml.jackson.core.`type`.TypeReference; import scala.jdk.CollectionConverters._ import java.util.Optional import scala.beans._ +import java.nio.file.{Files, Paths} +import scala.io.Source enum Sidebar: case Category( @@ -30,16 +32,32 @@ object Sidebar: private object RawInputTypeRef extends TypeReference[RawInput] - private def toSidebar(r: RawInput)(using CompilerContext): Sidebar = r match + private def toSidebar(r: RawInput, content: String | java.io.File)(using CompilerContext): Sidebar = r match case RawInput(title, page, index, subsection, dir, hidden) if page.nonEmpty && index.isEmpty && subsection.isEmpty() => + val pagePath = content match + case f: java.io.File => + val pagePath = f.toPath() + .getParent() + .resolve(s"_docs/$page") + if !Files.exists(pagePath) then + report.error(s"Page $page does not exist.") + case s: String => None Sidebar.Page(Option.when(title.nonEmpty)(title), page, hidden) case RawInput(title, page, index, subsection, dir, hidden) if page.isEmpty && (!subsection.isEmpty() || !index.isEmpty()) => - Sidebar.Category(Option.when(title.nonEmpty)(title), Option.when(index.nonEmpty)(index), subsection.asScala.map(toSidebar).toList, Option.when(dir.nonEmpty)(dir)) + Sidebar.Category(Option.when(title.nonEmpty)(title), Option.when(index.nonEmpty)(index), subsection.asScala.map(toSidebar(_, content)).toList, Option.when(dir.nonEmpty)(dir)) case RawInput(title, page, index, subsection, dir, hidden) => - report.error(s"Error parsing YAML configuration file.\n$schemaMessage") + if title.isEmpty() && index.isEmpty() then + val msg = "`title` property is missing for some page." + report.error(s"$msg\n$schemaMessage") + else if title.nonEmpty && (page.isEmpty() || index.isEmpty()) then + val msg = s"Error parsing YAML configuration file: 'index' or 'page' path is missing for title '$title'." + report.error(s"$msg\n$schemaMessage") + else + val msg = "Problem when parsing YAML configuration file." + report.warning(s"$msg\n$schemaMessage") Sidebar.Page(None, page, hidden) - private def schemaMessage: String = + def schemaMessage: String = s"""Static site YAML configuration file should comply with the following description: |The root element of static site needs to be |`title` and `directory` properties are ignored in root subsection. @@ -57,8 +75,7 @@ object Sidebar: | hidden: # optional - Default value is false. | |For more information visit: - |https://docs.scala-lang.org/scala3/guides/scaladoc/static-site.html - |""".stripMargin + |https://docs.scala-lang.org/scala3/guides/scaladoc/static-site.html""".stripMargin def load(content: String | java.io.File)(using CompilerContext): Sidebar.Category = import scala.util.Try @@ -75,7 +92,7 @@ object Sidebar: }, identity ) - toSidebar(root) match + toSidebar(root, content) match case c: Sidebar.Category => c case _ => report.error(s"Root element is not a subsection.\n$schemaMessage") diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala index 489720cc5936..e25639c36183 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala @@ -144,7 +144,7 @@ class StaticSiteLoader(val root: File, val args: Scaladoc.Args)(using StaticSite (("1900","01","01"), name) def dateFrom(tf: TemplateFile, default: String = "1900-01-01"): String = - val pageSettings = tf.settings.get("page").collect{ case m: Map[String @unchecked, _] => m } + val pageSettings = tf.settings.get("page").collect{ case m: Map[String @unchecked, ?] => m } pageSettings.flatMap(_.get("date").collect{ case s: String => s}).getOrElse(default) // blogs without date are last val posts = List(rootPath.resolve("_posts")) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/common.scala b/scaladoc/src/dotty/tools/scaladoc/site/common.scala index 0811d217537f..9e58dbe3cd28 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/common.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/common.scala @@ -94,7 +94,7 @@ def loadTemplateFile(file: File, defaultTitle: Option[TemplateName] = None)(usin }.map(_.stripPrefix("\"").stripSuffix("\"")) def listSetting(settings: Map[String, Object], name: String): Option[List[String]] = settings.get(name).map { - case elems: List[_] => elems.zipWithIndex.map { + case elems: List[?] => elems.zipWithIndex.map { case (s: String, _) => s case (other, index) => throw new RuntimeException(s"Expected a string at index $index for $name in $file but got $other") diff --git a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala index 92e0096e5af1..c37ff8fe0200 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala @@ -103,10 +103,10 @@ case class TemplateFile( ) def asJavaElement(o: Object): Object = o match - case m: Map[_, _] => m.transform { + case m: Map[?, ?] => m.transform { case (k: String, v: Object) => asJavaElement(v) }.asJava - case l: List[_] => l.map(x => asJavaElement(x.asInstanceOf[Object])).asJava + case l: List[?] => l.map(x => asJavaElement(x.asInstanceOf[Object])).asJava case other => other // Library requires mutable maps.. diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala index 03fdd4e849ff..0ab5086e09d0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala @@ -21,7 +21,7 @@ import dotty.tools.dotc.util.{ SourcePosition, NoSourcePosition, SourceFile, NoS import scala.util.{ Try, Success, Failure } class SnippetCompiler( - val snippetCompilerSettings: Seq[SnippetCompilerSetting[_]], + val snippetCompilerSettings: Seq[SnippetCompilerSetting[?]], target: AbstractFile = new VirtualDirectory("(memory)") ): object SnippetDriver extends Driver: diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompilerDataCollector.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompilerDataCollector.scala index 91c49feaa560..37c598a04df5 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompilerDataCollector.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompilerDataCollector.scala @@ -26,7 +26,7 @@ class SnippetCompilerDataCollector[Q <: Quotes](val qctx: Q): p.fold(SnippetCompilerData.Position(0, 0))(p => SnippetCompilerData.Position(p.startLine - 1, p.startColumn)) private def hackGetPositionOfDocstring(using Quotes)(s: qctx.reflect.Symbol): Option[qctx.reflect.Position] = - import dotty.tools.dotc.core.Comments.CommentsContext + import dotty.tools.dotc.core.Comments.docCtx import dotty.tools.dotc given ctx: Contexts.Context = qctx.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx val docCtx = ctx.docCtx.getOrElse { diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 2c7017f76636..1598accf4f40 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -30,7 +30,7 @@ trait ClassLikeSupport: else Kind.Class(Nil, Nil) private def kindForClasslike(classDef: ClassDef): Kind = - def typeArgs = classDef.getTypeParams.map(mkTypeArgument(_)) + def typeArgs = classDef.getTypeParams.map(mkTypeArgument(_, classDef)) def parameterModifier(parameter: Symbol): String = val fieldSymbol = classDef.symbol.declaredField(parameter.normalizedName) @@ -47,9 +47,13 @@ trait ClassLikeSupport: Some(classDef.constructor.symbol) .filter(s => s.exists && !s.isHiddenByVisibility) .map( _.tree.asInstanceOf[DefDef]) - constr.fold(Nil)( - _.termParamss.map(pList => api.TermParameterList(pList.params.map(p => mkParameter(p, parameterModifier)), paramListModifier(pList.params))) + + constr.fold(Nil)(_.termParamss.map(pList => + api.TermParameterList( + pList.params.map(p => mkParameter(p, classDef, parameterModifier)), + paramListModifier(pList.params), ) + )) if classDef.symbol.flags.is(Flags.Module) then Kind.Object else if classDef.symbol.flags.is(Flags.Trait) then @@ -85,7 +89,7 @@ trait ClassLikeSupport: def getSupertypesGraph(link: LinkToType, to: Seq[Tree]): Seq[(LinkToType, LinkToType)] = to.flatMap { case tree => val symbol = if tree.symbol.isClassConstructor then tree.symbol.owner else tree.symbol - val signature = signatureWithName(tree.asSignature) + val signature = signatureWithName(tree.asSignature(classDef)) val superLink = LinkToType(signature, symbol.dri, bareClasslikeKind(symbol)) val nextTo = unpackTreeToClassDef(tree).parents if symbol.isHiddenByVisibility then getSupertypesGraph(link, nextTo) @@ -96,16 +100,16 @@ trait ClassLikeSupport: .filterNot((s, t) => s.isHiddenByVisibility) .map { case (symbol, tpe) => - val signature = signatureWithName(tpe.asSignature) + val signature = signatureWithName(tpe.asSignature(classDef)) LinkToType(signature, symbol.dri, bareClasslikeKind(symbol)) } val selfType = classDef.self.map { (valdef: ValDef) => val symbol = valdef.symbol val tpe = valdef.tpt.tpe - val signature = signatureWithName(tpe.asSignature) + val signature = signatureWithName(tpe.asSignature(classDef)) LinkToType(signature, symbol.dri, Kind.Type(false, false, Seq.empty)) } - val selfSignature: DSignature = signatureWithName(typeForClass(classDef).asSignature) + val selfSignature: DSignature = signatureWithName(typeForClass(classDef).asSignature(classDef)) val graph = HierarchyGraph.withEdges( getSupertypesGraph(LinkToType(selfSignature, classDef.symbol.dri, bareClasslikeKind(classDef.symbol)), unpackTreeToClassDef(classDef).parents) @@ -148,19 +152,19 @@ trait ClassLikeSupport: case dd: DefDef if isDocumentableExtension(dd.symbol) => dd.symbol.extendedSymbol.map { extSym => val memberInfo = unwrapMemberInfo(c, dd.symbol) - val typeParams = dd.symbol.extendedTypeParams.map(mkTypeArgument(_, memberInfo.genericTypes)) + val typeParams = dd.symbol.extendedTypeParams.map(mkTypeArgument(_, c, memberInfo.genericTypes)) val termParams = dd.symbol.extendedTermParamLists.zipWithIndex.flatMap { case (termParamList, index) => memberInfo.termParamLists(index) match case MemberInfo.EvidenceOnlyParameterList => None case MemberInfo.RegularParameterList(info) => - Some(api.TermParameterList(termParamList.params.map(mkParameter(_, memberInfo = info)), paramListModifier(termParamList.params))) + Some(api.TermParameterList(termParamList.params.map(mkParameter(_, c, memberInfo = info)), paramListModifier(termParamList.params))) case _ => assert(false, "memberInfo.termParamLists contains a type parameter list !") } val target = ExtensionTarget( extSym.symbol.normalizedName, typeParams, termParams, - extSym.tpt.asSignature, + extSym.tpt.asSignature(c), extSym.tpt.symbol.dri, extSym.symbol.pos.get.start ) @@ -190,7 +194,7 @@ trait ClassLikeSupport: Some(parseMethod(c, dd.symbol)) case td: TypeDef if !td.symbol.flags.is(Flags.Synthetic) && (!td.symbol.flags.is(Flags.Case) || !td.symbol.flags.is(Flags.Enum)) => - Some(parseTypeDef(td)) + Some(parseTypeDef(td, c)) case vd: ValDef if !isSyntheticField(vd.symbol) && (!vd.symbol.flags.is(Flags.Case) || !vd.symbol.flags.is(Flags.Enum)) => Some(parseValDef(c, vd)) @@ -268,7 +272,7 @@ trait ClassLikeSupport: def getParentsAsLinkToTypes: List[LinkToType] = c.getParentsAsTreeSymbolTuples.map { - (tree, symbol) => LinkToType(tree.asSignature, symbol.dri, bareClasslikeKind(symbol)) + (tree, symbol) => LinkToType(tree.asSignature(c), symbol.dri, bareClasslikeKind(symbol)) } def getParentsAsTreeSymbolTuples: List[(Tree, Symbol)] = @@ -324,7 +328,7 @@ trait ClassLikeSupport: val enumTypes = companion.membersToDocument.collect { case td: TypeDef if !td.symbol.flags.is(Flags.Synthetic) && td.symbol.flags.is(Flags.Enum) && td.symbol.flags.is(Flags.Case) => td - }.toList.map(parseTypeDef) + }.toList.map(parseTypeDef(_, classDef)) val enumNested = companion.membersToDocument.collect { case c: ClassDef if c.symbol.flags.is(Flags.Case) && c.symbol.flags.is(Flags.Enum) => processTree(c)(parseClasslike(c)) @@ -351,7 +355,7 @@ trait ClassLikeSupport: val memberInfo = unwrapMemberInfo(c, methodSymbol) - val unshuffledMemberInfoParamLists = + val unshuffledMemberInfoParamLists = if methodSymbol.isExtensionMethod && methodSymbol.isRightAssoc then // Taken from RefinedPrinter.scala // If you change the names of the clauses below, also change them in right-associative-extension-methods.md @@ -367,7 +371,7 @@ trait ClassLikeSupport: rightTyParams ::: rightParam ::: rest6 else memberInfo.paramLists // it wasn't a binary operator, after all. - else + else memberInfo.paramLists val croppedUnshuffledMemberInfoParamLists = unshuffledMemberInfoParamLists.takeRight(paramLists.length) @@ -377,10 +381,10 @@ trait ClassLikeSupport: case (_: TermParamClause, MemberInfo.EvidenceOnlyParameterList) => Nil case (pList: TermParamClause, MemberInfo.RegularParameterList(info)) => Some(Left(api.TermParameterList(pList.params.map( - mkParameter(_, paramPrefix, memberInfo = info)), paramListModifier(pList.params) + mkParameter(_, c, paramPrefix, memberInfo = info)), paramListModifier(pList.params) ))) case (TypeParamClause(genericTypeList), MemberInfo.TypeParameterList(memInfoTypes)) => - Some(Right(genericTypeList.map(mkTypeArgument(_, memInfoTypes, memberInfo.contextBounds)))) + Some(Right(genericTypeList.map(mkTypeArgument(_, c, memInfoTypes, memberInfo.contextBounds)))) case (_,_) => assert(false, s"croppedUnshuffledMemberInfoParamLists and SymOps.nonExtensionParamLists disagree on whether this clause is a type or term one") } @@ -388,7 +392,7 @@ trait ClassLikeSupport: val methodKind = if methodSymbol.isClassConstructor then Kind.Constructor(basicDefKind) - else if methodSymbol.flags.is(Flags.Implicit) then + else if methodSymbol.flags.is(Flags.Implicit) then val termParamLists: List[TermParamClause] = methodSymbol.nonExtensionTermParamLists extractImplicitConversion(method.returnTpt.tpe) match case Some(conversion) if termParamLists.size == 0 || (termParamLists.size == 1 && termParamLists.head.params.size == 0) => @@ -402,7 +406,7 @@ trait ClassLikeSupport: )) case _ => Kind.Implicit(basicDefKind, None) - else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicDefKind, Some(method.returnTpt.tpe.asSignature), extractImplicitConversion(method.returnTpt.tpe)) + else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicDefKind, Some(method.returnTpt.tpe.asSignature(c)), extractImplicitConversion(method.returnTpt.tpe)) else specificKind(basicDefKind) val origin = if !methodSymbol.isOverridden then Origin.RegularlyDefined else @@ -418,7 +422,7 @@ trait ClassLikeSupport: mkMember( methodSymbol, methodKind, - method.returnTpt.tpe.asSignature + method.returnTpt.tpe.asSignature(c), )( modifiers = modifiers, origin = origin, @@ -428,28 +432,31 @@ trait ClassLikeSupport: def mkParameter( argument: ValDef, + classDef: ClassDef, prefix: Symbol => String = _ => "", isExtendedSymbol: Boolean = false, isGrouped: Boolean = false, - memberInfo: Map[String, TypeRepr] = Map.empty) = - val inlinePrefix = if argument.symbol.flags.is(Flags.Inline) then "inline " else "" - val nameIfNotSynthetic = Option.when(!argument.symbol.flags.is(Flags.Synthetic))(argument.symbol.normalizedName) - val name = argument.symbol.normalizedName - api.TermParameter( - argument.symbol.getAnnotations(), - inlinePrefix + prefix(argument.symbol), - nameIfNotSynthetic, - argument.symbol.dri, - memberInfo.get(name).fold(argument.tpt.asSignature)(_.asSignature), - isExtendedSymbol, - isGrouped - ) + memberInfo: Map[String, TypeRepr] = Map.empty, + ) = + val inlinePrefix = if argument.symbol.flags.is(Flags.Inline) then "inline " else "" + val nameIfNotSynthetic = Option.when(!argument.symbol.flags.is(Flags.Synthetic))(argument.symbol.normalizedName) + val name = argument.symbol.normalizedName + api.TermParameter( + argument.symbol.getAnnotations(), + inlinePrefix + prefix(argument.symbol), + nameIfNotSynthetic, + argument.symbol.dri, + memberInfo.get(name).fold(argument.tpt.asSignature(classDef))(_.asSignature(classDef)), + isExtendedSymbol, + isGrouped + ) def mkTypeArgument( argument: TypeDef, + classDef: ClassDef, memberInfo: Map[String, TypeBounds] = Map.empty, - contextBounds: Map[String, DSignature] = Map.empty - ): TypeParameter = + contextBounds: Map[String, DSignature] = Map.empty, + ): TypeParameter = val variancePrefix: "+" | "-" | "" = if argument.symbol.flags.is(Flags.Covariant) then "+" else if argument.symbol.flags.is(Flags.Contravariant) then "-" @@ -457,7 +464,7 @@ trait ClassLikeSupport: val name = argument.symbol.normalizedName val normalizedName = if name.matches("_\\$\\d*") then "_" else name - val boundsSignature = memberInfo.get(name).fold(argument.rhs.asSignature)(_.asSignature) + val boundsSignature = memberInfo.get(name).fold(argument.rhs.asSignature(classDef))(_.asSignature(classDef)) val signature = contextBounds.get(name) match case None => boundsSignature case Some(contextBoundsSignature) => @@ -471,14 +478,14 @@ trait ClassLikeSupport: signature ) - def parseTypeDef(typeDef: TypeDef): Member = + def parseTypeDef(typeDef: TypeDef, classDef: ClassDef): Member = def isTreeAbstract(typ: Tree): Boolean = typ match { case TypeBoundsTree(_, _) => true case LambdaTypeTree(params, body) => isTreeAbstract(body) case _ => false } val (generics, tpeTree) = typeDef.rhs match - case LambdaTypeTree(params, body) => (params.map(mkTypeArgument(_)), body) + case LambdaTypeTree(params, body) => (params.map(mkTypeArgument(_, classDef)), body) case tpe => (Nil, tpe) val defaultKind = Kind.Type(!isTreeAbstract(typeDef.rhs), typeDef.symbol.isOpaque, generics).asInstanceOf[Kind.Type] @@ -492,19 +499,19 @@ trait ClassLikeSupport: Some(Link(l.tpe.typeSymbol.owner.name, l.tpe.typeSymbol.owner.dri)) case _ => None } - mkMember(typeDef.symbol, Kind.Exported(kind), tpeTree.asSignature)( + mkMember(typeDef.symbol, Kind.Exported(kind), tpeTree.asSignature(classDef))( deprecated = typeDef.symbol.isDeprecated(), origin = Origin.ExportedFrom(origin), experimental = typeDef.symbol.isExperimental() ) } - else mkMember(typeDef.symbol, kind, tpeTree.asSignature)(deprecated = typeDef.symbol.isDeprecated()) + else mkMember(typeDef.symbol, kind, tpeTree.asSignature(classDef))(deprecated = typeDef.symbol.isDeprecated()) def parseValDef(c: ClassDef, valDef: ValDef): Member = def defaultKind = if valDef.symbol.flags.is(Flags.Mutable) then Kind.Var else Kind.Val val memberInfo = unwrapMemberInfo(c, valDef.symbol) val kind = if valDef.symbol.flags.is(Flags.Implicit) then Kind.Implicit(Kind.Val, extractImplicitConversion(valDef.tpt.tpe)) - else if valDef.symbol.flags.is(Flags.Given) then Kind.Given(Kind.Val, Some(memberInfo.res.asSignature), extractImplicitConversion(valDef.tpt.tpe)) + else if valDef.symbol.flags.is(Flags.Given) then Kind.Given(Kind.Val, Some(memberInfo.res.asSignature(c)), extractImplicitConversion(valDef.tpt.tpe)) else if valDef.symbol.flags.is(Flags.Enum) then Kind.EnumCase(Kind.Val) else defaultKind @@ -514,7 +521,7 @@ trait ClassLikeSupport: .filterNot(m => m == Modifier.Lazy || m == Modifier.Final) case _ => valDef.symbol.getExtraModifiers() - mkMember(valDef.symbol, kind, memberInfo.res.asSignature)( + mkMember(valDef.symbol, kind, memberInfo.res.asSignature(c))( modifiers = modifiers, deprecated = valDef.symbol.isDeprecated(), experimental = valDef.symbol.isExperimental() @@ -552,7 +559,7 @@ trait ClassLikeSupport: contextBounds: Map[String, DSignature] = Map.empty, ){ val genericTypes: Map[String, TypeBounds] = paramLists.collect{ case MemberInfo.TypeParameterList(types) => types }.headOption.getOrElse(Map()) - + val termParamLists: List[MemberInfo.ParameterList] = paramLists.filter(_.isTerm) } @@ -562,7 +569,7 @@ trait ClassLikeSupport: case EvidenceOnlyParameterList extends ParameterList(isTerm = true, isUsing = false) case RegularParameterList(m: Map[String, TypeRepr])(isUsing: Boolean) extends ParameterList(isTerm = true, isUsing) case TypeParameterList(m: Map[String, TypeBounds]) extends ParameterList(isTerm = false, isUsing = false) - + export ParameterList.{RegularParameterList, EvidenceOnlyParameterList, TypeParameterList} @@ -571,7 +578,7 @@ trait ClassLikeSupport: val baseTypeRepr = typeForClass(c).memberType(symbol) def isSyntheticEvidence(name: String) = - if !name.startsWith(NameKinds.EvidenceParamName.separator) then false else + if !name.startsWith(NameKinds.ContextBoundParamName.separator) then false else // This assumes that every parameter that starts with `evidence$` and is implicit is generated by compiler to desugar context bound. // Howrever, this is just a heuristic, so // `def foo[A](evidence$1: ClassTag[A]) = 1` @@ -604,13 +611,18 @@ trait ClassLikeSupport: val (paramsThatLookLikeContextBounds, contextBounds) = evidences.partitionMap { case (_, AppliedType(tpe, List(typeParam: ParamRef))) => - Right(nameForRef(typeParam) -> tpe.asSignature) + Right(nameForRef(typeParam) -> tpe.asSignature(c)) case (name, original) => findParamRefs(original) match case Nil => Left((name, original)) case typeParam :: _ => val name = nameForRef(typeParam) - val signature = Seq(Plain("(["), dotty.tools.scaladoc.Type(name, None), Plain("]"), Keyword(" =>> ")) ++ original.asSignature ++ Seq(Plain(")")) + val signature = Seq( + Plain("(["), + dotty.tools.scaladoc.Type(name, None), + Plain("]"), + Keyword(" =>> "), + ) ++ original.asSignature(c) ++ Seq(Plain(")")) Right(name -> signature.toList) } @@ -619,7 +631,7 @@ trait ClassLikeSupport: val termParamList = if newParams.isEmpty && contextBounds.nonEmpty then MemberInfo.EvidenceOnlyParameterList else MemberInfo.RegularParameterList(newParams)(isUsing) - + MemberInfo(memberInfo.paramLists :+ termParamList, methodType.resType, contextBounds.toMap) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ScalaDocSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ScalaDocSupport.scala index 51db17fb087a..69666e07f4bf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ScalaDocSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ScalaDocSupport.scala @@ -45,7 +45,7 @@ object ScaladocSupport: val commentString: String = if tree.symbol.isClassDef || tree.symbol.owner.isClassDef then import dotty.tools.dotc - import dotty.tools.dotc.core.Comments.CommentsContext + import dotty.tools.dotc.core.Comments.docCtx given ctx: dotc.core.Contexts.Context = quotes.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx val docCtx = ctx.docCtx.get diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index cd1bed42f485..f55451fdc636 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -5,7 +5,7 @@ package tasty import java.util.regex.Pattern import scala.util.{Try, Success, Failure} -import scala.tasty.inspector.DocTastyInspector +import scala.tasty.inspector.{TastyInspector, Inspector, Tasty} import scala.quoted._ import dotty.tools.dotc @@ -24,24 +24,12 @@ import ScaladocSupport._ * * Delegates most of the work to [[TastyParser]] [[dotty.tools.scaladoc.tasty.TastyParser]]. */ -case class ScaladocTastyInspector()(using ctx: DocContext) extends DocTastyInspector: +case class ScaladocTastyInspector()(using ctx: DocContext) extends Inspector: private val topLevels = Seq.newBuilder[(String, Member)] private var rootDoc: Option[Comment] = None - def processCompilationUnit(using Quotes)(root: reflect.Tree): Unit = () - - override def postProcess(using Quotes): Unit = - // hack into the compiler to get a list of all top-level trees - // in principle, to do this, one would collect trees in processCompilationUnit - // however, path-dependent types disallow doing so w/o using casts - inline def hackForeachTree(thunk: reflect.Tree => Unit): Unit = - given dctx: dotc.core.Contexts.Context = quotes.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx - dctx.run.nn.units.foreach { compilationUnit => - // mirrors code from TastyInspector - thunk(compilationUnit.tpdTree.asInstanceOf[reflect.Tree]) - } - + def inspect(using Quotes)(tastys: List[scala.tasty.inspector.Tasty[quotes.type]]): Unit = val symbolsToSkip: Set[reflect.Symbol] = ctx.args.identifiersToSkip.flatMap { ref => val qrSymbol = reflect.Symbol @@ -116,7 +104,8 @@ case class ScaladocTastyInspector()(using ctx: DocContext) extends DocTastyInspe rootDoc = Some(parseCommentString(using parser.qctx, summon[DocContext])(content, topLevelPck, None)) } - hackForeachTree { root => + for tasty <- tastys do { + val root = tasty.ast if !isSkipped(root.symbol) then val treeRoot = root.asInstanceOf[parser.qctx.reflect.Tree] processRootDocIfNeeded(treeRoot) @@ -126,7 +115,10 @@ case class ScaladocTastyInspector()(using ctx: DocContext) extends DocTastyInspe if ctx.args.documentSyntheticTypes then import parser.qctx.reflect._ val intrinsicTypeDefs = parser.intrinsicTypeDefs.toSeq.map { s => - "scala" -> parser.parseTypeDef(s.tree.asInstanceOf[TypeDef]) + "scala" -> parser.parseTypeDef( + s.tree.asInstanceOf[TypeDef], + defn.AnyClass.tree.asInstanceOf[ClassDef], + ) } val intrinsicClassDefs = parser.intrinsicClassDefs.toSeq.map { s => "scala" -> parser.parseClasslike(s.tree.asInstanceOf[ClassDef]) @@ -138,15 +130,39 @@ case class ScaladocTastyInspector()(using ctx: DocContext) extends DocTastyInspe topLevels += "scala" -> Member(scalaPckg.fullName, "", scalaPckg.dri, Kind.Package) topLevels += mergeAnyRefAliasAndObject(parser) - def result(): (List[Member], Option[Comment]) = - topLevels.clear() - rootDoc = None + + + def mergeAnyRefAliasAndObject(parser: TastyParser) = + import parser.qctx.reflect._ + val javaLangObjectDef = defn.ObjectClass.tree.asInstanceOf[ClassDef] + val objectMembers = parser.extractPatchedMembers(javaLangObjectDef) + val aM = parser.parseTypeDef( + defn.AnyRefClass.tree.asInstanceOf[TypeDef], + defn.AnyClass.tree.asInstanceOf[ClassDef], + ) + "scala" -> aM.copy( + kind = Kind.Class(Nil, Nil), + members = objectMembers + ) + +object ScaladocTastyInspector: + + def loadDocs()(using ctx: DocContext): (List[Member], Option[Comment]) = val filePaths = ctx.args.tastyFiles.map(_.getAbsolutePath).toList val classpath = ctx.args.classpath.split(java.io.File.pathSeparator).toList - if filePaths.nonEmpty then inspectFilesInContext(classpath, filePaths) + val inspector = new ScaladocTastyInspector + + val (tastyPaths, nonTastyPaths) = filePaths.partition(_.endsWith(".tasty")) + val (jarPaths, invalidPaths) = nonTastyPaths.partition(_.endsWith(".jar")) + + for invalidPath <- invalidPaths do + report.error("File extension is not `tasty` or `jar`: " + invalidPath) + + if tastyPaths.nonEmpty then + TastyInspector.inspectAllTastyFiles(tastyPaths, jarPaths, classpath)(inspector) - val all = topLevels.result() + val all = inspector.topLevels.result() all.groupBy(_._1).map { case (pckName, members) => val (pcks, rest) = members.map(_._2).partition(_.kind == Kind.Package) val basePck = pcks.reduce( (p1, p2) => @@ -154,17 +170,10 @@ case class ScaladocTastyInspector()(using ctx: DocContext) extends DocTastyInspe if withNewMembers.docs.isEmpty then withNewMembers.withDocs(p2.docs) else withNewMembers ) basePck.withMembers((basePck.members ++ rest).sortBy(_.name)) - }.toList -> rootDoc + }.toList -> inspector.rootDoc + +end ScaladocTastyInspector - def mergeAnyRefAliasAndObject(parser: TastyParser) = - import parser.qctx.reflect._ - val javaLangObjectDef = defn.ObjectClass.tree.asInstanceOf[ClassDef] - val objectMembers = parser.extractPatchedMembers(javaLangObjectDef) - val aM = parser.parseTypeDef(defn.AnyRefClass.tree.asInstanceOf[TypeDef]) - "scala" -> aM.copy( - kind = Kind.Class(Nil, Nil), - members = objectMembers - ) /** Parses a single Tasty compilation unit. */ case class TastyParser( qctx: Quotes, diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala index c94eda9409b2..35cf1cb6eec3 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala @@ -13,41 +13,19 @@ trait TypesSupport: type SSignature = List[SignaturePart] - def getGivenInstance(method: qctx.reflect.DefDef): Option[SSignature] = - import qctx.reflect._ - given qctx.type = qctx - - def extractTypeSymbol(t: Tree): Option[Symbol] = t match - case tpeTree: TypeTree => - inner(tpeTree.tpe) - case other => None - - def inner(tpe: TypeRepr): Option[Symbol] = tpe match - case ThisType(tpe) => inner(tpe) - case AnnotatedType(tpe, _) => inner(tpe) - case AppliedType(tpe, _) => inner(tpe) - case tp @ TermRef(qual, typeName) => Some(tp.termSymbol) - case tp @ TypeRef(qual, typeName) => Some(tp.typeSymbol) - - val typeSymbol = extractTypeSymbol(method.returnTpt) - - typeSymbol.map(_.tree).collect { - case c: ClassDef => c.getTreeOfFirstParent - case _ => Some(method.returnTpt) - }.flatten.map(_.asSignature) - given TreeSyntax: AnyRef with extension (using Quotes)(tpeTree: reflect.Tree) - def asSignature: SSignature = + def asSignature(elideThis: reflect.ClassDef): SSignature = import reflect._ tpeTree match - case TypeBoundsTree(low, high) => typeBoundsTreeOfHigherKindedType(low.tpe, high.tpe) - case tpeTree: TypeTree => topLevelProcess(tpeTree.tpe) - case term: Term => topLevelProcess(term.tpe) + case TypeBoundsTree(low, high) => typeBoundsTreeOfHigherKindedType(low.tpe, high.tpe)(using elideThis) + case tpeTree: TypeTree => topLevelProcess(tpeTree.tpe)(using elideThis) + case term: Term => topLevelProcess(term.tpe)(using elideThis) given TypeSyntax: AnyRef with extension (using Quotes)(tpe: reflect.TypeRepr) - def asSignature: SSignature = topLevelProcess(tpe) + def asSignature(elideThis: reflect.ClassDef): SSignature = + topLevelProcess(tpe)(using elideThis) private def plain(str: String): SignaturePart = Plain(str) @@ -58,13 +36,15 @@ trait TypesSupport: private def tpe(str: String): SignaturePart = dotty.tools.scaladoc.Type(str, None) + private def inParens(s: SSignature, wrap: Boolean = true) = + if wrap then plain("(").l ++ s ++ plain(")").l else s + extension (on: SignaturePart) def l: List[SignaturePart] = List(on) private def tpe(using Quotes)(symbol: reflect.Symbol): SSignature = import SymOps._ - val suffix = if symbol.isValDef || symbol.flags.is(reflect.Flags.Module) then plain(".type").l else Nil val dri: Option[DRI] = Option(symbol).filterNot(_.isHiddenByVisibility).map(_.dri) - dotty.tools.scaladoc.Type(symbol.normalizedName, dri) :: suffix + dotty.tools.scaladoc.Type(symbol.normalizedName, dri).l private def commas(lists: List[SSignature]) = lists match case List(single) => single @@ -86,25 +66,47 @@ trait TypesSupport: case _ => false case _ => false - private def topLevelProcess(using Quotes)(tp: reflect.TypeRepr): SSignature = + private def topLevelProcess(using Quotes)(tp: reflect.TypeRepr)(using elideThis: reflect.ClassDef): SSignature = import reflect._ tp match - case ThisType(tpe) => inner(tpe) :+ plain(".this.type") + case ThisType(tpe) => + val suffix = List(keyword("this"), plain("."), keyword("type")) + if skipPrefix(tp, elideThis) then suffix + else inner(tpe) ++ plain(".").l ++ suffix case tpe => inner(tpe) // TODO #23 add support for all types signatures that makes sense - private def inner(using Quotes)(tp: reflect.TypeRepr)(using indent: Int = 0): SSignature = + private def inner( + using Quotes, + )( + tp: reflect.TypeRepr, + )(using + elideThis: reflect.ClassDef, + indent: Int = 0, + skipTypeSuffix: Boolean = false, + ): SSignature = import reflect._ def noSupported(name: String): SSignature = println(s"WARN: Unsupported type: $name: ${tp.show}") plain(s"Unsupported[$name]").l tp match - case OrType(left, right) => inner(left) ++ keyword(" | ").l ++ inner(right) - case AndType(left, right) => inner(left) ++ keyword(" & ").l ++ inner(right) + case OrType(left, right) => + inParens(inner(left), shouldWrapInParens(left, tp, true)) + ++ keyword(" | ").l + ++ inParens(inner(right), shouldWrapInParens(right, tp, false)) + case AndType(left, right) => + inParens(inner(left), shouldWrapInParens(left, tp, true)) + ++ keyword(" & ").l + ++ inParens(inner(right), shouldWrapInParens(right, tp, false)) case ByNameType(tpe) => keyword("=> ") :: inner(tpe) case ConstantType(constant) => plain(constant.show).l - case ThisType(tpe) => inner(tpe) + case ThisType(tpe) => + val prefix = findSupertype(elideThis, tpe.typeSymbol) match + case Some(_) => Nil + case None => inner(tpe) ++ plain(".").l + val suffix = if skipTypeSuffix then Nil else List(plain("."), keyword("type")) + prefix ++ keyword("this").l ++ suffix case AnnotatedType(AppliedType(_, Seq(tpe)), annotation) if isRepeatedAnnotation(annotation) => inner(tpe) :+ plain("*") case AppliedType(repeatedClass, Seq(tpe)) if isRepeated(repeatedClass) => @@ -191,12 +193,18 @@ trait TypesSupport: } case t @ AppliedType(tpe, typeList) => import dotty.tools.dotc.util.Chars._ - if !t.typeSymbol.name.forall(isIdentifierPart) && typeList.size == 2 then - inner(typeList.head) + if defn.isTupleClass(tpe.typeSymbol) && typeList.length != 1 then + typeList match + case Nil => Nil + case args => inParens(commas(args.map(inner(_)))) + else if isInfix(t) then + val lhs = typeList.head + val rhs = typeList.last + inParens(inner(lhs), shouldWrapInParens(lhs, t, true)) ++ plain(" ").l ++ inner(tpe) ++ plain(" ").l - ++ inner(typeList.last) + ++ inParens(inner(rhs), shouldWrapInParens(rhs, t, false)) else if t.isFunctionType then val arrow = if t.isContextFunctionType then " ?=> " else " => " typeList match @@ -205,74 +213,56 @@ trait TypesSupport: case Seq(rtpe) => plain("()").l ++ keyword(arrow).l ++ inner(rtpe) case Seq(arg, rtpe) => - def withParentheses(tpe: TypeRepr) = plain("(").l ++ inner(tpe) ++ plain(")").l val partOfSignature = arg match - case tpe @ (_:TermRef | _:TypeRef | _:ConstantType | _: ParamRef) => inner(arg) - case tpe: AppliedType if !tpe.isFunctionType && !tpe.isTupleN => inner(arg) - case _ => withParentheses(arg) + case _: TermRef | _: TypeRef | _: ConstantType | _: ParamRef => inner(arg) + case _ => inParens(inner(arg)) partOfSignature ++ keyword(arrow).l ++ inner(rtpe) case args => - plain("(").l ++ commas(args.init.map(inner)) ++ plain(")").l ++ keyword(arrow).l ++ inner(args.last) - else if t.isTupleN then - typeList match - case Nil => - Nil - case args => - plain("(").l ++ commas(args.map(inner)) ++ plain(")").l + plain("(").l ++ commas(args.init.map(inner(_))) ++ plain(")").l ++ keyword(arrow).l ++ inner(args.last) else inner(tpe) ++ plain("[").l ++ commas(typeList.map { t => t match case _: TypeBounds => keyword("_").l ++ inner(t) - case _ => inner(t) + case _ => topLevelProcess(t) }) ++ plain("]").l case tp @ TypeRef(qual, typeName) => qual match { case r: RecursiveThis => tpe(s"this.$typeName").l - case _: TypeRepr => tpe(tp.typeSymbol) + case t if skipPrefix(t, elideThis) => + tpe(tp.typeSymbol) + case _: TermRef | _: ParamRef => + val suffix = if tp.typeSymbol == Symbol.noSymbol then tpe(typeName).l else tpe(tp.typeSymbol) + inner(qual)(using skipTypeSuffix = true) ++ plain(".").l ++ suffix + case ThisType(tr) => + findSupertype(elideThis, tr.typeSymbol) match + case Some((sym, AppliedType(tr2, args))) => + sym.tree.asInstanceOf[ClassDef].constructor.paramss.headOption match + case Some(TypeParamClause(tpc)) => + tpc.zip(args).collectFirst { + case (TypeDef(name, _), arg) if name == typeName => arg + } match + case Some(tr) => inner(tr) + case None => tpe(tp.typeSymbol) + case _ => tpe(tp.typeSymbol) + case Some(_) => tpe(tp.typeSymbol) + case None => + val sig = inParens(inner(qual)(using skipTypeSuffix = true), shouldWrapInParens(qual, tp, true)) + sig ++ plain(".").l ++ tpe(tp.typeSymbol) + case _ => + val sig = inParens(inner(qual), shouldWrapInParens(qual, tp, true)) + sig ++ keyword("#").l ++ tpe(tp.typeSymbol) } - // convertTypeOrBoundsToReference(reflect)(qual) match { - // case TypeReference(label, link, xs, _) => TypeReference(typeName, link + "/" + label, xs, true) - // case EmptyReference => TypeReference(typeName, "", Nil, true) - // case _ if tp.typeSymbol.exists => - // tp.typeSymbol match { - // // NOTE: Only TypeRefs can reference ClassDefSymbols - // case sym if sym.isClassDef => //Need to be split because these types have their own file - // convertTypeOrBoundsToReference(reflect)(qual) match { - // case TypeReference(label, link, xs, _) => TypeReference(sym.name, link + "/" + label, xs, true) - // case EmptyReference if sym.name == "" | sym.name == "_root_" => EmptyReference - // case EmptyReference => TypeReference(sym.name, "", Nil, true) - // case _ => throw Exception("Match error in SymRef/TypeOrBounds/ClassDef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) - // } - - // // NOTE: This branch handles packages, which are now TypeRefs - // case sym if sym.isTerm || sym.isTypeDef => - // convertTypeOrBoundsToReference(reflect)(qual) match { - // case TypeReference(label, link, xs, _) => TypeReference(sym.name, link + "/" + label, xs) - // case EmptyReference if sym.name == "" | sym.name == "_root_" => EmptyReference - // case EmptyReference => TypeReference(sym.name, "", Nil) - // case _ => throw Exception("Match error in SymRef/TypeOrBounds/Other. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) - // } - // case sym => throw Exception("Match error in SymRef. This should not happen, please open an issue. " + sym) - // } - // case _ => - // throw Exception("Match error in TypeRef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) - // } + case tr @ TermRef(qual, typeName) => - tr.termSymbol.tree match - case vd: ValDef => inner(vd.tpt.tpe) - case _ => tpe(tr.termSymbol) - - - // convertTypeOrBoundsToReference(reflect)(qual) match { - // case TypeReference(label, link, xs, _) => TypeReference(typeName + "$", link + "/" + label, xs) - // case EmptyReference => TypeReference(typeName, "", Nil) - // case _ => throw Exception("Match error in TermRef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) - // } - - // NOTE: old SymRefs are now either TypeRefs or TermRefs - the logic here needs to be moved into above branches - // NOTE: _.symbol on *Ref returns its symbol - // case SymRef(symbol, typeOrBounds) => symbol match { - // } - // case _ => throw Exception("No match for type in conversion to Reference. This should not happen, please open an issue. " + tp) + val prefix = qual match + case t if skipPrefix(t, elideThis) => Nil + case tp => inner(tp)(using skipTypeSuffix = true) ++ plain(".").l + val suffix = if skipTypeSuffix then Nil else List(plain("."), keyword("type")) + val typeSig = tr.termSymbol.tree match + case vd: ValDef if tr.termSymbol.flags.is(Flags.Module) => + inner(vd.tpt.tpe) + case _ => plain(typeName).l + prefix ++ typeSig ++ suffix + case TypeBounds(low, hi) => if(low == hi) keyword(" = ").l ++ inner(low) else typeBoundsTreeOfHigherKindedType(low, hi) @@ -290,7 +280,9 @@ trait TypesSupport: } inner(sc) ++ keyword(" match ").l ++ plain("{\n").l ++ casesTexts ++ plain(spaces + "}").l - case ParamRef(m: MethodType, i) => tpe(m.paramNames(i)).l ++ plain(".type").l + case ParamRef(m: MethodType, i) => + val suffix = if skipTypeSuffix then Nil else List(plain("."), keyword("type")) + tpe(m.paramNames(i)).l ++ suffix case ParamRef(binder: LambdaType, i) => tpe(binder.paramNames(i)).l @@ -310,29 +302,86 @@ trait TypesSupport: s"${tpe.show(using Printer.TypeReprStructure)}" throw MatchError(msg) - private def typeBound(using Quotes)(t: reflect.TypeRepr, low: Boolean) = + private def typeBound(using Quotes)(t: reflect.TypeRepr, low: Boolean)(using elideThis: reflect.ClassDef) = import reflect._ val ignore = if (low) t.typeSymbol == defn.NothingClass else t.typeSymbol == defn.AnyClass val prefix = keyword(if low then " >: " else " <: ") t match { - case l: TypeLambda => prefix :: plain("(").l ++ inner(l) ++ plain(")").l - case p: ParamRef => prefix :: inner(p) - case other if !ignore => prefix :: inner(other) + case l: TypeLambda => prefix :: inParens(inner(l)(using elideThis)) + case p: ParamRef => prefix :: inner(p)(using elideThis) + case other if !ignore => prefix :: topLevelProcess(other)(using elideThis) case _ => Nil } - private def typeBoundsTreeOfHigherKindedType(using Quotes)(low: reflect.TypeRepr, high: reflect.TypeRepr) = + private def typeBoundsTreeOfHigherKindedType(using Quotes)(low: reflect.TypeRepr, high: reflect.TypeRepr)(using elideThis: reflect.ClassDef) = import reflect._ def regularTypeBounds(low: TypeRepr, high: TypeRepr) = - if low == high then keyword(" = ").l ++ inner(low) - else typeBound(low, low = true) ++ typeBound(high, low = false) + if low == high then keyword(" = ").l ++ inner(low)(using elideThis) + else typeBound(low, low = true)(using elideThis) ++ typeBound(high, low = false)(using elideThis) high.match case TypeLambda(params, paramBounds, resType) => if resType.typeSymbol == defn.AnyClass then plain("[").l ++ commas(params.zip(paramBounds).map { (name, typ) => val normalizedName = if name.matches("_\\$\\d*") then "_" else name - tpe(normalizedName).l ++ inner(typ) + tpe(normalizedName).l ++ inner(typ)(using elideThis) }) ++ plain("]").l else regularTypeBounds(low, high) case _ => regularTypeBounds(low, high) + + private def findSupertype(using Quotes)(c: reflect.ClassDef, sym: reflect.Symbol) = + getSupertypes(c).find((s, t) => s == sym) + + private def skipPrefix(using Quotes)(tr: reflect.TypeRepr, elideThis: reflect.ClassDef) = + import reflect._ + + def collectOwners(owners: Set[Symbol], sym: Symbol): Set[Symbol] = + if sym.flags.is(Flags.Package) then owners + else collectOwners(owners + sym, sym.owner) + val owners = collectOwners(Set.empty, elideThis.symbol) + + tr match + case NoPrefix() => true + case ThisType(tp) if owners(tp.typeSymbol) => true + case tp if owners(tp.typeSymbol) => true + case _ => + val flags = tr.typeSymbol.flags + flags.is(Flags.Module) || flags.is(Flags.Package) + + private def shouldWrapInParens(using Quotes)(inner: reflect.TypeRepr, outer: reflect.TypeRepr, isLeft: Boolean) = + import reflect._ + + (inner, outer) match + case (_: AndType, _: TypeRef) => true + case (_: OrType, _: TypeRef) => true + case (t: AppliedType, _: TypeRef) => isInfix(t) + + case (_: AndType, _: AndType) => false + case (_: AndType, _: OrType) => false + case (_: OrType, _: AndType) => true + case (_: OrType, _: OrType) => false + + case (at: AppliedType, _: AndType) => at.isFunctionType || isInfix(at) + case (at: AppliedType, _: OrType) => at.isFunctionType || isInfix(at) + case (_: AndType, at: AppliedType) => isInfix(at) + case (_: OrType, at: AppliedType) => isInfix(at) + case (at1: AppliedType, at2: AppliedType) => + val leftAssoc = !at1.tycon.typeSymbol.name.endsWith(":") + isInfix(at2) && (at1.isFunctionType || isInfix(at1) && ( + at1.tycon.typeSymbol != at2.tycon.typeSymbol || leftAssoc != isLeft + )) + case _ => false + + private def isInfix(using Quotes)(at: reflect.AppliedType) = + import dotty.tools.dotc.util.Chars.isIdentifierPart + import reflect._ + + def infixAnnot = + at.tycon.typeSymbol.getAnnotation(Symbol.requiredClass("scala.annotation.showAsInfix")) match + case Some(Apply(_, args)) => + args.collectFirst { + case Literal(BooleanConstant(false)) => false + }.getOrElse(true) + case _ => false + + at.args.size == 2 && (!at.typeSymbol.name.forall(isIdentifierPart) || infixAnnot) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala index edf9051c0ed7..77d57748a2e8 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala @@ -38,7 +38,7 @@ object MarkdownParser { new MutableDataSet() .setFrom(ParserEmulationProfile.COMMONMARK.getOptions) - .set(Parser.EXTENSIONS, Arrays.asList(extArray:_*)) + .set(Parser.EXTENSIONS, Arrays.asList(extArray*)) .set(EmojiExtension.ROOT_IMAGE_PATH, "https://github.global.ssl.fastly.net/images/icons/emoji/") .set(WikiLinkExtension.LINK_ESCAPE_CHARS, "") diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala index b2c4e1bdcac4..26c4fb06dfdf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala @@ -236,9 +236,9 @@ object MemberLookup extends MemberLookup { // Scaladoc overloading support allows terminal * (and they're meaningless) val cleanStr = str.stripSuffix("*") - if cleanStr endsWith "$" then + if cleanStr.endsWith("$") then Selector(cleanStr.init, SelectorKind.ForceTerm) - else if cleanStr endsWith "!" then + else if cleanStr.endsWith("!") then Selector(cleanStr.init, SelectorKind.ForceType) else Selector(cleanStr, SelectorKind.NoForce) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala index 95db8983626a..9fad9e22eeb9 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala @@ -130,7 +130,7 @@ object Preparser { val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1)) val bodyTags: mutable.Map[TagKey, List[String]] = - mutable.Map((tagsWithoutDiagram).toSeq: _*) + mutable.Map((tagsWithoutDiagram).toSeq*) def allTags(key: SimpleTagKey): List[String] = (bodyTags remove key).getOrElse(Nil).reverse diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala index d797eaed7fbf..d87a6692b99c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala @@ -72,7 +72,7 @@ case class DocFlexmarkRenderer(renderLink: (DocLink, String) => String) html.raw(renderLink(node.target, node.body)) object Render extends NodeRenderer: - override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[_]] = + override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[?]] = JSet( new NodeRenderingHandler(classOf[DocLinkNode], Handler), ) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala index 421c7eaab76f..0acb1c02a69e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala @@ -27,14 +27,15 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: case class AnchorLink(link: String) extends BlankLine(BasedSequence.EmptyBasedSequence()) object SectionHandler extends CustomNodeRenderer[Section]: - val repeatedIds: mutable.Map[(NodeRendererContext, BasedSequence), Int] = mutable.Map() + val repeatedIds: mutable.Map[(NodeRendererContext, String), Int] = mutable.Map() val idGenerator = new HeaderIdGenerator.Factory().create() override def render(node: Section, c: NodeRendererContext, html: HtmlWriter): Unit = val Section(header, body) = node - val idSuffix = repeatedIds.getOrElseUpdate((c, header.getText), 0) + val headerText = header.getText.toString + val idSuffix = repeatedIds.getOrElseUpdate((c, headerText), 0) val ifSuffixStr = if(idSuffix == 0) then "" else idSuffix.toString - repeatedIds.update((c, header.getText), repeatedIds((c, header.getText)) + 1) - val id = idGenerator.getId(header.getText.append(ifSuffixStr)) + repeatedIds.update((c, headerText), idSuffix + 1) + val id = idGenerator.getId(headerText + ifSuffixStr) val anchor = AnchorLink(s"#$id") val headerClass: String = header.getLevel match case 1 => "h500" @@ -58,7 +59,7 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: object Render extends NodeRenderer: - override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[_]] = + override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[?]] = JSet( new NodeRenderingHandler(classOf[Section], SectionHandler), new NodeRenderingHandler(classOf[AnchorLink], AnchorLinkHandler) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala index e980c5fc44ef..d4a439042073 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala @@ -35,7 +35,7 @@ object SnippetRenderingExtension extends HtmlRenderer.HtmlRendererExtension: html.raw(SnippetRenderer.renderSnippet(node.getContentChars.toString, node.getInfo.toString.split(" ").headOption)) object Render extends NodeRenderer: - override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[_]] = + override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[?]] = JSet( new NodeRenderingHandler(classOf[ExtendedFencedCodeBlock], ExtendedFencedCodeBlockHandler), new NodeRenderingHandler(classOf[FencedCodeBlock], FencedCodeBlockHandler) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala index dd3187fb5346..125bca102fba 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala @@ -675,7 +675,7 @@ sealed class CharReader(buffer: String) { reader => var offset: Int = 0 def char: Char = - if (offset >= buffer.length) endOfText else buffer charAt offset + if (offset >= buffer.length) endOfText else buffer.charAt(offset) final def nextChar() = offset += 1 @@ -712,7 +712,7 @@ sealed class CharReader(buffer: String) { reader => jumpWhitespace() val (ok0, chars0) = if (chars.charAt(0) == ' ') - (offset > poff, chars substring 1) + (offset > poff, chars.substring(1)) else (true, chars) val ok = ok0 && jump(chars0) diff --git a/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala b/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala index 02e224f10cf0..b027aff83f6d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala @@ -3,16 +3,16 @@ package transformers class InheritanceInformationTransformer(using DocContext) extends (Module => Module): override def apply(original: Module): Module = - val subtypes = getSupertypes(original.rootPackage).groupMap(_(0))(_(1)) + val subtypes = getSupertypes(original.rootPackage).groupMap(_(0))(_(1)).view.mapValues(_.distinct).toMap original.updateMembers { m => val edges = getEdges(m.asLink.copy(kind = bareClasslikeKind(m.kind)), subtypes) val st: Seq[LinkToType] = edges.map(_._1).distinct - m.withKnownChildren(st).withNewGraphEdges(edges) + m.withKnownChildren(st).withNewGraphEdges(edges.toSeq) } private def getEdges(ltt: LinkToType, subtypes: Map[DRI, Seq[LinkToType]]): Seq[(LinkToType, LinkToType)] = - val st: Seq[LinkToType] = subtypes.getOrElse(ltt.dri, Nil) - st.flatMap(s => Seq(s -> ltt) ++ getEdges(s, subtypes)) + val st: Seq[LinkToType] = subtypes.getOrElse(ltt.dri, Vector.empty) + st.flatMap(s => Vector(s -> ltt) ++ getEdges(s, subtypes)) private def bareClasslikeKind(kind: Kind): Kind = kind match case _: Kind.Trait => Kind.Trait(Nil, Nil) diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala index 72776a7413aa..f7d99eaf4927 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala @@ -9,8 +9,8 @@ object HTML: type TagArg = AppliedTag | Seq[AppliedTag] | String | Seq[String] case class Tag(name: String): - def apply(tags: TagArg*): AppliedTag = apply()(tags:_*) - def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest):_*)() + def apply(tags: TagArg*): AppliedTag = apply()(tags*) + def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest)*)() def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag = def unpackTags(tags: TagArg*)(using sb: StringBuilder): StringBuilder = tags.foreach { @@ -19,7 +19,7 @@ object HTML: case s: String => sb.append(s.escapeReservedTokens) case s: Seq[AppliedTag | String] => - unpackTags(s:_*) + unpackTags(s*) } sb val sb = StringBuilder() @@ -31,7 +31,7 @@ object HTML: sb.append(" ").append(e) } sb.append(">") - unpackTags(tags:_*)(using sb) + unpackTags(tags*)(using sb) sb.append(s"") sb @@ -51,6 +51,8 @@ object HTML: val div = Tag("div") val span = Tag("span") + val em = Tag("em") + val strong = Tag("strong") val a = Tag("a") val p = Tag("p") val h1 = Tag("h1") diff --git a/scaladoc/src/scala/tasty/inspector/DocTastyInspector.scala b/scaladoc/src/scala/tasty/inspector/DocTastyInspector.scala deleted file mode 100644 index d908c2646f59..000000000000 --- a/scaladoc/src/scala/tasty/inspector/DocTastyInspector.scala +++ /dev/null @@ -1,9 +0,0 @@ -package scala.tasty.inspector - -import dotty.tools.dotc.core.Contexts.Context - -abstract class DocTastyInspector extends OldTastyInspector: - def inspectFilesInDocContext( - classpath: List[String], - filePaths: List[String])( - using Context): Unit = inspectFilesInContext(classpath, filePaths) diff --git a/scaladoc/src/scala/tasty/inspector/Inspector.scala b/scaladoc/src/scala/tasty/inspector/Inspector.scala new file mode 100644 index 000000000000..061c7dff0c44 --- /dev/null +++ b/scaladoc/src/scala/tasty/inspector/Inspector.scala @@ -0,0 +1,33 @@ +// Copy of tasty-inspector/src/scala/tasty/inspector/Inspector.scala +// FIXME remove this copy of the file + +package scala.tasty.inspector + +import scala.quoted._ +import scala.quoted.runtime.impl.QuotesImpl + +import dotty.tools.dotc.Compiler +import dotty.tools.dotc.Driver +import dotty.tools.dotc.Run +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.fromtasty._ +import dotty.tools.dotc.util.ClasspathFromClassloader +import dotty.tools.dotc.CompilationUnit +import dotty.tools.unsupported +import dotty.tools.dotc.report + +import java.io.File.pathSeparator + +trait Inspector: + + /** Inspect all TASTy files using `Quotes` reflect API. + * + * Note: Within this method `quotes.reflect.SourceFile.current` will not work, hence the explicit source paths. + * + * @param tastys List of `Tasty` containing `.tasty`file path and AST + */ + def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit + +end Inspector diff --git a/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala b/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala deleted file mode 100644 index 16f9b0fdca1d..000000000000 --- a/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala +++ /dev/null @@ -1,136 +0,0 @@ -package scala.tasty.inspector - -import scala.quoted._ -import scala.quoted.runtime.impl.QuotesImpl - -import dotty.tools.dotc.Compiler -import dotty.tools.dotc.Driver -import dotty.tools.dotc.Run -import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Mode -import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.fromtasty._ -import dotty.tools.dotc.util.ClasspathFromClassloader -import dotty.tools.dotc.CompilationUnit -import dotty.tools.unsupported -import dotty.tools.dotc.report - -import java.io.File.pathSeparator - -// COPY OF OLD IMPLEMENTATION -// TODO: update to new implementation -trait OldTastyInspector: - self => - - /** Process a TASTy file using TASTy reflect */ - protected def processCompilationUnit(using Quotes)(root: quotes.reflect.Tree): Unit - - /** Called after all compilation units are processed */ - protected def postProcess(using Quotes): Unit = () - - /** Load and process TASTy files using TASTy reflect - * - * @param tastyFiles List of paths of `.tasty` files - */ - def inspectTastyFiles(tastyFiles: List[String]): Boolean = - inspectAllTastyFiles(tastyFiles, Nil, Nil) - - /** Load and process TASTy files in a `jar` file using TASTy reflect - * - * @param jars Path of `.jar` file - */ - def inspectTastyFilesInJar(jar: String): Boolean = - inspectAllTastyFiles(Nil, List(jar), Nil) - - /** Load and process TASTy files using TASTy reflect - * - * @param tastyFiles List of paths of `.tasty` files - * @param jars List of path of `.jar` files - * @param dependenciesClasspath Classpath with extra dependencies needed to load class in the `.tasty` files - */ - def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String]): Boolean = - def checkFile(fileName: String, ext: String): Unit = - val file = dotty.tools.io.Path(fileName) - if file.extension != ext then - throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") - else if !file.exists then - throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") - tastyFiles.foreach(checkFile(_, "tasty")) - jars.foreach(checkFile(_, "jar")) - val files = tastyFiles ::: jars - files.nonEmpty && inspectFiles(dependenciesClasspath, files) - - /** Load and process TASTy files using TASTy reflect and provided context - * - * Used in doctool to reuse reporter and setup provided by sbt - * - * @param classes List of paths of `.tasty` and `.jar` files (no validation is performed) - * @param classpath Classpath with extra dependencies needed to load class in the `.tasty` files - */ - protected[inspector] def inspectFilesInContext(classpath: List[String], classes: List[String])(using Context): Unit = - if (classes.isEmpty) report.error("Parameter classes should no be empty") - inspectorDriver().process(inspectorArgs(classpath, classes), summon[Context]) - - - private def inspectorDriver() = - class InspectorDriver extends Driver: - override protected def newCompiler(implicit ctx: Context): Compiler = new TastyFromClass - - class TastyInspectorPhase extends Phase: - override def phaseName: String = "tastyInspector" - - override def run(implicit ctx: Context): Unit = - val qctx = QuotesImpl() - self.processCompilationUnit(using qctx)(ctx.compilationUnit.tpdTree.asInstanceOf[qctx.reflect.Tree]) - - class TastyInspectorFinishPhase extends Phase: - override def phaseName: String = "tastyInspectorFinish" - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - val qctx = QuotesImpl() - self.postProcess(using qctx) - units - - override def run(implicit ctx: Context): Unit = unsupported("run") - - class TastyFromClass extends TASTYCompiler: - - override protected def frontendPhases: List[List[Phase]] = - List(new ReadTasty) :: // Load classes from tasty - Nil - - override protected def picklerPhases: List[List[Phase]] = Nil - - override protected def transformPhases: List[List[Phase]] = Nil - - override protected def backendPhases: List[List[Phase]] = - List(new TastyInspectorPhase) :: // Perform a callback for each compilation unit - List(new TastyInspectorFinishPhase) :: // Perform a final callback - Nil - - override def newRun(implicit ctx: Context): Run = - reset() - val ctx2 = ctx.fresh - .addMode(Mode.ReadPositions) - .setSetting(ctx.settings.YreadComments, true) - new TASTYRun(this, ctx2) - - new InspectorDriver - - private def inspectorArgs(classpath: List[String], classes: List[String]): Array[String] = - val currentClasspath = ClasspathFromClassloader(getClass.getClassLoader) - val fullClasspath = (classpath :+ currentClasspath).mkString(pathSeparator) - ("-from-tasty" :: "-Yretain-trees" :: "-classpath" :: fullClasspath :: classes).toArray - - - private def inspectFiles(classpath: List[String], classes: List[String]): Boolean = - if (classes.isEmpty) - throw new IllegalArgumentException("Parameter classes should no be empty") - - val reporter = inspectorDriver().process(inspectorArgs(classpath, classes)) - reporter.hasErrors - - end inspectFiles - - -end OldTastyInspector diff --git a/scaladoc/src/scala/tasty/inspector/Tasty.scala b/scaladoc/src/scala/tasty/inspector/Tasty.scala new file mode 100644 index 000000000000..b3e65bb5479e --- /dev/null +++ b/scaladoc/src/scala/tasty/inspector/Tasty.scala @@ -0,0 +1,20 @@ +// Copy of tasty-inspector/src/scala/tasty/inspector/Tasty.scala +// FIXME remove this copy of the file + +package scala.tasty.inspector + +import scala.quoted._ + +/** `.tasty` file representation containing file path and the AST */ +trait Tasty[Q <: Quotes & Singleton]: + + /** Instance of `Quotes` used to load the AST */ + val quotes: Q + + /** Path to the `.tasty` file */ + def path: String + + /** Abstract Syntax Tree contained in the `.tasty` file */ + def ast: quotes.reflect.Tree + +end Tasty diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala new file mode 100644 index 000000000000..14e5f019b433 --- /dev/null +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -0,0 +1,127 @@ +// Copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +// FIXME remove this copy of the file + +package scala.tasty.inspector + +import scala.quoted._ +import scala.quoted.runtime.impl.QuotesImpl + +import dotty.tools.dotc.Compiler +import dotty.tools.dotc.Driver +import dotty.tools.dotc.Run +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.fromtasty._ +import dotty.tools.dotc.quoted.QuotesCache +import dotty.tools.dotc.util.ClasspathFromClassloader +import dotty.tools.dotc.CompilationUnit +import dotty.tools.unsupported +import dotty.tools.dotc.report + +import java.io.File.pathSeparator + +object TastyInspector: + + /** Load and process TASTy files using TASTy reflect + * + * @param tastyFiles List of paths of `.tasty` files + * + * @return boolean value indicating whether the process succeeded + */ + def inspectTastyFiles(tastyFiles: List[String])(inspector: Inspector): Boolean = + inspectAllTastyFiles(tastyFiles, Nil, Nil)(inspector) + + /** Load and process TASTy files in a `jar` file using TASTy reflect + * + * @param jars Path of `.jar` file + * + * @return boolean value indicating whether the process succeeded + */ + def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = + inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) + + /** Load and process TASTy files using TASTy reflect + * + * @param tastyFiles List of paths of `.tasty` files + * @param jars List of path of `.jar` files + * @param dependenciesClasspath Classpath with extra dependencies needed to load class in the `.tasty` files + * + * @return boolean value indicating whether the process succeeded + */ + def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = + def checkFile(fileName: String, ext: String): Unit = + val file = dotty.tools.io.Path(fileName) + if file.extension != ext then + throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") + else if !file.exists then + throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") + tastyFiles.foreach(checkFile(_, "tasty")) + jars.foreach(checkFile(_, "jar")) + val files = tastyFiles ::: jars + inspectFiles(dependenciesClasspath, files)(inspector) + + private def inspectorDriver(inspector: Inspector) = + class InspectorDriver extends Driver: + override protected def newCompiler(implicit ctx: Context): Compiler = new TastyFromClass + + class TastyInspectorPhase extends Phase: + override def phaseName: String = "tastyInspector" + + override def runOn(units: List[CompilationUnit])(using ctx0: Context): List[CompilationUnit] = + // NOTE: although this is a phase, do not expect this to be ran with an xsbti.CompileProgress + val ctx = QuotesCache.init(ctx0.fresh) + runOnImpl(units)(using ctx) + + private def runOnImpl(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val quotesImpl = QuotesImpl() + class TastyImpl(val path: String, val ast: quotesImpl.reflect.Tree) extends Tasty[quotesImpl.type] { + val quotes = quotesImpl + } + val tastys = units.map(unit => new TastyImpl(unit.source.path , unit.tpdTree.asInstanceOf[quotesImpl.reflect.Tree])) + inspector.inspect(using quotesImpl)(tastys) + units + + override def run(implicit ctx: Context): Unit = unsupported("run") + end TastyInspectorPhase + + class TastyFromClass extends TASTYCompiler: + + override protected def frontendPhases: List[List[Phase]] = + List(new ReadTasty) :: // Load classes from tasty + Nil + + override protected def picklerPhases: List[List[Phase]] = Nil + + override protected def transformPhases: List[List[Phase]] = Nil + + override protected def backendPhases: List[List[Phase]] = + List(new TastyInspectorPhase) :: // Perform a callback for each compilation unit + Nil + + override def newRun(implicit ctx: Context): Run = + reset() + val ctx2 = ctx.fresh + .addMode(Mode.ReadPositions) + .setSetting(ctx.settings.YreadComments, true) + new TASTYRun(this, ctx2) + + new InspectorDriver + + private def inspectorArgs(classpath: List[String], classes: List[String]): Array[String] = + val currentClasspath = ClasspathFromClassloader(getClass.getClassLoader) + val fullClasspath = (classpath :+ currentClasspath).mkString(pathSeparator) + ("-from-tasty" :: "-Yretain-trees" :: "-classpath" :: fullClasspath :: classes).toArray + + + private def inspectFiles(classpath: List[String], classes: List[String])(inspector: Inspector): Boolean = + classes match + case Nil => true + case _ => + val reporter = inspectorDriver(inspector).process(inspectorArgs(classpath, classes)) + !reporter.hasErrors + + end inspectFiles + + +end TastyInspector diff --git a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala index 59e274749f26..a63f699c4c2f 100644 --- a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala @@ -57,7 +57,8 @@ class Scaladoc3ExternalLocationProviderIntegrationTest extends ExternalLocationP def getScalaLibraryPath: String = { val classpath: List[String] = System.getProperty("java.class.path").split(java.io.File.pathSeparatorChar).toList - classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail + val stdlib = classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail + new java.io.File(stdlib).getCanonicalPath() // canonicalize for case-insensitive file systems } class Scaladoc2LegacyExternalLocationProviderIntegrationTest extends LegacyExternalLocationProviderIntegrationTest( diff --git a/scaladoc/test/dotty/tools/scaladoc/SocialLinksTest.scala b/scaladoc/test/dotty/tools/scaladoc/SocialLinksTest.scala new file mode 100644 index 000000000000..ede928ff2a08 --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/SocialLinksTest.scala @@ -0,0 +1,52 @@ +package dotty.tools.scaladoc + +import org.junit.Test +import org.junit.Assert._ +import dotty.tools.scaladoc.SocialLinks + +class SocialLinksTest: + + @Test def githubLink(): Unit = + val githubLink = "github::https://github.com/test" + val expected = SocialLinks.Github("https://github.com/test") + assertEquals(expected, SocialLinks.parse(githubLink).getOrElse(null)) + + @Test def twitterLink(): Unit = + val twitterLink = "twitter::https://twitter.com/test" + val expected = SocialLinks.Twitter("https://twitter.com/test") + assertEquals(expected, SocialLinks.parse(twitterLink).getOrElse(null)) + + @Test def gitterLink(): Unit = + val gitterLink = "gitter::https://gitter.im/test" + val expected = SocialLinks.Gitter("https://gitter.im/test") + assertEquals(expected, SocialLinks.parse(gitterLink).getOrElse(null)) + + @Test def discordLink(): Unit = + val discordLink = "discord::https://discord.gg/test" + val expected = SocialLinks.Discord("https://discord.gg/test") + assertEquals(expected, SocialLinks.parse(discordLink).getOrElse(null)) + + @Test def customLinkLight(): Unit = + val customLink = "namecustom::https://custom.com/test::custom" + val expected = SocialLinks.Custom("https://custom.com/test", "custom", "custom") + assertEquals(expected, SocialLinks.parse(customLink).getOrElse(null)) + + @Test def customLinkLightAndDark(): Unit = + val customLink = "namecustom::https://custom.com/test::custom::custom-dark" + val expected = SocialLinks.Custom("https://custom.com/test", "custom", "custom-dark") + assertEquals(expected, SocialLinks.parse(customLink).getOrElse(null)) + + @Test def customLinkUpper(): Unit = + val customLink = "Namecustom::https://custom.com/test::custom" + val expected = SocialLinks.Custom("https://custom.com/test", "custom", "custom") + assertEquals(expected, SocialLinks.parse(customLink).getOrElse(null)) + + @Test def parseRegexError(): Unit = + val regexErrorLink = "nameCustom3::https://custom.com/test::custom::custom-dark::custom" + val expected = s"Social links arg $regexErrorLink is invalid: " + assertEquals(expected, SocialLinks.parse(regexErrorLink).left.getOrElse(null)) + + @Test def parseLinkWithError(): Unit = + val errorLink = "namecustom::https://custom.com/test::custom::custom-dark::custom" + val expected = s"Social links arg $errorLink is invalid: For the 'custom' link, a minimum of two arguments is expected: URL, light icon file name, [dark icon file name]" + assertEquals(expected, SocialLinks.parse(errorLink).left.getOrElse(null)) diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala index eb27987f3f6c..824aec6daa16 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala @@ -1,11 +1,14 @@ package dotty.tools.scaladoc package signatures +import java.nio.file.Path; + import scala.io.Source import scala.jdk.CollectionConverters._ import scala.util.matching.Regex +import scala.language.unsafeNulls + import dotty.tools.scaladoc.test.BuildInfo -import java.nio.file.Path; import org.jsoup.Jsoup import util.IO import org.junit.Assert.assertTrue diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala index d5b7a0b9b6f8..bdedc3f14ce0 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala @@ -71,7 +71,7 @@ abstract class SignatureTest( // e.g. to remove '(0)' from object IAmACaseObject extends CaseImplementThis/*<-*/(0)/*->*/ private val commentRegex = raw"\/\*<-\*\/[^\/]+\/\*->\*\/".r private val whitespaceRegex = raw"\s+".r - private val expectedRegex = raw".+//expected: (.+)".r + private val expectedRegex = raw".*//expected: (.+)".r private val unexpectedRegex = raw"(.+)//unexpected".r private val identifierRegex = raw"^\s*(`.*`|(?:\w+)(?:_[^\[\(\s]+)|\w+|[^\[\(\s]+)".r @@ -94,7 +94,7 @@ abstract class SignatureTest( private def signaturesFromSources(source: Source, kinds: Seq[String]): Seq[SignatureRes] = source.getLines.map(_.trim) .filterNot(_.isEmpty) - .filterNot(_.startWithAnyOfThese("=",":","{","}", "//")) + .filterNot(l => l.startWithAnyOfThese("=",":","{","}", "//") && !l.startsWith("//expected:")) .toSeq .flatMap { case unexpectedRegex(signature) => findName(signature, kinds).map(Unexpected(_)) diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index a09234be5512..5067bf5974ca 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -1,6 +1,6 @@ package dotty.tools.scaladoc.signatures -class GenericSignaftures extends SignatureTest("genericSignatures", Seq("class")) +class GenericSignatures extends SignatureTest("genericSignatures", Seq("class")) class ObjectSignatures extends SignatureTest("objectSignatures", Seq("object")) @@ -109,4 +109,14 @@ class ImplicitMembers extends SignatureTest( class NonScala3Parent extends SignatureTest("nonScala3Parent", SignatureTest.all) +class SupertypeParamsSubstitution extends SignatureTest("supertypeParamsSubstitution", SignatureTest.all) + +class ThisType extends SignatureTest("thisType", SignatureTest.all) + +class PathDependentTypes extends SignatureTest("pathDependentTypes", SignatureTest.all) + +class MatchTypeTuple extends SignatureTest("matchTypeTuple", SignatureTest.all) + +class InfixTypes extends SignatureTest("infixTypes", SignatureTest.all) + class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala index f137e9e6b13e..fe822df5f8a0 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala @@ -11,8 +11,8 @@ class NavigationTest extends BaseHtmlTest: withHtmlFile(page){ content => def test(query: String, el: Seq[NavMenuTestEntry]) = - content.assertTextsIn(query, el.map(_.name):_*) - content.assertAttr(query,"href", el.map(_.link):_*) + content.assertTextsIn(query, el.map(_.name)*) + content.assertAttr(query,"href", el.map(_.link)*) test(".side-menu>div>span>a", topLevel) test(".side-menu>div>div>span>a", topLevel.flatMap(_.nested)) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala index 72fc6515fdee..4cbb04cce154 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala @@ -3,8 +3,12 @@ package site import org.junit.Test import org.junit.Assert._ +import dotty.tools.scaladoc.site.Sidebar +import dotty.tools.scaladoc.site.Sidebar.RawInput +import java.io.ByteArrayOutputStream +import java.io.PrintStream -// TODO add negaitve and more details tests +// TODO add negative and more details tests class SidebarParserTest: private val sidebar = @@ -34,6 +38,63 @@ class SidebarParserTest: | - page: my-page6/my-page6/my-page6.md """.stripMargin + private val sidebarNoTitle = + """index: index.md + |subsection: + | page: my-page1.md + | - page: my-page2.md + | - page: my-page3/subsection + | - title: Reference + | subsection: + | - page: my-page3.md + | hidden: true + | - index: my-page4/index.md + | subsection: + | - page: my-page4/my-page4.md + | - title: My subsection + | index: my-page5/index.md + | subsection: + | - page: my-page5/my-page5.md + | - subsection: + | - page: my-page7/my-page7.md + | - index: my-page6/index.md + | subsection: + | - index: my-page6/my-page6/index.md + | subsection: + | - page: my-page6/my-page6/my-page6.md + """.stripMargin + + private val sidebarErrorNoPage = + """index: index.md + |subsection: + | - title: My title + | - page: my-page2.md + | - page: my-page3/subsection + | - title: Reference + | subsection: + | - page: my-page3.md + | hidden: true + | - index: my-page4/index.md + | subsection: + | - page: my-page4/my-page4.md + | - title: My subsection + | index: my-page5/index.md + | subsection: + | - page: my-page5/my-page5.md + | - subsection: + | - page: my-page7/my-page7.md + | - index: my-page6/index.md + | subsection: + | - index: my-page6/my-page6/index.md + | subsection: + | - page: my-page6/my-page6/my-page6.md + """.stripMargin + + private val msgNoTitle = "`title` property is missing for some page." + private val msgNoPage = "Error parsing YAML configuration file: 'index' or 'page' path is missing for title 'My title'." + + private def schemaMessage: String = Sidebar.schemaMessage + @Test def loadSidebar(): Unit = assertEquals( Sidebar.Category( @@ -53,3 +114,24 @@ class SidebarParserTest: ), Sidebar.load(sidebar)(using testContext) ) + + @Test + def loadSidebarNoPageError: Unit = + val out = new ByteArrayOutputStream() + Console.withErr(new PrintStream(out)) { + Sidebar.load(sidebarErrorNoPage)(using testContext) + } + val errorPage = out.toString().trim() + + assert(errorPage.contains(msgNoPage) && errorPage.contains(schemaMessage)) + + + @Test + def loadSidebarNoTitleError(): Unit = + val out = new ByteArrayOutputStream() + Console.withErr(new PrintStream(out)) { + Sidebar.load(sidebarNoTitle)(using testContext) + } + val errorTitle = out.toString().trim() + + assert(errorTitle.contains(msgNoTitle) && errorTitle.contains(schemaMessage)) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala index e012044156cc..4d558fe492c9 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala @@ -27,7 +27,7 @@ class SiteGeneratationTest extends BaseHtmlTest: content.assertTextsIn(".projectVersion", projectVersion) content.assertTextsIn("h1", header) content.assertTextsIn("title", title) - content.assertTextsIn(".breadcrumbs a", (parents :+ title):_*) + content.assertTextsIn(".breadcrumbs a", (parents :+ title)*) checks(content) } diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala index 71f4b42f3320..690c4ba166f5 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala @@ -8,7 +8,7 @@ abstract class BaseIntegrationTest(pck: String) extends BaseHtmlTest: @Test def testLinks: Unit = withGeneratedDoc(pcks = Seq(pck, "commonlinks")) { def checkDocLinks(links: String*)(ctx: DocumentContext): Unit = - ctx.assertAttr(".documentableBrief a, .cover a", "href", links:_*) + ctx.assertAttr(".documentableBrief a, .cover a", "href", links*) ctx.assertNotExists("unresolvedLinkSelector") def checkUnresolved(ctx: DocumentContext): Unit = diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala index c246401c75fc..5f4b85251407 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala @@ -1,7 +1,7 @@ package dotty.tools.scaladoc package tasty.comments -import scala.quoted.Quotes +import scala.quoted.* import org.junit.{Test, Rule} import org.junit.Assert.{assertSame, assertTrue} @@ -198,14 +198,11 @@ class MemberLookupTests { @Test def test(): Unit = { - import scala.tasty.inspector.OldTastyInspector - class Inspector extends OldTastyInspector: - var alreadyRan: Boolean = false + import scala.tasty.inspector.* + class MyInspector extends Inspector: - override def processCompilationUnit(using ctx: quoted.Quotes)(root: ctx.reflect.Tree): Unit = - if !alreadyRan then - this.test() - alreadyRan = true + def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + this.test() def test()(using q: Quotes): Unit = { import dotty.tools.scaladoc.tasty.comments.MemberLookup @@ -215,6 +212,6 @@ class MemberLookupTests { cases.testAll() } - Inspector().inspectTastyFiles(TestUtils.listOurClasses()) + TastyInspector.inspectTastyFiles(TestUtils.listOurClasses())(new MyInspector) } } diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala index 0d33a9363bac..103e95359714 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala @@ -13,8 +13,8 @@ class QueryParserTests { val head = shorthand.head val tail = shorthand.tail head match { - case ((id: String), ch) => Query.QualifiedId(Query.Qual.Id(id), ch, l2q(tail : _*)(last)) - case ((qual: Qual), ch) => Query.QualifiedId(qual, ch, l2q(tail : _*)(last)) + case ((id: String), ch) => Query.QualifiedId(Query.Qual.Id(id), ch, l2q(tail*)(last)) + case ((qual: Qual), ch) => Query.QualifiedId(qual, ch, l2q(tail*)(last)) } } } diff --git a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala index 2ba78c321eab..d8b0e179c5f1 100644 --- a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala +++ b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala @@ -71,7 +71,7 @@ def tastyFiles(name: String, allowEmpty: Boolean = false, rootPck: String = "tes } def collectFiles(dir: File): List[File] = listFilesSafe(dir).toList.flatMap { case f if f.isDirectory => collectFiles(f) - case f if f.getName endsWith ".tasty" => f :: Nil + case f if f.getName.endsWith(".tasty") => f :: Nil case _ => Nil } val outputDir = BuildInfo.test_testcasesOutputDir diff --git a/semanticdb/project/build.properties b/semanticdb/project/build.properties deleted file mode 100644 index 46e43a97ed86..000000000000 --- a/semanticdb/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.8.2 diff --git a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSLink.scala b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSLink.scala index 54e92b1559d6..2560021aec99 100644 --- a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSLink.scala +++ b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSLink.scala @@ -45,7 +45,7 @@ object ScalaJSLink: val result = PathIRContainer .fromClasspath(cpEntries.toSeq.map(entry => new File(entry).toPath())) .map(_._1) - .flatMap(cache.cached _) + .flatMap(cache.cached) .flatMap(linker.link(_, moduleInitializers, PathOutputDirectory(dir), logger)) val report = Await.result(result, Duration.Inf) diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index fbe6a3915a08..7f380dabd4e2 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -26,9 +26,9 @@ object Compiler: def make(appClassloader: ClassLoader)(implicit settings: Settings): Compiler = new Compiler: - private[this] val driver: QuoteDriver = new QuoteDriver(appClassloader) + private val driver: QuoteDriver = new QuoteDriver(appClassloader) - private[this] var running = false + private var running = false def run[T](exprBuilder: Quotes => Expr[T]): T = synchronized { try diff --git a/staging/src/scala/quoted/staging/ExprCompilationUnit.scala b/staging/src/scala/quoted/staging/ExprCompilationUnit.scala index 6f95c252fd9d..2f91f4bc4ef1 100644 --- a/staging/src/scala/quoted/staging/ExprCompilationUnit.scala +++ b/staging/src/scala/quoted/staging/ExprCompilationUnit.scala @@ -5,4 +5,4 @@ import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.util.NoSource /** Compilation unit containing the contents of a quoted expression */ -private class ExprCompilationUnit(val exprBuilder: Quotes => Expr[_]) extends CompilationUnit(NoSource) +private class ExprCompilationUnit(val exprBuilder: Quotes => Expr[?]) extends CompilationUnit(NoSource, null) diff --git a/staging/src/scala/quoted/staging/QuoteCompiler.scala b/staging/src/scala/quoted/staging/QuoteCompiler.scala index eee2dacdc5f5..cf24b1de369a 100644 --- a/staging/src/scala/quoted/staging/QuoteCompiler.scala +++ b/staging/src/scala/quoted/staging/QuoteCompiler.scala @@ -5,6 +5,7 @@ import dotty.tools.unsupported import dotty.tools.dotc._ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.CompilationUnitInfo import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.Mode @@ -33,7 +34,7 @@ import scala.quoted.{Expr, Quotes, Type} private class QuoteCompiler extends Compiler: /** Either `Left` with name of the classfile generated or `Right` with the value contained in the expression */ - private[this] var result: Either[String, Any] = null + private var result: Either[String, Any] = null override protected def frontendPhases: List[List[Phase]] = List(List(new QuotedFrontend)) @@ -62,18 +63,19 @@ private class QuoteCompiler extends Compiler: def phaseName: String = "quotedFrontend" override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = + // NOTE: although this is a phase, there is no need to track xsbti.CompileProgress here. units.flatMap { case exprUnit: ExprCompilationUnit => val ctx1 = ctx.fresh.setPhase(this.start).setCompilationUnit(exprUnit) implicit val unitCtx: Context = SpliceScope.setSpliceScope(new RunScope)(using ctx1) val pos = Span(0) - val assocFile = new VirtualFile("") + val compUnitInfo = CompilationUnitInfo(new VirtualFile("")) // Places the contents of expr in a compilable tree for a class with the following format. // `package __root__ { class ' { def apply: Any = } }` val cls = newCompleteClassSymbol(defn.RootClass, outputClassName, EmptyFlags, - defn.ObjectType :: Nil, newScope, coord = pos, assocFile = assocFile).entered.asClass + defn.ObjectType :: Nil, newScope, coord = pos, compUnitInfo = compUnitInfo).entered.asClass cls.enter(newDefaultConstructor(cls), EmptyScope) val meth = newSymbol(cls, nme.apply, Method, ExprType(defn.AnyType), coord = pos).entered @@ -93,8 +95,9 @@ private class QuoteCompiler extends Compiler: val classTree = ClassDef(cls, DefDef(cls.primaryConstructor.asTerm), run :: Nil) val tree = PackageDef(ref(defn.RootPackage).asInstanceOf[Ident], classTree :: Nil).withSpan(pos) val source = SourceFile.virtual("", "") + val unitInfo = CompilationUnitInfo(source.file, tastyInfo = None) result = Left(outputClassName.toString) - Some(CompilationUnit(source, tree, forceTrees = true)) + Some(CompilationUnit(source, tree, forceTrees = true, unitInfo)) } /** Get the literal value if this tree only contains a literal tree */ @@ -113,7 +116,7 @@ private class QuoteCompiler extends Compiler: /** Unpickle and optionally compile the expression. * Returns either `Left` with name of the classfile generated or `Right` with the value contained in the expression. */ - def compileExpr(exprBuilder: Quotes => Expr[_]): Either[String, Any] = + def compileExpr(exprBuilder: Quotes => Expr[?]): Either[String, Any] = val units = new ExprCompilationUnit(exprBuilder) :: Nil compileUnits(units) result diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index 8de0cd218b23..93e19f195e00 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -21,7 +21,7 @@ import scala.annotation.tailrec private class QuoteDriver(appClassloader: ClassLoader) extends Driver: import tpd._ - private[this] val contextBase: ContextBase = new ContextBase + private val contextBase: ContextBase = new ContextBase def run[T](exprBuilder: Quotes => Expr[T], settings: Compiler.Settings): T = val outDir: AbstractFile = diff --git a/staging/test-resources/repl-staging/i6007 b/staging/test-resources/repl-staging/i6007 index 0d6fbd0cffb1..dcb99cc47c67 100644 --- a/staging/test-resources/repl-staging/i6007 +++ b/staging/test-resources/repl-staging/i6007 @@ -1,9 +1,9 @@ scala> import scala.quoted._ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) -def compiler: quoted.staging.Compiler +def compiler: scala.quoted.staging.Compiler scala> def v(using Quotes) = '{ (if true then Some(1) else None).map(v => v+1) } -def v(using x$1: quoted.Quotes): scala.quoted.Expr[Option[Int]] +def v(using x$1: scala.quoted.Quotes): scala.quoted.Expr[Option[Int]] scala> scala.quoted.staging.withQuotes(v.show) val res0: String = (if (true) scala.Some.apply[scala.Int](1) else scala.None).map[scala.Int](((v: scala.Int) => v.+(1))) scala> scala.quoted.staging.run(v) diff --git a/staging/test-resources/repl-staging/i6263 b/staging/test-resources/repl-staging/i6263 index d765bf416785..8d967c1c58ac 100644 --- a/staging/test-resources/repl-staging/i6263 +++ b/staging/test-resources/repl-staging/i6263 @@ -1,9 +1,9 @@ scala> import quoted._ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) -def compiler: quoted.staging.Compiler +def compiler: scala.quoted.staging.Compiler scala> def fn[T : Type](v : T) = println("ok") -def fn[T](v: T)(implicit evidence$1: quoted.Type[T]): Unit +def fn[T](v: T)(implicit evidence$1: scala.quoted.Type[T]): Unit scala> withQuotes { fn("foo") } ok scala> withQuotes { fn((1,2)) } diff --git a/stdlib-bootstrapped/test/Main.scala b/stdlib-bootstrapped/test/Main.scala deleted file mode 100644 index 1dad89eceffc..000000000000 --- a/stdlib-bootstrapped/test/Main.scala +++ /dev/null @@ -1,12 +0,0 @@ -package hello - -enum Color: - case Red, Green, Blue - -object HelloWorld: - def main(args: Array[String]): Unit = { - println("hello dotty.superbootstrapped!") - println(Color.Red) - println(Color.Green) - println(Color.Blue) - } diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index 4c6440530ba2..e70d2d4f6dc5 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -66,6 +66,7 @@ object TastyInspector: override def phaseName: String = "tastyInspector" override def runOn(units: List[CompilationUnit])(using ctx0: Context): List[CompilationUnit] = + // NOTE: although this is a phase, do not expect this to be ran with an xsbti.CompileProgress val ctx = QuotesCache.init(ctx0.fresh) runOnImpl(units)(using ctx) diff --git a/tasty/src/dotty/tools/tasty/TastyBuffer.scala b/tasty/src/dotty/tools/tasty/TastyBuffer.scala index f9266cf23617..b27a5b8878ab 100644 --- a/tasty/src/dotty/tools/tasty/TastyBuffer.scala +++ b/tasty/src/dotty/tools/tasty/TastyBuffer.scala @@ -1,6 +1,7 @@ package dotty.tools.tasty import util.Util.dble +import java.nio.charset.StandardCharsets object TastyBuffer { @@ -115,6 +116,16 @@ class TastyBuffer(initialSize: Int) { writeBytes(bytes, 8) } + /** Write a UTF8 string encoded as `Nat UTF8-CodePoint*`, + * where the `Nat` is the length of the code-points bytes. + */ + def writeUtf8(x: String): Unit = { + val bytes = x.getBytes(StandardCharsets.UTF_8) + val length = bytes.length + writeNat(length) + writeBytes(bytes, length) + } + // -- Address handling -------------------------------------------- /** Write natural number `x` right-adjusted in a field of `width` bytes diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 39d559234868..a7150dc726c7 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -16,6 +16,7 @@ Micro-syntax: Nat = LongInt -- non-negative value, fits in an Int without overflow Digit = 0 | ... | 127 StopDigit = 128 | ... | 255 -- value = digit - 128 + Utf8 = Nat UTF8-CodePoint* ``` Macro-format: @@ -24,12 +25,12 @@ Macro-format: nameTable_Length Name* Section* Header = 0x5CA1AB1F UUID = Byte*16 -- random UUID - VersionString = Length UTF8-CodePoint* -- string that represents the compiler that produced the TASTy + VersionString = Utf8 -- string that represents the compiler that produced the TASTy Section = NameRef Length Bytes Length = Nat -- length of rest of entry in bytes - Name = UTF8 Length UTF8-CodePoint* + Name = UTF8 Utf8 QUALIFIED Length qualified_NameRef selector_NameRef -- A.B EXPANDED Length qualified_NameRef selector_NameRef -- A$$B, semantically a NameKinds.ExpandedName EXPANDPREFIX Length qualified_NameRef selector_NameRef -- A$B, prefix of expanded name, see NamedKinds.ExpandPrefixName @@ -49,6 +50,7 @@ Macro-format: // If positive, this is a NameRef for the fully qualified name of a term parameter. NameRef = Nat // ordinal number of name in name table, starting from 1. + Utf8Ref = Nat // ordinal number of UTF8 in name table, starting from 1. ``` Note: Unqualified names in the name table are strings. The context decides whether a name is @@ -79,8 +81,9 @@ Standard-Section: "ASTs" TopLevelStat* Param = TypeParam TermParam Template = TEMPLATE Length TypeParam* TermParam* parent_Term* Self? - Stat* -- [typeparams] paramss extends parents { self => stats }, where Stat* always starts with the primary constructor. + EndParents? Stat* -- [typeparams] paramss extends parents { self => stats }, where Stat* always starts with the primary constructor. Self = SELFDEF selfName_NameRef selfType_Term -- selfName : selfType + EndParents = SPLITCLAUSE -- explicitly end the template header, e.g. if there is no primary constructor Term = Path -- Paths represent both types and terms IDENT NameRef Type -- Used when term ident’s type is not a TermRef @@ -88,6 +91,7 @@ Standard-Section: "ASTs" TopLevelStat* SELECTin Length possiblySigned_NameRef qual_Term owner_Type -- qual.name, referring to a symbol declared in owner that has the given signature (see note below) QUALTHIS typeIdent_Tree -- id.this, different from THIS in that it contains a qualifier ident with position. NEW clsType_Term -- new cls + ELIDED exprType_Type -- elided expression of the given type THROW throwableExpr_Term -- throw throwableExpr NAMEDARG paramName_NameRef arg_Term -- paramName = arg APPLY Length fn_Term arg_Term* -- fn(args) @@ -122,6 +126,8 @@ Standard-Section: "ASTs" TopLevelStat* MATCHtpt Length bound_Term? sel_Term CaseDef* -- sel match { CaseDef } where `bound` is optional upper bound of all rhs BYNAMEtpt underlying_Term -- => underlying SHAREDterm term_ASTRef -- Link to previously serialized term + -- pickled quote trees: -- These trees can only appear in pickled quotes. They will never be in a TASTy file. + EXPLICITtpt tpt_Term -- Tag for a type tree that in a context where it is not explicitly known that this tree is a type. HOLE Length idx_Nat tpe_Type arg_Tree* -- Splice hole with index `idx`, the type of the hole `tpe`, type and term arguments of the hole `arg`s @@ -232,11 +238,11 @@ Note: The signature of a SELECTin or TERMREFin node is the signature of the sele Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. ```none - Category 1 (tags 1-59) : tag - Category 2 (tags 60-89) : tag Nat - Category 3 (tags 90-109) : tag AST - Category 4 (tags 110-127): tag Nat AST - Category 5 (tags 128-255): tag Length + Tree Category 1 (tags 1-59) : tag + Tree Category 2 (tags 60-89) : tag Nat + Tree Category 3 (tags 90-109) : tag AST + Tree Category 4 (tags 110-127): tag Nat AST + Tree Category 5 (tags 128-255): tag Length ``` Standard-Section: "Positions" LinesSizes Assoc* @@ -263,8 +269,30 @@ All elements of a position section are serialized as Ints Standard Section: "Comments" Comment* ```none - Comment = Length Bytes LongInt // Raw comment's bytes encoded as UTF-8, followed by the comment's coordinates. + Comment = Utf8 LongInt // Raw comment's bytes encoded as UTF-8, followed by the comment's coordinates. ``` + +Standard Section: "Attributes" Attribute* +```none + Attribute = SCALA2STANDARDLIBRARYattr + EXPLICITNULLSattr + CAPTURECHECKEDattr + WITHPUREFUNSattr + JAVAattr + OUTLINEattr + SOURCEFILEattr Utf8Ref +``` +Attribute tags cannot be repeated in an attribute section. Attributes are ordered by the tag ordinal. + +Note: Attribute tags are grouped into categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. + Unassigned categories can be used to extend and existing category or to add new kinds of attributes +```none + Attribute Category 1 (tags 1-32) : tag + Attribute Category 2 (tags 33-128): // not assigned yet + Attribute Category 3 (tags 129-160): tag Utf8Ref + Attribute Category 4 (tags 161-255): // not assigned yet +``` + **************************************************************************************/ object TastyFormat { @@ -359,6 +387,7 @@ object TastyFormat { final val ASTsSection = "ASTs" final val PositionsSection = "Positions" final val CommentsSection = "Comments" + final val AttributesSection = "Attributes" /** Tags used to serialize names, should update [[TastyFormat$.nameTagToString]] if a new constant is added */ class NameTags { @@ -425,9 +454,9 @@ object TastyFormat { final val SOURCE = 4 - // AST tags - // Cat. 1: tag + // AST tags + // Tree Cat. 1: tag final val firstSimpleTreeTag = UNITconst // final val ??? = 1 final val UNITconst = 2 @@ -476,8 +505,8 @@ object TastyFormat { final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 - // Cat. 2: tag Nat - + // Tree Cat. 2: tag Nat + final val firstNatTreeTag = SHAREDterm final val SHAREDterm = 60 final val SHAREDtype = 61 final val TERMREFdirect = 62 @@ -496,8 +525,8 @@ object TastyFormat { final val IMPORTED = 75 final val RENAMED = 76 - // Cat. 3: tag AST - + // Tree Cat. 3: tag AST + final val firstASTTreeTag = THIS final val THIS = 90 final val QUALTHIS = 91 final val CLASSconst = 92 @@ -511,9 +540,12 @@ object TastyFormat { final val RECtype = 100 final val SINGLETONtpt = 101 final val BOUNDED = 102 + final val EXPLICITtpt = 103 + final val ELIDED = 104 - // Cat. 4: tag Nat AST + // Tree Cat. 4: tag Nat AST + final val firstNatASTTreeTag = IDENT final val IDENT = 110 final val IDENTtpt = 111 final val SELECT = 112 @@ -525,8 +557,8 @@ object TastyFormat { final val SELFDEF = 118 final val NAMEDARG = 119 - // Cat. 5: tag Length ... - + // Tree Cat. 5: tag Length ... + final val firstLengthTreeTag = PACKAGE final val PACKAGE = 128 final val VALDEF = 129 final val DEFDEF = 130 @@ -588,10 +620,27 @@ object TastyFormat { final val HOLE = 255 - final val firstNatTreeTag = SHAREDterm - final val firstASTTreeTag = THIS - final val firstNatASTTreeTag = IDENT - final val firstLengthTreeTag = PACKAGE + // Attributes tags + + // Attribute Category 1 (tags 1-32) : tag + def isBooleanAttrTag(tag: Int): Boolean = 1 <= tag && tag <= 32 + final val SCALA2STANDARDLIBRARYattr = 1 + final val EXPLICITNULLSattr = 2 + final val CAPTURECHECKEDattr = 3 + final val WITHPUREFUNSattr = 4 + final val JAVAattr = 5 + final val OUTLINEattr = 6 + + // Attribute Category 2 (tags 33-128): unassigned + + // Attribute Category 3 (tags 129-160): tag Utf8Ref + def isStringAttrTag(tag: Int): Boolean = 129 <= tag && tag <= 160 + final val SOURCEFILEattr = 129 + + // Attribute Category 4 (tags 161-255): unassigned + + // end of Attributes tags + /** Useful for debugging */ def isLegalTag(tag: Int): Boolean = @@ -599,7 +648,7 @@ object TastyFormat { firstNatTreeTag <= tag && tag <= RENAMED || firstASTTreeTag <= tag && tag <= BOUNDED || firstNatASTTreeTag <= tag && tag <= NAMEDARG || - firstLengthTreeTag <= tag && tag <= MATCHtpt || + firstLengthTreeTag <= tag && tag <= MATCHCASEtype || tag == HOLE def isParamTag(tag: Int): Boolean = tag == PARAM || tag == TYPEPARAM @@ -659,6 +708,7 @@ object TastyFormat { | ANNOTATEDtpt | BYNAMEtpt | MATCHtpt + | EXPLICITtpt | BIND => true case _ => false } @@ -803,9 +853,21 @@ object TastyFormat { case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" case PROTECTEDqualified => "PROTECTEDqualified" + case EXPLICITtpt => "EXPLICITtpt" + case ELIDED => "ELIDED" case HOLE => "HOLE" } + def attributeTagToString(tag: Int): String = tag match { + case SCALA2STANDARDLIBRARYattr => "SCALA2STANDARDLIBRARYattr" + case EXPLICITNULLSattr => "EXPLICITNULLSattr" + case CAPTURECHECKEDattr => "CAPTURECHECKEDattr" + case WITHPUREFUNSattr => "WITHPUREFUNSattr" + case JAVAattr => "JAVAattr" + case OUTLINEattr => "OUTLINEattr" + case SOURCEFILEattr => "SOURCEFILEattr" + } + /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry. * If negative, minus the number of leading non-reference trees. */ diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index c0ed5dbd58fa..a51541192321 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -27,12 +27,67 @@ sealed abstract case class TastyHeader( toolingVersion: String ) -class TastyHeaderUnpickler(reader: TastyReader) { +trait UnpicklerConfig { + /** The TASTy major version that this reader supports */ + def majorVersion: Int + /** The TASTy minor version that this reader supports */ + def minorVersion: Int + /** The TASTy experimental version that this reader supports */ + def experimentalVersion: Int + /** The description of the upgraded tool that can read the given TASTy version */ + def upgradedReaderTool(version: TastyVersion): String + /** The description of the upgraded tool that can produce the given TASTy version */ + def upgradedProducerTool(version: TastyVersion): String + /** Additional information to help a user fix the outdated TASTy problem */ + def recompileAdditionalInfo: String + /** Additional information to help a user fix the more recent TASTy problem */ + def upgradeAdditionalInfo(fileVersion: TastyVersion): String +} + +object UnpicklerConfig { + + /** A config where its major, minor and experimental versions are fixed to those in TastyFormat */ + trait DefaultTastyVersion extends UnpicklerConfig { + override final def majorVersion: Int = MajorVersion + override final def minorVersion: Int = MinorVersion + override final def experimentalVersion: Int = ExperimentalVersion + } + + trait Generic extends UnpicklerConfig { + final def upgradedProducerTool(version: TastyVersion): String = + "a later version" + + final def upgradedReaderTool(version: TastyVersion): String = + if (version.isExperimental) s"the version of this tool compatible with TASTy ${version.show}" + else s"a newer version of this tool compatible with TASTy ${version.show}" + + final def recompileAdditionalInfo: String = """ + | Usually this means that the classpath entry of this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && experimentalVersion == 0) { + """ + | Note that this tool does not support reading experimental TASTy.""".stripMargin + } + else "" + } + + /** A config for the TASTy reader of a generic tool */ + val generic: UnpicklerConfig = new UnpicklerConfig with Generic with DefaultTastyVersion {} +} + +class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { import TastyHeaderUnpickler._ import reader._ + def this(config: UnpicklerConfig, bytes: Array[Byte]) = this(config, new TastyReader(bytes)) + def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader) def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + private val toolMajor: Int = config.majorVersion + private val toolMinor: Int = config.minorVersion + private val toolExperimental: Int = config.experimentalVersion + /** reads and verifies the TASTy version, extracting the UUID */ def readHeader(): UUID = readFullHeader().uuid @@ -45,8 +100,11 @@ class TastyHeaderUnpickler(reader: TastyReader) { val fileMajor = readNat() if (fileMajor <= 27) { // old behavior before `tasty-core` 3.0.0-M4 val fileMinor = readNat() - val signature = signatureString(fileMajor, fileMinor, 0) - throw new UnpickleException(signature + backIncompatAddendum + toolingAddendum) + val fileVersion = TastyVersion(fileMajor, fileMinor, 0) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + val signature = signatureString(fileVersion, toolVersion, what = "Backward", tool = None) + val fix = recompileFix(toolVersion.minStable) + throw new UnpickleException(signature + fix + tastyAddendum) } else { val fileMinor = readNat() @@ -63,20 +121,38 @@ class TastyHeaderUnpickler(reader: TastyReader) { fileMajor = fileMajor, fileMinor = fileMinor, fileExperimental = fileExperimental, - compilerMajor = MajorVersion, - compilerMinor = MinorVersion, - compilerExperimental = ExperimentalVersion + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental ) check(validVersion, { - val signature = signatureString(fileMajor, fileMinor, fileExperimental) - val producedByAddendum = s"\nThe TASTy file was produced by $toolingVersion.$toolingAddendum" - val msg = ( - if (fileExperimental != 0) unstableAddendum - else if (fileMajor < MajorVersion) backIncompatAddendum - else forwardIncompatAddendum + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler) + } + else upgradeFix(fileVersion) ) - signature + msg + producedByAddendum + signature + fix + tastyAddendum }) val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) @@ -89,40 +165,52 @@ class TastyHeaderUnpickler(reader: TastyReader) { private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } -} - -object TastyHeaderUnpickler { - private def toolingAddendum = ( - if (ExperimentalVersion > 0) - "\nNote that your tooling is currently using an unstable TASTy version." - else - "" - ) - - private def signatureString(fileMajor: Int, fileMinor: Int, fileExperimental: Int) = { - def showMinorVersion(min: Int, exp: Int) = { - val expStr = if (exp == 0) "" else s" [unstable release: $exp]" - s"$min$expStr" - } - val minorVersion = showMinorVersion(MinorVersion, ExperimentalVersion) - val fileMinorVersion = showMinorVersion(fileMinor, fileExperimental) - s"""TASTy signature has wrong version. - | expected: {majorVersion: $MajorVersion, minorVersion: $minorVersion} - | found : {majorVersion: $fileMajor, minorVersion: $fileMinorVersion} - | + private def signatureString( + fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { + val optProducedBy = tool.fold("")(t => s", produced by $t") + s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy, + | expected ${toolVersion.validRange}. |""".stripMargin } - private def unstableAddendum = - """This TASTy file was produced by an unstable release. - |To read this TASTy file, your tooling must be at the same version.""".stripMargin + private def recompileFix(producerVersion: TastyVersion) = { + val addendum = config.recompileAdditionalInfo + val newTool = config.upgradedProducerTool(producerVersion) + s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin + } - private def backIncompatAddendum = - """This TASTy file was produced by an earlier release that is not supported anymore. - |Please recompile this TASTy with a later version.""".stripMargin + private def upgradeFix(fileVersion: TastyVersion) = { + val addendum = config.upgradeAdditionalInfo(fileVersion) + val newTool = config.upgradedReaderTool(fileVersion) + s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin + } - private def forwardIncompatAddendum = - """This TASTy file was produced by a more recent, forwards incompatible release. - |To read this TASTy file, please upgrade your tooling.""".stripMargin + private def tastyAddendum: String = """ + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin +} + +object TastyHeaderUnpickler { + + private object Compatibility { + final val BackwardIncompatibleMajor = -3 + final val BackwardIncompatibleExperimental = -2 + final val ExperimentalRecompile = -1 + final val ExperimentalUpgrade = 1 + final val ForwardIncompatible = 2 + + /** Given that file can't be read, extract the reason */ + def failReason(file: TastyVersion, read: TastyVersion): Int = + if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) { + if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new + else ExperimentalUpgrade // they should upgrade compiler as library is too new + } + else if (file.major < read.major) + BackwardIncompatibleMajor // pre 3.0.0 + else if (file.isExperimental && file.major == read.major && file.minor <= read.minor) + // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY + BackwardIncompatibleExperimental + else ForwardIncompatible + } } diff --git a/tasty/src/dotty/tools/tasty/TastyReader.scala b/tasty/src/dotty/tools/tasty/TastyReader.scala index 31407f7a4ab8..b5aa29f16954 100644 --- a/tasty/src/dotty/tools/tasty/TastyReader.scala +++ b/tasty/src/dotty/tools/tasty/TastyReader.scala @@ -3,6 +3,7 @@ package dotty.tools.tasty import collection.mutable import TastyBuffer._ +import java.nio.charset.StandardCharsets /** A byte array buffer that can be filled with bytes or natural numbers in TASTY format, * and that supports reading and patching addresses represented as natural numbers. @@ -104,6 +105,15 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = x } + /** Read a UTF8 string encoded as `Nat UTF8-CodePoint*`, + * where the `Nat` is the length of the code-points bytes. + */ + def readUtf8(): String = { + val length = readNat() + if (length == 0) "" + else new String(readBytes(length), StandardCharsets.UTF_8) + } + /** Read a natural number and return as a NameRef */ def readNameRef(): NameRef = NameRef(readNat()) diff --git a/tasty/src/dotty/tools/tasty/TastyVersion.scala b/tasty/src/dotty/tools/tasty/TastyVersion.scala new file mode 100644 index 000000000000..b6474f7c7934 --- /dev/null +++ b/tasty/src/dotty/tools/tasty/TastyVersion.scala @@ -0,0 +1,39 @@ +package dotty.tools.tasty + +import scala.annotation.internal.sharable + +case class TastyVersion private(major: Int, minor: Int, experimental: Int) { + def isExperimental: Boolean = experimental > 0 + + def nextStable: TastyVersion = copy(experimental = 0) + + def minStable: TastyVersion = copy(minor = 0, experimental = 0) + + def show: String = { + val suffix = if (isExperimental) s"-experimental-$experimental" else "" + s"$major.$minor$suffix" + } + + def kind: String = + if (isExperimental) "experimental TASTy" else "TASTy" + + def validRange: String = { + val min = TastyVersion(major, 0, 0) + val max = if (experimental == 0) this else TastyVersion(major, minor - 1, 0) + val extra = Option.when(experimental > 0)(this) + s"stable TASTy from ${min.show} to ${max.show}${extra.fold("")(e => s", or exactly ${e.show}")}" + } +} + +object TastyVersion { + + @sharable + private val cache: java.util.concurrent.ConcurrentHashMap[TastyVersion, TastyVersion] = + new java.util.concurrent.ConcurrentHashMap() + + def apply(major: Int, minor: Int, experimental: Int): TastyVersion = { + val version = new TastyVersion(major, minor, experimental) + val cachedVersion = cache.putIfAbsent(version, version) + if (cachedVersion == null) version else cachedVersion + } +} diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala deleted file mode 100644 index 9f54c4b3061b..000000000000 --- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala +++ /dev/null @@ -1,84 +0,0 @@ -package dotty.tools.tasty - -import org.junit.Assert._ -import org.junit.{Test, Ignore} - -import TastyFormat._ -import TastyBuffer._ - -@Ignore // comment if you want to experiment with error messages -class TastyHeaderUnpicklerTest { - - import TastyHeaderUnpicklerTest._ - - @Test def vanilla: Unit = { - runTest(MajorVersion, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345") - } - - @Test def failBumpExperimental: Unit = { - (runTest(MajorVersion, MinorVersion, ExperimentalVersion + 1, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) - } - - @Test def failBumpMinor: Unit = { - (runTest(MajorVersion, MinorVersion + 1, ExperimentalVersion, "Scala 3.1.0-RC1")) - } - - @Test def failBumpMajor: Unit = { - (runTest(MajorVersion + 1, MinorVersion, ExperimentalVersion, "Scala 4.0.0-M1")) - } - - @Test def failBumpMajorFinal: Unit = { - (runTest(MajorVersion + 1, MinorVersion, 0, "Scala 4.0.0")) - } - - @Test def okSubtractExperimental: Unit = { - (runTest(MajorVersion, MinorVersion, ExperimentalVersion - 1, "Scala 3.0.0")) - } - - @Test def okSubtractMinor: Unit = { - (runTest(MajorVersion, MinorVersion - 1, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) - } - - @Test def failSubtractMajor: Unit = { - (runTest(MajorVersion - 1, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) - } - -} - -object TastyHeaderUnpicklerTest { - - - def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = { - val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8) - val buf = new TastyBuffer(header.length + 32 + compilerBytes.length) - for (ch <- header) buf.writeByte(ch.toByte) - buf.writeNat(maj) - buf.writeNat(min) - buf.writeNat(exp) - buf.writeNat(compilerBytes.length) - buf.writeBytes(compilerBytes, compilerBytes.length) - buf.writeUncompressedLong(237478L) - buf.writeUncompressedLong(324789L) - buf - } - - def runTest(maj: Int, min: Int, exp: Int, compiler: String): Unit = { - val headerBuffer = fillHeader(maj, min, exp, compiler) - val bs = headerBuffer.bytes.clone - - val hr = new TastyHeaderUnpickler(bs) - - hr.readFullHeader() - } - - def expectUnpickleError(op: => Unit) = { - try { - op - fail() - } - catch { - case err: UnpickleException => () - } - } - -} diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties index 46e43a97ed86..52413ab79a18 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.2 +sbt.version=1.9.3 diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties index 46e43a97ed86..52413ab79a18 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.2 +sbt.version=1.9.3 diff --git a/tests/coverage/pos/Constructor.scoverage.check b/tests/coverage/pos/Constructor.scoverage.check index 6a6742c9118d..cb676686eac5 100644 --- a/tests/coverage/pos/Constructor.scoverage.check +++ b/tests/coverage/pos/Constructor.scoverage.check @@ -27,7 +27,7 @@ covtest.C 28 36 -3 +4 DefDef false @@ -44,7 +44,7 @@ covtest.C 69 72 -5 +6 g Apply false @@ -61,7 +61,7 @@ covtest.C 80 88 -8 +9 DefDef false @@ -78,7 +78,7 @@ covtest.C 108 128 -9 +10 + Apply false @@ -95,7 +95,7 @@ covtest.C f 133 138 -11 +12 f DefDef false @@ -112,7 +112,7 @@ covtest.C x 153 158 -12 +13 x DefDef false @@ -129,7 +129,7 @@ covtest.C 165 169 -13 +14 f Apply false @@ -146,7 +146,7 @@ covtest.C 167 168 -13 +14 x Select false @@ -163,7 +163,7 @@ covtest.C g 173 178 -15 +16 g DefDef false @@ -180,7 +180,7 @@ covtest.O$ g 203 208 -18 +19 g DefDef false @@ -197,7 +197,7 @@ covtest.O$ y 223 228 -19 +20 y DefDef false @@ -214,7 +214,7 @@ covtest.O$ 235 239 -20 +21 g Apply false @@ -231,7 +231,7 @@ covtest.O$ 237 238 -20 +21 y Ident false diff --git a/tests/coverage/pos/ContextFunctions.scoverage.check b/tests/coverage/pos/ContextFunctions.scoverage.check index 5f2a5f8e14f3..0d616d811ffe 100644 --- a/tests/coverage/pos/ContextFunctions.scoverage.check +++ b/tests/coverage/pos/ContextFunctions.scoverage.check @@ -27,7 +27,7 @@ covtest.OnError onError 56 67 -3 +4 onError DefDef false @@ -44,7 +44,7 @@ covtest.Imperative readName2 121 134 -7 +8 readName2 DefDef false @@ -61,7 +61,7 @@ covtest.Imperative readPerson 252 309 -13 +14 onError Apply false @@ -78,7 +78,7 @@ covtest.Imperative readPerson 252 295 -13 +14 Apply false @@ -95,7 +95,7 @@ covtest.Imperative $anonfun 267 294 -13 +14 apply Apply false @@ -112,7 +112,7 @@ covtest.Imperative readPerson 192 206 -11 +12 readPerson DefDef false diff --git a/tests/coverage/pos/Enum.scoverage.check b/tests/coverage/pos/Enum.scoverage.check index 7e9b69be2d31..3baa74fc2e5e 100644 --- a/tests/coverage/pos/Enum.scoverage.check +++ b/tests/coverage/pos/Enum.scoverage.check @@ -27,7 +27,7 @@ covtest.Planet surfaceGravity 338 356 -14 +15 surfaceGravity DefDef false @@ -44,7 +44,7 @@ covtest.Planet surfaceWeight 444 458 -15 +16 surfaceGravity Select false @@ -61,7 +61,7 @@ covtest.Planet surfaceWeight 392 409 -15 +16 surfaceWeight DefDef false @@ -78,7 +78,7 @@ covtest.EnumTypes$ test 1043 1077 -30 +31 println Apply false @@ -95,7 +95,7 @@ covtest.EnumTypes$ test 1051 1076 -30 +31 + Apply false @@ -112,7 +112,7 @@ covtest.EnumTypes$ test 1082 1103 -31 +32 println Apply false @@ -129,7 +129,7 @@ covtest.EnumTypes$ test 1090 1102 -31 +32 s Apply false @@ -146,7 +146,7 @@ covtest.EnumTypes$ calculateEarthWeightOnPlanets 1195 1222 -34 +35 surfaceGravity Select false @@ -163,7 +163,7 @@ covtest.EnumTypes$ calculateEarthWeightOnPlanets 1229 1320 -35 +36 foreach Apply false @@ -180,7 +180,7 @@ covtest.EnumTypes$ calculateEarthWeightOnPlanets 1238 1251 -35 +36 refArrayOps Apply false @@ -197,7 +197,7 @@ covtest.EnumTypes$ $anonfun 1263 1320 -36 +37 println Apply false @@ -214,7 +214,7 @@ covtest.EnumTypes$ $anonfun 1271 1319 -36 +37 s Apply false @@ -231,7 +231,7 @@ covtest.EnumTypes$ $anonfun 1296 1317 -36 +37 surfaceWeight Apply false @@ -248,7 +248,7 @@ covtest.EnumTypes$ calculateEarthWeightOnPlanets 1109 1142 -33 +34 calculateEarthWeightOnPlanets DefDef false @@ -265,7 +265,7 @@ covtest.EnumTypes$ test 1326 1347 -38 +39 println Apply false @@ -282,7 +282,7 @@ covtest.EnumTypes$ test 1352 1385 -39 +40 calculateEarthWeightOnPlanets Apply false @@ -299,7 +299,7 @@ covtest.EnumTypes$ test 901 909 -27 +28 test DefDef false diff --git a/tests/coverage/pos/Escaping.scoverage.check b/tests/coverage/pos/Escaping.scoverage.check index ecb907a9d222..8ab815c8b6f1 100644 --- a/tests/coverage/pos/Escaping.scoverage.check +++ b/tests/coverage/pos/Escaping.scoverage.check @@ -27,7 +27,7 @@ covtest.\n.\r\n\f \r\n\f 69 80 -3 +4 length Apply false @@ -44,7 +44,7 @@ covtest.\n.\r\n\f \r\n\f 40 48 -3 +4 \r\n\f DefDef false diff --git a/tests/coverage/pos/For.scoverage.check b/tests/coverage/pos/For.scoverage.check index 9fdc9a7e9d80..901d34701ced 100644 --- a/tests/coverage/pos/For.scoverage.check +++ b/tests/coverage/pos/For.scoverage.check @@ -27,7 +27,7 @@ covtest.For$package$ testForLoop 43 77 -3 +4 foreach Apply false @@ -44,7 +44,7 @@ covtest.For$package$ testForLoop 52 59 -3 +4 to Apply false @@ -61,7 +61,7 @@ covtest.For$package$ testForLoop 52 53 -3 +4 intWrapper Apply false @@ -78,7 +78,7 @@ covtest.For$package$ $anonfun 67 77 -4 +5 println Apply false @@ -95,7 +95,7 @@ covtest.For$package$ testForLoop 17 32 -2 +3 testForLoop DefDef false @@ -112,7 +112,7 @@ covtest.For$package$ f 109 114 -7 +8 f DefDef false @@ -129,7 +129,7 @@ covtest.For$package$ testForAdvanced 141 183 -8 +9 foreach Apply false @@ -146,7 +146,7 @@ covtest.For$package$ testForAdvanced 145 165 -8 +9 withFilter Apply false @@ -163,7 +163,7 @@ covtest.For$package$ testForAdvanced 150 157 -8 +9 to Apply false @@ -180,7 +180,7 @@ covtest.For$package$ testForAdvanced 150 151 -8 +9 intWrapper Apply false @@ -197,7 +197,7 @@ covtest.For$package$ $anonfun 161 165 -8 +9 f Apply false @@ -214,7 +214,7 @@ covtest.For$package$ $anonfun 173 183 -9 +10 println Apply false @@ -231,7 +231,7 @@ covtest.For$package$ testForAdvanced 79 98 -6 +7 testForAdvanced DefDef false @@ -248,7 +248,7 @@ covtest.For$package$ testForeach 301 344 -13 +14 foreach Apply false @@ -262,27 +262,10 @@ covtest For$package$ Object covtest.For$package$ -testForeach -301 -304 -13 -Nil -Ident -false -0 -false -Nil - -15 -For.scala -covtest -For$package$ -Object -covtest.For$package$ $anonfun 318 343 -13 +14 println Apply false @@ -290,7 +273,7 @@ false false println("user code here") -16 +15 For.scala covtest For$package$ @@ -299,7 +282,7 @@ covtest.For$package$ testForeach 185 200 -11 +12 testForeach DefDef false diff --git a/tests/coverage/pos/Givens.scoverage.check b/tests/coverage/pos/Givens.scoverage.check index 15f8f02c378f..4442f329c6b2 100644 --- a/tests/coverage/pos/Givens.scoverage.check +++ b/tests/coverage/pos/Givens.scoverage.check @@ -27,7 +27,7 @@ covtest.Givens test 174 191 -10 +11 println Apply false @@ -44,7 +44,7 @@ covtest.Givens test 196 213 -11 +12 println Apply false @@ -61,7 +61,7 @@ covtest.Givens test 100 108 -8 +9 test DefDef false @@ -78,7 +78,7 @@ covtest.Givens printContext 279 291 -14 +15 println Apply false @@ -95,7 +95,7 @@ covtest.Givens printContext 296 311 -15 +16 println Apply false @@ -112,7 +112,7 @@ covtest.Givens printContext 217 233 -13 +14 printContext DefDef false @@ -129,7 +129,7 @@ covtest.Givens getMessage 315 329 -17 +18 getMessage DefDef false @@ -146,7 +146,7 @@ covtest.Givens test2 399 409 -20 +21 Apply false @@ -163,7 +163,7 @@ covtest.Givens test2 414 443 -21 +22 printContext Apply false @@ -180,7 +180,7 @@ covtest.Givens test2 448 477 -22 +23 printContext Apply false @@ -197,7 +197,7 @@ covtest.Givens test2 461 476 -22 +23 getMessage Apply false @@ -214,7 +214,7 @@ covtest.Givens test2 362 371 -19 +20 test2 DefDef false diff --git a/tests/coverage/pos/Inlined.scoverage.check b/tests/coverage/pos/Inlined.scoverage.check index 85393db11a8f..bc0ab9d99892 100644 --- a/tests/coverage/pos/Inlined.scoverage.check +++ b/tests/coverage/pos/Inlined.scoverage.check @@ -27,7 +27,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply false @@ -44,7 +44,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply true @@ -59,15 +59,15 @@ Inlined$package$ Object covtest.Inlined$package$ testInlined -155 -162 -6 -apply -Apply -false +330 +330 +11 + +Literal +true 0 false -List(l) + 3 Inlined.scala @@ -77,14 +77,14 @@ Object covtest.Inlined$package$ testInlined 155 -159 -6 -List -Ident +162 +7 +apply +Apply false 0 false -List +List(l) 4 Inlined.scala @@ -95,7 +95,7 @@ covtest.Inlined$package$ testInlined 155 169 -6 +7 length Select false @@ -112,7 +112,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply false @@ -129,7 +129,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply true @@ -144,15 +144,15 @@ Inlined$package$ Object covtest.Inlined$package$ testInlined -180 -187 -7 -apply -Apply -false +330 +330 +11 + +Literal +true 0 false -List(l) + 8 Inlined.scala @@ -162,14 +162,14 @@ Object covtest.Inlined$package$ testInlined 180 -184 -7 -List -Ident +187 +8 +apply +Apply false 0 false -List +List(l) 9 Inlined.scala @@ -180,7 +180,7 @@ covtest.Inlined$package$ testInlined 180 194 -7 +8 length Select false @@ -197,7 +197,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply false @@ -214,7 +214,7 @@ covtest.Inlined$package$ testInlined 288 330 -10 +11 assertFailed Apply true @@ -229,9 +229,26 @@ Inlined$package$ Object covtest.Inlined$package$ testInlined +330 +330 +11 + +Literal +true +0 +false + + +13 +Inlined.scala +covtest +Inlined$package$ +Object +covtest.Inlined$package$ +testInlined 86 101 -3 +4 testInlined DefDef false diff --git a/tests/coverage/pos/InlinedFromLib.scoverage.check b/tests/coverage/pos/InlinedFromLib.scoverage.check index d7b2a42cd3b3..263281cfad39 100644 --- a/tests/coverage/pos/InlinedFromLib.scoverage.check +++ b/tests/coverage/pos/InlinedFromLib.scoverage.check @@ -27,7 +27,7 @@ covtest.InlinedFromLib$package$ testInlined 169 183 -6 +7 assertFailed Apply false @@ -44,7 +44,7 @@ covtest.InlinedFromLib$package$ testInlined 169 183 -6 +7 assertFailed Apply true @@ -61,7 +61,7 @@ covtest.InlinedFromLib$package$ testInlined 169 183 -6 +7 Literal true @@ -78,7 +78,7 @@ covtest.InlinedFromLib$package$ testInlined 198 205 -7 +8 apply Apply false @@ -94,25 +94,8 @@ Object covtest.InlinedFromLib$package$ testInlined 198 -202 -7 -List -Ident -false -0 -false -List - -5 -InlinedFromLib.scala -covtest -InlinedFromLib$package$ -Object -covtest.InlinedFromLib$package$ -testInlined -198 212 -7 +8 length Select false @@ -120,7 +103,7 @@ false false List(l).length -6 +5 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -129,7 +112,7 @@ covtest.InlinedFromLib$package$ testInlined 186 213 -7 +8 assertFailed Apply false @@ -137,7 +120,7 @@ false false assert(l == List(l).length) -7 +6 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -146,7 +129,7 @@ covtest.InlinedFromLib$package$ testInlined 186 213 -7 +8 assertFailed Apply true @@ -154,7 +137,7 @@ true false assert(l == List(l).length) -8 +7 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -163,7 +146,7 @@ covtest.InlinedFromLib$package$ testInlined 186 213 -7 +8 Literal true @@ -171,7 +154,7 @@ true false assert(l == List(l).length) -9 +8 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -180,7 +163,7 @@ covtest.InlinedFromLib$package$ testInlined 223 230 -8 +9 apply Apply false @@ -188,24 +171,7 @@ false false List(l) -10 -InlinedFromLib.scala -covtest -InlinedFromLib$package$ -Object -covtest.InlinedFromLib$package$ -testInlined -223 -227 -8 -List -Ident -false -0 -false -List - -11 +9 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -214,7 +180,7 @@ covtest.InlinedFromLib$package$ testInlined 223 237 -8 +9 length Select false @@ -222,7 +188,7 @@ false false List(l).length -12 +10 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -231,7 +197,7 @@ covtest.InlinedFromLib$package$ testInlined 216 243 -8 +9 assertFailed Apply false @@ -239,7 +205,7 @@ false false assert(List(l).length == 1) -13 +11 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -248,7 +214,7 @@ covtest.InlinedFromLib$package$ testInlined 216 243 -8 +9 assertFailed Apply true @@ -256,7 +222,7 @@ true false assert(List(l).length == 1) -14 +12 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -265,7 +231,7 @@ covtest.InlinedFromLib$package$ testInlined 216 243 -8 +9 Literal true @@ -273,7 +239,7 @@ true false assert(List(l).length == 1) -15 +13 InlinedFromLib.scala covtest InlinedFromLib$package$ @@ -282,7 +248,7 @@ covtest.InlinedFromLib$package$ testInlined 129 144 -4 +5 testInlined DefDef false diff --git a/tests/coverage/pos/Lift.scoverage.check b/tests/coverage/pos/Lift.scoverage.check index a1c656cbdb67..cce8c18c6254 100644 --- a/tests/coverage/pos/Lift.scoverage.check +++ b/tests/coverage/pos/Lift.scoverage.check @@ -27,7 +27,7 @@ covtest.SomeFunctions f 40 45 -3 +4 f DefDef false @@ -44,7 +44,7 @@ covtest.SomeFunctions g 61 66 -4 +5 g DefDef false @@ -61,7 +61,7 @@ covtest.SomeFunctions c 83 98 -5 +6 Apply false @@ -78,7 +78,7 @@ covtest.SomeFunctions c 75 80 -5 +6 c DefDef false @@ -95,7 +95,7 @@ covtest.SomeFunctions test 113 121 -7 +8 f Apply false @@ -112,7 +112,7 @@ covtest.SomeFunctions test 113 114 -7 +8 c Select false @@ -129,7 +129,7 @@ covtest.SomeFunctions test 117 120 -7 +8 g Apply false @@ -146,7 +146,7 @@ covtest.SomeFunctions test 102 110 -7 +8 test DefDef false diff --git a/tests/coverage/pos/Literals.scoverage.check b/tests/coverage/pos/Literals.scoverage.check index e890f0978074..f23c481c1145 100644 --- a/tests/coverage/pos/Literals.scoverage.check +++ b/tests/coverage/pos/Literals.scoverage.check @@ -27,7 +27,7 @@ covtest.Literals$package$ block 31 50 -3 +4 println Apply false @@ -44,7 +44,7 @@ covtest.Literals$package$ block 17 26 -2 +3 block DefDef false @@ -61,7 +61,7 @@ covtest.Literals$package$ f 177 180 -8 +9 ??? Ident false @@ -78,7 +78,7 @@ covtest.Literals$package$ f 137 142 -8 +9 f DefDef false @@ -95,7 +95,7 @@ covtest.Literals$package$ main 201 212 -11 +12 f Apply false @@ -112,7 +112,7 @@ covtest.Literals$package$ main 182 190 -10 +11 main DefDef false diff --git a/tests/coverage/pos/MatchCaseClasses.scoverage.check b/tests/coverage/pos/MatchCaseClasses.scoverage.check index 5440c2e3098d..0911532b1977 100644 --- a/tests/coverage/pos/MatchCaseClasses.scoverage.check +++ b/tests/coverage/pos/MatchCaseClasses.scoverage.check @@ -25,26 +25,9 @@ MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -135 -147 -7 - -Block -false -0 -false -case Pat1(0) - -1 -MatchCaseClasses.scala -covtest -MatchCaseClasses$ -Object -covtest.MatchCaseClasses$ -f 151 163 -7 +8 println Apply false @@ -52,24 +35,24 @@ false false println("a") -2 +1 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -168 -180 +148 +163 8 Block -false +true 0 false -case Pat1(_) +=> println("a") -3 +2 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -78,7 +61,7 @@ covtest.MatchCaseClasses$ f 184 196 -8 +9 println Apply false @@ -86,24 +69,24 @@ false false println("b") -4 +3 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -201 -221 +181 +196 9 Block -false +true 0 false -case p @ Pat2(1, -1) +=> println("b") -5 +4 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -112,7 +95,7 @@ covtest.MatchCaseClasses$ f 225 237 -9 +10 println Apply false @@ -120,24 +103,24 @@ false false println("c") -6 +5 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -242 -265 +222 +237 10 Block -false +true 0 false -case Pat2(_, y: String) +=> println("c") -7 +6 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -146,7 +129,7 @@ covtest.MatchCaseClasses$ f 275 285 -11 +12 println Apply false @@ -154,7 +137,7 @@ false false println(y) -8 +7 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -163,7 +146,7 @@ covtest.MatchCaseClasses$ f 292 304 -12 +13 println Apply false @@ -171,24 +154,24 @@ false false println("d") -9 +8 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -309 -321 -13 +275 +304 +12 Block -false +true 0 false -case p: Pat2 +println(y)\n println("d") -10 +9 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -197,7 +180,7 @@ covtest.MatchCaseClasses$ f 325 337 -13 +14 println Apply false @@ -205,24 +188,24 @@ false false println("e") -11 +10 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -342 -348 +322 +337 14 Block -false +true 0 false -case _ +=> println("e") -12 +11 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -231,7 +214,7 @@ covtest.MatchCaseClasses$ f 352 368 -14 +15 println Apply false @@ -239,6 +222,23 @@ false false println("other") +12 +MatchCaseClasses.scala +covtest +MatchCaseClasses$ +Object +covtest.MatchCaseClasses$ +f +349 +368 +15 + +Block +true +0 +false +=> println("other") + 13 MatchCaseClasses.scala covtest @@ -248,7 +248,7 @@ covtest.MatchCaseClasses$ f 101 106 -6 +7 f DefDef false diff --git a/tests/coverage/pos/MatchNumbers.scoverage.check b/tests/coverage/pos/MatchNumbers.scoverage.check index 0421cf77002a..ccb8a627f46e 100644 --- a/tests/coverage/pos/MatchNumbers.scoverage.check +++ b/tests/coverage/pos/MatchNumbers.scoverage.check @@ -25,15 +25,15 @@ MatchNumbers$ Object covtest.MatchNumbers$ f -106 -126 -6 +127 +132 +7 Block -false +true 0 false -case x: Int if x < 0 +=> -1 1 MatchNumbers.scala @@ -42,15 +42,15 @@ MatchNumbers$ Object covtest.MatchNumbers$ f -137 -148 -7 +149 +153 +8 Block -false +true 0 false -case x: Int +=> x 2 MatchNumbers.scala @@ -59,15 +59,15 @@ MatchNumbers$ Object covtest.MatchNumbers$ f -158 -170 -8 +171 +181 +9 Block -false +true 0 false -case y: Long +=> y.toInt 3 MatchNumbers.scala @@ -78,7 +78,7 @@ covtest.MatchNumbers$ f 69 74 -5 +6 f DefDef false @@ -95,7 +95,7 @@ covtest.MatchNumbers$ 185 189 -10 +11 f Apply false @@ -112,7 +112,7 @@ covtest.MatchNumbers$ 192 197 -11 +12 f Apply false diff --git a/tests/coverage/pos/PolymorphicExtensions.scoverage.check b/tests/coverage/pos/PolymorphicExtensions.scoverage.check index 495ddf037d3e..33be52244cdf 100644 --- a/tests/coverage/pos/PolymorphicExtensions.scoverage.check +++ b/tests/coverage/pos/PolymorphicExtensions.scoverage.check @@ -27,7 +27,7 @@ covtest.PolyExt$ foo 61 68 -4 +5 foo DefDef false @@ -44,7 +44,7 @@ covtest.PolyExt$ get 114 121 -7 +8 get DefDef false @@ -61,7 +61,7 @@ covtest.PolyExt$ tap 170 173 -8 +9 ??? Ident false @@ -78,7 +78,7 @@ covtest.PolyExt$ tap 139 146 -8 +9 tap DefDef false @@ -95,7 +95,7 @@ covtest.PolyExt$ 177 189 -10 +11 foo Apply false @@ -112,7 +112,7 @@ covtest.PolyExt$ 177 186 -10 +11 foo Apply false @@ -129,7 +129,7 @@ covtest.PolyExt$ 277 287 -11 +12 get Apply false @@ -146,7 +146,7 @@ covtest.PolyExt$ foo 370 377 -13 +14 foo DefDef false @@ -163,7 +163,7 @@ covtest.PolyExt$ bar 405 421 -14 +15 tap Apply false @@ -180,7 +180,7 @@ covtest.PolyExt$ bar 405 412 -14 +15 tap Apply false @@ -197,7 +197,7 @@ covtest.PolyExt$ bar 405 408 -14 +15 foo Ident false @@ -214,7 +214,7 @@ covtest.PolyExt$ $anonfun 413 420 -14 +15 println Apply false @@ -231,7 +231,7 @@ covtest.PolyExt$ bar 390 397 -14 +15 bar DefDef false diff --git a/tests/coverage/pos/PolymorphicMethods.scoverage.check b/tests/coverage/pos/PolymorphicMethods.scoverage.check index 5bcfe254ffe2..ae6413869f06 100644 --- a/tests/coverage/pos/PolymorphicMethods.scoverage.check +++ b/tests/coverage/pos/PolymorphicMethods.scoverage.check @@ -27,7 +27,7 @@ covtest.PolyMeth$ f 36 41 -3 +4 f DefDef false @@ -44,7 +44,7 @@ covtest.PolyMeth$ 60 69 -4 +5 f Apply false @@ -61,7 +61,7 @@ covtest.PolyMeth$ 147 170 -6 +7 f Apply false @@ -78,7 +78,7 @@ covtest.PolyMeth$ 147 158 -6 +7 Apply false @@ -95,7 +95,7 @@ covtest.C f 187 192 -9 +10 f DefDef false diff --git a/tests/coverage/pos/Select.scoverage.check b/tests/coverage/pos/Select.scoverage.check index 183ba7395de0..5300716cae90 100644 --- a/tests/coverage/pos/Select.scoverage.check +++ b/tests/coverage/pos/Select.scoverage.check @@ -27,7 +27,7 @@ covtest.A print 82 94 -6 +7 println Apply false @@ -44,7 +44,7 @@ covtest.A print 68 77 -6 +7 print DefDef false @@ -61,7 +61,7 @@ covtest.A instance 97 109 -7 +8 instance DefDef false @@ -78,7 +78,7 @@ covtest.B print 166 179 -11 +12 print Apply false @@ -95,7 +95,7 @@ covtest.B print 184 206 -12 +13 println Apply false @@ -112,7 +112,7 @@ covtest.B print 192 205 -12 +13 instance Select false @@ -129,7 +129,7 @@ covtest.B print 139 157 -10 +11 print DefDef false @@ -146,7 +146,7 @@ covtest.Select$package$ test 237 240 -15 +16 Apply false @@ -163,7 +163,7 @@ covtest.Select$package$ test 254 259 -16 +17 Apply false @@ -180,7 +180,7 @@ covtest.Select$package$ test 263 281 -18 +19 print Apply false @@ -197,7 +197,7 @@ covtest.Select$package$ test 263 273 -18 +19 instance Select false @@ -214,7 +214,7 @@ covtest.Select$package$ test 345 354 -19 +20 print Apply false @@ -231,7 +231,7 @@ covtest.Select$package$ test 208 216 -14 +15 test DefDef false diff --git a/tests/coverage/pos/SimpleMethods.scoverage.check b/tests/coverage/pos/SimpleMethods.scoverage.check index dc68258f9a66..067bd744177b 100644 --- a/tests/coverage/pos/SimpleMethods.scoverage.check +++ b/tests/coverage/pos/SimpleMethods.scoverage.check @@ -27,7 +27,7 @@ covtest.C a 28 33 -3 +4 a DefDef false @@ -44,7 +44,7 @@ covtest.C b 46 51 -4 +5 b DefDef false @@ -61,7 +61,7 @@ covtest.C c 69 74 -5 +6 c DefDef false @@ -78,7 +78,7 @@ covtest.C d 88 93 -6 +7 d DefDef false @@ -95,7 +95,7 @@ covtest.C e 106 111 -7 +8 e DefDef false @@ -112,7 +112,7 @@ covtest.C block 128 137 -9 +10 block DefDef false @@ -129,7 +129,7 @@ covtest.C cond 206 210 -14 +15 Literal true @@ -146,7 +146,7 @@ covtest.C cond 220 225 -15 +16 Literal true @@ -163,7 +163,7 @@ covtest.C cond 168 176 -13 +14 cond DefDef false @@ -180,7 +180,7 @@ covtest.C partialCond 271 273 -18 +19 Literal true @@ -195,9 +195,26 @@ C Class covtest.C partialCond +273 +273 +19 + +Literal +true +0 +false + + +11 +SimpleMethods.scala +covtest +C +Class +covtest.C +partialCond 229 244 -17 +18 partialCond DefDef false @@ -205,7 +222,7 @@ false false def partialCond -11 +12 SimpleMethods.scala covtest C @@ -214,7 +231,7 @@ covtest.C new1 277 285 -20 +21 new1 DefDef false @@ -222,7 +239,7 @@ false false def new1 -12 +13 SimpleMethods.scala covtest C @@ -231,7 +248,7 @@ covtest.C tryCatch 330 332 -23 +24 Literal true @@ -239,24 +256,24 @@ true false () -13 +14 SimpleMethods.scala covtest C Class covtest.C tryCatch -349 -366 -25 +367 +371 +26 Block -false +true 0 false -case e: Exception +=> 1 -14 +15 SimpleMethods.scala covtest C @@ -265,7 +282,7 @@ covtest.C tryCatch 301 313 -22 +23 tryCatch DefDef false diff --git a/tests/coverage/pos/StructuralTypes.scoverage.check b/tests/coverage/pos/StructuralTypes.scoverage.check index 6108c83b08d6..2e897f6e8016 100644 --- a/tests/coverage/pos/StructuralTypes.scoverage.check +++ b/tests/coverage/pos/StructuralTypes.scoverage.check @@ -27,7 +27,7 @@ covtest.Record selectDynamic 148 172 -5 +6 find Apply false @@ -41,27 +41,10 @@ covtest Record Class covtest.Record -$anonfun -159 -163 -5 -_1 -Select -false -0 -false -_._1 - -2 -StructuralTypes.scala -covtest -Record -Class -covtest.Record selectDynamic 148 176 -5 +6 get Select false @@ -69,24 +52,7 @@ false false elems.find(_._1 == name).get -3 -StructuralTypes.scala -covtest -Record -Class -covtest.Record -selectDynamic -148 -179 -5 -_2 -Select -false -0 -false -elems.find(_._1 == name).get._2 - -4 +2 StructuralTypes.scala covtest Record @@ -95,7 +61,7 @@ covtest.Record selectDynamic 109 126 -5 +6 selectDynamic DefDef false @@ -103,7 +69,7 @@ false false def selectDynamic -5 +3 StructuralTypes.scala covtest StructuralTypes$ @@ -112,7 +78,7 @@ covtest.StructuralTypes$ test 277 293 -12 +13 -> Apply false @@ -120,7 +86,7 @@ false false "name" -> "Emma" -6 +4 StructuralTypes.scala covtest StructuralTypes$ @@ -129,7 +95,7 @@ covtest.StructuralTypes$ test 295 306 -12 +13 -> Apply false @@ -137,7 +103,7 @@ false false "age" -> 42 -7 +5 StructuralTypes.scala covtest StructuralTypes$ @@ -146,7 +112,7 @@ covtest.StructuralTypes$ test 333 344 -13 +14 selectDynamic Apply false @@ -154,7 +120,7 @@ false false person.name -8 +6 StructuralTypes.scala covtest StructuralTypes$ @@ -163,7 +129,7 @@ covtest.StructuralTypes$ test 234 242 -11 +12 test DefDef false diff --git a/tests/coverage/pos/TypeLambdas.scoverage.check b/tests/coverage/pos/TypeLambdas.scoverage.check index 4085c3e41f18..091a5cf4da57 100644 --- a/tests/coverage/pos/TypeLambdas.scoverage.check +++ b/tests/coverage/pos/TypeLambdas.scoverage.check @@ -27,7 +27,7 @@ covtest.TypeLambdas$ test 306 319 -13 +14 apply Apply false @@ -42,26 +42,9 @@ TypeLambdas$ Object covtest.TypeLambdas$ test -306 -309 -13 -Map -Ident -false -0 -false -Map - -2 -TypeLambdas.scala -covtest -TypeLambdas$ -Object -covtest.TypeLambdas$ -test 310 318 -13 +14 -> Apply false @@ -69,7 +52,7 @@ false false 1 -> "1" -3 +2 TypeLambdas.scala covtest TypeLambdas$ @@ -78,7 +61,7 @@ covtest.TypeLambdas$ test 324 334 -14 +15 println Apply false @@ -86,7 +69,7 @@ false false println(m) -4 +3 TypeLambdas.scala covtest TypeLambdas$ @@ -95,7 +78,7 @@ covtest.TypeLambdas$ test 382 396 -17 +18 println Apply false @@ -103,7 +86,7 @@ false false println(tuple) -5 +4 TypeLambdas.scala covtest TypeLambdas$ @@ -112,7 +95,7 @@ covtest.TypeLambdas$ test 259 267 -12 +13 test DefDef false diff --git a/tests/coverage/pos/i16502.scala b/tests/coverage/pos/i16502.scala new file mode 100644 index 000000000000..bd7847138c3f --- /dev/null +++ b/tests/coverage/pos/i16502.scala @@ -0,0 +1,8 @@ +import scala.concurrent.* + +def asyncSum: ExecutionContext ?=> Future[Int] = Future(1) + +@main +def Test(): Unit = + import scala.concurrent.ExecutionContext.Implicits.global + asyncSum diff --git a/tests/coverage/pos/i16502.scoverage.check b/tests/coverage/pos/i16502.scoverage.check new file mode 100644 index 000000000000..4bc50c0f91c3 --- /dev/null +++ b/tests/coverage/pos/i16502.scoverage.check @@ -0,0 +1,88 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +i16502.scala + +i16502$package$ +Object +.i16502$package$ +$anonfun +76 +85 +3 +apply +Apply +false +0 +false +Future(1) + +1 +i16502.scala + +i16502$package$ +Object +.i16502$package$ +asyncSum +27 +39 +3 +asyncSum +DefDef +false +0 +false +def asyncSum + +2 +i16502.scala + +i16502$package$ +Object +.i16502$package$ +Test +174 +182 +8 +apply +Apply +false +0 +false +asyncSum + +3 +i16502.scala + +i16502$package$ +Object +.i16502$package$ +Test +87 +101 +6 +Test +DefDef +false +0 +false +@main\ndef Test + diff --git a/tests/coverage/pos/scoverage-samples-case.scala b/tests/coverage/pos/scoverage-samples-case.scala new file mode 100644 index 000000000000..78f13da70401 --- /dev/null +++ b/tests/coverage/pos/scoverage-samples-case.scala @@ -0,0 +1,28 @@ +// minimized example from sbt-scoverage-samples +package org.scoverage.samples + +import scala.concurrent.duration._ +import scala.concurrent.ExecutionContext.Implicits.global + +case object StartService +case object StopService + +class PriceEngine() { + + var cancellable: String = _ + + cancellable = "abc" + + def receive: Any => Unit = { + case StartService => + stop() + + case StopService => + stop() + } + + def stop(): Unit = { + if (cancellable != null) + println("stop") + } +} diff --git a/tests/coverage/pos/scoverage-samples-case.scoverage.check b/tests/coverage/pos/scoverage-samples-case.scoverage.check new file mode 100644 index 000000000000..4b67fa77541c --- /dev/null +++ b/tests/coverage/pos/scoverage-samples-case.scoverage.check @@ -0,0 +1,173 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +$anonfun +362 +368 +18 +stop +Apply +false +0 +false +stop() + +1 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +$anonfun +353 +368 +17 + +Block +true +0 +false +=>\n stop() + +2 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +$anonfun +400 +406 +21 +stop +Apply +false +0 +false +stop() + +3 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +$anonfun +391 +406 +20 + +Block +true +0 +false +=>\n stop() + +4 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +receive +302 +313 +16 +receive +DefDef +false +0 +false +def receive + +5 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +stop +470 +485 +26 +println +Apply +false +0 +false +println("stop") + +6 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +stop +470 +485 +26 +println +Apply +true +0 +false +println("stop") + +7 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +stop +485 +485 +26 + +Literal +true +0 +false + + +8 +scoverage-samples-case.scala +org.scoverage.samples +PriceEngine +Class +org.scoverage.samples.PriceEngine +stop +414 +422 +24 +stop +DefDef +false +0 +false +def stop + diff --git a/tests/coverage/pos/scoverage-samples-implicit-class.scala b/tests/coverage/pos/scoverage-samples-implicit-class.scala new file mode 100644 index 000000000000..98a0e0976b89 --- /dev/null +++ b/tests/coverage/pos/scoverage-samples-implicit-class.scala @@ -0,0 +1,14 @@ +// minimized example from sbt-scoverage-samples +package org.scoverage.samples + +implicit class StringOpssssss(s: String) { + def ! (str: String): Unit = println(s + "!" + str) +} + +class CreditEngine { + def receive: Int => Unit = { req => + if (req < 2000) + "if 1" ! "xd" + else println("else 1") + } +} diff --git a/tests/coverage/pos/scoverage-samples-implicit-class.scoverage.check b/tests/coverage/pos/scoverage-samples-implicit-class.scoverage.check new file mode 100644 index 000000000000..d44faf848942 --- /dev/null +++ b/tests/coverage/pos/scoverage-samples-implicit-class.scoverage.check @@ -0,0 +1,207 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +$anonfun +263 +276 +11 +! +Apply +false +0 +false +"if 1" ! "xd" + +1 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +$anonfun +263 +269 +11 +StringOpssssss +Apply +false +0 +false +"if 1" + +2 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +$anonfun +263 +276 +11 +! +Apply +true +0 +false +"if 1" ! "xd" + +3 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +$anonfun +286 +303 +12 +println +Apply +false +0 +false +println("else 1") + +4 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +$anonfun +286 +303 +12 +println +Apply +true +0 +false +println("else 1") + +5 +scoverage-samples-implicit-class.scala +org.scoverage.samples +CreditEngine +Class +org.scoverage.samples.CreditEngine +receive +201 +212 +9 +receive +DefDef +false +0 +false +def receive + +6 +scoverage-samples-implicit-class.scala +org.scoverage.samples +StringOpssssss +Class +org.scoverage.samples.StringOpssssss +! +152 +174 +5 +println +Apply +false +0 +false +println(s + "!" + str) + +7 +scoverage-samples-implicit-class.scala +org.scoverage.samples +StringOpssssss +Class +org.scoverage.samples.StringOpssssss +! +160 +173 +5 ++ +Apply +false +0 +false +s + "!" + str + +8 +scoverage-samples-implicit-class.scala +org.scoverage.samples +StringOpssssss +Class +org.scoverage.samples.StringOpssssss +! +160 +167 +5 ++ +Apply +false +0 +false +s + "!" + +9 +scoverage-samples-implicit-class.scala +org.scoverage.samples +StringOpssssss +Class +org.scoverage.samples.StringOpssssss +! +124 +129 +5 +! +DefDef +false +0 +false +def ! + +10 +scoverage-samples-implicit-class.scala +org.scoverage.samples +scoverage-samples-implicit-class$package$ +Object +org.scoverage.samples.scoverage-samples-implicit-class$package$ +StringOpssssss +79 +108 +4 +StringOpssssss +DefDef +false +0 +false +implicit class StringOpssssss + diff --git a/tests/coverage/run/currying/test.scoverage.check b/tests/coverage/run/currying/test.scoverage.check index 591bf44c17fd..183e42fba2d5 100644 --- a/tests/coverage/run/currying/test.scoverage.check +++ b/tests/coverage/run/currying/test.scoverage.check @@ -27,7 +27,7 @@ Object f1 15 21 -1 +2 f1 DefDef false @@ -44,7 +44,7 @@ Object f2 56 62 -2 +3 f2 DefDef false @@ -61,7 +61,7 @@ Object g1 114 120 -5 +6 g1 DefDef false @@ -78,7 +78,7 @@ Object g2 175 181 -8 +9 g2 DefDef false @@ -95,7 +95,7 @@ Object main 277 297 -11 +12 println Apply false @@ -112,7 +112,7 @@ Object main 285 296 -11 +12 f1 Apply false @@ -129,7 +129,7 @@ Object main 302 322 -12 +13 println Apply false @@ -146,7 +146,7 @@ Object main 310 321 -12 +13 apply Apply false @@ -163,7 +163,7 @@ Object main 310 318 -12 +13 apply Apply false @@ -180,7 +180,7 @@ Object main 310 315 -12 +13 apply Apply false @@ -197,7 +197,7 @@ Object main 310 312 -12 +13 f2 Ident false @@ -214,7 +214,7 @@ Object main 327 365 -13 +14 println Apply false @@ -231,7 +231,7 @@ Object main 335 364 -13 +14 apply Apply false @@ -248,7 +248,7 @@ Object main 370 408 -14 +15 println Apply false @@ -265,7 +265,7 @@ Object main 378 407 -14 +15 g2 Apply false @@ -282,7 +282,7 @@ Object main 235 243 -10 +11 main DefDef false diff --git a/tests/coverage/run/erased/test.scoverage.check b/tests/coverage/run/erased/test.scoverage.check index f31c1a2418a9..979d88ecd78f 100644 --- a/tests/coverage/run/erased/test.scoverage.check +++ b/tests/coverage/run/erased/test.scoverage.check @@ -27,7 +27,7 @@ Object foo 181 203 -6 +7 println Apply false @@ -44,7 +44,7 @@ Object foo 189 202 -6 +7 s Apply false @@ -61,7 +61,7 @@ Object foo 132 139 -5 +6 foo DefDef false @@ -78,7 +78,7 @@ Object identity 245 269 -10 +11 println Apply false @@ -95,7 +95,7 @@ Object identity 253 268 -10 +11 s Apply false @@ -112,7 +112,7 @@ Object identity 209 221 -9 +10 identity DefDef false @@ -129,7 +129,7 @@ Object Test 300 323 -15 +16 foo Apply false @@ -146,7 +146,7 @@ Object Test 326 342 -16 +17 foo Apply false @@ -163,7 +163,7 @@ Object Test 345 374 -17 +18 foo Apply false @@ -180,7 +180,7 @@ Object Test 357 373 -17 +18 identity Apply false @@ -197,7 +197,7 @@ Object Test 275 289 -14 +15 Test DefDef false diff --git a/tests/coverage/run/extend-case-class/test.scoverage.check b/tests/coverage/run/extend-case-class/test.scoverage.check index 69da960e4f6a..891f4ac198ca 100644 --- a/tests/coverage/run/extend-case-class/test.scoverage.check +++ b/tests/coverage/run/extend-case-class/test.scoverage.check @@ -27,7 +27,7 @@ Object Test 282 303 -8 +9 println Apply false @@ -44,7 +44,7 @@ Object Test 306 337 -9 +10 println Apply false @@ -61,7 +61,7 @@ Object Test 206 220 -6 +7 Test DefDef false diff --git a/tests/coverage/run/i16940/i16940.scala b/tests/coverage/run/i16940/i16940.scala new file mode 100644 index 000000000000..9d1c80b1a5cd --- /dev/null +++ b/tests/coverage/run/i16940/i16940.scala @@ -0,0 +1,27 @@ +import concurrent.ExecutionContext.Implicits.global +import scala.concurrent.* +import scala.concurrent.duration.* + +var test = 0 + +def brokenSynchronizedBlock(option: Boolean): Future[Unit] = Future { + if (option) { + Thread.sleep(500) + } + synchronized { + val tmp = test + Thread.sleep(1000) + test = tmp + 1 + } +} + +object Test extends App { + Await.result( + Future.sequence(Seq(brokenSynchronizedBlock(false), brokenSynchronizedBlock(true))) + .map { result => + println(test) + assert(test == 2) + }, + 3.seconds + ) +} diff --git a/tests/coverage/run/i16940/i16940.scoverage.check b/tests/coverage/run/i16940/i16940.scoverage.check new file mode 100644 index 000000000000..f8ff25537b0f --- /dev/null +++ b/tests/coverage/run/i16940/i16940.scoverage.check @@ -0,0 +1,326 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +i16940/i16940.scala + +Test$ +Object +.Test$ + +353 +552 +19 +result +Apply +false +0 +false +Await.result(\n Future.sequence(Seq(brokenSynchronizedBlock(false), brokenSynchronizedBlock(true)))\n .map { result =>\n println(test)\n assert(test == 2)\n },\n 3.seconds\n ) + +1 +i16940/i16940.scala + +Test$ +Object +.Test$ + +371 +533 +20 +map +Apply +false +0 +false +Future.sequence(Seq(brokenSynchronizedBlock(false), brokenSynchronizedBlock(true)))\n .map { result =>\n println(test)\n assert(test == 2)\n } + +2 +i16940/i16940.scala + +Test$ +Object +.Test$ + +371 +454 +20 +sequence +Apply +false +0 +false +Future.sequence(Seq(brokenSynchronizedBlock(false), brokenSynchronizedBlock(true))) + +3 +i16940/i16940.scala + +Test$ +Object +.Test$ + +387 +453 +20 +apply +Apply +false +0 +false +Seq(brokenSynchronizedBlock(false), brokenSynchronizedBlock(true)) + +4 +i16940/i16940.scala + +Test$ +Object +.Test$ + +391 +421 +20 +brokenSynchronizedBlock +Apply +false +0 +false +brokenSynchronizedBlock(false) + +5 +i16940/i16940.scala + +Test$ +Object +.Test$ + +423 +452 +20 +brokenSynchronizedBlock +Apply +false +0 +false +brokenSynchronizedBlock(true) + +6 +i16940/i16940.scala + +Test$ +Object +.Test$ +$anonfun +486 +499 +22 +println +Apply +false +0 +false +println(test) + +7 +i16940/i16940.scala + +Test$ +Object +.Test$ +$anonfun +508 +525 +23 +assertFailed +Apply +false +0 +false +assert(test == 2) + +8 +i16940/i16940.scala + +Test$ +Object +.Test$ +$anonfun +508 +525 +23 +assertFailed +Apply +true +0 +false +assert(test == 2) + +9 +i16940/i16940.scala + +Test$ +Object +.Test$ +$anonfun +508 +525 +23 + +Literal +true +0 +false +assert(test == 2) + +10 +i16940/i16940.scala + +Test$ +Object +.Test$ + +539 +548 +25 +seconds +Select +false +0 +false +3.seconds + +11 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +189 +323 +7 +apply +Apply +false +0 +false +Future {\n if (option) {\n Thread.sleep(500)\n }\n synchronized {\n val tmp = test\n Thread.sleep(1000)\n test = tmp + 1\n }\n} + +12 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +218 +235 +9 +sleep +Apply +false +0 +false +Thread.sleep(500) + +13 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +212 +239 +8 + +Block +true +0 +false +{\n Thread.sleep(500)\n } + +14 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +239 +239 +10 + +Literal +true +0 +false + + +15 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +242 +321 +11 +synchronized +Apply +false +0 +false +synchronized {\n val tmp = test\n Thread.sleep(1000)\n test = tmp + 1\n } + +16 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +280 +298 +13 +sleep +Apply +false +0 +false +Thread.sleep(1000) + +17 +i16940/i16940.scala + +i16940$package$ +Object +.i16940$package$ +brokenSynchronizedBlock +128 +155 +7 +brokenSynchronizedBlock +DefDef +false +0 +false +def brokenSynchronizedBlock + diff --git a/tests/coverage/run/i18233-min/i18233-min.scala b/tests/coverage/run/i18233-min/i18233-min.scala new file mode 100644 index 000000000000..67ec5824f57e --- /dev/null +++ b/tests/coverage/run/i18233-min/i18233-min.scala @@ -0,0 +1,13 @@ +def aList = + List(Array[String]()*) + +def arr = + Array("abc", "def") + +def anotherList = + List(arr*) + +object Test extends App { + println(aList) + println(anotherList) +} diff --git a/tests/coverage/run/i18233-min/i18233-min.scoverage.check b/tests/coverage/run/i18233-min/i18233-min.scoverage.check new file mode 100644 index 000000000000..7c9144c289bc --- /dev/null +++ b/tests/coverage/run/i18233-min/i18233-min.scoverage.check @@ -0,0 +1,224 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +i18233-min/i18233-min.scala + +Test$ +Object +.Test$ + +131 +145 +11 +println +Apply +false +0 +false +println(aList) + +1 +i18233-min/i18233-min.scala + +Test$ +Object +.Test$ + +139 +144 +11 +aList +Ident +false +0 +false +aList + +2 +i18233-min/i18233-min.scala + +Test$ +Object +.Test$ + +148 +168 +12 +println +Apply +false +0 +false +println(anotherList) + +3 +i18233-min/i18233-min.scala + +Test$ +Object +.Test$ + +156 +167 +12 +anotherList +Ident +false +0 +false +anotherList + +4 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +aList +14 +36 +2 +apply +Apply +false +0 +false +List(Array[String]()*) + +5 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +aList +19 +34 +2 +apply +Apply +false +0 +false +Array[String]() + +6 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +aList +0 +9 +1 +aList +DefDef +false +0 +false +def aList + +7 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +arr +50 +69 +5 +apply +Apply +false +0 +false +Array("abc", "def") + +8 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +arr +38 +45 +4 +arr +DefDef +false +0 +false +def arr + +9 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +anotherList +91 +101 +8 +apply +Apply +false +0 +false +List(arr*) + +10 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +anotherList +96 +99 +8 +arr +Ident +false +0 +false +arr + +11 +i18233-min/i18233-min.scala + +i18233-min$package$ +Object +.i18233-min$package$ +anotherList +71 +86 +7 +anotherList +DefDef +false +0 +false +def anotherList + diff --git a/tests/coverage/run/i18233/i18233.scala b/tests/coverage/run/i18233/i18233.scala new file mode 100644 index 000000000000..e3978d90ca38 --- /dev/null +++ b/tests/coverage/run/i18233/i18233.scala @@ -0,0 +1,9 @@ +enum Foo: + case Bar, Baz + +object Foo: + def render = List(values.tail*).mkString + +object Test extends App { + println(Foo.render) +} diff --git a/tests/coverage/run/i18233/i18233.scoverage.check b/tests/coverage/run/i18233/i18233.scoverage.check new file mode 100644 index 000000000000..e632292e782e --- /dev/null +++ b/tests/coverage/run/i18233/i18233.scoverage.check @@ -0,0 +1,139 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +i18233/i18233.scala + +Foo$ +Object +.Foo$ +render +54 +72 +5 +apply +Apply +false +0 +false +List(values.tail*) + +1 +i18233/i18233.scala + +Foo$ +Object +.Foo$ +render +59 +65 +5 +refArrayOps +Apply +false +0 +false +values + +2 +i18233/i18233.scala + +Foo$ +Object +.Foo$ +render +59 +70 +5 +tail +Select +false +0 +false +values.tail + +3 +i18233/i18233.scala + +Foo$ +Object +.Foo$ +render +54 +81 +5 +mkString +Select +false +0 +false +List(values.tail*).mkString + +4 +i18233/i18233.scala + +Foo$ +Object +.Foo$ +render +41 +51 +5 +render +DefDef +false +0 +false +def render + +5 +i18233/i18233.scala + +Test$ +Object +.Test$ + +111 +130 +8 +println +Apply +false +0 +false +println(Foo.render) + +6 +i18233/i18233.scala + +Test$ +Object +.Test$ + +119 +129 +8 +render +Select +false +0 +false +Foo.render + diff --git a/tests/coverage/run/inheritance/test.scoverage.check b/tests/coverage/run/inheritance/test.scoverage.check index 4b75764fcef2..d4ca12879ea3 100644 --- a/tests/coverage/run/inheritance/test.scoverage.check +++ b/tests/coverage/run/inheritance/test.scoverage.check @@ -27,7 +27,7 @@ Class 84 100 -2 +3 println Apply false @@ -44,7 +44,7 @@ Class 125 131 -3 +4 Apply false @@ -61,7 +61,7 @@ Object Test 161 176 -7 +8 println Apply false @@ -78,7 +78,7 @@ Object Test 169 173 -7 +8 Apply false @@ -95,7 +95,7 @@ Object Test 211 226 -9 +10 println Apply false @@ -112,7 +112,7 @@ Object Test 219 223 -9 +10 Apply false @@ -129,7 +129,7 @@ Object Test 136 150 -6 +7 Test DefDef false diff --git a/tests/coverage/run/inline-def/test.scoverage.check b/tests/coverage/run/inline-def/test.scoverage.check index 784c0a00b62b..f8a852707146 100644 --- a/tests/coverage/run/inline-def/test.scoverage.check +++ b/tests/coverage/run/inline-def/test.scoverage.check @@ -27,7 +27,7 @@ Object Test 225 228 -12 +13 Apply false @@ -44,7 +44,7 @@ Object Test 231 243 -13 +14 println Apply false @@ -61,7 +61,7 @@ Object Test 246 260 -14 +15 println Apply false @@ -78,7 +78,7 @@ Object Test 134 148 -7 +8 toString Apply false @@ -95,7 +95,7 @@ Object Test 263 277 -15 +16 println Apply false @@ -112,7 +112,7 @@ Object Test 176 190 -8 +9 toString Apply false @@ -129,7 +129,7 @@ Object Test 295 309 -17 +18 println Apply false @@ -146,7 +146,7 @@ Object Test 303 308 -17 +18 foo Select false @@ -163,7 +163,7 @@ Object Test 192 206 -11 +12 Test DefDef false diff --git a/tests/coverage/run/interpolation/test.scoverage.check b/tests/coverage/run/interpolation/test.scoverage.check index 6b38152cdcc1..b2373483737e 100644 --- a/tests/coverage/run/interpolation/test.scoverage.check +++ b/tests/coverage/run/interpolation/test.scoverage.check @@ -27,7 +27,7 @@ Object simple 60 78 -3 +4 s Apply false @@ -44,7 +44,7 @@ Object simple 68 76 -3 +4 length Apply false @@ -61,7 +61,7 @@ Object simple 16 26 -2 +3 simple DefDef false @@ -78,7 +78,7 @@ Object hexa 113 126 -6 +7 f Apply false @@ -95,7 +95,7 @@ Object hexa 82 90 -5 +6 hexa DefDef false @@ -112,7 +112,7 @@ Object main 195 224 -9 +10 apply Apply false @@ -127,26 +127,9 @@ Test$ Object .Test$ main -195 -199 -9 -List -Ident -false -0 -false -List - -7 -interpolation/test.scala - -Test$ -Object -.Test$ -main 229 278 -10 +11 map Apply false @@ -154,7 +137,7 @@ false false xs.zipWithIndex.map((s, i) => println(s"$i: $s")) -8 +7 interpolation/test.scala Test$ @@ -163,7 +146,7 @@ Object main 229 244 -10 +11 zipWithIndex Select false @@ -171,7 +154,7 @@ false false xs.zipWithIndex -9 +8 interpolation/test.scala Test$ @@ -180,7 +163,7 @@ Object $anonfun 259 277 -10 +11 println Apply false @@ -188,7 +171,7 @@ false false println(s"$i: $s") -10 +9 interpolation/test.scala Test$ @@ -197,7 +180,7 @@ Object $anonfun 267 276 -10 +11 s Apply false @@ -205,7 +188,7 @@ false false s"$i: $s" -11 +10 interpolation/test.scala Test$ @@ -214,7 +197,7 @@ Object main 284 309 -12 +13 println Apply false @@ -222,7 +205,7 @@ false false println(simple(1, "abc")) -12 +11 interpolation/test.scala Test$ @@ -231,7 +214,7 @@ Object main 292 308 -12 +13 simple Apply false @@ -239,7 +222,7 @@ false false simple(1, "abc") -13 +12 interpolation/test.scala Test$ @@ -248,7 +231,7 @@ Object main 314 332 -13 +14 println Apply false @@ -256,7 +239,7 @@ false false println(hexa(127)) -14 +13 interpolation/test.scala Test$ @@ -265,7 +248,7 @@ Object main 322 331 -13 +14 hexa Apply false @@ -273,7 +256,7 @@ false false hexa(127) -15 +14 interpolation/test.scala Test$ @@ -282,7 +265,7 @@ Object main 337 355 -14 +15 println Apply false @@ -290,7 +273,7 @@ false false println(raw"a\\nb") -16 +15 interpolation/test.scala Test$ @@ -299,7 +282,7 @@ Object main 345 354 -14 +15 raw Apply false @@ -307,7 +290,7 @@ false false raw"a\\nb" -17 +16 interpolation/test.scala Test$ @@ -316,7 +299,7 @@ Object main 130 138 -8 +9 main DefDef false diff --git a/tests/coverage/run/java-methods/test.scoverage.check b/tests/coverage/run/java-methods/test.scoverage.check index 7d3752c8db20..e39efe8f3aa8 100644 --- a/tests/coverage/run/java-methods/test.scoverage.check +++ b/tests/coverage/run/java-methods/test.scoverage.check @@ -27,7 +27,7 @@ Object Test 61 83 -4 +5 simple Apply false @@ -44,7 +44,7 @@ Object Test 86 127 -5 +6 withTypeParam Apply false @@ -61,7 +61,7 @@ Object Test 140 152 -6 +7 Apply false @@ -78,7 +78,7 @@ Object Test 155 162 -7 +8 f Apply false @@ -95,7 +95,7 @@ Object Test 165 194 -8 +9 println Apply false @@ -112,7 +112,7 @@ Object Test 173 193 -8 +9 identity Apply false @@ -129,7 +129,7 @@ Object Test 197 211 -9 +10 println Apply false @@ -146,7 +146,7 @@ Object Test 36 50 -3 +4 Test DefDef false diff --git a/tests/coverage/run/lifting-bool/test.scoverage.check b/tests/coverage/run/lifting-bool/test.scoverage.check index 9d2a3d0f0162..80d0bfad9c59 100644 --- a/tests/coverage/run/lifting-bool/test.scoverage.check +++ b/tests/coverage/run/lifting-bool/test.scoverage.check @@ -27,7 +27,7 @@ Object notCalled 19 22 -1 +2 ??? Ident false @@ -44,7 +44,7 @@ Object notCalled 1 14 -1 +2 notCalled DefDef false @@ -61,7 +61,7 @@ Object f 24 29 -3 +4 f DefDef false @@ -78,7 +78,7 @@ Object Test 109 120 -7 +8 notCalled Apply false @@ -95,7 +95,7 @@ Object Test 159 170 -8 +9 notCalled Apply false @@ -112,7 +112,7 @@ Object Test 219 230 -9 +10 notCalled Apply false @@ -129,7 +129,7 @@ Object Test 267 278 -10 +11 notCalled Apply false @@ -146,7 +146,7 @@ Object Test 318 329 -11 +12 notCalled Apply false @@ -163,7 +163,7 @@ Object Test 341 367 -12 +13 println Apply false @@ -180,7 +180,7 @@ Object Test 349 366 -12 +13 s Apply false @@ -197,7 +197,7 @@ Object Test 379 393 -14 +15 f Apply false @@ -214,7 +214,7 @@ Object Test 396 406 -15 +16 println Apply false @@ -231,7 +231,7 @@ Object Test 422 466 -17 +18 f Apply false @@ -248,7 +248,7 @@ Object Test 432 443 -17 +18 notCalled Apply false @@ -265,7 +265,7 @@ Object Test 454 465 -17 +18 notCalled Apply false @@ -282,7 +282,7 @@ Object Test 469 479 -18 +19 println Apply false @@ -299,7 +299,7 @@ Object Test 68 82 -6 +7 Test DefDef false diff --git a/tests/coverage/run/lifting/test.scoverage.check b/tests/coverage/run/lifting/test.scoverage.check index 536c5ab0cf1b..0c2ee042e912 100644 --- a/tests/coverage/run/lifting/test.scoverage.check +++ b/tests/coverage/run/lifting/test.scoverage.check @@ -27,7 +27,7 @@ Class 22 29 -1 +2 apply Apply false @@ -42,26 +42,9 @@ Vals Class .Vals -22 -26 -1 -List -Ident -false -0 -false -List - -2 -lifting/test.scala - -Vals -Class -.Vals - 41 57 -2 +3 :: Apply false @@ -69,7 +52,7 @@ false false l :: List(1,2,3) -3 +2 lifting/test.scala Vals @@ -78,7 +61,7 @@ Class 46 57 -2 +3 apply Apply false @@ -86,24 +69,7 @@ false false List(1,2,3) -4 -lifting/test.scala - -Vals -Class -.Vals - -46 -50 -2 -List -Ident -false -0 -false -List - -5 +3 lifting/test.scala A @@ -112,7 +78,7 @@ Class msg 104 136 -5 +6 + Apply false @@ -120,7 +86,7 @@ false false "string" + a + "." + b + "." + c -6 +4 lifting/test.scala A @@ -129,7 +95,7 @@ Class msg 104 132 -5 +6 + Apply false @@ -137,7 +103,7 @@ false false "string" + a + "." + b + "." -7 +5 lifting/test.scala A @@ -146,7 +112,7 @@ Class msg 104 126 -5 +6 + Apply false @@ -154,7 +120,7 @@ false false "string" + a + "." + b -8 +6 lifting/test.scala A @@ -163,7 +129,7 @@ Class msg 104 122 -5 +6 + Apply false @@ -171,7 +137,7 @@ false false "string" + a + "." -9 +7 lifting/test.scala A @@ -180,7 +146,7 @@ Class msg 104 116 -5 +6 + Apply false @@ -188,7 +154,7 @@ false false "string" + a -10 +8 lifting/test.scala A @@ -197,7 +163,7 @@ Class msg 70 77 -5 +6 msg DefDef false @@ -205,7 +171,7 @@ false false def msg -11 +9 lifting/test.scala A @@ -214,7 +180,7 @@ Class integer 139 150 -6 +7 integer DefDef false @@ -222,7 +188,7 @@ false false def integer -12 +10 lifting/test.scala A @@ -231,7 +197,7 @@ Class ex 162 168 -7 +8 ex DefDef false @@ -239,7 +205,7 @@ false false def ex -13 +11 lifting/test.scala test$package$ @@ -248,7 +214,7 @@ Object Test 221 224 -11 +12 Apply false @@ -256,7 +222,7 @@ false false A() -14 +12 lifting/test.scala test$package$ @@ -265,7 +231,7 @@ Object f 241 246 -13 +14 f DefDef false @@ -273,7 +239,7 @@ false false def f -15 +13 lifting/test.scala test$package$ @@ -282,7 +248,7 @@ Object Test 264 286 -14 +15 msg Apply false @@ -290,7 +256,7 @@ false false a.msg(i, 0, a.integer) -16 +14 lifting/test.scala test$package$ @@ -299,7 +265,7 @@ Object Test 276 285 -14 +15 integer Select false @@ -307,7 +273,7 @@ false false a.integer -17 +15 lifting/test.scala test$package$ @@ -316,7 +282,7 @@ Object Test 289 299 -15 +16 println Apply false @@ -324,7 +290,7 @@ false false println(x) -18 +16 lifting/test.scala test$package$ @@ -333,7 +299,7 @@ Object Test 306 334 -16 +17 msg Apply false @@ -341,7 +307,7 @@ false false a.ex.msg(i, 0, a.ex.integer) -19 +17 lifting/test.scala test$package$ @@ -350,7 +316,7 @@ Object Test 306 310 -16 +17 ex Select false @@ -358,7 +324,7 @@ false false a.ex -20 +18 lifting/test.scala test$package$ @@ -367,7 +333,7 @@ Object Test 321 325 -16 +17 ex Select false @@ -375,7 +341,7 @@ false false a.ex -21 +19 lifting/test.scala test$package$ @@ -384,7 +350,7 @@ Object Test 321 333 -16 +17 integer Select false @@ -392,7 +358,7 @@ false false a.ex.integer -22 +20 lifting/test.scala test$package$ @@ -401,7 +367,7 @@ Object Test 337 347 -17 +18 println Apply false @@ -409,7 +375,7 @@ false false println(x) -23 +21 lifting/test.scala test$package$ @@ -418,7 +384,7 @@ Object Test 354 370 -18 +19 msg Apply false @@ -426,7 +392,7 @@ false false a.msg(f(), 0, i) -24 +22 lifting/test.scala test$package$ @@ -435,7 +401,7 @@ Object Test 360 363 -18 +19 f Apply false @@ -443,7 +409,7 @@ false false f() -25 +23 lifting/test.scala test$package$ @@ -452,7 +418,7 @@ Object Test 373 383 -19 +20 println Apply false @@ -460,7 +426,7 @@ false false println(x) -26 +24 lifting/test.scala test$package$ @@ -469,7 +435,7 @@ Object Test 188 202 -10 +11 Test DefDef false diff --git a/tests/coverage/run/parameterless/test.scoverage.check b/tests/coverage/run/parameterless/test.scoverage.check index 91a9b1d6597f..6f15f3a5e93f 100644 --- a/tests/coverage/run/parameterless/test.scoverage.check +++ b/tests/coverage/run/parameterless/test.scoverage.check @@ -27,7 +27,7 @@ Object f 32 46 -2 +3 println Apply false @@ -44,7 +44,7 @@ Object f 12 17 -1 +2 f DefDef false @@ -61,7 +61,7 @@ Object g 87 101 -6 +7 println Apply false @@ -78,7 +78,7 @@ Object g 64 69 -5 +6 g DefDef false @@ -95,7 +95,7 @@ Object f 162 174 -12 +13 println Apply false @@ -112,7 +112,7 @@ Object f 142 147 -11 +12 f DefDef false @@ -129,7 +129,7 @@ Object g 213 225 -16 +17 println Apply false @@ -146,7 +146,7 @@ Object g 190 195 -15 +16 g DefDef false @@ -163,7 +163,7 @@ Object Test 249 250 -19 +20 f Ident false @@ -180,7 +180,7 @@ Object Test 261 262 -20 +21 g Ident false @@ -197,7 +197,7 @@ Object Test 265 275 -21 +22 println Apply false @@ -214,7 +214,7 @@ Object Test 273 274 -21 +22 f Ident false @@ -231,7 +231,7 @@ Object Test 278 288 -22 +23 println Apply false @@ -248,7 +248,7 @@ Object Test 286 287 -22 +23 g Ident false @@ -265,7 +265,7 @@ Object Test 291 303 -23 +24 println Apply false @@ -282,7 +282,7 @@ Object Test 299 302 -23 +24 f Select false @@ -299,7 +299,7 @@ Object Test 306 318 -24 +25 println Apply false @@ -316,7 +316,7 @@ Object Test 314 317 -24 +25 g Select false @@ -333,7 +333,7 @@ Object Test 117 131 -10 +11 Test DefDef false diff --git a/tests/coverage/run/trait/test.scoverage.check b/tests/coverage/run/trait/test.scoverage.check index 8dbf64238cfa..c5b6123e7e2b 100644 --- a/tests/coverage/run/trait/test.scoverage.check +++ b/tests/coverage/run/trait/test.scoverage.check @@ -27,7 +27,7 @@ Trait x 12 17 -1 +2 x DefDef false @@ -44,7 +44,7 @@ Class 133 140 -7 +8 Apply false @@ -61,7 +61,7 @@ Object Test 170 188 -11 +12 println Apply false @@ -78,7 +78,7 @@ Object Test 178 185 -11 +12 Apply false @@ -95,7 +95,7 @@ Object Test 178 187 -11 +12 x Select false @@ -112,7 +112,7 @@ Object Test 196 214 -12 +13 println Apply false @@ -129,7 +129,7 @@ Object Test 204 211 -12 +13 Apply false @@ -146,7 +146,7 @@ Object Test 225 243 -13 +14 println Apply false @@ -163,7 +163,7 @@ Object Test 233 240 -13 +14 Apply false @@ -180,7 +180,7 @@ Object Test 145 159 -10 +11 Test DefDef false diff --git a/tests/coverage/run/type-apply/test.check b/tests/coverage/run/type-apply/test.check new file mode 100644 index 000000000000..d4686cafe5a4 --- /dev/null +++ b/tests/coverage/run/type-apply/test.check @@ -0,0 +1 @@ +List(List(1), List(2), List(3)) diff --git a/tests/coverage/run/type-apply/test.measurement.check b/tests/coverage/run/type-apply/test.measurement.check new file mode 100644 index 000000000000..f1d6ee365359 --- /dev/null +++ b/tests/coverage/run/type-apply/test.measurement.check @@ -0,0 +1,5 @@ +4 +1 +2 +3 +0 diff --git a/tests/coverage/run/type-apply/test.scala b/tests/coverage/run/type-apply/test.scala new file mode 100644 index 000000000000..704a5953fae2 --- /dev/null +++ b/tests/coverage/run/type-apply/test.scala @@ -0,0 +1,5 @@ +@main +def Test: Unit = + // verifies a problematic case where the TypeApply instrumentation was added to the coverage file, + // but was never marked as invoked + println(List(1,2,3).map(a => List(a))) diff --git a/tests/coverage/run/type-apply/test.scoverage.check b/tests/coverage/run/type-apply/test.scoverage.check new file mode 100644 index 000000000000..6907a0aa516f --- /dev/null +++ b/tests/coverage/run/type-apply/test.scoverage.check @@ -0,0 +1,105 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +type-apply/test.scala + +test$package$ +Object +.test$package$ +Test +163 +201 +5 +println +Apply +false +0 +false +println(List(1,2,3).map(a => List(a))) + +1 +type-apply/test.scala + +test$package$ +Object +.test$package$ +Test +171 +200 +5 +map +Apply +false +0 +false +List(1,2,3).map(a => List(a)) + +2 +type-apply/test.scala + +test$package$ +Object +.test$package$ +Test +171 +182 +5 +apply +Apply +false +0 +false +List(1,2,3) + +3 +type-apply/test.scala + +test$package$ +Object +.test$package$ +$anonfun +192 +199 +5 +apply +Apply +false +0 +false +List(a) + +4 +type-apply/test.scala + +test$package$ +Object +.test$package$ +Test +0 +14 +2 +Test +DefDef +false +0 +false +@main\ndef Test + diff --git a/tests/coverage/run/varargs/test_1.scoverage.check b/tests/coverage/run/varargs/test_1.scoverage.check index 2c4edea68fcc..5b287586a540 100644 --- a/tests/coverage/run/varargs/test_1.scoverage.check +++ b/tests/coverage/run/varargs/test_1.scoverage.check @@ -27,7 +27,7 @@ Object repeated 48 60 -3 +4 repeated DefDef false @@ -44,7 +44,7 @@ Object f 79 84 -5 +6 f DefDef false @@ -61,7 +61,7 @@ Object Test 120 130 -9 +10 repeated Apply false @@ -78,7 +78,7 @@ Object Test 133 153 -10 +11 repeated Apply false @@ -95,7 +95,7 @@ Object Test 142 147 -10 +11 f Apply false @@ -112,7 +112,7 @@ Object Test 156 178 -11 +12 method Apply false @@ -129,7 +129,7 @@ Object Test 181 205 -12 +13 method Apply false @@ -146,7 +146,7 @@ Object Test 217 248 -14 +15 multiple Apply false @@ -163,7 +163,7 @@ Object Test 251 261 -15 +16 println Apply false @@ -180,7 +180,7 @@ Object Test 268 302 -16 +17 multiple Apply false @@ -197,7 +197,7 @@ Object Test 291 301 -16 +17 f Apply false @@ -214,7 +214,7 @@ Object Test 305 315 -17 +18 println Apply false @@ -231,7 +231,7 @@ Object Test 322 371 -18 +19 multiple Apply false @@ -248,7 +248,7 @@ Object Test 345 355 -18 +19 f Apply false @@ -265,7 +265,7 @@ Object Test 374 384 -19 +20 println Apply false @@ -282,7 +282,7 @@ Object Test 101 115 -8 +9 Test DefDef false diff --git a/tests/disabled/macro/pos/t8013/inpervolated_2.scala b/tests/disabled/macro/pos/t8013/inpervolated_2.scala index 90e571b42c8c..cbe5139cef5a 100644 --- a/tests/disabled/macro/pos/t8013/inpervolated_2.scala +++ b/tests/disabled/macro/pos/t8013/inpervolated_2.scala @@ -1,6 +1,4 @@ -/* - * scalac: -Xfatal-warnings -Xlint - */ +//> using options -Xfatal-warnings -Xlint package t8013 // unsuspecting user of perverse macro diff --git a/tests/explicit-nulls/neg-patmat/match-pat.scala b/tests/explicit-nulls/neg-patmat/match-pat.scala index fb7180d82e34..1b93267d8fe3 100644 --- a/tests/explicit-nulls/neg-patmat/match-pat.scala +++ b/tests/explicit-nulls/neg-patmat/match-pat.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Foo { val s: String = ??? diff --git a/tests/explicit-nulls/neg/flow-match.scala b/tests/explicit-nulls/neg/flow-match.scala new file mode 100644 index 000000000000..e385758261cd --- /dev/null +++ b/tests/explicit-nulls/neg/flow-match.scala @@ -0,0 +1,15 @@ +// Test flow-typing when NotNullInfos are from cases + +object MatchTest { + def f6(s: String | Null): String = s match { + case s2 => s2 // error + case null => "other" // error + case s3 => s3 + } + + def f7(s: String | Null): String = s match { + case null => "other" + case null => "other" // error + case s3 => s3 + } +} diff --git a/tests/explicit-nulls/neg/i17467.check b/tests/explicit-nulls/neg/i17467.check new file mode 100644 index 000000000000..2dcf20952e7c --- /dev/null +++ b/tests/explicit-nulls/neg/i17467.check @@ -0,0 +1,40 @@ +-- [E007] Type Mismatch Error: tests/explicit-nulls/neg/i17467.scala:4:22 ---------------------------------------------- +4 | val a2: a1.type = null // error + | ^^^^ + | Found: Null + | Required: (a1 : String) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (a1 : String) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/explicit-nulls/neg/i17467.scala:7:22 ---------------------------------------------- +7 | val b2: b1.type = null // error + | ^^^^ + | Found: Null + | Required: (b1 : String | Null) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (b1 : String | Null) + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/explicit-nulls/neg/i17467.scala:8:28 ------------------------------------------------------- +8 | summon[Null <:< b1.type] // error + | ^ + | Cannot prove that Null <:< (b1 : String | Null). +-- [E007] Type Mismatch Error: tests/explicit-nulls/neg/i17467.scala:14:22 --------------------------------------------- +14 | val c2: c1.type = null // error + | ^^^^ + | Found: Null + | Required: (c1 : Null) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (c1 : Null) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/explicit-nulls/neg/i17467.scala:18:24 --------------------------------------------- +18 | def me: this.type = null // error + | ^^^^ + | Found: Null + | Required: (Bar.this : Test.Bar) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (Bar.this : Test.Bar) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/explicit-nulls/neg/i17467.scala b/tests/explicit-nulls/neg/i17467.scala new file mode 100644 index 000000000000..3b66afcc3606 --- /dev/null +++ b/tests/explicit-nulls/neg/i17467.scala @@ -0,0 +1,19 @@ +object Test: + def test(): Unit = + val a1: String = "foo" + val a2: a1.type = null // error + + val b1: String | Null = "foo" + val b2: b1.type = null // error + summon[Null <:< b1.type] // error + + /* The following would be sound, but it would require a specific subtyping + * rule (and implementation code) for debatable value. So it is an error. + */ + val c1: Null = null + val c2: c1.type = null // error + end test + + class Bar: + def me: this.type = null // error +end Test diff --git a/tests/explicit-nulls/pos/flow-match.scala b/tests/explicit-nulls/pos/flow-match.scala index 260068b3ac3f..57e2c12b3c68 100644 --- a/tests/explicit-nulls/pos/flow-match.scala +++ b/tests/explicit-nulls/pos/flow-match.scala @@ -12,4 +12,47 @@ object MatchTest { // after the null case, s becomes non-nullable case _ => s } + + def f(s: String | Null): String = s match { + case null => "other" + case s2 => s2 + case s3 => s3 + } + + class Foo + + def f2(s: String | Null): String = s match { + case n @ null => "other" + case s2 => s2 + case s3 => s3 + } + + def f3(s: String | Null): String = s match { + case null | "foo" => "other" + case s2 => s2 + case s3 => s3 + } + + def f4(s: String | Null): String = s match { + case _ => "other" + case s2 => s2 + case s3 => s3 + } + + def f5(s: String | Null): String = s match { + case x => "other" + case s2 => s2 + case s3 => s3 + } + + def f6(s: String | Null): String = s match { + case s3: String => s3 + case null => "other" + case s4 => s4 + } + + def f7(s: String | Null): String = s match { + case s2 => s2.nn + case s3 => s3 + } } diff --git a/tests/explicit-nulls/pos/flow-predef-eq.scala b/tests/explicit-nulls/pos/flow-predef-eq.scala new file mode 100644 index 000000000000..25bbf32fef15 --- /dev/null +++ b/tests/explicit-nulls/pos/flow-predef-eq.scala @@ -0,0 +1,7 @@ +def f(s: String|Null): String = { + if(s eq null) "foo" else s +} + +def f2(s: String|Null): String = { + if(s ne null) s else "foo" +} diff --git a/tests/explicit-nulls/pos-special/i14682.scala b/tests/explicit-nulls/pos/i14682.scala similarity index 94% rename from tests/explicit-nulls/pos-special/i14682.scala rename to tests/explicit-nulls/pos/i14682.scala index 0c0619d8105b..318de6094a88 100644 --- a/tests/explicit-nulls/pos-special/i14682.scala +++ b/tests/explicit-nulls/pos/i14682.scala @@ -1,3 +1,5 @@ +//> using options -Ysafe-init + class C1: sealed abstract class Name { type ThisName <: Name diff --git a/tests/explicit-nulls/pos-special/i14947.scala b/tests/explicit-nulls/pos/i14947.scala similarity index 82% rename from tests/explicit-nulls/pos-special/i14947.scala rename to tests/explicit-nulls/pos/i14947.scala index b8f013f325b1..f7a8ff0ff28f 100644 --- a/tests/explicit-nulls/pos-special/i14947.scala +++ b/tests/explicit-nulls/pos/i14947.scala @@ -1,3 +1,5 @@ +//> using options -Ytest-pickler -Xprint-types + class B: def g: String | Null = ??? diff --git a/tests/explicit-nulls/pos-separate/interop-enum-src/Day_1.java b/tests/explicit-nulls/pos/interop-enum-src-separate/Day_1.java similarity index 100% rename from tests/explicit-nulls/pos-separate/interop-enum-src/Day_1.java rename to tests/explicit-nulls/pos/interop-enum-src-separate/Day_1.java diff --git a/tests/explicit-nulls/pos-separate/interop-enum-src/Planet_2.java b/tests/explicit-nulls/pos/interop-enum-src-separate/Planet_2.java similarity index 100% rename from tests/explicit-nulls/pos-separate/interop-enum-src/Planet_2.java rename to tests/explicit-nulls/pos/interop-enum-src-separate/Planet_2.java diff --git a/tests/explicit-nulls/pos-separate/interop-enum-src/S_3.scala b/tests/explicit-nulls/pos/interop-enum-src-separate/S_3.scala similarity index 100% rename from tests/explicit-nulls/pos-separate/interop-enum-src/S_3.scala rename to tests/explicit-nulls/pos/interop-enum-src-separate/S_3.scala diff --git a/tests/explicit-nulls/pos/match-flow-typing.scala b/tests/explicit-nulls/pos/match-flow-typing.scala new file mode 100644 index 000000000000..200af36a73e0 --- /dev/null +++ b/tests/explicit-nulls/pos/match-flow-typing.scala @@ -0,0 +1,21 @@ +def m(): String = { + var x: String|Null = "foo" + 1 match { + case 1 => x = x + } + if(x == null) "foo" + else x +} + +def m2(): String = { + var x: String|Null = "foo" + try { + x = x + } catch { + case e => x = x + } finally { + x = x + } + if(x == null) "foo" + else x +} diff --git a/tests/explicit-nulls/pos-patmat/match-pat.scala b/tests/explicit-nulls/pos/match-pat.scala similarity index 93% rename from tests/explicit-nulls/pos-patmat/match-pat.scala rename to tests/explicit-nulls/pos/match-pat.scala index c9a408bde6d4..33013d1865fd 100644 --- a/tests/explicit-nulls/pos-patmat/match-pat.scala +++ b/tests/explicit-nulls/pos/match-pat.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + // Ensure we don't get "the type test for argType cannot be checked at runtime" warning class Symbol { diff --git a/tests/explicit-nulls/pos-separate/notnull/J_2.java b/tests/explicit-nulls/pos/notnull-separate/J_2.java similarity index 100% rename from tests/explicit-nulls/pos-separate/notnull/J_2.java rename to tests/explicit-nulls/pos/notnull-separate/J_2.java diff --git a/tests/explicit-nulls/pos-separate/notnull/Nonnull_1.java b/tests/explicit-nulls/pos/notnull-separate/Nonnull_1.java similarity index 100% rename from tests/explicit-nulls/pos-separate/notnull/Nonnull_1.java rename to tests/explicit-nulls/pos/notnull-separate/Nonnull_1.java diff --git a/tests/explicit-nulls/pos-separate/notnull/S_3.scala b/tests/explicit-nulls/pos/notnull-separate/S_3.scala similarity index 100% rename from tests/explicit-nulls/pos-separate/notnull/S_3.scala rename to tests/explicit-nulls/pos/notnull-separate/S_3.scala diff --git a/tests/explicit-nulls/pos-patmat/unsafe-match-null-pat.scala b/tests/explicit-nulls/pos/unsafe-match-null-pat.scala similarity index 88% rename from tests/explicit-nulls/pos-patmat/unsafe-match-null-pat.scala rename to tests/explicit-nulls/pos/unsafe-match-null-pat.scala index 143a79d1e2f0..ff31642a4027 100644 --- a/tests/explicit-nulls/pos-patmat/unsafe-match-null-pat.scala +++ b/tests/explicit-nulls/pos/unsafe-match-null-pat.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.language.unsafeNulls def test1 = diff --git a/tests/generic-java-signatures/17069.scala b/tests/generic-java-signatures/17069.scala new file mode 100644 index 000000000000..279a8c88e2db --- /dev/null +++ b/tests/generic-java-signatures/17069.scala @@ -0,0 +1,10 @@ + +class Foo: + val generic: List[String] = ??? + +@main def Test = + val tpe = classOf[Foo].getDeclaredField("generic").getGenericType() + assert(tpe.getTypeName == "scala.collection.immutable.List") + + val tpe2 = classOf[Foo].getDeclaredMethod("generic").getGenericReturnType() + assert(tpe2.getTypeName == "scala.collection.immutable.List") diff --git a/tests/init-global/neg/context-sensitivity.scala b/tests/init-global/neg/context-sensitivity.scala new file mode 100755 index 000000000000..626fd41bb43f --- /dev/null +++ b/tests/init-global/neg/context-sensitivity.scala @@ -0,0 +1,17 @@ +trait Foo: + def foo(): Int + +class C(var x: Int) extends Foo { + def foo(): Int = 20 +} + +class D(var y: Int) extends Foo { + def foo(): Int = A.m // error +} + +class Box(var value: Foo) + +object A: + val box1: Box = new Box(new C(5)) + val box2: Box = new Box(new D(10)) + val m: Int = box1.value.foo() diff --git a/tests/init-global/neg/global-cycle1.check b/tests/init-global/neg/global-cycle1.check new file mode 100644 index 000000000000..c68b8b51a419 --- /dev/null +++ b/tests/init-global/neg/global-cycle1.check @@ -0,0 +1,20 @@ +-- Error: tests/init-global/neg/global-cycle1.scala:1:7 ---------------------------------------------------------------- +1 |object A { // error + | ^ + | Cyclic initialization: object A -> object B -> object A. Calling trace: + | ├── object A { // error [ global-cycle1.scala:1 ] + | │ ^ + | ├── val a: Int = B.b [ global-cycle1.scala:2 ] + | │ ^ + | ├── object B { [ global-cycle1.scala:5 ] + | │ ^ + | └── val b: Int = A.a // error [ global-cycle1.scala:6 ] + | ^ +-- Error: tests/init-global/neg/global-cycle1.scala:6:17 --------------------------------------------------------------- +6 | val b: Int = A.a // error + | ^^^ + | Access uninitialized field value a. Calling trace: + | ├── object B { [ global-cycle1.scala:5 ] + | │ ^ + | └── val b: Int = A.a // error [ global-cycle1.scala:6 ] + | ^^^ diff --git a/tests/init-global/neg/global-cycle1.scala b/tests/init-global/neg/global-cycle1.scala new file mode 100644 index 000000000000..592f5a652dc8 --- /dev/null +++ b/tests/init-global/neg/global-cycle1.scala @@ -0,0 +1,10 @@ +object A { // error + val a: Int = B.b +} + +object B { + val b: Int = A.a // error +} + +@main +def Test = print(A.a) diff --git a/tests/init-global/neg/global-cycle14.scala b/tests/init-global/neg/global-cycle14.scala new file mode 100644 index 000000000000..bcacbebb74fa --- /dev/null +++ b/tests/init-global/neg/global-cycle14.scala @@ -0,0 +1,14 @@ +object O { + case class Data(x: Int) extends (Int => Int) { + def apply(x: Int) = x * x + } + val d = Data(3) +} + +object A { // error + val n: Int = B.m +} + +object B { + val m: Int = A.n // error +} diff --git a/tests/init-global/neg/global-cycle2.scala b/tests/init-global/neg/global-cycle2.scala new file mode 100644 index 000000000000..d86fe09fb0fa --- /dev/null +++ b/tests/init-global/neg/global-cycle2.scala @@ -0,0 +1,7 @@ +object A { + val a: Int = B.foo() +} + +object B { + def foo(): Int = A.a * 2 // error +} diff --git a/tests/init-global/neg/global-cycle3.scala b/tests/init-global/neg/global-cycle3.scala new file mode 100644 index 000000000000..405e007b044a --- /dev/null +++ b/tests/init-global/neg/global-cycle3.scala @@ -0,0 +1,7 @@ +class A(x: Int) { + def foo(): Int = B.a + 10 // error +} + +object B { + val a: Int = A(4).foo() +} diff --git a/tests/init-global/neg/global-cycle4.scala b/tests/init-global/neg/global-cycle4.scala new file mode 100644 index 000000000000..8c1627afeae4 --- /dev/null +++ b/tests/init-global/neg/global-cycle4.scala @@ -0,0 +1,19 @@ +trait A { + def foo(): Int +} + +class B extends A { + def foo(): Int = 10 +} + +class C extends A { + def foo(): Int = O.a + 10 // error +} + +class D(x: Int) { + def bar(): A = if x > 0 then new B else new C +} + +object O { + val a: Int = D(5).bar().foo() +} diff --git a/tests/init-global/neg/global-cycle5.scala b/tests/init-global/neg/global-cycle5.scala new file mode 100755 index 000000000000..1ba3435d3830 --- /dev/null +++ b/tests/init-global/neg/global-cycle5.scala @@ -0,0 +1,23 @@ +class X { + def foo(): Int = 10 +} + +object A { + var a: X = new X() +} + +object B { + val b: Int = A.a.foo() // error +} + +class Y extends X { + override def foo() = C.c +} + +object C { + val c: Int = B.b +} + +def main = { + A.a = new Y(); C +} \ No newline at end of file diff --git a/tests/init-global/neg/global-cycle6.scala b/tests/init-global/neg/global-cycle6.scala new file mode 100644 index 000000000000..36e3ab0b6a94 --- /dev/null +++ b/tests/init-global/neg/global-cycle6.scala @@ -0,0 +1,25 @@ +object A { // error + val n: Int = B.m + class Inner { + println(n) // error + } +} + +object B { + val a = new A.Inner + val m: Int = 10 +} + +object O { + object A { + val n: Int = B.m + class Inner { + val x: Int = 4 + } + } + + object B { + val a = new A.Inner + val m: Int = 10 + } +} \ No newline at end of file diff --git a/tests/init-global/neg/global-cycle7.scala b/tests/init-global/neg/global-cycle7.scala new file mode 100644 index 000000000000..aea75726fbf7 --- /dev/null +++ b/tests/init-global/neg/global-cycle7.scala @@ -0,0 +1,18 @@ +object A { // error + val n: Int = B.m +} + +object B { + val m: Int = A.n // error +} + +abstract class TokensCommon { + def maxToken: Int + + val tokenString, debugString: Array[String] = new Array[String](maxToken + 1) +} + +object JavaTokens extends TokensCommon { + final def maxToken: Int = DOUBLE + final val DOUBLE = 188 +} diff --git a/tests/init-global/neg/global-cycle8.scala b/tests/init-global/neg/global-cycle8.scala new file mode 100644 index 000000000000..344dc3241395 --- /dev/null +++ b/tests/init-global/neg/global-cycle8.scala @@ -0,0 +1,20 @@ +class A { + def foo() = println(O.n) +} + +class B { + val a = new A +} + +object O { // error + val n: Int = 10 + println(P.m) +} + +object P { + val m = Q.bar(new B) +} + +object Q { + def bar(b: B) = b.a.foo() +} diff --git a/tests/init-global/neg/global-irrelevance1.scala b/tests/init-global/neg/global-irrelevance1.scala new file mode 100644 index 000000000000..903d3b14ae18 --- /dev/null +++ b/tests/init-global/neg/global-irrelevance1.scala @@ -0,0 +1,5 @@ +object A: + var x = 6 + +object B: + var y = A.x * 2 // error \ No newline at end of file diff --git a/tests/init-global/neg/global-irrelevance2.scala b/tests/init-global/neg/global-irrelevance2.scala new file mode 100644 index 000000000000..66b06677b689 --- /dev/null +++ b/tests/init-global/neg/global-irrelevance2.scala @@ -0,0 +1,8 @@ +object A: + var x = 6 + +class B(b: Int): + A.x = b * 2 // error + +object B: + new B(10) diff --git a/tests/init-global/neg/global-irrelevance3.scala b/tests/init-global/neg/global-irrelevance3.scala new file mode 100644 index 000000000000..2f36d65d917e --- /dev/null +++ b/tests/init-global/neg/global-irrelevance3.scala @@ -0,0 +1,14 @@ +object A: + class Pair(val f: Int => Unit, val g: () => Int) + val p: Pair = foo() + + def foo(): Pair = + var x = 6 + new Pair( + y => x = y, + (() => x) // error + ) + + +object B: + var y = A.p.g() diff --git a/tests/init-global/neg/global-irrelevance4.scala b/tests/init-global/neg/global-irrelevance4.scala new file mode 100644 index 000000000000..7a2a778814b2 --- /dev/null +++ b/tests/init-global/neg/global-irrelevance4.scala @@ -0,0 +1,13 @@ +object A: + class Pair(val f: Int => Unit, val g: () => Int) + val p: Pair = foo() + + def foo(): Pair = + var x = 6 + new Pair( + (y => x = y), // error + () => x + ) + +object B: + A.p.f(10) diff --git a/tests/init-global/neg/global-irrelevance5.scala b/tests/init-global/neg/global-irrelevance5.scala new file mode 100644 index 000000000000..fd5bde3032aa --- /dev/null +++ b/tests/init-global/neg/global-irrelevance5.scala @@ -0,0 +1,6 @@ +object A: + val array: Array[Int] = new Array(1) + array(0) = 10 + +object B: + var y = A.array(0) * 2 // error diff --git a/tests/init-global/neg/global-irrelevance6.scala b/tests/init-global/neg/global-irrelevance6.scala new file mode 100644 index 000000000000..78699b6988b6 --- /dev/null +++ b/tests/init-global/neg/global-irrelevance6.scala @@ -0,0 +1,9 @@ +class Box(x: Int): + def foo(): Int = 100 + +object A: + val array: Array[Box] = new Array(1) + val n = array(0).foo() // ok, no crash + +object B: + var y = A.array(0).foo() * 2 // error diff --git a/tests/init-global/neg/global-irrelevance7.scala b/tests/init-global/neg/global-irrelevance7.scala new file mode 100644 index 000000000000..2c860cbc4259 --- /dev/null +++ b/tests/init-global/neg/global-irrelevance7.scala @@ -0,0 +1,10 @@ +class Box(x: Int): + def foo(): Int = 100 + +object A: + val array: Array[Box] = new Array(1) + array(0) = new Box(10) + val n = array(0).foo() // ok + +object B: + var y = A.array(0).foo() * 2 // error diff --git a/tests/init-global/neg/global-list.scala b/tests/init-global/neg/global-list.scala new file mode 100755 index 000000000000..cdef6dbf3bbe --- /dev/null +++ b/tests/init-global/neg/global-list.scala @@ -0,0 +1,9 @@ +case class Foo(name: String) + +object O: // error + val a = Foo("Apple") + val b = Foo("Banana") + val c = Foo("Cherry") + +object Foo: + val all: List[Foo] = List(O.a, O.b, O.c) // error // error // error \ No newline at end of file diff --git a/tests/init-global/neg/global-local-var.scala b/tests/init-global/neg/global-local-var.scala new file mode 100644 index 000000000000..6965a42bd37f --- /dev/null +++ b/tests/init-global/neg/global-local-var.scala @@ -0,0 +1,16 @@ +class A(x: Int) { + def foo(): Int = { + val to = x + var sum = 0 + var i = 0 + while i < to do + sum += i + i += 1 + + B.a + 10 + sum // error + } +} + +object B { + val a: Int = A(4).foo() +} diff --git a/tests/init-global/neg/global-region1.scala b/tests/init-global/neg/global-region1.scala new file mode 100644 index 000000000000..48473717b5b5 --- /dev/null +++ b/tests/init-global/neg/global-region1.scala @@ -0,0 +1,9 @@ +trait B { def foo(): Int } +class C(var x: Int) extends B { def foo(): Int = 20 } +class D(var y: Int) extends B { def foo(): Int = A.m } // error +class Box(var value: B) + +object A: + val box1: Box = new Box(new C(5)) + val box2: Box = new Box(new D(10)) + val m: Int = box1.value.foo() diff --git a/tests/init-global/neg/i11262.scala b/tests/init-global/neg/i11262.scala new file mode 100644 index 000000000000..c1c01f6aad8c --- /dev/null +++ b/tests/init-global/neg/i11262.scala @@ -0,0 +1,2 @@ +object A { val x: String = B.y } // error +object B { val y: String = A.x } // error diff --git a/tests/init-global/neg/i12544b.scala b/tests/init-global/neg/i12544b.scala new file mode 100644 index 000000000000..586b88df04bd --- /dev/null +++ b/tests/init-global/neg/i12544b.scala @@ -0,0 +1,12 @@ +enum Enum: + case Case + +object Enum: + object nested: // error + val a: Enum = Case + + val b: Enum = f(nested.a) // error + + def f(e: Enum): Enum = e + +@main def main(): Unit = println(Enum.b) diff --git a/tests/init-global/neg/i15883.scala b/tests/init-global/neg/i15883.scala new file mode 100644 index 000000000000..80051c13b92b --- /dev/null +++ b/tests/init-global/neg/i15883.scala @@ -0,0 +1,2 @@ +val a = b // error +val b = 1 diff --git a/tests/init-global/neg/i18628.scala b/tests/init-global/neg/i18628.scala new file mode 100644 index 000000000000..798b3204338c --- /dev/null +++ b/tests/init-global/neg/i18628.scala @@ -0,0 +1,7 @@ +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: => Box): Int = + a.x + recur(a, b) + b.x // error // error + + recur(Box(1), Box(2)) \ No newline at end of file diff --git a/tests/init-global/neg/i18628_2.scala b/tests/init-global/neg/i18628_2.scala new file mode 100644 index 000000000000..4ad428035441 --- /dev/null +++ b/tests/init-global/neg/i18628_2.scala @@ -0,0 +1,7 @@ +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: Box): Int = + a.x + recur(a, b) + b.x // error + + recur(Box(1), Box(2)) diff --git a/tests/init-global/neg/i18628_3.scala b/tests/init-global/neg/i18628_3.scala new file mode 100644 index 000000000000..101674cffb6f --- /dev/null +++ b/tests/init-global/neg/i18628_3.scala @@ -0,0 +1,9 @@ +import scala.annotation.init.widen + +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: => Box): Int = + a.x + recur(a: @widen(5), b: @widen(5)) + b.x // error // error + + recur(Box(1), Box(2)) \ No newline at end of file diff --git a/tests/init/full/neg/i9176.scala b/tests/init-global/neg/i9176.scala similarity index 81% rename from tests/init/full/neg/i9176.scala rename to tests/init-global/neg/i9176.scala index abb8a6394dd2..c93a16f2f8b1 100644 --- a/tests/init/full/neg/i9176.scala +++ b/tests/init-global/neg/i9176.scala @@ -1,9 +1,9 @@ class Foo(val opposite: Foo) case object A extends Foo(B) // error -case object B extends Foo(A) // error +case object B extends Foo(A) object Test { def main(args: Array[String]): Unit = { println(A.opposite) println(B.opposite) } -} \ No newline at end of file +} diff --git a/tests/init-global/neg/lazy-local-val.scala b/tests/init-global/neg/lazy-local-val.scala new file mode 100644 index 000000000000..2a645ae78db1 --- /dev/null +++ b/tests/init-global/neg/lazy-local-val.scala @@ -0,0 +1,19 @@ +object A: + class Box(value: => Int) + + def f(a: => Int): Box = + val b = a + Box(b) + + val box = f(n) // error + val n = 10 + +object B: + class Box(value: Int) + + def f(a: => Int): Box = + lazy val b = a + Box(b) + + val box = f(n) // error + val n = 10 diff --git a/tests/init-global/neg/line-spacing.check b/tests/init-global/neg/line-spacing.check new file mode 100644 index 000000000000..6fa8801bb07b --- /dev/null +++ b/tests/init-global/neg/line-spacing.check @@ -0,0 +1,13 @@ +-- Error: tests/init-global/neg/line-spacing.scala:4:7 ----------------------------------------------------------------- +3 | B +4 | .s.length // error + | ^ + | Access uninitialized field value s. Calling trace: + | ├── object B { [ line-spacing.scala:7 ] + | │ ^ + | ├── val s: String = s"${A.a}a" [ line-spacing.scala:8 ] + | │ ^^^ + | ├── def a: Int = [ line-spacing.scala:2 ] + | │ ^ + | └── .s.length // error [ line-spacing.scala:4 ] + | ^ diff --git a/tests/init-global/neg/line-spacing.scala b/tests/init-global/neg/line-spacing.scala new file mode 100644 index 000000000000..42474decb53c --- /dev/null +++ b/tests/init-global/neg/line-spacing.scala @@ -0,0 +1,9 @@ +object A { + def a: Int = + B + .s.length // error +} + +object B { + val s: String = s"${A.a}a" +} diff --git a/tests/init-global/neg/mutable-array.scala b/tests/init-global/neg/mutable-array.scala new file mode 100644 index 000000000000..3cbc23a5e127 --- /dev/null +++ b/tests/init-global/neg/mutable-array.scala @@ -0,0 +1,8 @@ +object A: + class Box(var value: Int) + val box: Box = new Box(0) + +object B: + val boxes: Array[A.Box] = Array(A.box) + val box: A.Box = boxes(0) + val x: Int = box.value // error diff --git a/tests/init-global/neg/mutable-read1.scala b/tests/init-global/neg/mutable-read1.scala new file mode 100755 index 000000000000..507a8b7d74ad --- /dev/null +++ b/tests/init-global/neg/mutable-read1.scala @@ -0,0 +1,10 @@ +class Box(var value: Int) + +object A: + val box: Box = new Box(4) + +object B: + val boxB: Box = new Box(5) + val boxA: Box = A.box + val m: Int = boxB.value + val n: Int = boxA.value // error \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read2.scala b/tests/init-global/neg/mutable-read2.scala new file mode 100755 index 000000000000..e7653c63d8bb --- /dev/null +++ b/tests/init-global/neg/mutable-read2.scala @@ -0,0 +1,10 @@ +object A: + class Box(var value: Int) { + val initial: Int = value + } + val box: Box = new Box(0) + +object B: + val box: A.Box = A.box + val a: Int = box.initial + val b: Int = box.value // error \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read3.scala b/tests/init-global/neg/mutable-read3.scala new file mode 100755 index 000000000000..d103e112f372 --- /dev/null +++ b/tests/init-global/neg/mutable-read3.scala @@ -0,0 +1,9 @@ +object A: + class Box(var value: Int) + val box: Box = new Box(0) + +object B: + val boxes: Array[A.Box] = new Array(1) + boxes(0) = A.box + val box: A.Box = boxes(0) + val x: Int = box.value // error \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read4.scala b/tests/init-global/neg/mutable-read4.scala new file mode 100755 index 000000000000..507a8b7d74ad --- /dev/null +++ b/tests/init-global/neg/mutable-read4.scala @@ -0,0 +1,10 @@ +class Box(var value: Int) + +object A: + val box: Box = new Box(4) + +object B: + val boxB: Box = new Box(5) + val boxA: Box = A.box + val m: Int = boxB.value + val n: Int = boxA.value // error \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read5.scala b/tests/init-global/neg/mutable-read5.scala new file mode 100755 index 000000000000..c166295bf9fa --- /dev/null +++ b/tests/init-global/neg/mutable-read5.scala @@ -0,0 +1,9 @@ +object Names: + class Name(val start: Int, val length: Int) + var chrs: Array[Char] = new Array[Char](0x20000) + def name(s: String): Name = Name(0, chrs.length) // error + +object StdNames: + val AnyRef: Names.Name = Names.name("AnyRef") + val Array: Names.Name = Names.name("Array") + val List: Names.Name = Names.name("List") \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read6.scala b/tests/init-global/neg/mutable-read6.scala new file mode 100755 index 000000000000..8b00eeaf4216 --- /dev/null +++ b/tests/init-global/neg/mutable-read6.scala @@ -0,0 +1,15 @@ +class SourceFile + +object Contexts: + val NoContext: Context = new Context + class Context: + private var _source: SourceFile = null + final def source: SourceFile = _source // error + def setSource(source: SourceFile) = { + this._source = source + } + +object Implicits: + import Contexts.* + case class SearchFailure(tag: Int, source: SourceFile) + val NoMatchingFailure: SearchFailure = SearchFailure(1, NoContext.source) \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read7.scala b/tests/init-global/neg/mutable-read7.scala new file mode 100755 index 000000000000..ad9d154d74f5 --- /dev/null +++ b/tests/init-global/neg/mutable-read7.scala @@ -0,0 +1,13 @@ +object Positioned: + var debug: Boolean = false + var debugId = Int.MinValue + var nextId: Int = 0 + +abstract class Positioned: + if (Positioned.debug) { // error + println("do debugging") + } + +object Trees: + class Tree extends Positioned + val emptyTree = new Tree \ No newline at end of file diff --git a/tests/init-global/neg/mutable-read8.scala b/tests/init-global/neg/mutable-read8.scala new file mode 100755 index 000000000000..e830fa65be73 --- /dev/null +++ b/tests/init-global/neg/mutable-read8.scala @@ -0,0 +1,11 @@ +object Stats { + var monitored: Boolean = false +} + +class UncachedGroundType { + if (Stats.monitored) println("record stats") // error +} + +class LazyType extends UncachedGroundType + +object NoCompleter extends LazyType \ No newline at end of file diff --git a/tests/init-global/neg/partial-ordering.scala b/tests/init-global/neg/partial-ordering.scala new file mode 100755 index 000000000000..1bc1b251fb72 --- /dev/null +++ b/tests/init-global/neg/partial-ordering.scala @@ -0,0 +1,8 @@ +object Names: // error + val ctorString = "" + val ctorName: MethodName = MethodName.apply(ctorString) + +class MethodName(encoded: String) +object MethodName: + val ctor: MethodName = new MethodName(Names.ctorString) + def apply(name: String): MethodName = new MethodName(name) \ No newline at end of file diff --git a/tests/init-global/neg/patmat-unapplySeq.check b/tests/init-global/neg/patmat-unapplySeq.check new file mode 100644 index 000000000000..8f7a1f64631b --- /dev/null +++ b/tests/init-global/neg/patmat-unapplySeq.check @@ -0,0 +1,11 @@ +-- Error: tests/init-global/neg/patmat-unapplySeq.scala:8:32 ----------------------------------------------------------- +8 | def apply(i: Int): Box = array(i) // error + | ^^^^^^^^ + |Reading mutable state of object A during initialization of object B. + |Reading mutable state of other static objects is forbidden as it breaks initialization-time irrelevance. Calling trace: + |├── object B: [ patmat-unapplySeq.scala:15 ] + |│ ^ + |├── case A(b) => [ patmat-unapplySeq.scala:17 ] + |│ ^^^^ + |└── def apply(i: Int): Box = array(i) // error [ patmat-unapplySeq.scala:8 ] + | ^^^^^^^^ diff --git a/tests/init-global/neg/patmat-unapplySeq.scala b/tests/init-global/neg/patmat-unapplySeq.scala new file mode 100644 index 000000000000..81c853a6e19f --- /dev/null +++ b/tests/init-global/neg/patmat-unapplySeq.scala @@ -0,0 +1,17 @@ +object A: + class Box(var x: Int) + + val array: Array[Box] = new Array(1) + array(0) = new Box(10) + + def length: Int = array.length + def apply(i: Int): Box = array(i) // error + def drop(n: Int): Seq[Box] = array.toSeq + def toSeq: Seq[Box] = array.toSeq + + def unapplySeq(array: Array[Box]): A.type = this + + +object B: + A.array match + case A(b) => diff --git a/tests/init-global/neg/patmat-unapplySeq2.scala b/tests/init-global/neg/patmat-unapplySeq2.scala new file mode 100644 index 000000000000..adab9495db49 --- /dev/null +++ b/tests/init-global/neg/patmat-unapplySeq2.scala @@ -0,0 +1,17 @@ +object A: + class Box(var x: Int) + + val array: Array[Box] = new Array(1) + array(0) = new Box(10) + + def length: Int = array.length + def apply(i: Int): Box = array(i) // error + def drop(n: Int): Seq[Box] = array.toSeq + def toSeq: Seq[Box] = array.toSeq + + def unapplySeq(array: Array[Box]): A.type = this + + +object B: + A.array match + case A(b*) => diff --git a/tests/init-global/neg/patmat.scala b/tests/init-global/neg/patmat.scala new file mode 100644 index 000000000000..126e66e7cf7b --- /dev/null +++ b/tests/init-global/neg/patmat.scala @@ -0,0 +1,36 @@ +object A: // error + val a: Option[Int] = Some(3) + a match + case Some(x) => println(x * 2 + B.a.size) + case None => println(0) + +object B: + val a = 3 :: 4 :: Nil + a match + case x :: xs => + println(x * 2) + if A.a.isEmpty then println(xs.size) + case Nil => + println(0) + +case class Box[T](value: T) +case class Holder[T](value: T) +object C: + (Box(5): Box[Int] | Holder[Int]) match + case Box(x) => x + case Holder(x) => x + + (Box(5): Box[Int] | Holder[Int]) match + case box: Box[Int] => box.value + case holder: Holder[Int] => holder.value + + val a: Int = Inner.b + + object Inner: // error + val b: Int = 10 + + val foo: () => Int = () => C.a + + (Box(foo): Box[() => Int] | Holder[Int]) match + case Box(f) => f() + case Holder(x) => x diff --git a/tests/init-global/neg/return.scala b/tests/init-global/neg/return.scala new file mode 100755 index 000000000000..5cbf6915fc0e --- /dev/null +++ b/tests/init-global/neg/return.scala @@ -0,0 +1,10 @@ +object A: + def foo(x: Int): Int => Int = + if x <= 0 then + return (a: Int) => a + B.n // error + + (a: Int) => a * a + x + +object B: + val n = A.foo(-10)(20) + diff --git a/tests/init-global/neg/return2.scala b/tests/init-global/neg/return2.scala new file mode 100755 index 000000000000..6a4dec50c2dd --- /dev/null +++ b/tests/init-global/neg/return2.scala @@ -0,0 +1,13 @@ +object A: + def foo(x: Int): Int => Int = + val f = (a: Int) => a + B.n // error + var i = 0 + + val g = () => return f + + if x <= 0 then g() + + (a: Int) => a * a + x + +object B: + val n = A.foo(-10)(20) diff --git a/tests/init-global/neg/t5366.scala b/tests/init-global/neg/t5366.scala new file mode 100644 index 000000000000..854bdfe0544b --- /dev/null +++ b/tests/init-global/neg/t5366.scala @@ -0,0 +1,15 @@ +class IdAndMsg(val id: Int, val msg: String = "") + +case object ObjA extends IdAndMsg(1) // error +case object ObjB extends IdAndMsg(2) + +object IdAndMsg { // error + val values = List(ObjA , ObjB) +} + +object Test { + def main(args: Array[String]): Unit = { + ObjA + println(IdAndMsg.values) + } +} \ No newline at end of file diff --git a/tests/init-global/neg/t9115.scala b/tests/init-global/neg/t9115.scala new file mode 100644 index 000000000000..a3020c6939a8 --- /dev/null +++ b/tests/init-global/neg/t9115.scala @@ -0,0 +1,8 @@ +object D { // error + def aaa = 1 //that’s the reason + class Z (depends: Any) + case object D1 extends Z(aaa) // 'null' when calling D.D1 first time // error + case object D2 extends Z(aaa) // 'null' when calling D.D2 first time + println(D1) + println(D2) +} diff --git a/tests/init-global/neg/t9261.scala b/tests/init-global/neg/t9261.scala new file mode 100644 index 000000000000..1e23bedb9b6a --- /dev/null +++ b/tests/init-global/neg/t9261.scala @@ -0,0 +1,3 @@ +sealed abstract class OrderType(val reverse: OrderType) +case object Buy extends OrderType(Sell) // error +case object Sell extends OrderType(Buy) diff --git a/tests/init-global/neg/t9312.scala b/tests/init-global/neg/t9312.scala new file mode 100644 index 000000000000..d88093a2f67a --- /dev/null +++ b/tests/init-global/neg/t9312.scala @@ -0,0 +1,23 @@ +object DeadLockTest { + def main(args: Array[String]): Unit = { + def run(block: => Unit): Unit = + new Thread(new Runnable {def run(): Unit = block}).start() + + run {println(Parent.Child1)} + run {println(Parent.Child2)} + + } + + object Parent { // error + trait Child { + Thread.sleep(2000) // ensure concurrent behavior + val parent = Parent + def siblings = parent.children - this + } + + object Child1 extends Child // error + object Child2 extends Child + + final val children = Set(Child1, Child2) + } +} diff --git a/tests/init-global/neg/t9360.scala b/tests/init-global/neg/t9360.scala new file mode 100644 index 000000000000..2ec0c740d739 --- /dev/null +++ b/tests/init-global/neg/t9360.scala @@ -0,0 +1,25 @@ +class BaseClass(s: String) { + def print: Unit = () +} + +object Obj { // error + val s: String = "hello" + + object AObj extends BaseClass(s) // error + + object BObj extends BaseClass(s) + + val list = List(AObj, BObj) + + def print = { + println(list) + } +} + +object ObjectInit { + def main(args: Array[String]) = { + Obj.AObj.print + Obj.BObj.print + Obj.print + } +} diff --git a/tests/init-global/neg/unapply-implicit-arg.scala b/tests/init-global/neg/unapply-implicit-arg.scala new file mode 100644 index 000000000000..bf41fbbf9412 --- /dev/null +++ b/tests/init-global/neg/unapply-implicit-arg.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(i: Int) = i+1 + def m2(i: Int) = i+2 + } + def unapply(using f1: Foo)(i: Int): Option[Int] = + if i == 0 then Some(f1.m1(i)) else Some(f1.m2(i)) + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = i2 match // error + case Bar(i) => i + case _ => 0 +} \ No newline at end of file diff --git a/tests/init-global/neg/unapply-implicit-arg2.scala b/tests/init-global/neg/unapply-implicit-arg2.scala new file mode 100644 index 000000000000..c0a16faac377 --- /dev/null +++ b/tests/init-global/neg/unapply-implicit-arg2.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(i: Int) = i+1 + def m2(i: Int) = i+2 + } + def unapply(using f1: Foo)(i: Int): Option[Int] = + if i == 0 then Some(f1.m1(i1)) else Some(f1.m2(i2)) // error + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = i1 match + case Bar(i) => i + case _ => 0 +} diff --git a/tests/init-global/neg/unapply-implicit-arg3.scala b/tests/init-global/neg/unapply-implicit-arg3.scala new file mode 100644 index 000000000000..efa348f6cfdb --- /dev/null +++ b/tests/init-global/neg/unapply-implicit-arg3.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(i: Int) = i + i1 + def m2(i: Int) = i + i2 // error + } + def unapply(using f1: Foo)(i: Int): Option[Int] = + if i == 0 then Some(f1.m1(i)) else Some(f1.m2(i)) + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = i1 match + case Bar(i) => i + case _ => 0 +} diff --git a/tests/init-global/neg/unapplySeq-implicit-arg.scala b/tests/init-global/neg/unapplySeq-implicit-arg.scala new file mode 100644 index 000000000000..e58635a3090f --- /dev/null +++ b/tests/init-global/neg/unapplySeq-implicit-arg.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(seq: Seq[Int]) = 1 +: seq + def m2(seq: Seq[Int]) = 2 +: seq + } + def unapplySeq(using f1: Foo)(seqi: Seq[Int]): Option[Seq[Int]] = + if seqi(0) == 0 then Some(f1.m1(seqi)) else Some(f1.m2(seqi)) + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = Seq(i2) match // error + case Bar(i) => i + case _ => 0 +} diff --git a/tests/init-global/neg/unapplySeq-implicit-arg2.scala b/tests/init-global/neg/unapplySeq-implicit-arg2.scala new file mode 100644 index 000000000000..35f5105b84d2 --- /dev/null +++ b/tests/init-global/neg/unapplySeq-implicit-arg2.scala @@ -0,0 +1,10 @@ +object Bar { + class Foo + def unapplySeq(using f1: Foo)(using f2: Foo)(seqi: Seq[Int])(using Foo): Option[Seq[Int]] = + Some(i1 +: seqi) // error + given Foo = new Foo + val i1: Int = Seq(0) match { + case Bar(i) => i + case _ => 0 + } +} diff --git a/tests/init-global/neg/unapplySeq-implicit-arg3.scala b/tests/init-global/neg/unapplySeq-implicit-arg3.scala new file mode 100644 index 000000000000..2b5cdd327e57 --- /dev/null +++ b/tests/init-global/neg/unapplySeq-implicit-arg3.scala @@ -0,0 +1,12 @@ +object Bar { + class Foo { + def m(seq: Seq[Int]) = i1 +: seq // error + } + def unapplySeq(using f1: Foo)(seqi: Seq[Int])(using Foo): Option[Seq[Int]] = + Some(f1.m(seqi)) + given Foo = new Foo + val i1: Int = Seq(0) match { + case Bar(i, _) => i + case _ => 0 + } +} diff --git a/tests/init-global/pos/array-size-zero.scala b/tests/init-global/pos/array-size-zero.scala new file mode 100644 index 000000000000..a1a2fc578ad7 --- /dev/null +++ b/tests/init-global/pos/array-size-zero.scala @@ -0,0 +1,10 @@ +object A: + val emptyArray = new Array(0) + +object B: + def build(data: Int*) = + if data.size == 0 then A.emptyArray else Array(data) + + val arr = build(5, 6) + val first = arr(0) + diff --git a/tests/init-global/pos/global-by-name.scala b/tests/init-global/pos/global-by-name.scala new file mode 100644 index 000000000000..623d7af8335c --- /dev/null +++ b/tests/init-global/pos/global-by-name.scala @@ -0,0 +1,11 @@ +def time[A](f: => A): A = + val start = System.nanoTime + val res = f + val elapsed = (System.nanoTime - start) + res + +case class Foo(data: Int) + +object o: + val foo = time(Foo(3)) + println(foo.data) diff --git a/tests/init-global/pos/global-cycle10.scala b/tests/init-global/pos/global-cycle10.scala new file mode 100644 index 000000000000..9d6200cd884d --- /dev/null +++ b/tests/init-global/pos/global-cycle10.scala @@ -0,0 +1,17 @@ +abstract class Base { + val msg: String = "hello" + def foo(): Unit + foo() +} + +object O extends Base { // error + + class Inner { + println(msg) + } + + def foo() = new Inner +} + +@main +def Test = O diff --git a/tests/init-global/pos/global-cycle11.scala b/tests/init-global/pos/global-cycle11.scala new file mode 100644 index 000000000000..bbd33bd9b105 --- /dev/null +++ b/tests/init-global/pos/global-cycle11.scala @@ -0,0 +1,14 @@ +import scala.collection.mutable + +object NameKinds { // error + private val qualifiedNameKinds = mutable.HashMap[Int, QualifiedNameKind]() + + val QualifiedName: QualifiedNameKind = new QualifiedNameKind(2, ".") + + abstract class NameKind(val tag: Int) + + class QualifiedNameKind(tag: Int, val separator: String) + extends NameKind(tag) { + qualifiedNameKinds(tag) = this + } +} diff --git a/tests/init-global/pos/global-cycle12.scala b/tests/init-global/pos/global-cycle12.scala new file mode 100644 index 000000000000..300ef02ee8a9 --- /dev/null +++ b/tests/init-global/pos/global-cycle12.scala @@ -0,0 +1,16 @@ +// from Scala.js +object Names { // error + private final val ConstructorSimpleEncodedName: String = + "" + + final class SimpleMethodName(encoded: String) + + object SimpleMethodName { + def apply(name: String): SimpleMethodName = + val res = name == ConstructorSimpleEncodedName + new SimpleMethodName(name) + } + + val ConstructorSimpleName: SimpleMethodName = + SimpleMethodName(ConstructorSimpleEncodedName) +} diff --git a/tests/init-global/pos/global-cycle9.scala b/tests/init-global/pos/global-cycle9.scala new file mode 100644 index 000000000000..4c2421f190d8 --- /dev/null +++ b/tests/init-global/pos/global-cycle9.scala @@ -0,0 +1,20 @@ +object Names { // error + abstract class Name + + abstract class TermName extends Name: + def toTypeName: TypeName = ??? + + final class SimpleName(val start: Int, val length: Int) extends TermName + final class TypeName(val toTermName: TermName) extends Name + + class NameTable: + def add(index: Int, name: Name): Unit = () + add(0, EmptyTermName) + + var chrs: Array[Char] = new Array[Char](1024) + + val EmptyTermName: SimpleName = SimpleName(-1, 0) + val EmptyTypeName: TypeName = EmptyTermName.toTypeName + + val nameTable = NameTable() +} diff --git a/tests/init-global/pos/global-fun.scala b/tests/init-global/pos/global-fun.scala new file mode 100644 index 000000000000..97c3cb394e81 --- /dev/null +++ b/tests/init-global/pos/global-fun.scala @@ -0,0 +1,13 @@ +class C: + def double(x: Int): Int = x * 2 + +object A: + val n: Int = 10 + val f: C => Int = foo(n) + + def foo(x: Int): C => Int = + c => c.double(x) + + +object B: + var y = A.f(new C) diff --git a/tests/init-global/pos/global-instantiation.scala b/tests/init-global/pos/global-instantiation.scala new file mode 100755 index 000000000000..6964901e964e --- /dev/null +++ b/tests/init-global/pos/global-instantiation.scala @@ -0,0 +1,7 @@ +class A(x: Int) { + def foo(): Int = B.m +} + +object B: + val m: Int = 20 + val n: Int = new A(10).foo() \ No newline at end of file diff --git a/tests/init-global/pos/global-read.scala b/tests/init-global/pos/global-read.scala new file mode 100755 index 000000000000..5f2386d12c2c --- /dev/null +++ b/tests/init-global/pos/global-read.scala @@ -0,0 +1,8 @@ +object A: + val a: Int = 10 + val b: Int = 20 + +object B: + var n: Int = A.a * A.b + +@main def entry() = println(B.n) \ No newline at end of file diff --git a/tests/init-global/pos/global-recursion.scala b/tests/init-global/pos/global-recursion.scala new file mode 100644 index 000000000000..42c80c46fde3 --- /dev/null +++ b/tests/init-global/pos/global-recursion.scala @@ -0,0 +1,6 @@ +object Recursion: + def foo(): Int = + def fact(x: Int): Int = if x == 0 then 1 else x * fact(x - 1) + fact(5) + + val n = foo() \ No newline at end of file diff --git a/tests/init-global/pos/global-recursion2.scala b/tests/init-global/pos/global-recursion2.scala new file mode 100755 index 000000000000..5a9e3edfad14 --- /dev/null +++ b/tests/init-global/pos/global-recursion2.scala @@ -0,0 +1,6 @@ +class A(val a: A) + +object B: + val a: A = loop(ofA()) + def ofA(): A = ofA().a + def loop(a: A): A = loop(new A(a)) \ No newline at end of file diff --git a/tests/init-global/pos/global-region1.scala b/tests/init-global/pos/global-region1.scala new file mode 100644 index 000000000000..db56fe45e1a4 --- /dev/null +++ b/tests/init-global/pos/global-region1.scala @@ -0,0 +1,11 @@ +import scala.annotation.init.region + +trait B { def foo(): Int } +class C(var x: Int) extends B { def foo(): Int = 20 } +class D(var y: Int) extends B { def foo(): Int = A.m } +class Box(var value: B) + +object A: + val box1: Box = region { new Box(new C(5)) } + val box2: Box = region { new Box(new D(10)) } + val m: Int = box1.value.foo() // ok diff --git a/tests/init-global/pos/global-this.scala b/tests/init-global/pos/global-this.scala new file mode 100755 index 000000000000..b6807416bbd1 --- /dev/null +++ b/tests/init-global/pos/global-this.scala @@ -0,0 +1,11 @@ +object NameKinds: + abstract class NameKind(val tag: Int): + class Info + class QualifiedNameKind(tag: Int, val separator: String) extends NameKind(tag): + qualifiedNameKinds(tag) = this // error + + val MAX_TAG = 8 + val qualifiedNameKinds = new Array[QualifiedNameKind](MAX_TAG) + + val QualifiedName: QualifiedNameKind = new QualifiedNameKind(0, ".") + val FlatName: QualifiedNameKind = new QualifiedNameKind(1, "$") diff --git a/tests/init-global/pos/global-trivial-cycle.scala b/tests/init-global/pos/global-trivial-cycle.scala new file mode 100644 index 000000000000..b9371e8600db --- /dev/null +++ b/tests/init-global/pos/global-trivial-cycle.scala @@ -0,0 +1,9 @@ +object C: + val n: Int = A.n + +object A: + val x: Int = 10 + val n: Int = B.foo() + +object B: + def foo(): Int = A.x diff --git a/tests/init-global/pos/global-val-owner.scala b/tests/init-global/pos/global-val-owner.scala new file mode 100644 index 000000000000..164e56d9e776 --- /dev/null +++ b/tests/init-global/pos/global-val-owner.scala @@ -0,0 +1,15 @@ +object Test: + val n = { + def fact(x: Int): Int = if x == 0 then 1 else x * fact(x - 1) + fact(5) + } + + def foo() = + val n = { + def fact(x: Int): Int = if x == 0 then 1 else x * fact(x - 1) + fact(5) + } + n + + val x = foo() + diff --git a/tests/init-global/pos/global-val-owner2.scala b/tests/init-global/pos/global-val-owner2.scala new file mode 100644 index 000000000000..8d84b792cc81 --- /dev/null +++ b/tests/init-global/pos/global-val-owner2.scala @@ -0,0 +1,5 @@ +object Test: + abstract class Base(implicit config: Int) + case class A(x: Int)(implicit config: Int) extends Base + + val a: A = A(3)(using 5) diff --git a/tests/init-global/pos/i17997-2.scala b/tests/init-global/pos/i17997-2.scala new file mode 100644 index 000000000000..370e7887c06b --- /dev/null +++ b/tests/init-global/pos/i17997-2.scala @@ -0,0 +1,23 @@ +abstract class FunSuite: + def foo(): Unit = println("FunSuite") + + foo() + +trait MySelfType + +trait MyTrait extends FunSuite { this: MySelfType => +} + +abstract class MyAbstractClass extends FunSuite { this: MySelfType & MyTrait => + + override def foo() = { + println("MyAbstractClass") + super.foo() + } +} + +final class MyFinalClass extends MyAbstractClass with MyTrait with MySelfType: + val n: Int = 100 + +object Main: + (new MyFinalClass).foo() diff --git a/tests/init-global/pos/i17997.scala b/tests/init-global/pos/i17997.scala new file mode 100644 index 000000000000..73372bf239eb --- /dev/null +++ b/tests/init-global/pos/i17997.scala @@ -0,0 +1,23 @@ +abstract class FunSuite: + def foo(): Unit = println("FunSuite") + + foo() + +trait MySelfType + +trait MyTrait extends FunSuite { this: MySelfType => +} + +abstract class MyAbstractClass extends FunSuite { this: MySelfType => + + override def foo() = { + println("MyAbstractClass") + super.foo() + } +} + +final class MyFinalClass extends MyAbstractClass with MyTrait with MySelfType: + val n: Int = 100 + +object Main: + (new MyFinalClass).foo() diff --git a/tests/init-global/pos/i18624.scala b/tests/init-global/pos/i18624.scala new file mode 100644 index 000000000000..1d59a39e9b74 --- /dev/null +++ b/tests/init-global/pos/i18624.scala @@ -0,0 +1,8 @@ +def h(a: Int): Unit = { + +} + +object X { + h.notify() + println(h.getClass()) +} diff --git a/tests/init-global/pos/i18628-lazy.scala b/tests/init-global/pos/i18628-lazy.scala new file mode 100644 index 000000000000..16e194e9fc37 --- /dev/null +++ b/tests/init-global/pos/i18628-lazy.scala @@ -0,0 +1,91 @@ +abstract class Reader[+T] { + def first: T + + def rest: Reader[T] + + def atEnd: Boolean +} + +trait Parsers { + type Elem + type Input = Reader[Elem] + + sealed abstract class ParseResult[+T] { + val successful: Boolean + + def map[U](f: T => U): ParseResult[U] + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] + } + + sealed abstract class NoSuccess(val msg: String) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error + val successful = false + + def map[U](f: Nothing => U) = this + + def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U] + = this + } + + case class Failure(override val msg: String) extends NoSuccess(msg) + + case class Error(override val msg: String) extends NoSuccess(msg) + + case class Success[+T](result: T, val next: Input) extends ParseResult[T] { + val successful = true + + def map[U](f: T => U) = Success(f(result), next) + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] = f(result)(next) match { + case s @ Success(result, rest) => Success(result, rest) + case f: Failure => f + case e: Error => e + } + } + + case class ~[+a, +b](_1: a, _2: b) { + override def toString = s"(${_1}~${_2})" + } + + abstract class Parser[+T] extends (Input => ParseResult[T]) { + def apply(in: Input): ParseResult[T] + + def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { lazy val p = q + (for(a <- this; b <- p) yield new ~(a,b)) + } + + def flatMap[U](f: T => Parser[U]): Parser[U] + = Parser{ in => this(in).flatMapWithNext(f)} + + def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))} + = Parser{ in => this(in).map(f)} + + def ^^ [U](f: T => U): Parser[U] = map(f) + } + + def Parser[T](f: Input => ParseResult[T]): Parser[T] + = new Parser[T]{ def apply(in: Input) = f(in) } + + def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("'"+e+"' expected but " + _ + " found") + + def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in => + if (in.atEnd) Failure("end of input") + else if (p(in.first)) Success(in.first, in.rest) + else Failure(err(in.first)) + } +} + + +object grammars3 extends Parsers { + type Elem = String + + val a: Parser[String] = accept("a") + val b: Parser[String] = accept("b") + + val AnBnCn: Parser[List[String]] = { + repMany(a,b) + } + + def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = + p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)} +} \ No newline at end of file diff --git a/tests/init-global/pos/i18628.scala b/tests/init-global/pos/i18628.scala new file mode 100644 index 000000000000..e9fd8f359356 --- /dev/null +++ b/tests/init-global/pos/i18628.scala @@ -0,0 +1,91 @@ +abstract class Reader[+T] { + def first: T + + def rest: Reader[T] + + def atEnd: Boolean +} + +trait Parsers { + type Elem + type Input = Reader[Elem] + + sealed abstract class ParseResult[+T] { + val successful: Boolean + + def map[U](f: T => U): ParseResult[U] + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] + } + + sealed abstract class NoSuccess(val msg: String) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error + val successful = false + + def map[U](f: Nothing => U) = this + + def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U] + = this + } + + case class Failure(override val msg: String) extends NoSuccess(msg) + + case class Error(override val msg: String) extends NoSuccess(msg) + + case class Success[+T](result: T, val next: Input) extends ParseResult[T] { + val successful = true + + def map[U](f: T => U) = Success(f(result), next) + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] = f(result)(next) match { + case s @ Success(result, rest) => Success(result, rest) + case f: Failure => f + case e: Error => e + } + } + + case class ~[+a, +b](_1: a, _2: b) { + override def toString = s"(${_1}~${_2})" + } + + abstract class Parser[+T] extends (Input => ParseResult[T]) { + def apply(in: Input): ParseResult[T] + + def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { + (for(a <- this; b <- q) yield new ~(a,b)) + } + + def flatMap[U](f: T => Parser[U]): Parser[U] + = Parser{ in => this(in).flatMapWithNext(f)} + + def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))} + = Parser{ in => this(in).map(f)} + + def ^^ [U](f: T => U): Parser[U] = map(f) + } + + def Parser[T](f: Input => ParseResult[T]): Parser[T] + = new Parser[T]{ def apply(in: Input) = f(in) } + + def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("'"+e+"' expected but " + _ + " found") + + def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in => + if (in.atEnd) Failure("end of input") + else if (p(in.first)) Success(in.first, in.rest) + else Failure(err(in.first)) + } +} + + +object grammars3 extends Parsers { + type Elem = String + + val a: Parser[String] = accept("a") + val b: Parser[String] = accept("b") + + val AnBnCn: Parser[List[String]] = { + repMany(a,b) + } + + def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = + p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)} +} \ No newline at end of file diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala new file mode 100644 index 000000000000..f97c21ee918d --- /dev/null +++ b/tests/init-global/pos/i18629.scala @@ -0,0 +1,6 @@ +object Foo { + val bar = List() match { + case List() => ??? + case _ => ??? + } +} diff --git a/tests/init-global/pos/lazy-local-val.scala b/tests/init-global/pos/lazy-local-val.scala new file mode 100644 index 000000000000..792160c58dfb --- /dev/null +++ b/tests/init-global/pos/lazy-local-val.scala @@ -0,0 +1,9 @@ +object Test: + class Box(value: => Int) + + def f(a: => Int): Box = + lazy val b = a + Box(b) + + val box = f(n) + val n = 10 diff --git a/tests/init-global/pos/lazy-local-val2.scala b/tests/init-global/pos/lazy-local-val2.scala new file mode 100644 index 000000000000..d98c56d62d74 --- /dev/null +++ b/tests/init-global/pos/lazy-local-val2.scala @@ -0,0 +1,7 @@ +object C: + def f(a: => Int): Int = + lazy val a: Int = 10 + b + lazy val b: Int = 20 + a + b + + val n = f(10) diff --git a/tests/init-global/pos/patmat-interpolator.scala b/tests/init-global/pos/patmat-interpolator.scala new file mode 100644 index 000000000000..2df74326b77a --- /dev/null +++ b/tests/init-global/pos/patmat-interpolator.scala @@ -0,0 +1,3 @@ +object Test: + val RootPackage = "_root_/" + val s"${RootPackageName @ _}/" = RootPackage: @unchecked diff --git a/tests/init-global/pos/patmat.scala b/tests/init-global/pos/patmat.scala new file mode 100644 index 000000000000..72a00f373e75 --- /dev/null +++ b/tests/init-global/pos/patmat.scala @@ -0,0 +1,14 @@ +object A: + val a: Option[Int] = Some(3) + a match + case Some(x) => println(x * 2) + case None => println(0) + +object B: + val a = 3 :: 4 :: Nil + a match + case x :: xs => + println(x * 2) + println(xs.size) + case Nil => + println(0) diff --git a/tests/init-global/pos/secondary-constructor-return.scala b/tests/init-global/pos/secondary-constructor-return.scala new file mode 100644 index 000000000000..c4a0c1f95001 --- /dev/null +++ b/tests/init-global/pos/secondary-constructor-return.scala @@ -0,0 +1,12 @@ +class Foo (var x: Int) { + def this(a : Int, b : Int) = { + this(a + b) + return + } + val y = x +} + +object A { + val a = new Foo(2, 3) + val b = a.y +} \ No newline at end of file diff --git a/tests/init-global/pos/tree-counter.scala b/tests/init-global/pos/tree-counter.scala new file mode 100755 index 000000000000..2201911af608 --- /dev/null +++ b/tests/init-global/pos/tree-counter.scala @@ -0,0 +1,8 @@ +object Trees: + private var counter = 0 + class Tree { + counter += 1 + } + + class EmptyTree extends Tree + val theEmptyTree = new EmptyTree \ No newline at end of file diff --git a/tests/init-global/pos/unapply-implicit-arg-pos.scala b/tests/init-global/pos/unapply-implicit-arg-pos.scala new file mode 100644 index 000000000000..5573a2210160 --- /dev/null +++ b/tests/init-global/pos/unapply-implicit-arg-pos.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(i: Int) = i + i1 + def m2(i: Int) = i + 2 + } + def unapply(using f1: Foo)(using f2: Foo)(i: Int)(using f3: Foo): Option[Int] = + if i == 0 then Some(f1.m1(i1) + f3.m1(i1)) else Some(f2.m2(i) + f3.m2(i)) + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = i1 match + case Bar(i) => i + case _ => 0 +} \ No newline at end of file diff --git a/tests/init-global/pos/unapplySeq-implicit-arg-pos.scala b/tests/init-global/pos/unapplySeq-implicit-arg-pos.scala new file mode 100644 index 000000000000..08e69d4ff3bc --- /dev/null +++ b/tests/init-global/pos/unapplySeq-implicit-arg-pos.scala @@ -0,0 +1,14 @@ +object Bar { + class Foo { + def m1(seq: Seq[Int]) = 0 +: seq + def m2(seq: Seq[Int]) = i1 +: seq + } + def unapplySeq(using f1: Foo)(using f2: Foo)(seqi: Seq[Int])(using f3: Foo): Option[Seq[Int]] = + if seqi(0) == 0 then Some(f1.m1(seqi)) else Some(f2.m2(seqi)) + + given Foo = new Foo + val i1: Int = 0 + val i2: Int = Seq(i1) match + case Bar(i) => i + case _ => 0 +} diff --git a/tests/init/crash/by-name.scala b/tests/init/crash/by-name.scala new file mode 100644 index 000000000000..623d7af8335c --- /dev/null +++ b/tests/init/crash/by-name.scala @@ -0,0 +1,11 @@ +def time[A](f: => A): A = + val start = System.nanoTime + val res = f + val elapsed = (System.nanoTime - start) + res + +case class Foo(data: Int) + +object o: + val foo = time(Foo(3)) + println(foo.data) diff --git a/tests/init/crash/t6888.scala b/tests/init/crash/t6888.scala new file mode 100644 index 000000000000..d339f840c86f --- /dev/null +++ b/tests/init/crash/t6888.scala @@ -0,0 +1,19 @@ +class C { + val x = 1 + object `$` { + val y = x + x + class abc$ { + def xy = x + y + } + object abc$ { + def xy = x + y + } + } +} + +object Test extends App { + val c = new C() + println(c.`$`.y) + println(c.`$`.abc$.xy) + println(new c.`$`.abc$().xy) +} diff --git a/tests/init/full/neg/global-cycle1.scala b/tests/init/full/neg/global-cycle1.scala deleted file mode 100644 index ebd667c51ba0..000000000000 --- a/tests/init/full/neg/global-cycle1.scala +++ /dev/null @@ -1,10 +0,0 @@ -object A { - val a: Int = B.b // error -} - -object B { - val b: Int = A.a // error -} - -@main -def Test = print(A.a) \ No newline at end of file diff --git a/tests/init/full/neg/global-cycle2.scala b/tests/init/full/neg/global-cycle2.scala deleted file mode 100644 index 30792e58af6b..000000000000 --- a/tests/init/full/neg/global-cycle2.scala +++ /dev/null @@ -1,7 +0,0 @@ -object A { - val a: Int = B.foo() // error -} - -object B { - def foo(): Int = A.a * 2 -} diff --git a/tests/init/full/neg/global-cycle3.scala b/tests/init/full/neg/global-cycle3.scala deleted file mode 100644 index 7fae20dbe894..000000000000 --- a/tests/init/full/neg/global-cycle3.scala +++ /dev/null @@ -1,7 +0,0 @@ -class A(x: Int) { - def foo(): Int = B.a + 10 -} - -object B { - val a: Int = A(4).foo() // error -} diff --git a/tests/init/full/neg/global-cycle4.scala b/tests/init/full/neg/global-cycle4.scala deleted file mode 100644 index 3de0533cb521..000000000000 --- a/tests/init/full/neg/global-cycle4.scala +++ /dev/null @@ -1,19 +0,0 @@ -trait A { - def foo(): Int -} - -class B extends A { - def foo(): Int = 10 -} - -class C extends A { - def foo(): Int = O.a + 10 -} - -class D(x: Int) { - def bar(): A = if x > 0 then new B else new C -} - -object O { - val a: Int = D(5).bar().foo() // error -} diff --git a/tests/init/neg/apply2.scala b/tests/init/neg/apply2.scala index c6c7fe5fedd2..ea312ac6e8c7 100755 --- a/tests/init/neg/apply2.scala +++ b/tests/init/neg/apply2.scala @@ -1,4 +1,4 @@ -object O: +class O: case class A(b: B): println(n) diff --git a/tests/init/neg/closureLeak.check b/tests/init/neg/closureLeak.check index a90acaa8ed00..2631072eaa11 100644 --- a/tests/init/neg/closureLeak.check +++ b/tests/init/neg/closureLeak.check @@ -2,13 +2,13 @@ 11 | l.foreach(a => a.addX(this)) // error | ^^^^^^^^^^^^^^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (class Outer) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: - |-> class Outer { [ closureLeak.scala:1 ] - | ^ - |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^^^^^^^^^^^^^^ + |├── class Outer { [ closureLeak.scala:1 ] + |│ ^ + |└── l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^^^^^^^^^^^^^^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Outer) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). |Non initialized field(s): value p. Promotion trace: - |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^ + |└── l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^ diff --git a/tests/init/neg/cycle-structure.check b/tests/init/neg/cycle-structure.check index dfe7c9b85e2f..b07316cfb2a8 100644 --- a/tests/init/neg/cycle-structure.check +++ b/tests/init/neg/cycle-structure.check @@ -2,29 +2,29 @@ 3 | val x = B(this) // error | ^^^^^^^ | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: - | -> case class A(b: B) { [ cycle-structure.scala:1 ] - | ^ - | -> val x = B(this) // error [ cycle-structure.scala:3 ] - | ^^^^^^^ + | ├── case class A(b: B) { [ cycle-structure.scala:1 ] + | │ ^ + | └── val x = B(this) // error [ cycle-structure.scala:3 ] + | ^^^^^^^ | | It leads to the following error during object initialization: | Access field value x on an uninitialized (Cold) object. Calling trace: - | -> case class B(a: A) { [ cycle-structure.scala:7 ] - | ^ - | -> val x1 = a.x [ cycle-structure.scala:8 ] - | ^^^ + | ├── case class B(a: A) { [ cycle-structure.scala:7 ] + | │ ^ + | └── val x1 = a.x [ cycle-structure.scala:8 ] + | ^^^ -- Error: tests/init/neg/cycle-structure.scala:9:13 -------------------------------------------------------------------- 9 | val x = A(this) // error | ^^^^^^^ | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: - | -> case class B(a: A) { [ cycle-structure.scala:7 ] - | ^ - | -> val x = A(this) // error [ cycle-structure.scala:9 ] - | ^^^^^^^ + | ├── case class B(a: A) { [ cycle-structure.scala:7 ] + | │ ^ + | └── val x = A(this) // error [ cycle-structure.scala:9 ] + | ^^^^^^^ | | It leads to the following error during object initialization: | Access field value x on an uninitialized (Cold) object. Calling trace: - | -> case class A(b: B) { [ cycle-structure.scala:1 ] - | ^ - | -> val x1 = b.x [ cycle-structure.scala:2 ] - | ^^^ + | ├── case class A(b: B) { [ cycle-structure.scala:1 ] + | │ ^ + | └── val x1 = b.x [ cycle-structure.scala:2 ] + | ^^^ diff --git a/tests/init/neg/default-this.check b/tests/init/neg/default-this.check index f64f36304e9b..88c5d3dcbba0 100644 --- a/tests/init/neg/default-this.check +++ b/tests/init/neg/default-this.check @@ -3,11 +3,11 @@ | ^^^^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). |Non initialized field(s): value result. Calling trace: - |-> class B extends A { [ default-this.scala:6 ] - | ^ - |-> val result = updateThenCompare(5) [ default-this.scala:11 ] - | ^^^^^^^^^^^^^^^^^^^^ - |-> def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] - | ^ - |-> compare() // error [ default-this.scala:9 ] - | ^^^^^^^ + |├── class B extends A { [ default-this.scala:6 ] + |│ ^ + |├── val result = updateThenCompare(5) [ default-this.scala:11 ] + |│ ^^^^^^^^^^^^^^^^^^^^ + |├── def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] + |│ ^ + |└── compare() // error [ default-this.scala:9 ] + | ^^^^^^^ diff --git a/tests/init/neg/final-fields.scala b/tests/init/neg/final-fields.scala index 174ee9eeb79d..7154673058b7 100644 --- a/tests/init/neg/final-fields.scala +++ b/tests/init/neg/final-fields.scala @@ -12,14 +12,14 @@ trait U { val f2: Int } -object Test0 extends U { +class Test0 extends U { final val f1 = 1 final val f2 = 2 final val f3 = f1 + f2 val f4: 3 = f3 } -object Test1 extends U { +class Test1 extends U { final val f1 = 1 final val f3 = f1 + f2 final val f2 = 2 @@ -28,7 +28,7 @@ object Test1 extends U { } -object Test extends T { +class Test extends T { override final val f1 = /*super.f1*/ 1 + f2 // error override final val f2 = 2 // error override final val f3 = {println(3); 3} // error @@ -37,7 +37,7 @@ object Test extends T { def g: 3 = { println("g"); 3 } final val x = g + 1 def main(args: Array[String]): Unit = { - Test0 - Test1 + new Test0 + new Test1 } } diff --git a/tests/init/neg/function11.scala b/tests/init/neg/function11.scala index cb6626291214..278192d003aa 100644 --- a/tests/init/neg/function11.scala +++ b/tests/init/neg/function11.scala @@ -1,5 +1,5 @@ final class Capture { - private[this] var m: Boolean = false + private var m: Boolean = false (0 to 10).foreach { i => // error f() @@ -14,7 +14,7 @@ final class Capture { } final class Capture2 { - private[this] var m: Boolean = false + private var m: Boolean = false (0 to 10).foreach { i => f() diff --git a/tests/init/neg/i12544.scala b/tests/init/neg/i12544.scala index 2692c27134e0..695e57e1d42a 100644 --- a/tests/init/neg/i12544.scala +++ b/tests/init/neg/i12544.scala @@ -2,18 +2,23 @@ enum Enum: case Case case Case2(x: Int) -def g(b: Enum.B): Int = b.foo() +class Outer: + val e = new Enum2 -object Enum: - object nested: - val a: Enum = Case + class Enum2: + class nested: + val a: Enum = Enum.Case - val b: Enum = f(nested.a) + val b: Enum = f((new nested).a) - def f(e: Enum): Enum = e + def f(e: Enum): Enum = e - class B() { def foo() = n + 1 } - g(new B()) // error - val n: Int = 10 + class B() { def foo() = n + 1 } + def g(b: B): Int = b.foo() + g(new B()) // error + val n: Int = 10 -@main def main(): Unit = println(Enum.b) +@main def main(): Unit = { + val o = new Outer + print(o.e.b) +} diff --git a/tests/init/neg/i15363.check b/tests/init/neg/i15363.check index 9912aa186a5b..d78fc1dcf41b 100644 --- a/tests/init/neg/i15363.check +++ b/tests/init/neg/i15363.check @@ -2,14 +2,14 @@ 3 | val b = new B(this) // error | ^^^^^^^^^^^ | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: - | -> class A: [ i15363.scala:1 ] - | ^ - | -> val b = new B(this) // error [ i15363.scala:3 ] - | ^^^^^^^^^^^ + | ├── class A: [ i15363.scala:1 ] + | │ ^ + | └── val b = new B(this) // error [ i15363.scala:3 ] + | ^^^^^^^^^^^ | | It leads to the following error during object initialization: | Access field value m on an uninitialized (Cold) object. Calling trace: - | -> class B(a: A): [ i15363.scala:7 ] - | ^ - | -> val x = a.m [ i15363.scala:8 ] - | ^^^ + | ├── class B(a: A): [ i15363.scala:7 ] + | │ ^ + | └── val x = a.m [ i15363.scala:8 ] + | ^^^ diff --git a/tests/init/neg/i15459.check b/tests/init/neg/i15459.check index a8c9972276db..5a8b79c2d82d 100644 --- a/tests/init/neg/i15459.check +++ b/tests/init/neg/i15459.check @@ -3,9 +3,9 @@ | ^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Sub) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). |Non initialized field(s): value b. Calling trace: - |-> class Sub extends Sup: [ i15459.scala:5 ] - | ^ - |-> class Sup: [ i15459.scala:1 ] - | ^ - |-> println(this) // error [ i15459.scala:3 ] - | ^^^^ + |├── class Sub extends Sup: [ i15459.scala:5 ] + |│ ^ + |├── class Sup: [ i15459.scala:1 ] + |│ ^ + |└── println(this) // error [ i15459.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/i15883.scala b/tests/init/neg/i15883.scala deleted file mode 100644 index 6f6e3066a878..000000000000 --- a/tests/init/neg/i15883.scala +++ /dev/null @@ -1,2 +0,0 @@ -val a = b -val b = 1 // error diff --git a/tests/init/neg/i4031.scala b/tests/init/neg/i4031.scala index 8340296340e7..ed3f18ac931a 100644 --- a/tests/init/neg/i4031.scala +++ b/tests/init/neg/i4031.scala @@ -1,4 +1,4 @@ -object App { +class App { trait A { type L >: Any} def upcast(a: A, x: Any): a.L = x val p: A { type L <: Nothing } = p // error diff --git a/tests/init/neg/inherit-non-hot.check b/tests/init/neg/inherit-non-hot.check index 068ba9662fd1..5e3adbe93fd9 100644 --- a/tests/init/neg/inherit-non-hot.check +++ b/tests/init/neg/inherit-non-hot.check @@ -2,16 +2,16 @@ 6 | if b == null then b = new B(this) // error | ^^^^^^^^^^^^^^^ |The RHS of reassignment must be transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = a transitively initialized (Hot) object, args = (an uninitialized (Cold) object) }. Calling trace: - |-> class C extends A { [ inherit-non-hot.scala:15 ] - | ^ - |-> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] - | ^^^ - |-> def toB: B = [ inherit-non-hot.scala:5 ] - | ^ - |-> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] - | ^^^^^^^^^^^^^^^ + |├── class C extends A { [ inherit-non-hot.scala:15 ] + |│ ^ + |├── val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] + |│ ^^^ + |├── def toB: B = [ inherit-non-hot.scala:5 ] + |│ ^ + |└── if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] + | ^^^^^^^^^^^^^^^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Could not verify that the field value a is transitively initialized (Hot). It was found to be an uninitialized (Cold) object. Promotion trace: - |-> class B(a: A) { [ inherit-non-hot.scala:10 ] - | ^^^^ + |└── class B(a: A) { [ inherit-non-hot.scala:10 ] + | ^^^^ diff --git a/tests/init/neg/inlined-method.check b/tests/init/neg/inlined-method.check index f3061bcb63ed..9f084c05cc1b 100644 --- a/tests/init/neg/inlined-method.check +++ b/tests/init/neg/inlined-method.check @@ -3,9 +3,9 @@ | ^^^^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class InlineError) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). |Non initialized field(s): value v. Calling trace: - |-> class InlineError { [ inlined-method.scala:1 ] - | ^ - |-> Assertion.failAssert(this) [ inlined-method.scala:2 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - |-> scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] - | ^^^^^^^ + |├── class InlineError { [ inlined-method.scala:1 ] + |│ ^ + |├── Assertion.failAssert(this) [ inlined-method.scala:2 ] + |│ ^^^^^^^^^^^^^^^^^^^^^^^^^^ + |└── scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] + | ^^^^^^^ diff --git a/tests/init/neg/inner-first.check b/tests/init/neg/inner-first.check index fe90423c828f..6f24780ecb5d 100644 --- a/tests/init/neg/inner-first.check +++ b/tests/init/neg/inner-first.check @@ -3,7 +3,7 @@ | ^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). |Non initialized field(s): value n. Calling trace: - |-> class B: [ inner-first.scala:2 ] - | ^ - |-> println(this) // error [ inner-first.scala:3 ] - | ^^^^ + |├── class B: [ inner-first.scala:2 ] + |│ ^ + |└── println(this) // error [ inner-first.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/inner30.scala b/tests/init/neg/inner30.scala index 01bb5754d485..87d8888437da 100644 --- a/tests/init/neg/inner30.scala +++ b/tests/init/neg/inner30.scala @@ -1,4 +1,4 @@ -object Scanners { +class Scanners { enum IndentWidth { case Run(ch: Char, n: Int) case Conc(l: IndentWidth, r: Run) diff --git a/tests/init/neg/inner9.scala b/tests/init/neg/inner9.scala index db5198ea0138..b572c8cb49d3 100644 --- a/tests/init/neg/inner9.scala +++ b/tests/init/neg/inner9.scala @@ -1,20 +1,23 @@ -object Flags { - class Inner { - println(b) - } +class Outer: + val flags = new Flags // error - new Flags.Inner + class Flags { + class Inner { + println(b) + } - val a = this.b + 3 - val b = 5 // error -} + new flags.Inner -object Flags2 { - class Inner { - println(b) + val a = this.b + 3 + val b = 5 // error } + class Flags2 { + class Inner { + println(b) + } + - lazy val a = 3 - val b = 5 -} + lazy val a = 3 + val b = 5 + } diff --git a/tests/init/neg/leak-warm.check b/tests/init/neg/leak-warm.check index c2fc561a3668..4bc4e3f31378 100644 --- a/tests/init/neg/leak-warm.check +++ b/tests/init/neg/leak-warm.check @@ -2,7 +2,7 @@ 19 | val l2 = l.map(_.m()) // error | ^^^^^^^^^^^^ | Call method method map on an uninitialized (Cold) object. Calling trace: - | -> object leakWarm { [ leak-warm.scala:1 ] - | ^ - | -> val l2 = l.map(_.m()) // error [ leak-warm.scala:19 ] - | ^^^^^^^^^^^^ + | ├── class leakWarm { [ leak-warm.scala:1 ] + | │ ^ + | └── val l2 = l.map(_.m()) // error [ leak-warm.scala:19 ] + | ^^^^^^^^^^^^ diff --git a/tests/init/neg/leak-warm.scala b/tests/init/neg/leak-warm.scala index bc5539ce9c0b..9bfbe2e4f285 100644 --- a/tests/init/neg/leak-warm.scala +++ b/tests/init/neg/leak-warm.scala @@ -1,4 +1,4 @@ -object leakWarm { +class leakWarm { abstract class A(tag: Int) { class B(x: Int) { val y = x diff --git a/tests/init/neg/promotion-loop.check b/tests/init/neg/promotion-loop.check index bc05640d10d2..e4343d7ec412 100644 --- a/tests/init/neg/promotion-loop.check +++ b/tests/init/neg/promotion-loop.check @@ -2,13 +2,13 @@ 16 | println(b) // error | ^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class Test) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: - |-> class Test { test => [ promotion-loop.scala:1 ] - | ^ - |-> println(b) // error [ promotion-loop.scala:16 ] - | ^ + |├── class Test { test => [ promotion-loop.scala:1 ] + |│ ^ + |└── println(b) // error [ promotion-loop.scala:16 ] + | ^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Could not verify that the field value outer is transitively initialized (Hot). It was found to be the original object of type (class Test) where initialization checking started. |Non initialized field(s): value n. Promotion trace: - |-> val outer = test [ promotion-loop.scala:12 ] - | ^^^^^^^^^^^^^^^^ + |└── val outer = test [ promotion-loop.scala:12 ] + | ^^^^^^^^^^^^^^^^ diff --git a/tests/init/neg/promotion-segment3.check b/tests/init/neg/promotion-segment3.check index a7320b5c3ed3..fc152664fb08 100644 --- a/tests/init/neg/promotion-segment3.check +++ b/tests/init/neg/promotion-segment3.check @@ -2,10 +2,10 @@ 9 | bar(new B) // error | ^^^^^ |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class A) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: - |-> class A: [ promotion-segment3.scala:2 ] - | ^ - |-> bar(new B) // error [ promotion-segment3.scala:9 ] - | ^^^^^ + |├── class A: [ promotion-segment3.scala:2 ] + |│ ^ + |└── bar(new B) // error [ promotion-segment3.scala:9 ] + | ^^^^^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Promotion cancelled as the value contains inner class C. diff --git a/tests/init/neg/secondary-ctor4.check b/tests/init/neg/secondary-ctor4.check index e867ba65ded5..2c89cfe9f289 100644 --- a/tests/init/neg/secondary-ctor4.check +++ b/tests/init/neg/secondary-ctor4.check @@ -2,43 +2,43 @@ 54 | val c = new C(b, 5) // error | ^^^^^^^^^^^ | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: - | -> class D { [ secondary-ctor4.scala:52 ] - | ^ - | -> val c = new C(b, 5) // error [ secondary-ctor4.scala:54 ] - | ^^^^^^^^^^^ + | ├── class D { [ secondary-ctor4.scala:52 ] + | │ ^ + | └── val c = new C(b, 5) // error [ secondary-ctor4.scala:54 ] + | ^^^^^^^^^^^ | | It leads to the following error during object initialization: | Access field value n on an uninitialized (Cold) object. Calling trace: - | -> def this(b: B, x: Int) = this(b) [ secondary-ctor4.scala:49 ] - | ^^^^^^^ - | -> class C(b: B) extends A(b) with T { [ secondary-ctor4.scala:48 ] - | ^ - | -> def this(b: B) = { [ secondary-ctor4.scala:17 ] - | ^ - | -> Inner().foo() [ secondary-ctor4.scala:26 ] - | ^^^^^^^ - | -> class Inner() { [ secondary-ctor4.scala:21 ] - | ^ - | -> println(b.n) [ secondary-ctor4.scala:23 ] - | ^^^ + | ├── def this(b: B, x: Int) = this(b) [ secondary-ctor4.scala:49 ] + | │ ^^^^^^^ + | ├── class C(b: B) extends A(b) with T { [ secondary-ctor4.scala:48 ] + | │ ^ + | ├── def this(b: B) = { [ secondary-ctor4.scala:17 ] + | │ ^ + | ├── Inner().foo() [ secondary-ctor4.scala:26 ] + | │ ^^^^^^^ + | ├── class Inner() { [ secondary-ctor4.scala:21 ] + | │ ^ + | └── println(b.n) [ secondary-ctor4.scala:23 ] + | ^^^ -- Error: tests/init/neg/secondary-ctor4.scala:42:4 -------------------------------------------------------------------- 42 | new A(new B(new D)) // error | ^^^^^^^^^^^^^^^^^^^ |Problematic object instantiation: the outer M.this and arg 1 are not transitively initialized (Hot). Calling trace: - |-> class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] - | ^ - |-> def this(d: D) = { [ secondary-ctor4.scala:7 ] - | ^ - |-> new A(new B(new D)) // error [ secondary-ctor4.scala:42 ] - | ^^^^^^^^^^^^^^^^^^^ + |├── class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] + |│ ^ + |├── def this(d: D) = { [ secondary-ctor4.scala:7 ] + |│ ^ + |└── new A(new B(new D)) // error [ secondary-ctor4.scala:42 ] + | ^^^^^^^^^^^^^^^^^^^ | |It leads to the following error during object initialization: |Access field value n on an uninitialized (Cold) object. Calling trace: - |-> def this(b: B) = { [ secondary-ctor4.scala:17 ] - | ^ - |-> Inner().foo() [ secondary-ctor4.scala:26 ] - | ^^^^^^^ - |-> class Inner() { [ secondary-ctor4.scala:21 ] - | ^ - |-> println(b.n) [ secondary-ctor4.scala:23 ] - | ^^^ + |├── def this(b: B) = { [ secondary-ctor4.scala:17 ] + |│ ^ + |├── Inner().foo() [ secondary-ctor4.scala:26 ] + |│ ^^^^^^^ + |├── class Inner() { [ secondary-ctor4.scala:21 ] + |│ ^ + |└── println(b.n) [ secondary-ctor4.scala:23 ] + | ^^^ diff --git a/tests/init/neg/super-resolution.check b/tests/init/neg/super-resolution.check index d5fdd2a3e59e..47f86d63cf5c 100644 --- a/tests/init/neg/super-resolution.check +++ b/tests/init/neg/super-resolution.check @@ -2,35 +2,35 @@ 21 | val m = 30 // error | ^ | Access non-initialized value m. Calling trace: - | -> class C extends A with M with N: [ super-resolution.scala:17 ] - | ^ - | -> foo() [ super-resolution.scala:18 ] - | ^^^^^ - | -> override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] - | ^^^^^^^^^^^ - | -> override def foo(): Int = a + super.foo() [ super-resolution.scala:11 ] - | ^^^^^^^^^^^ - | -> def foo(): Int = m [ super-resolution.scala:7 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution.scala:17 ] + | │ ^ + | ├── foo() [ super-resolution.scala:18 ] + | │ ^^^^^ + | ├── override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] + | │ ^^^^^^^^^^^ + | ├── override def foo(): Int = a + super.foo() [ super-resolution.scala:11 ] + | │ ^^^^^^^^^^^ + | └── def foo(): Int = m [ super-resolution.scala:7 ] + | ^ -- Error: tests/init/neg/super-resolution.scala:19:6 ------------------------------------------------------------------- 19 | val a = 10 // error | ^ | Access non-initialized value a. Calling trace: - | -> class C extends A with M with N: [ super-resolution.scala:17 ] - | ^ - | -> foo() [ super-resolution.scala:18 ] - | ^^^^^ - | -> override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] - | ^^^^^^^^^^^ - | -> override def foo(): Int = a + super.foo() [ super-resolution.scala:11 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution.scala:17 ] + | │ ^ + | ├── foo() [ super-resolution.scala:18 ] + | │ ^^^^^ + | ├── override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] + | │ ^^^^^^^^^^^ + | └── override def foo(): Int = a + super.foo() [ super-resolution.scala:11 ] + | ^ -- Error: tests/init/neg/super-resolution.scala:20:6 ------------------------------------------------------------------- 20 | val b = 20 // error | ^ | Access non-initialized value b. Calling trace: - | -> class C extends A with M with N: [ super-resolution.scala:17 ] - | ^ - | -> foo() [ super-resolution.scala:18 ] - | ^^^^^ - | -> override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution.scala:17 ] + | │ ^ + | ├── foo() [ super-resolution.scala:18 ] + | │ ^^^^^ + | └── override def foo(): Int = b * super.foo() [ super-resolution.scala:15 ] + | ^ diff --git a/tests/init/neg/super-resolution2.check b/tests/init/neg/super-resolution2.check index f852c519e922..7e8cca7690b1 100644 --- a/tests/init/neg/super-resolution2.check +++ b/tests/init/neg/super-resolution2.check @@ -2,27 +2,27 @@ 19 | val n = 40 // error | ^ | Access non-initialized value n. Calling trace: - | -> class N extends A with B: [ super-resolution2.scala:9 ] - | ^ - | -> new Inner [ super-resolution2.scala:16 ] - | ^^^^^^^^^ - | -> class Inner: [ super-resolution2.scala:12 ] - | ^ - | -> N.super[A].foo() [ super-resolution2.scala:13 ] - | ^^^^^^^^^^^^^^^^ - | -> def foo(): Int = n [ super-resolution2.scala:3 ] - | ^ + | ├── class N extends A with B: [ super-resolution2.scala:9 ] + | │ ^ + | ├── new Inner [ super-resolution2.scala:16 ] + | │ ^^^^^^^^^ + | ├── class Inner: [ super-resolution2.scala:12 ] + | │ ^ + | ├── N.super[A].foo() [ super-resolution2.scala:13 ] + | │ ^^^^^^^^^^^^^^^^ + | └── def foo(): Int = n [ super-resolution2.scala:3 ] + | ^ -- Error: tests/init/neg/super-resolution2.scala:18:6 ------------------------------------------------------------------ 18 | val m = 30 // error | ^ | Access non-initialized value m. Calling trace: - | -> class N extends A with B: [ super-resolution2.scala:9 ] - | ^ - | -> new Inner [ super-resolution2.scala:16 ] - | ^^^^^^^^^ - | -> class Inner: [ super-resolution2.scala:12 ] - | ^ - | -> N.super.foo() [ super-resolution2.scala:14 ] - | ^^^^^^^^^^^^^ - | -> def foo(): Int = m [ super-resolution2.scala:7 ] - | ^ + | ├── class N extends A with B: [ super-resolution2.scala:9 ] + | │ ^ + | ├── new Inner [ super-resolution2.scala:16 ] + | │ ^^^^^^^^^ + | ├── class Inner: [ super-resolution2.scala:12 ] + | │ ^ + | ├── N.super.foo() [ super-resolution2.scala:14 ] + | │ ^^^^^^^^^^^^^ + | └── def foo(): Int = m [ super-resolution2.scala:7 ] + | ^ diff --git a/tests/init/neg/super-resolution3.check b/tests/init/neg/super-resolution3.check index 704d64d6d0cd..d23e944c8ee3 100644 --- a/tests/init/neg/super-resolution3.check +++ b/tests/init/neg/super-resolution3.check @@ -2,57 +2,57 @@ 27 | val n = 40 // error | ^ | Access non-initialized value n. Calling trace: - | -> class C extends A with M with N: [ super-resolution3.scala:22 ] - | ^ - | -> new Inner() [ super-resolution3.scala:23 ] - | ^^^^^^^^^^^ - | -> class Inner: [ super-resolution3.scala:17 ] - | ^ - | -> N.super[A].foo() [ super-resolution3.scala:18 ] - | ^^^^^^^^^^^^^^^^ - | -> def foo(): Int = n [ super-resolution3.scala:3 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution3.scala:22 ] + | │ ^ + | ├── new Inner() [ super-resolution3.scala:23 ] + | │ ^^^^^^^^^^^ + | ├── class Inner: [ super-resolution3.scala:17 ] + | │ ^ + | ├── N.super[A].foo() [ super-resolution3.scala:18 ] + | │ ^^^^^^^^^^^^^^^^ + | └── def foo(): Int = n [ super-resolution3.scala:3 ] + | ^ -- Error: tests/init/neg/super-resolution3.scala:26:6 ------------------------------------------------------------------ 26 | val m = 30 // error | ^ | Access non-initialized value m. Calling trace: - | -> class C extends A with M with N: [ super-resolution3.scala:22 ] - | ^ - | -> new Inner() [ super-resolution3.scala:23 ] - | ^^^^^^^^^^^ - | -> class Inner: [ super-resolution3.scala:17 ] - | ^ - | -> N.super.foo() [ super-resolution3.scala:19 ] - | ^^^^^^^^^^^^^ - | -> override def foo(): Int = a + super.foo() [ super-resolution3.scala:11 ] - | ^^^^^^^^^^^ - | -> def foo(): Int = m [ super-resolution3.scala:7 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution3.scala:22 ] + | │ ^ + | ├── new Inner() [ super-resolution3.scala:23 ] + | │ ^^^^^^^^^^^ + | ├── class Inner: [ super-resolution3.scala:17 ] + | │ ^ + | ├── N.super.foo() [ super-resolution3.scala:19 ] + | │ ^^^^^^^^^^^^^ + | ├── override def foo(): Int = a + super.foo() [ super-resolution3.scala:11 ] + | │ ^^^^^^^^^^^ + | └── def foo(): Int = m [ super-resolution3.scala:7 ] + | ^ -- Error: tests/init/neg/super-resolution3.scala:24:6 ------------------------------------------------------------------ 24 | val a = 10 // error | ^ | Access non-initialized value a. Calling trace: - | -> class C extends A with M with N: [ super-resolution3.scala:22 ] - | ^ - | -> new Inner() [ super-resolution3.scala:23 ] - | ^^^^^^^^^^^ - | -> class Inner: [ super-resolution3.scala:17 ] - | ^ - | -> N.super.foo() [ super-resolution3.scala:19 ] - | ^^^^^^^^^^^^^ - | -> override def foo(): Int = a + super.foo() [ super-resolution3.scala:11 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution3.scala:22 ] + | │ ^ + | ├── new Inner() [ super-resolution3.scala:23 ] + | │ ^^^^^^^^^^^ + | ├── class Inner: [ super-resolution3.scala:17 ] + | │ ^ + | ├── N.super.foo() [ super-resolution3.scala:19 ] + | │ ^^^^^^^^^^^^^ + | └── override def foo(): Int = a + super.foo() [ super-resolution3.scala:11 ] + | ^ -- Error: tests/init/neg/super-resolution3.scala:25:6 ------------------------------------------------------------------ 25 | val b = 20 // error | ^ | Access non-initialized value b. Calling trace: - | -> class C extends A with M with N: [ super-resolution3.scala:22 ] - | ^ - | -> new Inner() [ super-resolution3.scala:23 ] - | ^^^^^^^^^^^ - | -> class Inner: [ super-resolution3.scala:17 ] - | ^ - | -> foo() [ super-resolution3.scala:20 ] - | ^^^^^ - | -> override def foo(): Int = b * super.foo() [ super-resolution3.scala:15 ] - | ^ + | ├── class C extends A with M with N: [ super-resolution3.scala:22 ] + | │ ^ + | ├── new Inner() [ super-resolution3.scala:23 ] + | │ ^^^^^^^^^^^ + | ├── class Inner: [ super-resolution3.scala:17 ] + | │ ^ + | ├── foo() [ super-resolution3.scala:20 ] + | │ ^^^^^ + | └── override def foo(): Int = b * super.foo() [ super-resolution3.scala:15 ] + | ^ diff --git a/tests/init/neg/t3273.check b/tests/init/neg/t3273.check index 0fe7ea78871c..8865393835b8 100644 --- a/tests/init/neg/t3273.check +++ b/tests/init/neg/t3273.check @@ -1,26 +1,26 @@ -- Error: tests/init/neg/t3273.scala:4:42 ------------------------------------------------------------------------------ 4 | val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error | ^^^^^^^^^^^^^^^ - |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: - |-> object Test { [ t3273.scala:3 ] - | ^ - |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (class Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |├── class Test { [ t3273.scala:3 ] + |│ ^ + |└── val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^^^^^^^^^^^^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Access non-initialized value num1. Promotion trace: - |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^ + |└── val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^ -- Error: tests/init/neg/t3273.scala:5:61 ------------------------------------------------------------------------------ 5 | val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: - |-> object Test { [ t3273.scala:3 ] - | ^ - |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (class Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |├── class Test { [ t3273.scala:3 ] + |│ ^ + |└── val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |Promoting the value to transitively initialized (Hot) failed due to the following problem: |Access non-initialized value num2. Promotion trace: - |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^ + |└── val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^ \ No newline at end of file diff --git a/tests/init/neg/t3273.scala b/tests/init/neg/t3273.scala index 141e544bdfeb..af1df70c471e 100644 --- a/tests/init/neg/t3273.scala +++ b/tests/init/neg/t3273.scala @@ -1,6 +1,6 @@ import scala.language.implicitConversions -object Test { +class Test { val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error diff --git a/tests/init/neg/unsound1.check b/tests/init/neg/unsound1.check index d114ba072db6..ef3ad1de7728 100644 --- a/tests/init/neg/unsound1.check +++ b/tests/init/neg/unsound1.check @@ -2,7 +2,7 @@ 2 | if (m > 0) println(foo(m - 1).a2.n) // error | ^^^^^^^^^^^^^^^ | Access field variable n on an uninitialized (Cold) object. Calling trace: - | -> class A(m: Int) { [ unsound1.scala:1 ] - | ^ - | -> if (m > 0) println(foo(m - 1).a2.n) // error [ unsound1.scala:2 ] - | ^^^^^^^^^^^^^^^ + | ├── class A(m: Int) { [ unsound1.scala:1 ] + | │ ^ + | └── if (m > 0) println(foo(m - 1).a2.n) // error [ unsound1.scala:2 ] + | ^^^^^^^^^^^^^^^ diff --git a/tests/init/neg/unsound2.check b/tests/init/neg/unsound2.check index 69d1278e94df..d1525ca9d18c 100644 --- a/tests/init/neg/unsound2.check +++ b/tests/init/neg/unsound2.check @@ -1,12 +1,12 @@ -- Error: tests/init/neg/unsound2.scala:5:26 --------------------------------------------------------------------------- 5 | def getN: Int = a.n // error | ^^^ - | Access field value n on an uninitialized (Cold) object. Calling trace: - | -> case class A(x: Int) { [ unsound2.scala:1 ] - | ^ - | -> println(foo(x).getB) [ unsound2.scala:8 ] - | ^^^^^^ - | -> def foo(y: Int): B = if (y > 10) then B(bar(y - 1), foo(y - 1).getN) else B(bar(y), 10) [ unsound2.scala:2 ] - | ^^^^^^^^^^^^^^^ - | -> def getN: Int = a.n // error [ unsound2.scala:5 ] + | Access field value n on an uninitialized (Cold) object. Calling trace: + | ├── case class A(x: Int) { [ unsound2.scala:1 ] + | │ ^ + | ├── println(foo(x).getB) [ unsound2.scala:8 ] + | │ ^^^^^^ + | ├── def foo(y: Int): B = if (y > 10) then B(bar(y - 1), foo(y - 1).getN) else B(bar(y), 10) [ unsound2.scala:2 ] + | │ ^^^^^^^^^^^^^^^ + | └── def getN: Int = a.n // error [ unsound2.scala:5 ] | ^^^ diff --git a/tests/init/neg/unsound3.check b/tests/init/neg/unsound3.check index c32e66272d1a..1cc6e270dbd9 100644 --- a/tests/init/neg/unsound3.check +++ b/tests/init/neg/unsound3.check @@ -2,11 +2,11 @@ 10 | if (x < 12) then foo().getC().b else newB // error | ^^^^^^^^^^^^^^ | Access field value b on an uninitialized (Cold) object. Calling trace: - | -> class C { [ unsound3.scala:5 ] - | ^ - | -> val b = foo() [ unsound3.scala:12 ] - | ^^^^^ - | -> def foo(): B = { [ unsound3.scala:7 ] - | ^ - | -> if (x < 12) then foo().getC().b else newB // error [ unsound3.scala:10 ] - | ^^^^^^^^^^^^^^ + | ├── class C { [ unsound3.scala:5 ] + | │ ^ + | ├── val b = foo() [ unsound3.scala:12 ] + | │ ^^^^^ + | ├── def foo(): B = { [ unsound3.scala:7 ] + | │ ^ + | └── if (x < 12) then foo().getC().b else newB // error [ unsound3.scala:10 ] + | ^^^^^^^^^^^^^^ diff --git a/tests/init/neg/unsound4.check b/tests/init/neg/unsound4.check index 9b356b35a3c2..fb9a99cf9bea 100644 --- a/tests/init/neg/unsound4.check +++ b/tests/init/neg/unsound4.check @@ -2,9 +2,9 @@ 3 | val aAgain = foo(5) // error | ^ | Access non-initialized value aAgain. Calling trace: - | -> class A { [ unsound4.scala:1 ] - | ^ - | -> val aAgain = foo(5) // error [ unsound4.scala:3 ] - | ^^^^^^ - | -> def foo(x: Int): A = if (x < 5) then this else foo(x - 1).aAgain [ unsound4.scala:2 ] - | ^^^^^^^^^^^^^^^^^ + | ├── class A { [ unsound4.scala:1 ] + | │ ^ + | ├── val aAgain = foo(5) // error [ unsound4.scala:3 ] + | │ ^^^^^^ + | └── def foo(x: Int): A = if (x < 5) then this else foo(x - 1).aAgain [ unsound4.scala:2 ] + | ^^^^^^^^^^^^^^^^^ diff --git a/tests/init/pos/Properties.scala b/tests/init/pos/Properties.scala index 84d1e09a24f9..8dd2fa052c0b 100644 --- a/tests/init/pos/Properties.scala +++ b/tests/init/pos/Properties.scala @@ -15,7 +15,7 @@ import java.util.jar.Attributes.Name as AttributeName private[scala] trait PropertiesTrait { protected def propCategory: String // specializes the remainder of the values - protected def pickJarBasedOn: Class[_] // props file comes from jar containing this + protected def pickJarBasedOn: Class[?] // props file comes from jar containing this /** The name of the properties file */ protected val propFilename = "/" + propCategory + ".properties" @@ -23,9 +23,9 @@ private[scala] trait PropertiesTrait { /** The loaded properties */ protected lazy val scalaProps: java.util.Properties = { val props = new java.util.Properties - val stream = pickJarBasedOn getResourceAsStream propFilename + val stream = pickJarBasedOn.getResourceAsStream(propFilename) if (stream ne null) - quietlyDispose(props load stream, stream.close) + quietlyDispose(props.load(stream), stream.close) props } @@ -47,8 +47,8 @@ private[scala] trait PropertiesTrait { final def setProp(name: String, value: String) = System.setProperty(name, value) final def clearProp(name: String) = System.clearProperty(name) - final def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt - final def envOrNone(name: String) = Option(System getenv name) + final def envOrElse(name: String, alt: String) = Option(System.getenv(name)) getOrElse alt + final def envOrNone(name: String) = Option(System.getenv(name)) final def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt @@ -68,7 +68,7 @@ private[scala] trait PropertiesTrait { val releaseVersion = for { v <- scalaPropOrNone("maven.version.number") - if !(v endsWith "-SNAPSHOT") + if !(v.endsWith("-SNAPSHOT")) } yield v /** The development Scala version, if this is not a final release. @@ -82,7 +82,7 @@ private[scala] trait PropertiesTrait { val developmentVersion = for { v <- scalaPropOrNone("maven.version.number") - if v endsWith "-SNAPSHOT" + if v.endsWith("-SNAPSHOT") ov <- scalaPropOrNone("version.number") } yield ov diff --git a/tests/init/pos/i15465.scala b/tests/init/pos/i15465.scala index 5b99670e9027..1b0c9e1fead3 100644 --- a/tests/init/pos/i15465.scala +++ b/tests/init/pos/i15465.scala @@ -2,7 +2,7 @@ class TestSuite: protected val it = new ItWord protected final class ItWord: - def should(string: String) = new ItVerbString("should", string) + infix def should(string: String) = new ItVerbString("should", string) private def registerTestToRun(fun: => Any): Unit = () diff --git a/tests/init/pos/i17997.scala b/tests/init/pos/i17997.scala new file mode 100644 index 000000000000..ba311696e9ef --- /dev/null +++ b/tests/init/pos/i17997.scala @@ -0,0 +1,20 @@ +abstract class FunSuite: + def foo(): Unit = println("FunSuite") + + foo() + +trait MySelfType + +trait MyTrait extends FunSuite { this: MySelfType => +} + +abstract class MyAbstractClass extends FunSuite { this: MySelfType => + + override def foo() = { + println("MyAbstractClass") + super.foo() + } +} + +final class MyFinalClass extends MyAbstractClass with MyTrait with MySelfType: + val n: Int = 100 diff --git a/tests/init/pos/i18407/base_0.scala b/tests/init/pos/i18407/base_0.scala new file mode 100644 index 000000000000..892ff56e2ab1 --- /dev/null +++ b/tests/init/pos/i18407/base_0.scala @@ -0,0 +1,4 @@ +// base_0.scala +trait BaseTest extends AnyFreeSpecLike { + "empty-test" - {} // ok if we comment out this line +} diff --git a/tests/init/pos/i18407/macros_0.scala b/tests/init/pos/i18407/macros_0.scala new file mode 100644 index 000000000000..83a5cb7a81c2 --- /dev/null +++ b/tests/init/pos/i18407/macros_0.scala @@ -0,0 +1,37 @@ +// macros_0.scala +object source { + import scala.quoted._ + + class Position() + + object Position { + def withPosition[T]( + fun: Expr[Position => T] + )(using quotes: Quotes, typeOfT: Type[T]): Expr[T] = { + '{ + ${ fun }.apply(new source.Position()) + } + } + } +} + +trait AnyFreeSpecLike { + import scala.language.implicitConversions + + protected final class FreeSpecStringWrapper( + string: String, + pos: source.Position + ) { + def -(fun: => Unit): Unit = fun + } + + inline implicit def convertToFreeSpecStringWrapper( + s: String + ): FreeSpecStringWrapper = { + ${ + source.Position.withPosition[FreeSpecStringWrapper]('{ + (pos: source.Position) => new FreeSpecStringWrapper(s, pos) + }) + } + } +} diff --git a/tests/init/pos/i18407/test_1.scala b/tests/init/pos/i18407/test_1.scala new file mode 100644 index 000000000000..d3050da180b1 --- /dev/null +++ b/tests/init/pos/i18407/test_1.scala @@ -0,0 +1,4 @@ +class MyTest extends BaseTest { + "empty-test" - {} + private val myObject = new {} +} diff --git a/tests/neg-custom-args/adhoc-extension/B.scala b/tests/neg-custom-args/adhoc-extension/B.scala deleted file mode 100644 index dd1971e1835f..000000000000 --- a/tests/neg-custom-args/adhoc-extension/B.scala +++ /dev/null @@ -1,8 +0,0 @@ -package adhoc -class B extends A // error: adhoc-extension (under -strict -feature -Xfatal-warnings) -class C extends A // error - -object O { - val a = new A {} // error - object E extends A // error -} \ No newline at end of file diff --git a/tests/neg-custom-args/autoTuplingTest.scala b/tests/neg-custom-args/autoTuplingTest.scala deleted file mode 100644 index 7321a83827bb..000000000000 --- a/tests/neg-custom-args/autoTuplingTest.scala +++ /dev/null @@ -1,9 +0,0 @@ -object autoTupling { - - val x = Some(1, 2) // error when running with -language:noAutoTupling - - x match { - case Some(a, b) => a + b // error // error when running with -language:noAutoTupling - case None => - } -} diff --git a/tests/neg-custom-args/captures/box-adapt-boxing.scala b/tests/neg-custom-args/captures/box-adapt-boxing.scala index ea133051a21a..0052828dbabb 100644 --- a/tests/neg-custom-args/captures/box-adapt-boxing.scala +++ b/tests/neg-custom-args/captures/box-adapt-boxing.scala @@ -1,11 +1,11 @@ trait Cap def main(io: Cap^, fs: Cap^): Unit = { - val test1: Unit -> Unit = _ => { // error + val test1: Unit -> Unit = _ => { type Op = [T] -> (T ->{io} Unit) -> Unit val f: (Cap^{io}) -> Unit = ??? val op: Op = ??? - op[Cap^{io}](f) + op[Cap^{io}](f) // error // expected type of f: {io} (box {io} Cap) -> Unit // actual type: ({io} Cap) -> Unit // adapting f to the expected type will also @@ -32,7 +32,7 @@ def main(io: Cap^, fs: Cap^): Unit = { type Id[X] = Box[X] -> Unit type Op[X] = Unit -> Box[X] val f: Unit -> (Cap^{io}) -> Unit = ??? - val g: Op[Id[Cap^{io}]^{fs}] = f // error + val g: Op[Id[Cap^{io}]^{fs}] = f val h: Op[Id[Cap^{io}]^{io}] = f } } diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala index 7010444eecb5..3dac26a98318 100644 --- a/tests/neg-custom-args/captures/box-adapt-cases.scala +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -7,21 +7,21 @@ def test1(): Unit = { x(cap => cap.use()) // was error, now OK } -def test2(io: Cap^{cap}): Unit = { +def test2(io: Cap^): Unit = { type Id[X] = [T] -> (op: X -> T) -> T val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // error } -def test3(io: Cap^{cap}): Unit = { +def test3(io: Cap^): Unit = { type Id[X] = [T] -> (op: X ->{io} T) -> T val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // ok } -def test4(io: Cap^{cap}, fs: Cap^{cap}): Unit = { +def test4(io: Cap^, fs: Cap^): Unit = { type Id[X] = [T] -> (op: X ->{io} T) -> T val x: Id[Cap^{io, fs}] = ??? diff --git a/tests/neg-custom-args/captures/box-adapt-contra.scala b/tests/neg-custom-args/captures/box-adapt-contra.scala new file mode 100644 index 000000000000..2dc79a66d932 --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-contra.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking + +trait Cap + +def useCap[X](x: X): (X -> Unit) -> Unit = ??? + +def test1(c: Cap^): Unit = + val f: (Cap^{c} -> Unit) -> Unit = useCap[Cap^{c}](c) // error + +def test2(c: Cap^, d: Cap^): Unit = + def useCap1[X](x: X): (X => Unit) -> Unit = ??? + val f1: (Cap^{c} => Unit) ->{c} Unit = useCap1[Cap^{c}](c) // ok + + def useCap2[X](x: X): (X ->{c} Unit) -> Unit = ??? + val f2: (Cap^{c} -> Unit) ->{c} Unit = useCap2[Cap^{c}](c) // ok + + def useCap3[X](x: X): (X ->{d} Unit) -> Unit = ??? + val f3: (Cap^{c} -> Unit) => Unit = useCap3[Cap^{c}](c) // error diff --git a/tests/neg-custom-args/captures/box-adapt-cov.scala b/tests/neg-custom-args/captures/box-adapt-cov.scala index 96901e81458d..2c1f15a5c77f 100644 --- a/tests/neg-custom-args/captures/box-adapt-cov.scala +++ b/tests/neg-custom-args/captures/box-adapt-cov.scala @@ -1,12 +1,12 @@ trait Cap -def test1(io: Cap^{cap}) = { +def test1(io: Cap^) = { type Op[X] = [T] -> Unit -> X val f: Op[Cap^{io}] = ??? val x: [T] -> Unit -> Cap^{io} = f // error } -def test2(io: Cap^{cap}) = { +def test2(io: Cap^) = { type Op[X] = [T] -> Unit -> X^{io} val f: Op[Cap^{io}] = ??? val x: Unit -> Cap^{io} = f[Unit] // error diff --git a/tests/neg-custom-args/captures/box-adapt-cs.scala b/tests/neg-custom-args/captures/box-adapt-cs.scala index a39ed0200151..a2a9232fb264 100644 --- a/tests/neg-custom-args/captures/box-adapt-cs.scala +++ b/tests/neg-custom-args/captures/box-adapt-cs.scala @@ -1,14 +1,14 @@ trait Cap { def use(): Int } -def test1(io: Cap^{cap}): Unit = { +def test1(io: Cap^): Unit = { type Id[X] = [T] -> (op: X ->{io} T) -> T val x: Id[Cap^{io}] = ??? - val f: (Cap^{cap}) -> Unit = ??? + val f: (Cap^) -> Unit = ??? x(f) // ok } -def test2(io: Cap^{cap}): Unit = { +def test2(io: Cap^): Unit = { type Id[X] = [T] -> (op: X => T) -> T val x: Id[Cap^] = ??? diff --git a/tests/neg-custom-args/captures/box-adapt-depfun.scala b/tests/neg-custom-args/captures/box-adapt-depfun.scala index 9416ffa040ab..d1c1c73f8207 100644 --- a/tests/neg-custom-args/captures/box-adapt-depfun.scala +++ b/tests/neg-custom-args/captures/box-adapt-depfun.scala @@ -15,7 +15,7 @@ def test2(io: Cap^): Unit = { // should work when the expected type is a dependent function } -def test3(io: Cap^{cap}): Unit = { +def test3(io: Cap^): Unit = { type Id[X] = [T] -> (op: (x: X) ->{} T) -> T val x: Id[Cap^{io}] = ??? diff --git a/tests/neg-custom-args/captures/box-adapt-typefun.scala b/tests/neg-custom-args/captures/box-adapt-typefun.scala index 65a06cd68ed9..175acdda1c8f 100644 --- a/tests/neg-custom-args/captures/box-adapt-typefun.scala +++ b/tests/neg-custom-args/captures/box-adapt-typefun.scala @@ -1,12 +1,12 @@ trait Cap { def use(): Int } -def test1(io: Cap^{cap}): Unit = { +def test1(io: Cap^): Unit = { type Op[X] = [T] -> X -> Unit val f: [T] -> (Cap^{io}) -> Unit = ??? val op: Op[Cap^{io}] = f // error } -def test2(io: Cap^{cap}): Unit = { +def test2(io: Cap^): Unit = { type Lazy[X] = [T] -> Unit -> X val f: Lazy[Cap^{io}] = ??? val test: [T] -> Unit -> (Cap^{io}) = f // error diff --git a/tests/neg-custom-args/captures/box-unsoundness.scala b/tests/neg-custom-args/captures/box-unsoundness.scala new file mode 100644 index 000000000000..d1331f16df1f --- /dev/null +++ b/tests/neg-custom-args/captures/box-unsoundness.scala @@ -0,0 +1,13 @@ +//@annotation.capability +class CanIO { def use(): Unit = () } +def use[X](x: X): (op: X -> Unit) -> Unit = op => op(x) +def test(io: CanIO^): Unit = + val f: (CanIO^ => Unit) -> Unit = use[CanIO^](io) // error + val _: (CanIO^ => Unit) -> Unit = f + + val g1 = () => f(x => x.use()) + + val a1 = f(x => x.use()) + val a2 = () => f(x => x.use()) + val g2: () -> Unit = a2 + // was UNSOUND: g uses the capability io but has an empty capture set diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check new file mode 100644 index 000000000000..07acea3c48e3 --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.check @@ -0,0 +1,26 @@ +-- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^ + | Mutable variable elems cannot have type Array[A] since + | that type refers to the type variable A, which is not sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- +16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + | ^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box A^? since + | that type refers to the type variable A, which is not sealed. + | This is often caused by a local capability in an argument of constructor ArrayBuffer + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type variable is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ +22 | val x: Array[A] = new Array[A](10) // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type variable is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index b1d8fb3b5404..61b83fc24688 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -5,10 +5,8 @@ | Required: (x$0: Int) ->{cap2} Int | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:19:5 ---------------------------------------- +-- Error: tests/neg-custom-args/captures/byname.scala:19:5 ------------------------------------------------------------- 19 | h(g()) // error | ^^^ - | Found: () ?->{cap2} I - | Required: () ?->{cap1} I - | - | longer explanation available when compiling with `-explain` + | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} + | of an enclosing function literal with expected type () ?->{cap1} I diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index f14951f410c4..80ee1aba84e1 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -14,8 +14,8 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, sealed R <: Top](op: (CanThrow[E]) => R)(handler: E => R): R = - val x: CanThrow[E] = ??? +def handle[E <: Exception, R <: Top](op: (CT[E] @retains(caps.cap)) => R)(handler: E => R): R = + val x: CT[E] = ??? try op(x) catch case ex: E => handler(ex) diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 85d3b2a7ddcb..74b9db728983 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -1,21 +1,17 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:4:2 ------------------------------------------ +-- Error: tests/neg-custom-args/captures/capt1.scala:4:11 -------------------------------------------------------------- 4 | () => if x == null then y else y // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: () ->{x} C^? - | Required: () -> C - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:7:2 ------------------------------------------ + | ^ + | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> C +-- Error: tests/neg-custom-args/captures/capt1.scala:7:11 -------------------------------------------------------------- 7 | () => if x == null then y else y // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: () ->{x} C^? - | Required: Matchable - | - | longer explanation available when compiling with `-explain` + | ^ + | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type Matchable -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:14:2 ----------------------------------------- 14 | def f(y: Int) = if x == null then y else y // error | ^ - | Found: Int ->{x} Int + | Found: (y: Int) ->{x} Int | Required: Matchable 15 | f | @@ -37,10 +33,22 @@ 27 | def m() = if x == null then y else y | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:32:24 ---------------------------------------- -32 | val z2 = h[() -> Cap](() => x) // error - | ^^^^^^^ - | Found: () ->{x} box C^ - | Required: () -> box C^ - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/capt1.scala:32:12 ------------------------------------------------------------- +32 | val z2 = h[() -> Cap](() => x) // error // error + | ^^^^^^^^^^^^ + | Sealed type variable X cannot be instantiated to () -> box C^ since + | the part box C^ of that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method h + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/capt1.scala:32:30 ------------------------------------------------------------- +32 | val z2 = h[() -> Cap](() => x) // error // error + | ^ + | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> box C^ +-- Error: tests/neg-custom-args/captures/capt1.scala:34:12 ------------------------------------------------------------- +34 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable X cannot be instantiated to box () ->{x} Cap since + | the part C^ of that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method h + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index 651184e8d2c9..48c4d889bf8d 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -29,8 +29,7 @@ def h4(x: Cap, y: Int): A = def foo() = val x: C @retains(caps.cap) = ??? def h[X](a: X)(b: X) = a - val z2 = h[() -> Cap](() => x) // error + val z2 = h[() -> Cap](() => x) // error // error (() => C()) - val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // ok - val z4 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // what was inferred for z3 + val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error diff --git a/tests/neg-custom-args/captures/cc-glb.check b/tests/neg-custom-args/captures/cc-glb.check new file mode 100644 index 000000000000..669cf81a082b --- /dev/null +++ b/tests/neg-custom-args/captures/cc-glb.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-glb.scala:7:19 ---------------------------------------- +7 | val x2: Foo[T] = x1 // error + | ^^ + | Found: (x1 : (Foo[T] & Foo[Any])^{io}) + | Required: Foo[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-glb.scala b/tests/neg-custom-args/captures/cc-glb.scala new file mode 100644 index 000000000000..ec54611915b4 --- /dev/null +++ b/tests/neg-custom-args/captures/cc-glb.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking +trait Cap +trait Foo[+T] + +def magic[T](io: Cap^, x: Foo[T]^{io}): Foo[T]^{} = + val x1: (Foo[T]^) & Foo[Any]^{io} = x + val x2: Foo[T] = x1 // error + x2 // boom, an impure value becomes pure diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 47207f913f1d..070e815d6d45 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -8,8 +8,8 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:10:15 ----------------------------------------------------------- 10 | class C2(val x: () => Int): // error | ^ - | reference (C2.this.x : () => Int) is not included in allowed capture set {} of the self type of class C2 + | reference (C2.this.x : () => Int) is not included in the allowed capture set {} of the self type of class C2 -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (C4.this.f : () => Int) is not included in allowed capture set {} of pure base class class C3 + |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index e0df7c857c85..bd9a1085d262 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,6 +1,12 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- +3 | this: D^ => // error + | ^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error - |^ - |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class C -3 | this: D^ => + | ^ + | illegal inheritance: self type D^ of class D does not conform to self type C + | of parent class C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index b22e5e456092..de1a722f73a9 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => + this: D^ => // error diff --git a/tests/neg-custom-args/captures/cc-this5.check b/tests/neg-custom-args/captures/cc-this5.check index 84ac97474b80..8affe7005e2e 100644 --- a/tests/neg-custom-args/captures/cc-this5.check +++ b/tests/neg-custom-args/captures/cc-this5.check @@ -1,7 +1,8 @@ -- Error: tests/neg-custom-args/captures/cc-this5.scala:16:20 ---------------------------------------------------------- 16 | def f = println(c) // error | ^ - | (c : Cap) cannot be referenced here; it is not included in the allowed capture set {} + | (c : Cap^) cannot be referenced here; it is not included in the allowed capture set {} + | of the enclosing class A -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:21:15 ------------------------------------- 21 | val x: A = this // error | ^^^^ diff --git a/tests/neg-custom-args/captures/ctest.scala b/tests/neg-custom-args/captures/ctest.scala deleted file mode 100644 index ad10b43a7773..000000000000 --- a/tests/neg-custom-args/captures/ctest.scala +++ /dev/null @@ -1,6 +0,0 @@ -class CC -type Cap = CC^ - -def test(cap1: Cap, cap2: Cap) = - var b: List[String => String] = Nil // error - val bc = b.head // was error, now OK diff --git a/tests/neg-custom-args/captures/curried-simplified.check b/tests/neg-custom-args/captures/curried-simplified.check deleted file mode 100644 index 6a792314e4e3..000000000000 --- a/tests/neg-custom-args/captures/curried-simplified.check +++ /dev/null @@ -1,42 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:7:28 ---------------------------- -7 | def y1: () -> () -> Int = x1 // error - | ^^ - | Found: () ->? () ->{x} Int - | Required: () -> () -> Int - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:9:28 ---------------------------- -9 | def y2: () -> () => Int = x2 // error - | ^^ - | Found: () ->{x} () => Int - | Required: () -> () => Int - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:11:39 --------------------------- -11 | def y3: Cap -> Protect[Int -> Int] = x3 // error - | ^^ - | Found: (x$0: Cap) ->? Int ->{x$0} Int - | Required: Cap -> Protect[Int -> Int] - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:32 --------------------------- -15 | def y5: Cap -> Int ->{} Int = x5 // error - | ^^ - | Found: Cap ->? Int ->{x} Int - | Required: Cap -> Int ->{} Int - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:48 --------------------------- -17 | def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error - | ^^ - | Found: (x$0: Cap) ->? (x$0: Cap) ->{x$0} Int ->{x$0, x$0} Int - | Required: Cap -> Cap ->{} Protect[Int -> Int] - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:48 --------------------------- -19 | def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error - | ^^ - | Found: (x$0: Cap) ->? (x: Cap) ->{x$0} Int ->{x$0, x} Int - | Required: Cap -> Protect[Cap -> Int ->{} Int] - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/curried-simplified.scala b/tests/neg-custom-args/captures/curried-simplified.scala deleted file mode 100644 index 988cf7c11c45..000000000000 --- a/tests/neg-custom-args/captures/curried-simplified.scala +++ /dev/null @@ -1,21 +0,0 @@ -@annotation.capability class Cap - -type Protect[T] = T - -def test(x: Cap, y: Cap) = - def x1: () -> () ->{x} Int = ??? - def y1: () -> () -> Int = x1 // error - def x2: () ->{x} () => Int = ??? - def y2: () -> () => Int = x2 // error - def x3: Cap -> Int -> Int = ??? - def y3: Cap -> Protect[Int -> Int] = x3 // error - def x4: Cap -> Protect[Int -> Int] = ??? - def y4: Cap -> Int ->{} Int = x4 // ok - def x5: Cap -> Int ->{x} Int = ??? - def y5: Cap -> Int ->{} Int = x5 // error - def x6: Cap -> Cap -> Int -> Int = ??? - def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error - def x7: Cap -> (x: Cap) -> Int -> Int = ??? - def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error - - diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index a77d66382095..91dfdf06d3cd 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -5,10 +5,8 @@ | Required: () -> Proc^{f} | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:6:14 ------------------------------------------- +-- Error: tests/neg-custom-args/captures/eta.scala:6:20 ---------------------------------------------------------------- 6 | bar( () => f ) // error - | ^^^^^^^ - | Found: () ->{f} box () ->{f} Unit - | Required: () -> box () ->? Unit - | - | longer explanation available when compiling with `-explain` + | ^ + | (f : Proc^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> box () ->? Unit diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 8dca91bc8e43..72b88f252e59 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,17 +1,12 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- -2 |class Err extends Exception: // error - |^ - |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class Throwable -3 | self: Err^ => --- Error: tests/neg-custom-args/captures/exception-definitions.scala:10:6 ---------------------------------------------- -10 |class Err4(c: Any^) extends AnyVal // error - |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |reference (Err4.this.c : Any^) is not included in allowed capture set {} of pure base class class AnyVal +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- +3 | self: Err^ => // error + | ^^^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (Err3.this.c : Any^) is not included in allowed capture set {} of pure base class class Throwable + | ^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index 996f64ae4bd1..fbc9f3fd1d33 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,12 +1,12 @@ -class Err extends Exception: // error - self: Err^ => +class Err extends Exception: + self: Err^ => // error def test(c: Any^) = class Err2 extends Exception: val x = c // error class Err3(c: Any^) extends Exception // error -class Err4(c: Any^) extends AnyVal // error +class Err4(c: Any^) extends AnyVal // was error, now ok diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 830563f51de3..59b8415d6e0f 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -8,7 +8,7 @@ class Service: var file: File^ = uninitialized // error def log = file.write("log") -def withFile[T](op: (f: File^) => T): T = +def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = op(new File) def test = diff --git a/tests/neg-custom-args/captures/heal-tparam-cs.scala b/tests/neg-custom-args/captures/heal-tparam-cs.scala index 58d12f8b6ce5..498292166297 100644 --- a/tests/neg-custom-args/captures/heal-tparam-cs.scala +++ b/tests/neg-custom-args/captures/heal-tparam-cs.scala @@ -1,33 +1,34 @@ import language.experimental.captureChecking -trait Cap { def use(): Unit } +trait Capp { def use(): Unit } -def localCap[sealed T](op: (cap: Cap^{cap}) => T): T = ??? +def localCap[T](op: (c: Capp^) => T): T = ??? -def main(io: Cap^{cap}, net: Cap^{cap}): Unit = { - val test1 = localCap { cap => // error - () => { cap.use() } +def main(io: Capp^, net: Capp^): Unit = { + + val test1 = localCap { c => // error + () => { c.use() } } - val test2: (cap: Cap^{cap}) -> () ->{cap} Unit = - localCap { cap => // should work - (cap1: Cap^{cap}) => () => { cap1.use() } + val test2: (c: Capp^) -> () => Unit = + localCap { c => // should work + (c1: Capp^) => () => { c1.use() } } - val test3: (cap: Cap^{io}) -> () ->{io} Unit = - localCap { cap => // should work - (cap1: Cap^{io}) => () => { cap1.use() } + val test3: (c: Capp^{io}) -> () ->{io} Unit = + localCap { c => // should work + (c1: Capp^{io}) => () => { c1.use() } } - val test4: (cap: Cap^{io}) -> () ->{net} Unit = - localCap { cap => // error - (cap1: Cap^{io}) => () => { cap1.use() } + val test4: (c: Capp^{io}) -> () ->{net} Unit = + localCap { c => // error + (c1: Capp^{io}) => () => { c1.use() } } - def localCap2[sealed T](op: (cap: Cap^{io}) => T): T = ??? + def localCap2[T](op: (c: Capp^{io}) => T): T = ??? val test5: () ->{io} Unit = - localCap2 { cap => // ok - () => { cap.use() } + localCap2 { c => // ok + () => { c.use() } } } diff --git a/tests/neg-custom-args/captures/i15049.scala b/tests/neg-custom-args/captures/i15049.scala index d978e0e1ad0f..ff6b17c360de 100644 --- a/tests/neg-custom-args/captures/i15049.scala +++ b/tests/neg-custom-args/captures/i15049.scala @@ -1,8 +1,8 @@ class Session: def request = "Response" class Foo: - private val session: Session^{cap} = new Session - def withSession[sealed T](f: (Session^{cap}) => T): T = f(session) + private val session: Session^ = new Session + def withSession[T](f: Session^ => T): T = f(session) def Test: Unit = val f = new Foo diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index 4b637a7c2e40..df05324866e1 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -1,28 +1,60 @@ --- Error: tests/neg-custom-args/captures/i15116.scala:3:6 -------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:3:13 ---------------------------------------- 3 | val x = Foo(m) // error - | ^^^^^^^^^^^^^^ - | Non-local value x cannot have an inferred type - | Foo{val m: String^{Bar.this.m}}^{Bar.this.m} - | with non-empty capture set {Bar.this.m}. - | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/i15116.scala:5:6 -------------------------------------------------------------- + | ^^^^^^ + | Found: Foo{val m²: (Bar.this.m : String^)}^{Bar.this.m} + | Required: Foo + | + | where: m is a value in class Bar + | m² is a value in class Foo + | + | + | Note that the expected type Foo + | is the previously inferred type of value x + | which is also the type seen in separately compiled sources. + | The new inferred type Foo{val m: (Bar.this.m : String^)}^{Bar.this.m} + | must conform to this type. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:5:13 ---------------------------------------- 5 | val x = Foo(m) // error - | ^^^^^^^^^^^^^^ - | Non-local value x cannot have an inferred type - | Foo{val m: String^}^{Baz.this} - | with non-empty capture set {Baz.this}. - | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/i15116.scala:7:6 -------------------------------------------------------------- + | ^^^^^^ + | Found: Foo{val m: String^{Baz.this}}^{Baz.this} + | Required: Foo + | + | Note that the expected type Foo + | is the previously inferred type of value x + | which is also the type seen in separately compiled sources. + | The new inferred type Foo{val m: String^{Baz.this}}^{Baz.this} + | must conform to this type. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:7:13 ---------------------------------------- 7 | val x = Foo(m) // error - | ^^^^^^^^^^^^^^ - | Non-local value x cannot have an inferred type - | Foo{val m: String^{Bar1.this.m}}^{Bar1.this.m} - | with non-empty capture set {Bar1.this.m}. - | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/i15116.scala:9:6 -------------------------------------------------------------- + | ^^^^^^ + | Found: Foo{val m²: (Bar1.this.m : String^)}^{Bar1.this.m} + | Required: Foo + | + | where: m is a value in class Bar1 + | m² is a value in class Foo + | + | + | Note that the expected type Foo + | is the previously inferred type of value x + | which is also the type seen in separately compiled sources. + | The new inferred type Foo{val m: (Bar1.this.m : String^)}^{Bar1.this.m} + | must conform to this type. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:9:13 ---------------------------------------- 9 | val x = Foo(m) // error - | ^^^^^^^^^^^^^^ - | Non-local value x cannot have an inferred type - | Foo{val m: String^}^{Baz2.this} - | with non-empty capture set {Baz2.this}. - | The type needs to be declared explicitly. + | ^^^^^^ + | Found: Foo{val m: String^{Baz2.this}}^{Baz2.this} + | Required: Foo + | + | Note that the expected type Foo + | is the previously inferred type of value x + | which is also the type seen in separately compiled sources. + | The new inferred type Foo{val m: String^{Baz2.this}}^{Baz2.this} + | must conform to this type. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala new file mode 100644 index 000000000000..0158928f4e39 --- /dev/null +++ b/tests/neg-custom-args/captures/i15749a.scala @@ -0,0 +1,22 @@ +import caps.cap +class Unit +object u extends Unit + +type Top = Any^ + +type Wrapper[+T] = [X] -> (op: T ->{cap} X) -> X + +def test = + + def wrapper[T](x: T): Wrapper[T] = + [X] => (op: T ->{cap} X) => op(x) + + def strictMap[A <: Top, B <: Top](mx: Wrapper[A])(f: A ->{cap} B): Wrapper[B] = + mx((x: A) => wrapper(f(x))) + + def force[A](thunk: Unit ->{cap} A): A = thunk(u) + + def forceWrapper[A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] + // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not + strictMap[Unit ->{mx*} A, A](mx)(t => force[A](t)) // error // should work diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index 949f7ca48588..cce58da1b93b 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -1,3 +1,8 @@ +-- Error: tests/neg-custom-args/captures/i15772.scala:19:26 ------------------------------------------------------------ +19 | val c : C^{x} = new C(x) // error + | ^ + | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> Int -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:46 --------------------------------------- 20 | val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error | ^^^^^^^ @@ -5,18 +10,26 @@ | Required: (C^ => Unit) -> Unit | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/i15772.scala:26:26 ------------------------------------------------------------ +26 | val c : C^{x} = new C(x) // error + | ^ + | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> Int -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:35 --------------------------------------- 27 | val boxed2 : Observe[C^] = box2(c) // error | ^^^^^^^ | Found: (C{val arg: C^}^{c} => Unit) ->{c} Unit - | Required: Observe[C^] + | Required: (C^ => Unit) -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:33 --------------------------------------- +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:34 --------------------------------------- 33 | val boxed2 : Observe[C]^ = box2(c) // error - | ^^^^^^^ - | Found: (C{val arg: C^}^ => Unit) ->? Unit - | Required: (C => Unit) => Unit + | ^ + | Found: box C^ + | Required: box C{val arg: C^?}^? + | + | Note that the universal capability `cap` + | cannot be included in capture set ? | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- diff --git a/tests/neg-custom-args/captures/i15772.scala b/tests/neg-custom-args/captures/i15772.scala index e4efb6b9ccab..a054eac835c1 100644 --- a/tests/neg-custom-args/captures/i15772.scala +++ b/tests/neg-custom-args/captures/i15772.scala @@ -16,14 +16,14 @@ class C(val arg: C^) { def main1(x: C^) : () -> Int = () => - val c : C^{x} = new C(x) + val c : C^{x} = new C(x) // error val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error boxed1((cap: C^) => unsafe(c)) 0 def main2(x: C^) : () -> Int = () => - val c : C^{x} = new C(x) + val c : C^{x} = new C(x) // error val boxed2 : Observe[C^] = box2(c) // error boxed2((cap: C^) => unsafe(c)) 0 diff --git a/tests/neg-custom-args/captures/i15922.scala b/tests/neg-custom-args/captures/i15922.scala new file mode 100644 index 000000000000..974870cd769c --- /dev/null +++ b/tests/neg-custom-args/captures/i15922.scala @@ -0,0 +1,16 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def withCap[X](op: (Cap^) => X): X = { + val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } + val result = op(cap) + result +} + +def leaking(c: Cap^): Id[Cap^{c}] = mkId(c) + +def test = + val ll = (c: Cap^) => leaking(c) + val bad1 = withCap(ll) // error + val bad2 = withCap(leaking) // error diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index 3994b34f5928..754fd0687037 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -3,12 +3,12 @@ type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { - def withCap[sealed X](op: (Cap^) => X): X = { - val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } + def withCap[X](op: (lcap: caps.Cap) ?-> Cap^{lcap} => X): X = { + val cap: Cap = new Cap { def use() = { println("cap is used"); 0 } } val result = op(cap) result } - val leak = withCap(cap => mkId(cap)) // error + val leak = withCap(cap => mkId(cap)) // error // error leak { cap => cap.use() } } \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala index d22c7f02d5fb..d363bb665dc3 100644 --- a/tests/neg-custom-args/captures/i16114.scala +++ b/tests/neg-custom-args/captures/i16114.scala @@ -11,32 +11,32 @@ def withCap[T](op: Cap^ => T): T = { } def main(fs: Cap^): Unit = { - def badOp(io: Cap^{cap}): Unit ->{} Unit = { - val op1: Unit ->{io} Unit = (x: Unit) => // error // limitation - expect[Cap^] { + def badOp(io: Cap^): Unit ->{} Unit = { + val op1: Unit ->{io} Unit = (x: Unit) => + expect[Cap^] { // error io.use() - fs + fs // error (limitation) } - val op2: Unit ->{fs} Unit = (x: Unit) => // error // limitation - expect[Cap^] { + val op2: Unit ->{fs} Unit = (x: Unit) => + expect[Cap^] { // error fs.use() - io + io // error (limitation) } - val op3: Unit ->{io} Unit = (x: Unit) => // ok - expect[Cap^] { + val op3: Unit ->{io} Unit = (x: Unit) => + expect[Cap^] { // error io.use() io } - val op4: Unit ->{} Unit = (x: Unit) => // ok - expect[Cap^](io) + val op4: Unit ->{} Unit = (x: Unit) => // o k + expect[Cap^](io) // error - val op: Unit -> Unit = (x: Unit) => // error - expect[Cap^] { - io.use() - io + val op: Unit -> Unit = (x: Unit) => + expect[Cap^] { // error + io.use() // error + io // error } op } diff --git a/tests/neg-custom-args/captures/i18518.scala b/tests/neg-custom-args/captures/i18518.scala new file mode 100644 index 000000000000..61e63a54141c --- /dev/null +++ b/tests/neg-custom-args/captures/i18518.scala @@ -0,0 +1,5 @@ +import language.experimental.captureChecking +type Foo1 = [R] -> (x: Unit) ->{} Unit // error +type Foo2 = [R] -> (x: Unit) ->{cap} Unit // error +type Foo3 = (c: Int^) -> [R] -> (x: Unit) ->{c} Unit // error +type Foo4 = (c: Int^) -> [R] -> (x0: Unit) -> (x: Unit) ->{c} Unit diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index 4b7611fc3fb7..09352ec648ce 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -8,8 +8,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^}^{cap1}) - | Required: lazylists.LazyList[Int] + | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^?}^{cap1}) + | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.check b/tests/neg-custom-args/captures/lazylists-exceptions.check index f58ed265d3be..3095c1f2f4f9 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.check +++ b/tests/neg-custom-args/captures/lazylists-exceptions.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/lazylists-exceptions.scala:36:2 ----------------------------------------------- -36 | try // error +36 | try // error | ^ - | Result of `try` cannot have type LazyList[Int]^ since + | result of `try` cannot have type LazyList[Int]^ since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. 37 | tabulate(10) { i => diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.scala b/tests/neg-custom-args/captures/lazylists-exceptions.scala index 6a72facf7285..295147f7f3c5 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.scala +++ b/tests/neg-custom-args/captures/lazylists-exceptions.scala @@ -33,7 +33,7 @@ def tabulate[A](n: Int)(gen: Int => A): LazyList[A]^{gen} = class Ex1 extends Exception def problem = - try // error + try // error tabulate(10) { i => if i > 9 then throw Ex1() i * i diff --git a/tests/neg-custom-args/captures/lazylists1.scala b/tests/neg-custom-args/captures/lazylists1.scala index 99472c13ebec..dfa556e38144 100644 --- a/tests/neg-custom-args/captures/lazylists1.scala +++ b/tests/neg-custom-args/captures/lazylists1.scala @@ -1,8 +1,8 @@ class CC -type Cap = CC^{cap} +type Cap = CC^ trait LazyList[+A]: - this: LazyList[A]^{cap} => + this: LazyList[A]^ => def isEmpty: Boolean def head: A @@ -13,7 +13,7 @@ object LazyNil extends LazyList[Nothing]: def head = ??? def tail = ??? -extension [A](xs: LazyList[A]^{cap}) +extension [A](xs: LazyList[A]^) def map[B](f: A => B): LazyList[B]^{xs, f} = final class Mapped extends LazyList[B]: this: (Mapped^{xs, f}) => diff --git a/tests/neg-custom-args/captures/lazylists2.check b/tests/neg-custom-args/captures/lazylists2.check index 72efbc08f8e2..13b1da6eaf1c 100644 --- a/tests/neg-custom-args/captures/lazylists2.check +++ b/tests/neg-custom-args/captures/lazylists2.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:18:4 ------------------------------------ 18 | final class Mapped extends LazyList[B]: // error | ^ - | Found: LazyList[B^?]^{f, xs} + | Found: LazyList[box B^?]^{f, xs} | Required: LazyList[B]^{f} 19 | this: (Mapped^{xs, f}) => 20 | def isEmpty = false @@ -13,7 +13,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:27:4 ------------------------------------ 27 | final class Mapped extends LazyList[B]: // error | ^ - | Found: LazyList[B^?]^{f, xs} + | Found: LazyList[box B^?]^{f, xs} | Required: LazyList[B]^{xs} 28 | this: Mapped^{xs, f} => 29 | def isEmpty = false @@ -33,7 +33,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:45:4 ------------------------------------ 45 | final class Mapped extends LazyList[B]: // error | ^ - | Found: LazyList[B^?]^{f, xs} + | Found: LazyList[box B^?]^{f, xs} | Required: LazyList[B]^{xs} 46 | this: (Mapped^{xs, f}) => 47 | def isEmpty = false @@ -45,5 +45,5 @@ -- Error: tests/neg-custom-args/captures/lazylists2.scala:60:10 -------------------------------------------------------- 60 | class Mapped2 extends Mapped: // error | ^ - | references {f, xs} are not all included in allowed capture set {} of the self type of class Mapped2 + | references {f, xs} are not all included in the allowed capture set {} of the self type of class Mapped2 61 | this: Mapped => diff --git a/tests/neg-custom-args/captures/lazyref.check b/tests/neg-custom-args/captures/lazyref.check index 8c91ec13b5d8..8683615c07d8 100644 --- a/tests/neg-custom-args/captures/lazyref.check +++ b/tests/neg-custom-args/captures/lazyref.check @@ -8,21 +8,21 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:21:35 -------------------------------------- 21 | val ref2c: LazyRef[Int]^{cap2} = ref2 // error | ^^^^ - | Found: (ref2 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Found: LazyRef[Int]{val elem: () ->{ref2*} Int}^{ref2} | Required: LazyRef[Int]^{cap2} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:23:35 -------------------------------------- 23 | val ref3c: LazyRef[Int]^{ref1} = ref3 // error | ^^^^ - | Found: (ref3 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Found: LazyRef[Int]{val elem: () ->{ref3*} Int}^{ref3} | Required: LazyRef[Int]^{ref1} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:25:35 -------------------------------------- 25 | val ref4c: LazyRef[Int]^{cap1} = ref4 // error | ^^^^ - | Found: (ref4 : LazyRef[Int]{val elem: () => Int}^{cap2, cap1}) + | Found: LazyRef[Int]{val elem: () ->{ref4*} Int}^{ref4} | Required: LazyRef[Int]^{cap1} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check new file mode 100644 index 000000000000..3f0a9800a4ec --- /dev/null +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/captures/leaked-curried.scala:14:20 ---------------------------------------------------- +14 | () => () => io // error + | ^^ + |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz +-- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- +17 | () => () => io // error + | ^^ + |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala new file mode 100644 index 000000000000..d765955ee6ce --- /dev/null +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -0,0 +1,19 @@ +trait Cap: + def use(): Unit + +def withCap[T](op: (x: Cap^) => T): T = ??? + +trait Box: + val get: () ->{} () => Cap^ + +def main(): Unit = + val leaked = withCap: (io: Cap^) => + class Fuzz extends Box, Pure: + self => + val get: () ->{} () ->{io} Cap^ = + () => () => io // error + class Foo extends Box, Pure: + val get: () ->{} () ->{io} Cap^ = + () => () => io // error + new Foo + val bad = leaked.get()().use() // using a leaked capability diff --git a/tests/neg-custom-args/captures/leaking-iterators.check b/tests/neg-custom-args/captures/leaking-iterators.check new file mode 100644 index 000000000000..0481a9a4d9e2 --- /dev/null +++ b/tests/neg-custom-args/captures/leaking-iterators.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-custom-args/captures/leaking-iterators.scala:56:2 -------------------------------------------------- +56 | usingLogFile: log => // error + | ^^^^^^^^^^^^ + | local reference log leaks into outer capture set of type parameter R of method usingLogFile diff --git a/tests/neg-custom-args/captures/leaking-iterators.scala b/tests/neg-custom-args/captures/leaking-iterators.scala new file mode 100644 index 000000000000..ad5958e91e5b --- /dev/null +++ b/tests/neg-custom-args/captures/leaking-iterators.scala @@ -0,0 +1,60 @@ +package cctest +import java.io.* + +trait IterableOnce[+A]: + //this: IterableOnce[A]^ => + def iterator: Iterator[A]^{this} + +trait Iterable[+A] extends IterableOnce[A]: + //this: IterableOnce[A]^ => + def iterator: Iterator[A]^{this} + +trait List[+A] extends Iterable[A]: + def head: A + def tail: List[A] + def length: Int + def foldLeft[B](z: B)(op: (B, A) => B): B + def foldRight[B](z: B)(op: (A, B) => B): B + def foreach(f: A => Unit): Unit + def iterator: Iterator[A] + def map[B](f: A => B): List[B] + def flatMap[B](f: A => IterableOnce[B]^): List[B] + def ++[B >: A](xs: IterableOnce[B]^): List[B] +object List: + def apply[A](xs: A*): List[A] = ??? + +trait Iterator[+A] extends IterableOnce[A]: + this: Iterator[A]^ => + def hasNext: Boolean + def next(): A + def foldLeft[B](z: B)(op: (B, A) => B): B + def foldRight[B](z: B)(op: (A, B) => B): B + def foreach(f: A => Unit): Unit + + def map[B](f: A => B): Iterator[B]^{this, f} + def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} + def ++[B >: A](xs: IterableOnce[B]^): Iterator[B]^{this, xs} +end Iterator + +private final class ConcatIteratorCell[A](head: => IterableOnce[A]^): + def headIterator: Iterator[A]^{this} = head.iterator + +def usingLogFile[R](op: FileOutputStream^ => R): R = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + +def test = + val xs = List(1, 2, 3) + + usingLogFile: log => + xs.map: x => + log.write(x) + x * x + + usingLogFile: log => // error + xs.iterator.map: x => + log.write(x) + x * x + diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check new file mode 100644 index 000000000000..a5f8d73ccf7a --- /dev/null +++ b/tests/neg-custom-args/captures/levels.check @@ -0,0 +1,17 @@ +-- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ +17 | val _ = Ref[String => String]((x: String) => x) // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box String => String since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of constructor Ref + | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/levels.scala:22:11 --------------------------------------- +22 | r.setV(g) // error + | ^ + | Found: box (x: String) ->{cap3} String + | Required: box (x$0: String) ->? String + | + | Note that reference (cap3 : CC^), defined in method scope + | cannot be included in outer capture set ? of value r which is associated with method test2 + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/levels.scala b/tests/neg-custom-args/captures/levels.scala new file mode 100644 index 000000000000..b28e87f03ef7 --- /dev/null +++ b/tests/neg-custom-args/captures/levels.scala @@ -0,0 +1,23 @@ +class CC + +def test1(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + +def test2(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + + val _ = Ref[String => String]((x: String) => x) // error + val r = Ref((x: String) => x) + + def scope(cap3: CC^) = + def g(x: String): String = if cap3 == cap3 then "" else "a" + r.setV(g) // error + () diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check new file mode 100644 index 000000000000..c250280961d9 --- /dev/null +++ b/tests/neg-custom-args/captures/outer-var.check @@ -0,0 +1,44 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:11:8 ------------------------------------- +11 | x = q // error + | ^ + | Found: () ->{q} Unit + | Required: () ->{p, q²} Unit + | + | where: q is a parameter in method inner + | q² is a parameter in method test + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:12:9 ------------------------------------- +12 | x = (q: Proc) // error + | ^^^^^^^ + | Found: Proc + | Required: () ->{p, q} Unit + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:13:9 ------------------------------------- +13 | y = (q: Proc) // error + | ^^^^^^^ + | Found: Proc + | Required: () ->{p} Unit + | + | Note that the universal capability `cap` + | cannot be included in capture set {p} of variable y + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:14:8 ------------------------------------- +14 | y = q // error + | ^ + | Found: () ->{q} Unit + | Required: () ->{p} Unit + | + | Note that reference (q : Proc), defined in method inner + | cannot be included in outer capture set {p} of variable y which is associated with method test + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/outer-var.scala:16:53 --------------------------------------------------------- +16 | var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method apply + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/outer-var.scala b/tests/neg-custom-args/captures/outer-var.scala new file mode 100644 index 000000000000..39c3a6da4ca3 --- /dev/null +++ b/tests/neg-custom-args/captures/outer-var.scala @@ -0,0 +1,18 @@ +class CC +type Cap = CC^ + +type Proc = () => Unit + +def test(p: Proc, q: () => Unit) = + var x: () ->{p, q} Unit = p + var y = p // OK, y has type () ->{p} Proc + + def inner(q: Proc) = + x = q // error + x = (q: Proc) // error + y = (q: Proc) // error + y = q // error + + var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error + + diff --git a/tests/neg-custom-args/captures/override-adapt-box-selftype.scala b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala index f44add78e246..bf7e88d84213 100644 --- a/tests/neg-custom-args/captures/override-adapt-box-selftype.scala +++ b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala @@ -15,7 +15,7 @@ object Test1 { } } -def Test2(io: IO^{cap}, fs: IO^{io}, ct: IO^) = { +def Test2(io: IO^, fs: IO^{io}, ct: IO^) = { abstract class A[X] { this: A[X]^{io} => def foo(x: X): X } diff --git a/tests/neg-custom-args/captures/override-adapt-box.scala b/tests/neg-custom-args/captures/override-adapt-box.scala index 70023dfbc941..9592c30a5025 100644 --- a/tests/neg-custom-args/captures/override-adapt-box.scala +++ b/tests/neg-custom-args/captures/override-adapt-box.scala @@ -7,7 +7,7 @@ abstract class A[X] { this: A[X]^{} => class IO class C -def test(io: IO^{cap}) = { +def test(io: IO^) = { class B extends A[C^{io}] { // X =:= {io} C // error override def foo(x: C^{io}): C^{io} = ??? } diff --git a/tests/neg-custom-args/captures/override-boxed.scala b/tests/neg-custom-args/captures/override-boxed.scala index d66d28d15aaa..cf1c49cef5a6 100644 --- a/tests/neg-custom-args/captures/override-boxed.scala +++ b/tests/neg-custom-args/captures/override-boxed.scala @@ -1,7 +1,7 @@ class A -def test(x: Any^{cap}) = +def test(x: Any^) = abstract class Getter: def get(): A^{x} class PolyGetter[T <: A^{x}] extends Getter: diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check new file mode 100644 index 000000000000..ee628a212ce7 --- /dev/null +++ b/tests/neg-custom-args/captures/reaches.check @@ -0,0 +1,43 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:21:11 -------------------------------------- +21 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} + | ^^^^^^^^^^^^^^^^^^^^^^^ + | Found: List[box () ->{f} Unit] + | Required: List[box () ->{xs*} Unit] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:32:7 --------------------------------------- +32 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} + | ^^^^^^^^^^^^^^^^^^^^^^^ + | Found: List[box () ->{f} Unit] + | Required: box List[box () ->{xs*} Unit]^? + | + | Note that reference (f : File^), defined in method $anonfun + | cannot be included in outer capture set {xs*} of value cur which is associated with method runAll1 + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/reaches.scala:35:6 ------------------------------------------------------------ +35 | var cur: List[Proc] = xs // error: Illegal type for var + | ^ + | Mutable variable cur cannot have type List[box () => Unit] since + | the part box () => Unit of that type captures the root capability `cap`. +-- Error: tests/neg-custom-args/captures/reaches.scala:42:15 ----------------------------------------------------------- +42 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref + | ^^^^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to List[box () => Unit] since + | the part box () => Unit of that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of constructor Ref + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/reaches.scala:52:31 ----------------------------------------------------------- +52 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + | ^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of constructor Id + | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:60:27 -------------------------------------- +60 | val f1: File^{id*} = id(f) // error + | ^^^^^ + | Found: File^{id, f} + | Required: File^{id*} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala new file mode 100644 index 000000000000..8ba7f60d6335 --- /dev/null +++ b/tests/neg-custom-args/captures/reaches.scala @@ -0,0 +1,61 @@ +class File: + def write(): Unit = ??? + +def usingFile[T](f: File^ => T): T = ??? + +type Proc = () => Unit + +class Ref[T](init: T): + private var x: T = init + def get: T = x + def set(y: T) = { x = y } + +def runAll0(xs: List[Proc]): Unit = + var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR + while cur.nonEmpty do + val next: () ->{xs*} Unit = cur.head + next() + cur = cur.tail: List[() ->{xs*} Unit] + + usingFile: f => + cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} + +def runAll1(xs: List[Proc]): Unit = + val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR + while cur.get.nonEmpty do + val next: () ->{xs*} Unit = cur.get.head + next() + cur.set(cur.get.tail: List[() ->{xs*} Unit]) + + usingFile: f => + cur.set: + (() => f.write()) :: Nil // error since {f*} !<: {xs*} + +def runAll2(xs: List[Proc]): Unit = + var cur: List[Proc] = xs // error: Illegal type for var + while cur.nonEmpty do + val next: () => Unit = cur.head + next() + cur = cur.tail + +def runAll3(xs: List[Proc]): Unit = + val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref + while cur.get.nonEmpty do + val next: () => Unit = cur.get.head + next() + cur.set(cur.get.tail: List[Proc]) + +class Id[-A, +B >: A](): + def apply(a: A): B = a + +def test = + val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + usingFile: f => + id(() => f.write()) // escape, if it was not for the error above + +def attack2 = + val id: File^ -> File^ = x => x + + val leaked = usingFile[File^{id*}]: f => + val f1: File^{id*} = id(f) // error + f1 diff --git a/tests/neg-custom-args/captures/real-try.check b/tests/neg-custom-args/captures/real-try.check index c8df3777bcfa..50dcc16f5f54 100644 --- a/tests/neg-custom-args/captures/real-try.check +++ b/tests/neg-custom-args/captures/real-try.check @@ -1,36 +1,46 @@ --- [E129] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:30:4 ---------------------------------- -30 | b.x +-- [E190] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:36:4 ---------------------------------- +36 | b.x | ^^^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | Discarded non-Unit value of type () -> Unit. You may want to use `()`. | | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/real-try.scala:12:2 ----------------------------------------------------------- 12 | try // error | ^ - | Result of `try` cannot have type () => Unit since + | result of `try` cannot have type () => Unit since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. 13 | () => foo(1) 14 | catch 15 | case _: Ex1 => ??? 16 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:18:2 ----------------------------------------------------------- -18 | try // error - | ^ - | Result of `try` cannot have type () => Cell[Unit]^? since - | that type captures the root capability `cap`. - | This is often caused by a locally generated exception capability leaking as part of its result. -19 | () => Cell(foo(1)) +-- Error: tests/neg-custom-args/captures/real-try.scala:18:10 ---------------------------------------------------------- +18 | val x = try // error + | ^ + | result of `try` cannot have type () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. +19 | () => foo(1) 20 | catch 21 | case _: Ex1 => ??? 22 | case _: Ex2 => ??? -- Error: tests/neg-custom-args/captures/real-try.scala:24:10 ---------------------------------------------------------- -24 | val b = try // error +24 | val y = try // error | ^ - | Result of `try` cannot have type Cell[box () => Unit]^? since - | the part box () => Unit of that type captures the root capability `cap`. + | result of `try` cannot have type () => Cell[Unit]^? since + | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -25 | Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] +25 | () => Cell(foo(1)) 26 | catch 27 | case _: Ex1 => ??? 28 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:30:10 ---------------------------------------------------------- +30 | val b = try // error + | ^ + | result of `try` cannot have type Cell[box () => Unit]^? since + | the part box () => Unit of that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. +31 | Cell(() => foo(1)) +32 | catch +33 | case _: Ex1 => ??? +34 | case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/real-try.scala b/tests/neg-custom-args/captures/real-try.scala index a826fdaa4af7..23961e884ea3 100644 --- a/tests/neg-custom-args/captures/real-try.scala +++ b/tests/neg-custom-args/captures/real-try.scala @@ -15,14 +15,20 @@ def test(): Unit = case _: Ex1 => ??? case _: Ex2 => ??? - try // error + val x = try // error + () => foo(1) + catch + case _: Ex1 => ??? + case _: Ex2 => ??? + + val y = try // error () => Cell(foo(1)) catch case _: Ex1 => ??? case _: Ex2 => ??? - val b = try // error - Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] + val b = try // error + Cell(() => foo(1)) catch case _: Ex1 => ??? case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala new file mode 100644 index 000000000000..5cf3f8b29c2b --- /dev/null +++ b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala @@ -0,0 +1,19 @@ +import language.experimental.captureChecking +trait Cap: + def use: Int = 42 + +def usingCap[T](op: Cap^ => T): T = ??? + +def badTest(): Unit = + def bad(b: Boolean)(c: Cap^): Cap^{c} = + if b then c + else + val leaked = usingCap(bad(true)) // error + leaked.use // boom + c + + usingCap[Unit]: c0 => + bad(false)(c0) + + + diff --git a/tests/neg-custom-args/captures/refine-reach-shallow.scala b/tests/neg-custom-args/captures/refine-reach-shallow.scala new file mode 100644 index 000000000000..9f4b28ce52e3 --- /dev/null +++ b/tests/neg-custom-args/captures/refine-reach-shallow.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +trait IO +def test1(): Unit = + val f: IO^ => IO^ = x => x + val g: IO^ => IO^{f*} = f // error +def test2(): Unit = + val f: [R] -> (IO^ => R) -> R = ??? + val g: [R] -> (IO^{f*} => R) -> R = f // error +def test3(): Unit = + val f: [R] -> (IO^ -> R) -> R = ??? + val g: [R] -> (IO^{f*} -> R) -> R = f // error +def test4(): Unit = + val xs: List[IO^] = ??? + val ys: List[IO^{xs*}] = xs // ok +def test5(): Unit = + val f: [R] -> (IO^ -> R) -> IO^ = ??? + val g: [R] -> (IO^ -> R) -> IO^{f*} = f // ok + val h: [R] -> (IO^{f*} -> R) -> IO^ = f // error diff --git a/tests/neg-custom-args/captures/refine-withFile.scala b/tests/neg-custom-args/captures/refine-withFile.scala new file mode 100644 index 000000000000..823b62711d05 --- /dev/null +++ b/tests/neg-custom-args/captures/refine-withFile.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking + +trait File +val useFile: [R] -> (path: String) -> (op: File^ -> R) -> R = ??? +def main(): Unit = + val f: [R] -> (path: String) -> (op: File^ -> R) -> R = useFile + val g: [R] -> (path: String) -> (op: File^{f*} -> R) -> R = f // error + val leaked = g[File^{f*}]("test")(f => f) // boom diff --git a/tests/neg-custom-args/captures/refs.scala.disabled b/tests/neg-custom-args/captures/refs.scala.disabled new file mode 100644 index 000000000000..b20e8029b65e --- /dev/null +++ b/tests/neg-custom-args/captures/refs.scala.disabled @@ -0,0 +1,55 @@ +import java.io.* + +type Proc = () => Unit + +class Ref[T](init: T): + var x: T = init + def setX(x: T): Unit = this.x = x + +class MonoRef(init: Proc): + type MonoProc = Proc + var x: MonoProc = init + def setX(x: MonoProc): Unit = this.x = x + +def usingLogFile[T](op: (local: caps.Cap) ?-> FileOutputStream^{local} => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + +def test1 = + usingLogFile[Proc]: (local: caps.Cap) ?=> + (f: FileOutputStream^{local}) => + () => f.write(1) // error (but with a hard to parse error message) + // this line has type () ->{local} Unit, but usingLogFile + // requires Proc, which expands to () -> 'cap[..test1](from instantiating usingLogFile) + +def test2 = + val r = new Ref[Proc](() => ()) + usingLogFile: f => + r.setX(() => f.write(10)) // error + r.x() // crash: f is closed at that point + val mr = new MonoRef(() => ()) + usingLogFile[Unit]: f => + mr.setX(() => f.write(10)) // error + +def test3 = + val r = new Ref[Proc](() => ()) + usingLogFile[Unit]: f => + r.x = () => f.write(10) // error + r.x() // crash: f is closed at that point + val mr = MonoRef(() => ()) + usingLogFile: f => + mr.x = () => f.write(10) // error + +def test4 = + var r: Proc = () => () + usingLogFile[Unit]: f => + r = () => f.write(10) // error + r() // crash: f is closed at that point + + + + + + diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala deleted file mode 100644 index bf46b52194c1..000000000000 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ /dev/null @@ -1,20 +0,0 @@ - -import java.io.* -def Test2 = - - def usingLogFile[sealed T](op: FileOutputStream^ => T): T = - val logFile = FileOutputStream("log") - val result = op(logFile) - logFile.close() - result - - val later = usingLogFile { f => () => f.write(0) } // error - val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error - - var x: (FileOutputStream^) | Null = null // error - def foo(f: FileOutputStream^, g: FileOutputStream^) = - var y = if ??? then f else g // error - - usingLogFile { f => x = f } - - later() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/selftype.scala b/tests/neg-custom-args/captures/selftype.scala index 21148f625a7a..f5d24fd2381f 100644 --- a/tests/neg-custom-args/captures/selftype.scala +++ b/tests/neg-custom-args/captures/selftype.scala @@ -1,4 +1,4 @@ -@annotation.experimental class C(x: () => Unit) extends caps.Pure // error +@annotation.experimental class C(x: () => Unit) extends Pure // error -@annotation.experimental class D(@annotation.constructorOnly x: () => Unit) extends caps.Pure // ok +@annotation.experimental class D(@annotation.constructorOnly x: () => Unit) extends Pure // ok diff --git a/tests/neg-custom-args/captures/simple-using.check b/tests/neg-custom-args/captures/simple-using.check new file mode 100644 index 000000000000..2df7c70e0540 --- /dev/null +++ b/tests/neg-custom-args/captures/simple-using.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-custom-args/captures/simple-using.scala:8:2 -------------------------------------------------------- +8 | usingLogFile { f => () => f.write(2) } // error + | ^^^^^^^^^^^^ + | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/simple-using.scala b/tests/neg-custom-args/captures/simple-using.scala new file mode 100644 index 000000000000..16350eecc8c5 --- /dev/null +++ b/tests/neg-custom-args/captures/simple-using.scala @@ -0,0 +1,8 @@ +import java.io.* +def usingLogFile[T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result +def test() = + usingLogFile { f => () => f.write(2) } // error diff --git a/tests/neg-custom-args/captures/singletons.scala b/tests/neg-custom-args/captures/singletons.scala new file mode 100644 index 000000000000..194e6e850dcd --- /dev/null +++ b/tests/neg-custom-args/captures/singletons.scala @@ -0,0 +1,6 @@ +val x = () => () + +val y1: x.type = x // ok +val y2: x.type^{} = x // error: singleton type cannot have capture set +val y3: x.type^{x} = x // error: singleton type cannot have capture set // error +val y4: x.type^ = x // error: singleton type cannot have capture set diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index 71b544dbe88d..befafbf13003 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[sealed T](op: Pooled^ => T): T = +def withFreshPooled[T](op: (lcap: caps.Cap) ?-> Pooled^{lcap} => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 4af370bfba1a..3b96927de738 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,29 +1,28 @@ -- Error: tests/neg-custom-args/captures/try.scala:23:16 --------------------------------------------------------------- -23 | val a = handle[Exception, CanThrow[Exception]] { // error +23 | val a = handle[Exception, CanThrow[Exception]] { // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable R cannot be instantiated to box CT[Exception]^ since + | Sealed type variable R cannot be instantiated to box CT[Exception]^ since | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method handle + | This is often caused by a local capability in an argument of method handle | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:29:43 ------------------------------------------ -29 | val b = handle[Exception, () -> Nothing] { // error - | ^ - | Found: (x: CT[Exception]^) ->? () ->{x} Nothing - | Required: (x$0: CanThrow[Exception]) => () -> Nothing -30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) -31 | } { - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/try.scala:30:65 --------------------------------------------------------------- +30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) // error + | ^ + | (x : CanThrow[Exception]) cannot be referenced here; it is not included in the allowed capture set {} + | of an enclosing function literal with expected type () ->? Nothing -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:52:2 ------------------------------------------- 47 |val global: () -> Int = handle { 48 | (x: CanThrow[Exception]) => 49 | () => 50 | raise(new Exception)(using x) 51 | 22 -52 |} { // error +52 |} { // error | ^ - | Found: () ->{x$0} Int + | Found: () ->{x, x²} Int | Required: () -> Int + | + | where: x is a reference to a value parameter + | x² is a reference to a value parameter 53 | (ex: Exception) => () => 22 54 |} | @@ -31,7 +30,4 @@ -- Error: tests/neg-custom-args/captures/try.scala:35:11 --------------------------------------------------------------- 35 | val xx = handle { // error | ^^^^^^ - | Sealed type variable R cannot be instantiated to box () => Int since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method handle - | leaking as part of its result. + | local reference x leaks into outer capture set of type parameter R of method handle diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index 3c6f0605d8b9..3d25dff4cd2c 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -14,20 +14,20 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, sealed R <: Top](op: CanThrow[E] => R)(handler: E => R): R = - val x: CanThrow[E] = ??? +def handle[E <: Exception, R <: Top](op: CT[E]^ => R)(handler: E => R): R = + val x: CT[E] = ??? try op(x) catch case ex: E => handler(ex) def test = - val a = handle[Exception, CanThrow[Exception]] { // error + val a = handle[Exception, CanThrow[Exception]] { // error (x: CanThrow[Exception]) => x }{ (ex: Exception) => ??? } - val b = handle[Exception, () -> Nothing] { // error - (x: CanThrow[Exception]) => () => raise(new Exception)(using x) + val b = handle[Exception, () -> Nothing] { + (x: CanThrow[Exception]) => () => raise(new Exception)(using x) // error } { (ex: Exception) => ??? } @@ -49,6 +49,6 @@ val global: () -> Int = handle { () => raise(new Exception)(using x) 22 -} { // error +} { // error (ex: Exception) => () => 22 } diff --git a/tests/neg-custom-args/captures/try3.scala b/tests/neg-custom-args/captures/try3.scala index 4c6835353c3f..004cda6a399c 100644 --- a/tests/neg-custom-args/captures/try3.scala +++ b/tests/neg-custom-args/captures/try3.scala @@ -4,9 +4,9 @@ class CT[E] type CanThrow[E] = CT[E]^ type Top = Any^ -def handle[E <: Exception, sealed T <: Top](op: CanThrow[E] ?=> T)(handler: E => T): T = - val x: CanThrow[E] = ??? - try op(using x) +def handle[E <: Exception, T <: Top](op: (lcap: caps.Cap) ?-> CT[E]^{lcap} ?=> T)(handler: E => T): T = + val x: CT[E] = ??? + try op(using caps.cap)(using x) catch case ex: E => handler(ex) def raise[E <: Exception](ex: E)(using CanThrow[E]): Nothing = @@ -14,7 +14,8 @@ def raise[E <: Exception](ex: E)(using CanThrow[E]): Nothing = @main def Test: Int = def f(a: Boolean) = - handle { // error + handle { // error: implementation restriction: curried dependent CFT not supported + // should work but give capture error if !a then raise(IOException()) (b: Boolean) => if !b then raise(IOException()) diff --git a/tests/neg-custom-args/captures/usingFile.scala b/tests/neg-custom-args/captures/usingFile.scala new file mode 100644 index 000000000000..3927be5ff506 --- /dev/null +++ b/tests/neg-custom-args/captures/usingFile.scala @@ -0,0 +1,23 @@ +// Reported in issue #17517 + +import language.experimental.captureChecking +import java.io.* + +object Test: + class Logger(f: OutputStream^): + def log(msg: String): Unit = ??? + + def usingFile[T](name: String, op: OutputStream^ => T): T = + val f = new FileOutputStream(name) + val result = op(f) + f.close() + result + + def usingLogger[T](f: OutputStream^)(op: Logger^{f} => T): T = ??? + + usingFile( // error + "foo", + file => { + usingLogger(file)(l => () => l.log("test")) + } + ) diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.check b/tests/neg-custom-args/captures/usingLogFile-alt.check index 31e97b7dfda1..35276914f7b2 100644 --- a/tests/neg-custom-args/captures/usingLogFile-alt.check +++ b/tests/neg-custom-args/captures/usingLogFile-alt.check @@ -1,7 +1,10 @@ -- Error: tests/neg-custom-args/captures/usingLogFile-alt.scala:18:2 --------------------------------------------------- -18 | usingFile( // error +18 | usingFile( // error | ^^^^^^^^^ - | Sealed type variable T cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method usingFile - | leaking as part of its result. + | reference (file : java.io.OutputStream^{lcap}) is not included in the allowed capture set {x$0, x$0²} + | + | Note that reference (file : java.io.OutputStream^{lcap}), defined in method $anonfun + | cannot be included in outer capture set {x$0, x$0} which is associated with package + | + | where: x$0 is a reference to a value parameter + | x$0² is a reference to a value parameter diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.scala b/tests/neg-custom-args/captures/usingLogFile-alt.scala deleted file mode 100644 index 6b529ee6f892..000000000000 --- a/tests/neg-custom-args/captures/usingLogFile-alt.scala +++ /dev/null @@ -1,23 +0,0 @@ -// Reported in issue #17517 - -import language.experimental.captureChecking -import java.io.* - -object Test: - class Logger(f: OutputStream^): - def log(msg: String): Unit = ??? - - def usingFile[sealed T](name: String, op: OutputStream^ => T): T = - val f = new FileOutputStream(name) - val result = op(f) - f.close() - result - - def usingLogger[sealed T](f: OutputStream^)(op: Logger^{f} => T): T = ??? - - usingFile( // error - "foo", - file => { - usingLogger(file)(l => () => l.log("test")) - } - ) diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index d3bc9082202c..476bf99ef0ef 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,47 +1,16 @@ --- Error: tests/neg-custom-args/captures/usingLogFile.scala:31:6 ------------------------------------------------------- -31 | var later3: () => Unit = () => () // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Mutable variable later3 cannot have type box () => Unit since - | that type captures the root capability `cap`. - | This restriction serves to prevent local capabilities from escaping the scope where they are defined. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:35:6 ------------------------------------------------------- -35 | var later4: Cell[() => Unit] = Cell(() => ()) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Mutable variable later4 cannot have type Test2.Cell[() => Unit] since - | the part () => Unit of that type captures the root capability `cap`. - | This restriction serves to prevent local capabilities from escaping the scope where they are defined. -- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method usingLogFile - | leaking as part of its result. + | local reference f leaks into outer capture set of type parameter T of method usingLogFile -- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ 28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error | ^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to box Test2.Cell[() => Unit]^? since - | the part () => Unit of that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method usingLogFile - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:47:6 ------------------------------------------------------- -47 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Non-local value later cannot have an inferred type - | () => Unit - | with non-empty capture set {x$0, cap}. - | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:16 ------------------------------------------------------ -62 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error + | local reference f leaks into outer capture set of type parameter T of method usingLogFile +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:44:16 ------------------------------------------------------ +44 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^ - | Sealed type variable T cannot be instantiated to box (x$0: Int) => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method usingFile - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:16 ------------------------------------------------------ -71 | val later = usingFile("logfile", // error + | local reference f leaks into outer capture set of type parameter T of method usingFile +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:52:16 ------------------------------------------------------ +52 | val later = usingFile("logfile", // error !!! but should be ok, since we can widen `l` to `file` instead of to `cap` | ^^^^^^^^^ - | Sealed type variable T cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method usingFile - | leaking as part of its result. + | local reference l leaks into outer capture set of type parameter T of method usingFile diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index e7c23573ca6e..e5dc271975b0 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -3,18 +3,18 @@ import annotation.capability object Test1: - def usingLogFile[sealed T](op: FileOutputStream => T): T = + def usingLogFile[T](op: (local: caps.Cap) ?-> FileOutputStream => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() result - val later = usingLogFile { f => () => f.write(0) } + private val later = usingLogFile { f => () => f.write(0) } // OK, `f` has global lifetime later() object Test2: - def usingLogFile[sealed T](op: FileOutputStream^ => T): T = + def usingLogFile[T](op: FileOutputStream^ => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -28,29 +28,11 @@ object Test2: private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error later2.x() - var later3: () => Unit = () => () // error - usingLogFile { f => later3 = () => f.write(0) } - later3() - - var later4: Cell[() => Unit] = Cell(() => ()) // error - usingLogFile { f => later4 = Cell(() => f.write(0)) } - later4.x() - object Test3: - - def usingLogFile[sealed T](op: FileOutputStream^ => T) = - val logFile = FileOutputStream("log") - val result = op(logFile) - logFile.close() - result - - val later = usingLogFile { f => () => f.write(0) } // error - -object Test4: class Logger(f: OutputStream^): def log(msg: String): Unit = ??? - def usingFile[sealed T](name: String, op: OutputStream^ => T): T = + def usingFile[T](name: String, op: OutputStream^ => T): T = val f = new FileOutputStream(name) val result = op(f) f.close() @@ -62,12 +44,11 @@ object Test4: val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error later(1) - - def usingLogger[sealed T](f: OutputStream^, op: Logger^{f} => T): T = + def usingLogger[T](f: OutputStream^, op: Logger^{f} => T): T = val logger = Logger(f) op(logger) def test = - val later = usingFile("logfile", // error - usingLogger(_, l => () => l.log("test"))) // ok, since we can widen `l` to `file` instead of to `cap` + val later = usingFile("logfile", // error !!! but should be ok, since we can widen `l` to `file` instead of to `cap` + usingLogger(_, l => () => l.log("test"))) later() diff --git a/tests/neg-custom-args/captures/vars-simple.check b/tests/neg-custom-args/captures/vars-simple.check new file mode 100644 index 000000000000..2bc014e9a4e7 --- /dev/null +++ b/tests/neg-custom-args/captures/vars-simple.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:15:9 ----------------------------------- +15 | a = (g: String => String) // error + | ^^^^^^^^^^^^^^^^^^^ + | Found: String => String + | Required: String ->{cap1, cap2} String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 ----------------------------------- +16 | a = g // error + | ^ + | Found: (x: String) ->{cap3} String + | Required: (x: String) ->{cap1, cap2} String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:17:12 ---------------------------------- +17 | b = List(g) // error + | ^^^^^^^ + | Found: List[box (x$0: String) ->{cap3} String] + | Required: List[box String ->{cap1, cap2} String] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/vars-simple.scala b/tests/neg-custom-args/captures/vars-simple.scala new file mode 100644 index 000000000000..161d74591e7b --- /dev/null +++ b/tests/neg-custom-args/captures/vars-simple.scala @@ -0,0 +1,18 @@ +class CC +type Cap = CC^ + +def test(cap1: Cap, cap2: Cap) = + var a: String ->{cap1, cap2} String = ??? + var b: List[String ->{cap1, cap2} String] = Nil + def f(x: String): String = if cap1 == cap1 then "" else "a" + a = f // ok + val x = List(f) + b = x // ok + b = List(f) // ok + + def scope(cap3: Cap) = + def g(x: String): String = if cap3 == cap3 then "" else "a" + a = (g: String => String) // error + a = g // error + b = List(g) // error + diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index e7055c810bb0..22d13d8e26e7 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,26 +1,28 @@ --- Error: tests/neg-custom-args/captures/vars.scala:13:6 --------------------------------------------------------------- -13 | var a: String => String = f // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Mutable variable a cannot have type box String => String since - | that type captures the root capability `cap`. - | This restriction serves to prevent local capabilities from escaping the scope where they are defined. --- Error: tests/neg-custom-args/captures/vars.scala:14:6 --------------------------------------------------------------- -14 | var b: List[String => String] = Nil // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Mutable variable b cannot have type List[String => String] since - | the part String => String of that type captures the root capability `cap`. - | This restriction serves to prevent local capabilities from escaping the scope where they are defined. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:11:24 ----------------------------------------- -11 | val z2c: () -> Unit = z2 // error - | ^^ - | Found: () ->{z2} Unit - | Required: () -> Unit +-- Error: tests/neg-custom-args/captures/vars.scala:22:14 -------------------------------------------------------------- +22 | a = x => g(x) // error + | ^^^^ + | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a + | + | Note that reference (cap3 : Cap), defined in method scope + | cannot be included in outer capture set {cap1} of variable a which is associated with method test +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:23:8 ------------------------------------------ +23 | a = g // error + | ^ + | Found: (x: String) ->{cap3} String + | Required: (x$0: String) ->{cap1} String + | + | Note that reference (cap3 : Cap), defined in method scope + | cannot be included in outer capture set {cap1} of variable a which is associated with method test + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:12 ----------------------------------------- +25 | b = List(g) // error + | ^^^^^^^ + | Found: List[box (x$0: String) ->{cap3} String] + | Required: List[box String ->{cap1, cap2} String] | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:32:2 --------------------------------------------------------------- -32 | local { cap3 => // error +-- Error: tests/neg-custom-args/captures/vars.scala:34:2 --------------------------------------------------------------- +34 | local { cap3 => // error | ^^^^^ - | Sealed type variable T cannot be instantiated to box (x$0: String) => String since - | that type captures the root capability `cap`. - | This is often caused by a local capability in the body of method local - | leaking as part of its result. + | local reference cap3 leaks into outer capture set of type parameter T of method local diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index b7761952167e..ab5a2f43acc7 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -8,26 +8,28 @@ def test(cap1: Cap, cap2: Cap) = val z = () => if x("") == "" then "a" else "b" val zc: () ->{cap1} String = z val z2 = () => { x = identity } - val z2c: () -> Unit = z2 // error + val z2c: () -> Unit = z2 + var a = f - var a: String => String = f // error - var b: List[String => String] = Nil // error - val u = a // was error, now ok - a("") // was error, now ok - b.head // was error, now ok + var b: List[String ->{cap1, cap2} String] = Nil + val u = a + a("") + b.head - def scope = - val cap3: Cap = CC() + def scope(cap3: Cap) = def g(x: String): String = if cap3 == cap3 then "" else "a" - a = g - b = List(g) + def h(): String = "" + a = x => g(x) // error + a = g // error + + b = List(g) // error val gc = g g - val s = scope - val sc: String => String = scope + val s = scope(new CC) + val sc: String => String = scope(new CC) - def local[sealed T](op: Cap -> T): T = op(CC()) + def local[T](op: CC^ -> T): T = op(CC()) local { cap3 => // error def g(x: String): String = if cap3 == cap3 then "" else "a" @@ -39,4 +41,4 @@ def test(cap1: Cap, cap2: Cap) = val r = Ref() r.elem = f - val fc = r.elem + val fc = r.elem \ No newline at end of file diff --git a/tests/neg-custom-args/captures/withFile.scala b/tests/neg-custom-args/captures/withFile.scala new file mode 100644 index 000000000000..b8cdf96f9143 --- /dev/null +++ b/tests/neg-custom-args/captures/withFile.scala @@ -0,0 +1,10 @@ +import java.io.* +object Test2: + + def usingLogFile[T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + private val later = usingLogFile { f => () => f.write(0) } // error diff --git a/tests/neg-custom-args/deprecation/i10247.scala b/tests/neg-custom-args/deprecation/i10247.scala deleted file mode 100644 index 22601cad8e79..000000000000 --- a/tests/neg-custom-args/deprecation/i10247.scala +++ /dev/null @@ -1,26 +0,0 @@ -def usered = Color.Red // error: value Red is deprecated - -object DeprecatedContainer { - @deprecated("no foo", "0.1") val foo = 23 -} - -enum Day { - - @deprecated("no more Mondays!", "0.1") case Monday - -} - -enum Color { - - @deprecated("no Red", "0.1") case Red - - @deprecated("no Generic", "0.1") case Generic(rgb: Int) - - def useFoo1 = DeprecatedContainer.foo // error // check that only enum cases are avoided - def useMonday = Day.Monday // error // check that enum cases are declared in this enum - -} - -object Color { - def useFoo2 = DeprecatedContainer.foo // error // check that only enum cases are avoided -} diff --git a/tests/neg-custom-args/deprecation/i11022.check b/tests/neg-custom-args/deprecation/i11022.check deleted file mode 100644 index 464f2827c49e..000000000000 --- a/tests/neg-custom-args/deprecation/i11022.check +++ /dev/null @@ -1,20 +0,0 @@ --- Error: tests/neg-custom-args/deprecation/i11022.scala:8:7 ----------------------------------------------------------- -8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method - | ^^^^^^^^^ - | class CaseClass is deprecated: no CaseClass --- Error: tests/neg-custom-args/deprecation/i11022.scala:8:19 ---------------------------------------------------------- -8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method - | ^^^^^^^^^ - | class CaseClass is deprecated: no CaseClass --- Error: tests/neg-custom-args/deprecation/i11022.scala:9:7 ----------------------------------------------------------- -9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class - | ^^^^^^^^^ - | class CaseClass is deprecated: no CaseClass --- Error: tests/neg-custom-args/deprecation/i11022.scala:9:23 ---------------------------------------------------------- -9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class - | ^^^^^^^^^ - | class CaseClass is deprecated: no CaseClass --- Error: tests/neg-custom-args/deprecation/i11022.scala:10:14 --------------------------------------------------------- -10 |val c: Unit = CaseClass(42).magic() // error: deprecated apply method - | ^^^^^^^^^ - | class CaseClass is deprecated: no CaseClass diff --git a/tests/neg-custom-args/deprecation/i12597.scala b/tests/neg-custom-args/deprecation/i12597.scala deleted file mode 100644 index 7927dded0cbc..000000000000 --- a/tests/neg-custom-args/deprecation/i12597.scala +++ /dev/null @@ -1,5 +0,0 @@ -@main def Test = - val a: IArray[Int] = IArray(2) - val b: IArray[Any] = a - val c = b.toArray // error: deprecated - c(0) = "" diff --git a/tests/neg-custom-args/deprecation/manifest-summoning.check b/tests/neg-custom-args/deprecation/manifest-summoning.check deleted file mode 100644 index aa1462f8baba..000000000000 --- a/tests/neg-custom-args/deprecation/manifest-summoning.check +++ /dev/null @@ -1,14 +0,0 @@ --- Error: tests/neg-custom-args/deprecation/manifest-summoning.scala:1:34 ---------------------------------------------- -1 |val foo = manifest[List[? <: Int]] // error - | ^ - | Compiler synthesis of Manifest and OptManifest is deprecated, instead - | replace with the type `scala.reflect.ClassTag[List[? <: Int]]`. - | Alternatively, consider using the new metaprogramming features of Scala 3, - | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html --- Error: tests/neg-custom-args/deprecation/manifest-summoning.scala:2:41 ---------------------------------------------- -2 |val bar = optManifest[Array[? <: String]] // error - | ^ - | Compiler synthesis of Manifest and OptManifest is deprecated, instead - | replace with the type `scala.reflect.ClassTag[Array[? <: String]]`. - | Alternatively, consider using the new metaprogramming features of Scala 3, - | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html diff --git a/tests/neg-custom-args/deprecation/manifest-summoning.scala b/tests/neg-custom-args/deprecation/manifest-summoning.scala deleted file mode 100644 index 7e9d9ee2cc9d..000000000000 --- a/tests/neg-custom-args/deprecation/manifest-summoning.scala +++ /dev/null @@ -1,2 +0,0 @@ -val foo = manifest[List[? <: Int]] // error -val bar = optManifest[Array[? <: String]] // error diff --git a/tests/neg-custom-args/deprecation/old-syntax.scala b/tests/neg-custom-args/deprecation/old-syntax.scala deleted file mode 100644 index 0ba7bbee7db0..000000000000 --- a/tests/neg-custom-args/deprecation/old-syntax.scala +++ /dev/null @@ -1,4 +0,0 @@ - -val f = (x: Int) ⇒ x + 1 // error - -val list = for (n ← List(42)) yield n + 1 // error diff --git a/tests/neg-custom-args/deprecation/t3235-minimal.check b/tests/neg-custom-args/deprecation/t3235-minimal.check deleted file mode 100644 index 665caab69d8d..000000000000 --- a/tests/neg-custom-args/deprecation/t3235-minimal.check +++ /dev/null @@ -1,16 +0,0 @@ --- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:3:21 --------------------------------------------------- -3 | assert(123456789.round == 123456789) // error - | ^^^^^^^^^^^^^^^ - |method round in class RichInt is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? --- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:4:16 --------------------------------------------------- -4 | assert(math.round(123456789) == 123456789) // error - | ^^^^^^^^^^ - |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? --- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:5:32 --------------------------------------------------- -5 | assert(1234567890123456789L.round == 1234567890123456789L) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - |method round in class RichLong is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? --- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:6:16 --------------------------------------------------- -6 | assert(math.round(1234567890123456789L) == 1234567890123456789L) // error - | ^^^^^^^^^^ - |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? diff --git a/tests/neg-custom-args/erased/by-name.scala b/tests/neg-custom-args/erased/by-name.scala deleted file mode 100644 index 707cfd96734b..000000000000 --- a/tests/neg-custom-args/erased/by-name.scala +++ /dev/null @@ -1,4 +0,0 @@ -def f(x: => Int, erased y: => Int) = x // error -def g(erased x: => Int, y: => Int) = y // error - -val h: (erased => Int, Int) => Int = (erased x, y) => y // error diff --git a/tests/neg-custom-args/erased/erased-1.scala b/tests/neg-custom-args/erased/erased-1.scala deleted file mode 100644 index 552d2cc7e086..000000000000 --- a/tests/neg-custom-args/erased/erased-1.scala +++ /dev/null @@ -1,34 +0,0 @@ -object Test { - def foo0(a: Int): Int = a - def foo1(erased a: Int): Int = { - foo0( - a // error - ) - foo0({ - println() - a // error - }) - foo1(a) // OK - foo2( // error - a // error - ) - foo3( // error - a - ) - a // error - } - erased def foo2(a: Int): Int = { - foo0(a) // OK - foo1(a) // OK - foo2(a) // OK - foo3(a) // OK - a // OK - } - erased def foo3(erased a: Int): Int = { - foo0(a) // OK - foo1(a) // OK - foo2(a) // OK - foo3(a) // OK - a // OK - } -} \ No newline at end of file diff --git a/tests/neg-custom-args/erased/erased-3.scala b/tests/neg-custom-args/erased/erased-3.scala deleted file mode 100644 index 16e18b320dc5..000000000000 --- a/tests/neg-custom-args/erased/erased-3.scala +++ /dev/null @@ -1,45 +0,0 @@ -object Test { - def foo0(a: Int): Int = a - def foo1(erased a: Int): Int = { - foo0( - u() // error - ) - foo1(u()) // OK - foo2( // error - u() // error - ) - foo3( // error - u() - ) - u() // error - u() // error - } - erased def foo2(a: Int): Int = { - foo0(u()) // OK - foo1(u()) // OK - foo2(u()) // OK - foo3(u()) // OK - u() // warn - u() // OK - } - erased def foo3(erased a: Int): Int = { - foo0(u()) // OK - foo1(u()) // OK - foo2(u()) // OK - foo3(u()) // OK - u() // warn - u() // OK - } - - erased val foo4: Int = { - foo0(u()) // OK - foo1(u()) // OK - foo2(u()) // OK - foo3(u()) // OK - println() - u() // warn - u() // OK - } - - erased def u(): Int = 42 -} \ No newline at end of file diff --git a/tests/neg-custom-args/erased/erased-4.scala b/tests/neg-custom-args/erased/erased-4.scala deleted file mode 100644 index 1583c5297cd1..000000000000 --- a/tests/neg-custom-args/erased/erased-4.scala +++ /dev/null @@ -1,17 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - def foo (erased i: Int) = 0 - - val f: (erased Int) => Int = - (erased x: Int) => { - x // error - } - - val f2: (erased Int) => Int = - (erased x: Int) => { - foo(x) - } - } - -} diff --git a/tests/neg-custom-args/erased/erased-5.scala b/tests/neg-custom-args/erased/erased-5.scala deleted file mode 100644 index 3a4ea33629bf..000000000000 --- a/tests/neg-custom-args/erased/erased-5.scala +++ /dev/null @@ -1,18 +0,0 @@ -object Test { - - type UU[T] = (erased T) => Int - - def main(args: Array[String]): Unit = { - fun { x => // error: `Int => Int` not compatible with `(erased Int) => Int` - x - } - - fun { - (x: Int) => x // error: `Int => Int` not compatible with `(erased Int) => Int` - } - } - - def fun(f: UU[Int]): Int = { - f(35) - } -} diff --git a/tests/neg-custom-args/erased/erased-6.scala b/tests/neg-custom-args/erased/erased-6.scala deleted file mode 100644 index 21d77d03afc2..000000000000 --- a/tests/neg-custom-args/erased/erased-6.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - erased def foo: Foo = new Foo - foo.x() // error - foo.y // error - foo.z // error -} - -class Foo { - def x(): String = "abc" - def y: String = "abc" - val z: String = "abc" -} \ No newline at end of file diff --git a/tests/neg-custom-args/erased/erased-args-lifted.scala b/tests/neg-custom-args/erased/erased-args-lifted.scala deleted file mode 100644 index 0f4b9e11ca1c..000000000000 --- a/tests/neg-custom-args/erased/erased-args-lifted.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test { - def foo(a: Int)(b: Int, c: Int) = 42 - erased def bar(i: Int): Int = { - println(1) - 42 - } - def baz: Int = { - println(1) - 2 - } - foo( - bar(baz) // error - )( - c = baz, b = baz // force all args to be lifted in vals befor the call - ) -} diff --git a/tests/neg-custom-args/erased/erased-assign.scala b/tests/neg-custom-args/erased/erased-assign.scala deleted file mode 100644 index 5f1bd9250e84..000000000000 --- a/tests/neg-custom-args/erased/erased-assign.scala +++ /dev/null @@ -1,11 +0,0 @@ -object Test { - var i: Int = 1 - def foo(erased a: Int): Int = { - i = a // error - erased def r = { - i = a - () - } - 42 - } -} diff --git a/tests/neg-custom-args/erased/erased-case-class.scala b/tests/neg-custom-args/erased/erased-case-class.scala deleted file mode 100644 index 692534d772b6..000000000000 --- a/tests/neg-custom-args/erased/erased-case-class.scala +++ /dev/null @@ -1 +0,0 @@ -case class Foo1(erased x: Int) // error // error diff --git a/tests/neg-custom-args/erased/erased-def-rhs.scala b/tests/neg-custom-args/erased/erased-def-rhs.scala deleted file mode 100644 index 23417583f860..000000000000 --- a/tests/neg-custom-args/erased/erased-def-rhs.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def f(erased i: Int) = { - def j: Int = i // error - j - } -} diff --git a/tests/neg-custom-args/erased/erased-implicit.scala b/tests/neg-custom-args/erased/erased-implicit.scala deleted file mode 100644 index 2df2ad88db4d..000000000000 --- a/tests/neg-custom-args/erased/erased-implicit.scala +++ /dev/null @@ -1,9 +0,0 @@ -object Test { - - fun // error - - def fun(implicit a: Double): Int = 42 - - erased implicit def doubleImplicit: Double = 42.0 - -} diff --git a/tests/neg-custom-args/erased/erased-lazy-val.scala b/tests/neg-custom-args/erased/erased-lazy-val.scala deleted file mode 100644 index 7b89809eddff..000000000000 --- a/tests/neg-custom-args/erased/erased-lazy-val.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - erased lazy val i: Int = 1 // error -} diff --git a/tests/neg-custom-args/erased/erased-object.scala b/tests/neg-custom-args/erased/erased-object.scala deleted file mode 100644 index 99720f244ef5..000000000000 --- a/tests/neg-custom-args/erased/erased-object.scala +++ /dev/null @@ -1 +0,0 @@ -erased object Test // error diff --git a/tests/neg-custom-args/erased/erased-singleton.scala b/tests/neg-custom-args/erased/erased-singleton.scala deleted file mode 100644 index d1aad7093e4c..000000000000 --- a/tests/neg-custom-args/erased/erased-singleton.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait Sys - -trait Obj { - erased val s: Sys - - type S = s.type // error: non final -} diff --git a/tests/neg-custom-args/erased/erased-type.scala b/tests/neg-custom-args/erased/erased-type.scala deleted file mode 100644 index 62acb815442a..000000000000 --- a/tests/neg-custom-args/erased/erased-type.scala +++ /dev/null @@ -1,3 +0,0 @@ -class Test { - erased type T // error -} diff --git a/tests/neg-custom-args/erased/erased-val-rhs.scala b/tests/neg-custom-args/erased/erased-val-rhs.scala deleted file mode 100644 index 1b904c671478..000000000000 --- a/tests/neg-custom-args/erased/erased-val-rhs.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def f(erased i: Int) = { - val j: Int = i // error - () - } -} diff --git a/tests/neg-custom-args/erased/erased-value-class.scala b/tests/neg-custom-args/erased/erased-value-class.scala deleted file mode 100644 index 65cd3e2f961b..000000000000 --- a/tests/neg-custom-args/erased/erased-value-class.scala +++ /dev/null @@ -1,4 +0,0 @@ - -class Foo(erased x: Int) extends AnyVal // error - -class Bar(x: Int)(y: Int) extends AnyVal // error diff --git a/tests/neg-custom-args/erased/erased-var.scala b/tests/neg-custom-args/erased/erased-var.scala deleted file mode 100644 index 465b3362e529..000000000000 --- a/tests/neg-custom-args/erased/erased-var.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - erased var i: Int = 1 // error -} diff --git a/tests/neg-custom-args/erased/erasedValue.scala b/tests/neg-custom-args/erased/erasedValue.scala deleted file mode 100644 index 5519a04b5b15..000000000000 --- a/tests/neg-custom-args/erased/erasedValue.scala +++ /dev/null @@ -1,7 +0,0 @@ -import scala.compiletime.erasedValue -object Test { - def foo0(a: Int): Int = 3 - def foo1(erased a: Int): Int = 3 - foo0(erasedValue[Int]) // error - foo1(erasedValue[Int]) -} diff --git a/tests/neg-custom-args/erased/i5525.scala b/tests/neg-custom-args/erased/i5525.scala deleted file mode 100644 index abf8488bd38b..000000000000 --- a/tests/neg-custom-args/erased/i5525.scala +++ /dev/null @@ -1,9 +0,0 @@ -erased enum Foo6 {} // error: only access modifiers allowed - -enum Foo10 { // error: Enumerations must contain at least one case - erased case C6() // error // error -} - -enum Foo11 { // error: Enumerations must contain at least one case - erased case C6 // error // error -} diff --git a/tests/neg-custom-args/erased/i6795.check b/tests/neg-custom-args/erased/i6795.check deleted file mode 100644 index 662012864244..000000000000 --- a/tests/neg-custom-args/erased/i6795.check +++ /dev/null @@ -1,4 +0,0 @@ --- Error: tests/neg-custom-args/erased/i6795.scala:1:13 ---------------------------------------------------------------- -1 |erased class Foo // error - |^^^^^^^^^^^^^^^^ - |modifier(s) `erased` incompatible with type definition diff --git a/tests/neg-custom-args/erased/poly-functions.scala b/tests/neg-custom-args/erased/poly-functions.scala deleted file mode 100644 index 000a2ca49cc9..000000000000 --- a/tests/neg-custom-args/erased/poly-functions.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test: - // Poly functions with erased parameters are disallowed as an implementation restriction - - type T1 = [X] => (erased x: X, y: Int) => Int // error - type T2 = [X] => (x: X, erased y: Int) => X // error - - val t1 = [X] => (erased x: X, y: Int) => y // error - val t2 = [X] => (x: X, erased y: Int) => x // error - - // Erased classes should be detected too - erased class A - - type T3 = [X] => (x: A, y: X) => X // error - - val t3 = [X] => (x: A, y: X) => y // error - diff --git a/tests/neg-custom-args/erased/tupled-function-instances.scala b/tests/neg-custom-args/erased/tupled-function-instances.scala deleted file mode 100644 index 3574125285e4..000000000000 --- a/tests/neg-custom-args/erased/tupled-function-instances.scala +++ /dev/null @@ -1,59 +0,0 @@ -import scala.util.TupledFunction -object Test { - def main(args: Array[String]): Unit = { - type T - type R - - summon[TupledFunction[(erased T) => R, erased Tuple1[T] => R]] // error // error - summon[TupledFunction[(erased T, T) => R, (erased (T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T) => R,(erased (T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T) => R,(erased (T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T) => R,(erased (T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error - - summon[TupledFunction[(erased T) ?=> R, (erased Tuple1[T]) ?=> R]] // error - summon[TupledFunction[(erased T, T) ?=> R, (erased T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T) ?=> R, (erased T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T) ?=> R, (erased T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T) ?=> R, (erased T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error - } -} \ No newline at end of file diff --git a/tests/neg-custom-args/explain/hidden-type-errors.check b/tests/neg-custom-args/explain/hidden-type-errors.check deleted file mode 100644 index 551d1d7b16ba..000000000000 --- a/tests/neg-custom-args/explain/hidden-type-errors.check +++ /dev/null @@ -1,23 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/explain/hidden-type-errors/Test.scala:6:24 ------------------------ -6 | val x = X.doSomething("XXX") // error - | ^^^^^^^^^^^^^^^^^^^^ - | Found: String - | Required: Int - |--------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | - | Tree: t12717.A.bar("XXX") - | I tried to show that - | String - | conforms to - | Int - | but the comparison trace ended with `false`: - | - | ==> String <: Int - | ==> String <: Int - | <== String <: Int = false - | <== String <: Int = false - | - | The tests were made under the empty constraint - --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/hidden-type-errors/Test.scala b/tests/neg-custom-args/explain/hidden-type-errors/Test.scala deleted file mode 100644 index 180aa07cfb50..000000000000 --- a/tests/neg-custom-args/explain/hidden-type-errors/Test.scala +++ /dev/null @@ -1,6 +0,0 @@ -package t12717 - - -object Test: - - val x = X.doSomething("XXX") // error diff --git a/tests/neg-custom-args/explain/i11637.check b/tests/neg-custom-args/explain/i11637.check deleted file mode 100644 index 82424396a43b..000000000000 --- a/tests/neg-custom-args/explain/i11637.check +++ /dev/null @@ -1,56 +0,0 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:33 ---------------------------------------- -11 | var h = new HKT3_1[FunctorImpl](); // error // error - | ^ - | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any - |-------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | I tried to show that - | test2.FunctorImpl - | conforms to - | [Generic2[T <: String] <: Set[T]] =>> Any - | but the comparison trace ended with `false`: - | - | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any - | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] - | ==> [T <: String] =>> Set[T] <: Iterable - | ==> type bounds [] <: type bounds [ <: String] - | ==> Any <: String - | ==> Any <: String - | <== Any <: String = false - | <== Any <: String = false - | <== type bounds [] <: type bounds [ <: String] = false - | <== [T <: String] =>> Set[T] <: Iterable = false - | <== type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false - | <== test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any = false - | - | The tests were made under the empty constraint - -------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:21 ---------------------------------------- -11 | var h = new HKT3_1[FunctorImpl](); // error // error - | ^ - | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any - |-------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | I tried to show that - | test2.FunctorImpl - | conforms to - | [Generic2[T <: String] <: Set[T]] =>> Any - | but the comparison trace ended with `false`: - | - | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any - | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] - | ==> [T <: String] =>> Set[T] <: Iterable - | ==> type bounds [] <: type bounds [ <: String] - | ==> Any <: String - | ==> Any <: String - | <== Any <: String = false - | <== Any <: String = false - | <== type bounds [] <: type bounds [ <: String] = false - | <== [T <: String] =>> Set[T] <: Iterable = false - | <== type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false - | <== test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any = false - | - | The tests were made under the empty constraint - -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i15575.check b/tests/neg-custom-args/explain/i15575.check deleted file mode 100644 index e254e0a5e22e..000000000000 --- a/tests/neg-custom-args/explain/i15575.check +++ /dev/null @@ -1,40 +0,0 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:3:27 ----------------------------------------- -3 | def bar[T]: Unit = foo[T & Any] // error - | ^ - | Type argument T & Any does not conform to lower bound Any - |--------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | I tried to show that - | Any - | conforms to - | T & Any - | but the comparison trace ended with `false`: - | - | ==> Any <: T & Any - | ==> Any <: T - | <== Any <: T = false - | <== Any <: T & Any = false - | - | The tests were made under the empty constraint - --------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:7:14 ----------------------------------------- -7 | val _ = foo[String] // error - | ^ - | Type argument String does not conform to lower bound CharSequence - |--------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | I tried to show that - | CharSequence - | conforms to - | String - | but the comparison trace ended with `false`: - | - | ==> CharSequence <: String - | ==> CharSequence <: String - | <== CharSequence <: String = false - | <== CharSequence <: String = false - | - | The tests were made under the empty constraint - --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i16601a.check b/tests/neg-custom-args/explain/i16601a.check deleted file mode 100644 index 63be0d2cd2b2..000000000000 --- a/tests/neg-custom-args/explain/i16601a.check +++ /dev/null @@ -1,18 +0,0 @@ --- [E042] Type Error: tests/neg-custom-args/explain/i16601a.scala:1:27 ------------------------------------------------- -1 |@main def Test: Unit = new concurrent.ExecutionContext // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | ExecutionContext is a trait; it cannot be instantiated - |--------------------------------------------------------------------------------------------------------------------- - | Explanation (enabled by `-explain`) - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | Abstract classes and traits need to be extended by a concrete class or object - | to make their functionality accessible. - | - | You may want to create an anonymous class extending ExecutionContext with - | class ExecutionContext { } - | - | or add a companion object with - | object ExecutionContext extends ExecutionContext - | - | You need to implement any abstract members in both cases. - --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i16601a.scala b/tests/neg-custom-args/explain/i16601a.scala deleted file mode 100644 index 2e058db0093c..000000000000 --- a/tests/neg-custom-args/explain/i16601a.scala +++ /dev/null @@ -1 +0,0 @@ -@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg-custom-args/explain/i16888.scala b/tests/neg-custom-args/explain/i16888.scala deleted file mode 100644 index 9d3fd0f2f57e..000000000000 --- a/tests/neg-custom-args/explain/i16888.scala +++ /dev/null @@ -1 +0,0 @@ -def test = summon[scala.quoted.Quotes] // error diff --git a/tests/neg-custom-args/explain/labelNotFound.scala b/tests/neg-custom-args/explain/labelNotFound.scala deleted file mode 100644 index 2618600702da..000000000000 --- a/tests/neg-custom-args/explain/labelNotFound.scala +++ /dev/null @@ -1,2 +0,0 @@ -object Test: - scala.util.boundary.break(1) // error diff --git a/tests/neg-custom-args/fatal-warnings/enum-variance.check b/tests/neg-custom-args/fatal-warnings/enum-variance.check deleted file mode 100644 index b525d4d94ba6..000000000000 --- a/tests/neg-custom-args/fatal-warnings/enum-variance.check +++ /dev/null @@ -1,10 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/enum-variance.scala:2:12 ------------------------------------------------ -2 | case Refl(f: T => T) // error: enum case Refl requires explicit declaration of type T - | ^^^^^^^^^ - | contravariant type T occurs in covariant position in type T => T of value f - | enum case Refl requires explicit declaration of type T to resolve this issue. - | See an example at https://docs.scala-lang.org/scala3/reference/enums/adts.html#parameter-variance-of-enums --- Error: tests/neg-custom-args/fatal-warnings/enum-variance.scala:5:16 ------------------------------------------------ -5 | case Refl[-T](f: T => T) extends ExplicitView[T] // error: contravariant type T occurs in covariant position - | ^^^^^^^^^ - | contravariant type T occurs in covariant position in type T => T of value f diff --git a/tests/neg-custom-args/fatal-warnings/i10137.check b/tests/neg-custom-args/fatal-warnings/i10137.check deleted file mode 100644 index 15361f8dbed2..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i10137.check +++ /dev/null @@ -1,10 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i10137.scala:2:12 ------------------------------------------------------- -2 | @main def main(): Unit = println("Hello, World!") // error - | ^ - | The class `foo.main` generated from `@main` will shadow the existing class main in package scala. - | The existing definition might no longer be found on recompile. --- Error: tests/neg-custom-args/fatal-warnings/i10137.scala:4:10 ------------------------------------------------------- -4 |@main def List(): Unit = println("List") // error - | ^ - | The class `List` generated from `@main` will shadow the existing type List in package scala. - | The existing definition might no longer be found on recompile. diff --git a/tests/neg-custom-args/fatal-warnings/i10994.scala b/tests/neg-custom-args/fatal-warnings/i10994.scala deleted file mode 100644 index ce5cb2cf3df9..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i10994.scala +++ /dev/null @@ -1,2 +0,0 @@ -def foo = true match - case (b: Boolean): Boolean => () // error diff --git a/tests/neg-custom-args/fatal-warnings/i11333.check b/tests/neg-custom-args/fatal-warnings/i11333.check deleted file mode 100644 index beef37c6460a..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i11333.check +++ /dev/null @@ -1,30 +0,0 @@ --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:2:19 ------------------------------- -2 | val f1: Float = 123456789 // error - | ^^^^^^^^^ - | Widening conversion from Int to Float loses precision. - | Write `.toFloat` instead. --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:3:19 ------------------------------- -3 | val d1: Double = 1234567890123456789L // error - | ^^^^^^^^^^^^^^^^^^^^ - | Widening conversion from Long to Double loses precision. - | Write `.toDouble` instead. --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:4:19 ------------------------------- -4 | val f2: Float = 123456789L // error - | ^^^^^^^^^^ - | Widening conversion from Long to Float loses precision. - | Write `.toFloat` instead. --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:10:21 ------------------------------ -10 | val f1_b: Float = i1 // error - | ^^ - | Widening conversion from Int to Float loses precision. - | Write `.toFloat` instead. --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:11:21 ------------------------------ -11 | val d1_b: Double = l1 // error - | ^^ - | Widening conversion from Long to Double loses precision. - | Write `.toDouble` instead. --- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:12:21 ------------------------------ -12 | val f2_b: Float = l2 // error - | ^^ - | Widening conversion from Long to Float loses precision. - | Write `.toFloat` instead. diff --git a/tests/neg-custom-args/fatal-warnings/i11963a.scala b/tests/neg-custom-args/fatal-warnings/i11963a.scala deleted file mode 100644 index 58d64d061162..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i11963a.scala +++ /dev/null @@ -1 +0,0 @@ -open trait Foo // error diff --git a/tests/neg-custom-args/fatal-warnings/i11963b.scala b/tests/neg-custom-args/fatal-warnings/i11963b.scala deleted file mode 100644 index 9fae92747d53..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i11963b.scala +++ /dev/null @@ -1 +0,0 @@ -open abstract class Foo // error diff --git a/tests/neg-custom-args/fatal-warnings/i11963c.scala b/tests/neg-custom-args/fatal-warnings/i11963c.scala deleted file mode 100644 index ebd56e1127c8..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i11963c.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def foo: Any = { - open class Bar // error - new Bar - } -} diff --git a/tests/neg-custom-args/fatal-warnings/i12188/Test.scala b/tests/neg-custom-args/fatal-warnings/i12188/Test.scala deleted file mode 100644 index 3bea42ac3032..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i12188/Test.scala +++ /dev/null @@ -1,9 +0,0 @@ -sealed trait P -case class PC1(a: String) extends P -case class PC2(b: Int) extends P - -def Test = MatchTest.test(PC2(10): P) - -def foo(x: P): Unit = - x match // error - case _: PC1 => \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i12253.check b/tests/neg-custom-args/fatal-warnings/i12253.check deleted file mode 100644 index 654ea9fc8247..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i12253.check +++ /dev/null @@ -1,9 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:10 ------------------------------------------------------ -11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error - | ^ - |the type test for extractors.q2.reflect.Term cannot be checked at runtime because it refers to an abstract type member or type parameter --- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:38 ------------------------------------------------------ -11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error - | ^ - |the type test for q1.reflect.Select cannot be checked at runtime because it refers to an abstract type member or type parameter -there was 1 deprecation warning; re-run with -deprecation for details diff --git a/tests/neg-custom-args/fatal-warnings/i13440.check b/tests/neg-custom-args/fatal-warnings/i13440.check deleted file mode 100644 index fde8133419b6..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i13440.check +++ /dev/null @@ -1,12 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:3:4 -------------------------------------------------------- -3 |def given = 42 // error - | ^ - | given is now a keyword, write `given` instead of given to keep it as an identifier --- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:5:13 ------------------------------------------------------- -5 |case class C(enum: List[Int] = Nil) { // error - | ^ - | enum is now a keyword, write `enum` instead of enum to keep it as an identifier --- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:6:11 ------------------------------------------------------- -6 | val s = s"$enum" // error - | ^ - | enum is now a keyword, write `enum` instead of enum to keep it as an identifier diff --git a/tests/neg-custom-args/fatal-warnings/i14705.scala b/tests/neg-custom-args/fatal-warnings/i14705.scala deleted file mode 100644 index f5c17baae609..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i14705.scala +++ /dev/null @@ -1,3 +0,0 @@ -val n = Nil -val b = n.head.isInstanceOf[String] // error - diff --git a/tests/neg-custom-args/fatal-warnings/i15474.scala b/tests/neg-custom-args/fatal-warnings/i15474.scala deleted file mode 100644 index 86b01eb28ce6..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i15474.scala +++ /dev/null @@ -1,16 +0,0 @@ -import scala.language.implicitConversions - -object Test1: - given c: Conversion[ String, Int ] with - def apply(from: String): Int = from.toInt // error - -object Test2: - given c: Conversion[ String, Int ] = _.toInt // loop not detected, could be used as a fallback to avoid the warning. - -object Prices { - opaque type Price = BigDecimal - - object Price{ - given Ordering[Price] = summon[Ordering[BigDecimal]] // error - } -} \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i15662.scala b/tests/neg-custom-args/fatal-warnings/i15662.scala deleted file mode 100644 index afe505922603..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i15662.scala +++ /dev/null @@ -1,14 +0,0 @@ -case class Composite[T](v: T) - -def m(composite: Composite[_]): Unit = - composite match { - case Composite[Int](v) => println(v) // error: cannot be checked at runtime - } - -def m2(composite: Composite[_]): Unit = - composite match { - case Composite(v) => println(v) // ok - } - -@main def Test = - m(Composite("This is String")) diff --git a/tests/neg-custom-args/fatal-warnings/i16649-refutable.check b/tests/neg-custom-args/fatal-warnings/i16649-refutable.check deleted file mode 100644 index 5b3d460c7f09..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i16649-refutable.check +++ /dev/null @@ -1,8 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i16649-refutable.scala:4:6 ---------------------------------------------- -4 | val '{ ($y: Int) + ($z: Int) } = x // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | pattern binding uses refutable extractor `'{...}` - | - | If this usage is intentional, this can be communicated by adding `: @unchecked` after the expression, - | which may result in a MatchError at runtime. - | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala b/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala deleted file mode 100644 index 2a42f652e093..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala +++ /dev/null @@ -1,4 +0,0 @@ -import quoted.* - -def foo(using Quotes)(x: Expr[Int]) = - val '{ ($y: Int) + ($z: Int) } = x // error diff --git a/tests/neg-custom-args/fatal-warnings/i16728.check b/tests/neg-custom-args/fatal-warnings/i16728.check deleted file mode 100644 index a797baf19be0..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i16728.check +++ /dev/null @@ -1,4 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i16728.scala:16:11 ------------------------------------------------------ -16 | case tx : C[Int]#X => // error - | ^ - | the type test for C[Int] cannot be checked at runtime because its type arguments can't be determined from A diff --git a/tests/neg-custom-args/fatal-warnings/i16876/Test.scala b/tests/neg-custom-args/fatal-warnings/i16876/Test.scala deleted file mode 100644 index d9229d31cd6d..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i16876/Test.scala +++ /dev/null @@ -1,11 +0,0 @@ -// scalac: -Wunused:all - -object Foo { - private def myMethod(a: Int, b: Int, c: Int) = adder // ok - myMethod(1, 2, 3) - - private def myMethodFailing(a: Int, b: Int, c: Int) = a + 0 // error // error - myMethodFailing(1, 2, 3) -} - - diff --git a/tests/neg-custom-args/fatal-warnings/i17335.scala b/tests/neg-custom-args/fatal-warnings/i17335.scala deleted file mode 100644 index 6629e2f151c9..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i17335.scala +++ /dev/null @@ -1,4 +0,0 @@ -// scalac: -Wunused:all - -def aMethod() = - doStuff { (x) => x } // error diff --git a/tests/neg-custom-args/fatal-warnings/i2673.scala b/tests/neg-custom-args/fatal-warnings/i2673.scala deleted file mode 100644 index 2cb4fb03874e..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i2673.scala +++ /dev/null @@ -1,4 +0,0 @@ -package Foos - -class Foo // error -class foo diff --git a/tests/neg-custom-args/fatal-warnings/i2673b.scala b/tests/neg-custom-args/fatal-warnings/i2673b.scala deleted file mode 100644 index e3c29d6bc8d0..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i2673b.scala +++ /dev/null @@ -1,4 +0,0 @@ -package Foos - -class Bar // error -object bar diff --git a/tests/neg-custom-args/fatal-warnings/i2673c.scala b/tests/neg-custom-args/fatal-warnings/i2673c.scala deleted file mode 100644 index a677fab3081d..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i2673c.scala +++ /dev/null @@ -1,6 +0,0 @@ -package Foos - -object Outer { - case class X() // error - object x -} diff --git a/tests/neg-custom-args/fatal-warnings/i4008.check b/tests/neg-custom-args/fatal-warnings/i4008.check deleted file mode 100644 index a6e206f623d7..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i4008.check +++ /dev/null @@ -1,40 +0,0 @@ --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:5:56 --------------------------------------- -5 |@annotation.implicitNotFound("An implicit ShouldWarn1[${B}] is not in scope") // error - | ^ - | Invalid reference to a type variable `B` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn1`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:9:56 --------------------------------------- -9 |@annotation.implicitNotFound("An implicit ShouldWarn2[${A}] is not in scope") // error - | ^ - | Invalid reference to a type variable `A` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn2`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:13:56 -------------------------------------- -13 |@annotation.implicitNotFound("An implicit ShouldWarn3[${A},${B}] is not in scope") // error - | ^ - | Invalid reference to a type variable `A` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn3`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:17:56 -------------------------------------- -17 |@annotation.implicitNotFound("An implicit ShouldWarn4[${A},${B}] is not in scope") // error // error - | ^ - | Invalid reference to a type variable `A` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn4`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:17:61 -------------------------------------- -17 |@annotation.implicitNotFound("An implicit ShouldWarn4[${A},${B}] is not in scope") // error // error - | ^ - | Invalid reference to a type variable `B` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn4`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:21:61 -------------------------------------- -21 |@annotation.implicitNotFound("An implicit ShouldWarn5[${C},${Abc}] is not in scope") // error - | ^ - | Invalid reference to a type variable `Abc` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `ShouldWarn5`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:44:54 -------------------------------------- -44 |class C[A](using @annotation.implicitNotFound("No C[${B}] found") c: Class[A]) // error - | ^ - | Invalid reference to a type variable `B` found in the annotation argument. - | The variable does not occur as a parameter in the scope of the constructor of `C`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4008.scala:46:62 -------------------------------------- -46 |def someMethod1[A](using @annotation.implicitNotFound("No C[${B}] found") sc: C[A]) = 0 // error - | ^ - | Invalid reference to a type variable `B` found in the annotation argument. - | The variable does not occur as a parameter in the scope of method `someMethod1`. diff --git a/tests/neg-custom-args/fatal-warnings/i4936.scala b/tests/neg-custom-args/fatal-warnings/i4936.scala deleted file mode 100644 index 65c026899e7e..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i4936.scala +++ /dev/null @@ -1 +0,0 @@ -final object Foo // error diff --git a/tests/neg-custom-args/fatal-warnings/i4986b.check b/tests/neg-custom-args/fatal-warnings/i4986b.check deleted file mode 100644 index 2e74c29b077c..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i4986b.check +++ /dev/null @@ -1,40 +0,0 @@ --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:3:65 -------------------------------------- -3 |@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.") // error // error - | ^ - | Invalid reference to a type variable `Too` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `Meh`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:3:94 -------------------------------------- -3 |@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.") // error // error - | ^ - | Invalid reference to a type variable `Elem` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `Meh`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:6:71 -------------------------------------- -6 |@implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.") // error - | ^ - | Invalid reference to a type variable `Elem` found in the annotation argument. - | The variable does not occur as a parameter in the scope of type `Meh2`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:9:46 -------------------------------------- -9 |class C[T](implicit @implicitNotFound("No C[${t}] available") t: T) // error - | ^ - | Invalid reference to a type variable `t` found in the annotation argument. - | The variable does not occur as a parameter in the scope of the constructor of `C`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:12:54 ------------------------------------- -12 | def m[Aaa](implicit @implicitNotFound("I see no C[${Uuh}]") theC: C[Aaa]) = ??? // error - | ^ - | Invalid reference to a type variable `Uuh` found in the annotation argument. - | The variable does not occur as a parameter in the scope of method `m`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:18:73 ------------------------------------- -18 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error - | ^ - | Invalid reference to a type variable `XX` found in the annotation argument. - | The variable does not occur as a parameter in the scope of method `m`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:18:79 ------------------------------------- -18 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error - | ^ - | Invalid reference to a type variable `ZZ` found in the annotation argument. - | The variable does not occur as a parameter in the scope of method `m`. --- [E158] Reference Error: tests/neg-custom-args/fatal-warnings/i4986b.scala:18:86 ------------------------------------- -18 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error - | ^ - | Invalid reference to a type variable `Nix` found in the annotation argument. - | The variable does not occur as a parameter in the scope of method `m`. diff --git a/tests/neg-custom-args/fatal-warnings/i6190b.check b/tests/neg-custom-args/fatal-warnings/i6190b.check deleted file mode 100644 index 8fc4e09896a9..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i6190b.check +++ /dev/null @@ -1,4 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i6190b.scala:3:29 ------------------------------------------------------- -3 |def foo = List("1", "2").map(Rule) // error - | ^^^^ - | The method `apply` is inserted. The auto insertion will be deprecated, please write `Rule.apply` explicitly. diff --git a/tests/neg-custom-args/fatal-warnings/i6190b.scala b/tests/neg-custom-args/fatal-warnings/i6190b.scala deleted file mode 100644 index 470757791078..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i6190b.scala +++ /dev/null @@ -1,3 +0,0 @@ -case class Rule(name: String) - -def foo = List("1", "2").map(Rule) // error diff --git a/tests/neg-custom-args/fatal-warnings/i8711.check b/tests/neg-custom-args/fatal-warnings/i8711.check deleted file mode 100644 index 491d1678b5ac..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i8711.check +++ /dev/null @@ -1,12 +0,0 @@ --- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:7:9 --------------------------- -7 | case x: B => x // error: this case is unreachable since class A is not a subclass of class B - | ^^^^ - | Unreachable case --- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:12:9 -------------------------- -12 | case x: C => x // error - | ^^^^ - | Unreachable case --- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:17:9 -------------------------- -17 | case x: (B | C) => x // error - | ^^^^^^^^^^ - | Unreachable case diff --git a/tests/neg-custom-args/fatal-warnings/i9168.scala b/tests/neg-custom-args/fatal-warnings/i9168.scala deleted file mode 100644 index 59263239f378..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9168.scala +++ /dev/null @@ -1 +0,0 @@ -def g: Int = try 42 finally ; // error diff --git a/tests/neg-custom-args/fatal-warnings/i9266.check b/tests/neg-custom-args/fatal-warnings/i9266.check deleted file mode 100644 index 849aed5fa2e0..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9266.check +++ /dev/null @@ -1,5 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i9266.scala:3:22 -------------------------------------------------------- -3 |def test = { implicit x: Int => x + x } // error - | ^ - | This syntax is no longer supported; parameter needs to be enclosed in (...) - | This construct can be rewritten automatically under -rewrite -source future-migration. diff --git a/tests/neg-custom-args/fatal-warnings/i9266.scala b/tests/neg-custom-args/fatal-warnings/i9266.scala deleted file mode 100644 index 84268c078b49..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9266.scala +++ /dev/null @@ -1,3 +0,0 @@ -import language.`future-migration` - -def test = { implicit x: Int => x + x } // error diff --git a/tests/neg-custom-args/fatal-warnings/i9408a.check b/tests/neg-custom-args/fatal-warnings/i9408a.check deleted file mode 100644 index ce2f8c4edd15..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9408a.check +++ /dev/null @@ -1,24 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:16:20 ------------------------------------------------------ -16 | val length: Int = "qwerty" // error - | ^^^^^^^^ - |The conversion (Test3.implicitLength : String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:21:20 ------------------------------------------------------ -21 | val length: Int = "qwerty" // error - | ^^^^^^^^ - |The conversion (Test4.implicitLength : => String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:26:20 ------------------------------------------------------ -26 | val length: Int = "qwerty" // error - | ^^^^^^^^ - |The conversion (Test5.implicitLength : [A]: String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:31:20 ------------------------------------------------------ -31 | val length: Int = "qwerty" // error - | ^^^^^^^^ - |The conversion (Test6.implicitLength : Map[String, Int]) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:35:60 ------------------------------------------------------ -35 | implicit def a2int[A](a: A)(implicit ev: A => Int): Int = a // error - | ^ - |The conversion (ev : A => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. --- Error: tests/neg-custom-args/fatal-warnings/i9408a.scala:59:2 ------------------------------------------------------- -59 | 123.foo // error - | ^^^ - |The conversion (Test11.a2foo : [A]: A => Test11.Foo) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. diff --git a/tests/neg-custom-args/fatal-warnings/i9408b.check b/tests/neg-custom-args/fatal-warnings/i9408b.check deleted file mode 100644 index 55fed99ae3bb..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9408b.check +++ /dev/null @@ -1,5 +0,0 @@ - --- Error: tests/neg-custom-args/fatal-warnings/i9408b/Test_2.scala:6:20 ------------------------------------------------ -6 | val length: Int = "abc" // error - | ^^^^^ - |The conversion (test.conversions.Conv.implicitLength : String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. diff --git a/tests/neg-custom-args/fatal-warnings/i9408b/Test_2.scala b/tests/neg-custom-args/fatal-warnings/i9408b/Test_2.scala deleted file mode 100644 index 1c45f8ba66fe..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9408b/Test_2.scala +++ /dev/null @@ -1,7 +0,0 @@ -import language.`3.0-migration` -import scala.language.implicitConversions - -object Test { - import test.conversions.Conv.* - val length: Int = "abc" // error -} diff --git a/tests/neg-custom-args/fatal-warnings/i9751.scala b/tests/neg-custom-args/fatal-warnings/i9751.scala deleted file mode 100644 index bb7d58957579..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9751.scala +++ /dev/null @@ -1,9 +0,0 @@ -def f(): Unit = { - () // error - () -} - -inline def g(): Unit = { - () // error - () -} diff --git a/tests/neg-custom-args/fatal-warnings/i9776.scala b/tests/neg-custom-args/fatal-warnings/i9776.scala deleted file mode 100644 index 0de811e2adb3..000000000000 --- a/tests/neg-custom-args/fatal-warnings/i9776.scala +++ /dev/null @@ -1,59 +0,0 @@ -import scala.annotation.switch - -sealed trait Fruit - -object Fruit { - case object Apple extends Fruit - case object Banana extends Fruit - case object Lemon extends Fruit - case object Lime extends Fruit - case object Orange extends Fruit - - def isCitrus(fruit: Fruit): Boolean = - (fruit: @switch) match { // error Could not emit switch for @switch annotated match - case Orange => true - case Lemon => true - case Lime => true - case _ => false - } -} - - -sealed trait TaggedFruit { - def tag: Int -} - -object TaggedFruit { - case object Apple extends TaggedFruit { - val tag = 1 - } - case object Banana extends TaggedFruit { - val tag = 2 - } - case object Orange extends TaggedFruit { - val tag = 3 - } - - def isCitrus(fruit: TaggedFruit): Boolean = - (fruit.tag: @switch) match { // error Could not emit switch for @switch annotated match - case Apple.tag => true - case 2 => true - case 3 => true - case _ => false - } - - // fewer than four cases, so no warning - def succ1(fruit: TaggedFruit): Boolean = - (fruit.tag: @switch) match { - case 3 => false - case 2 | Apple.tag => true - } - - // fewer than four cases, so no warning - def succ2(fruit: TaggedFruit): Boolean = - (fruit.tag: @switch) match { - case 3 => false - case 2 => true - case Apple.tag => true - } -} diff --git a/tests/neg-custom-args/fatal-warnings/structural.scala b/tests/neg-custom-args/fatal-warnings/structural.scala deleted file mode 100644 index e24eeeae105b..000000000000 --- a/tests/neg-custom-args/fatal-warnings/structural.scala +++ /dev/null @@ -1,73 +0,0 @@ -import scala.reflect.Selectable.reflectiveSelectable - -package p1 { - -object test123 { - type A = { def a: Int } - def f(a: A): A = a -} - -object structural2 { - type A = { def a: Int } - - type B = { - def b: Int - } - - type AB = A & B - - def f(ab: AB): AB = ab - - f(new { - def a = 43 - def b = 42 - }) -} -} - -package p2 { -object RClose { - type ReflectCloseable = { def close(): Unit } - def withReflectCloseable[T <: ReflectCloseable, R](s: T)(action: T => R): R = - try { - action(s) - } finally { - s.close() - } -} -} - -package p3 { - -object Test { - def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) // error: polymorphic refinement method map without matching type in parent Object is no longer allowed // error: Structural access not allowed - - def main(args: Array[String]): Unit = { - idMap(Some(5)) // error: type mismatch: found Some[Int], required Object{map: [U](f: Any => U): Any} - idMap(Responder.constant(5)) // error: type mismatch: found Responder[Int], required Object{map: [U](f: Any => U): Any} - } -} -} -package p4 { - -trait A { self: Any { def p: Any } => - def f(b: => Unit): Unit = {} - f { p } // OK -} -} - -package p5 { -// t2810 -object Test { - val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } - val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } -} -} - -package p6 { - - class Refinements { - val y: { val x: T; type T } // error: deprecated warning: illegal forward reference in refinement; now illegal - } - -} diff --git a/tests/neg-custom-args/fatal-warnings/supertraits.scala b/tests/neg-custom-args/fatal-warnings/supertraits.scala deleted file mode 100644 index 9337e2f925a3..000000000000 --- a/tests/neg-custom-args/fatal-warnings/supertraits.scala +++ /dev/null @@ -1,33 +0,0 @@ -transparent sealed trait TA -transparent sealed trait TB -trait S -case object a extends S, TA, TB -case object b extends S, TA, TB - -object Test: - - def choose0[X](x: X, y: X): X = x - def choose1[X <: TA](x: X, y: X): X = x - def choose2[X <: TB](x: X, y: X): X = x - def choose3[X <: Product](x: X, y: X): X = x - def choose4[X <: TA & TB](x: X, y: X): X = x - - choose0(a, b) match - case _: TA => ??? - case _: TB => ??? // error: unreachable - - choose1(a, b) match - case _: TA => ??? - case _: TB => ??? // error: unreachable - - choose2(a, b) match - case _: TB => ??? - case _: TA => ??? // error: unreachable - - choose3(a, b) match - case _: Product => ??? - case _: TA => ??? // error: unreachable - - choose4(a, b) match - case _: (TA & TB) => ??? - case _: Product => ??? // error: unreachable \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/switches.scala b/tests/neg-custom-args/fatal-warnings/switches.scala deleted file mode 100644 index a327ab88758a..000000000000 --- a/tests/neg-custom-args/fatal-warnings/switches.scala +++ /dev/null @@ -1,100 +0,0 @@ -import scala.annotation.switch - -// this is testing not so much how things ought to be but how they are; -// the test is supposed to start failing if the behavior changes at all. -object Other { - val C1 = 'P' // fails: not final - final val C2 = 'Q' // succeeds: singleton type Char('Q') inferred - final val C3: Char = 'R' // fails: type Char specified - final val C4 = '\u000A' // succeeds like C2 but more unicodey -} - -object Main { - def succ1(c: Char) = (c: @switch) match { - case 'A' | 'B' | 'C' => true - case 'd' => true - case 'f' | 'g' => true - case _ => false - } - - def succ2(c: Char) = (c: @switch) match { - case 'A' | 'B' | 'C' => true - case Other.C2 => true - case Other.C4 => true - case _ => false - } - - // has a guard, but since SI-5830 that's ok - // PENDING: #5070 - // def succ_guard(c: Char) = (c: @switch) match { - // case 'A' | 'B' | 'C' => true - // case x if x == 'A' => true - // case _ => false - // } - - // throwing in @unchecked on the next two to make sure - // multiple annotations are processed correctly - - // thinks a val in an object is constant... so naive - def fail1(c: Char) = (c: @switch @unchecked) match { // error: Could not emit switch for @switch annotated match - case 'A' => true - case 'B' => true - case Other.C1 => true - case _ => false - } - - // more naivete - def fail2(c: Char) = (c: @unchecked @switch) match { // error: Could not emit switch for @switch annotated match - case 'A' => true - case 'B' => true - case Other.C3 => true - case _ => false - } - - // guard case done correctly - def succ3(c: Char) = (c: @switch) match { - case 'A' | 'B' | 'C' => true - case x => x == 'A' - } - - // some ints just to mix it up a bit - def succ4(x: Int, y: Int) = ((x + y): @switch) match { - case 1 => 5 - case 2 => 10 - case 3 => 20 - case 4 => 50 - case 5|6|7|8 => 100 - case _ => -1 - } - - def fail3(x: Any) = (x: @switch) match { // error: Could not emit switch for @switch annotated match - case 1 | 2 | 3 => true - case _ => false - } - - def fail4(x: AnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match - case 1 | 2 | 3 => true - case _ => false - } - - case class IntAnyVal(x: Int) extends AnyVal - - val Ten = IntAnyVal(10) - def fail5(x: IntAnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match - case IntAnyVal(1) => 0 - case Ten => 1 - case IntAnyVal(100) => 2 - case IntAnyVal(1000) => 3 - case IntAnyVal(10000) => 4 - } - - // the generated lookupswitch covers only a subset of the cases - final val One = IntAnyVal(1) - def fail6(x: IntAnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match - case One => 0 - case IntAnyVal(10) => 1 - case IntAnyVal(100) => 2 - case IntAnyVal(1000) => 3 - case IntAnyVal(10000) => 4 - } -} diff --git a/tests/neg-custom-args/fatal-warnings/symbolic-packages.check b/tests/neg-custom-args/fatal-warnings/symbolic-packages.check deleted file mode 100644 index f5abe6ed0a36..000000000000 --- a/tests/neg-custom-args/fatal-warnings/symbolic-packages.check +++ /dev/null @@ -1,16 +0,0 @@ --- Error: tests/neg-custom-args/fatal-warnings/symbolic-packages.scala:1:8 --------------------------------------------- -1 |package `with spaces` { // error - | ^^^^^^^^^^^^^ - | The package name `with spaces` will be encoded on the classpath, and can lead to undefined behaviour. --- Error: tests/neg-custom-args/fatal-warnings/symbolic-packages.scala:5:10 -------------------------------------------- -5 |package +.* { // error // error - | ^ - | The package name `*` will be encoded on the classpath, and can lead to undefined behaviour. --- Error: tests/neg-custom-args/fatal-warnings/symbolic-packages.scala:5:8 --------------------------------------------- -5 |package +.* { // error // error - | ^ - | The package name `+` will be encoded on the classpath, and can lead to undefined behaviour. --- Error: tests/neg-custom-args/fatal-warnings/symbolic-packages.scala:9:16 -------------------------------------------- -9 |package object `mixed_*` { // error - | ^^^^^^^ - | The package name `mixed_*` will be encoded on the classpath, and can lead to undefined behaviour. diff --git a/tests/neg-custom-args/fatal-warnings/type-test-syntesize.scala b/tests/neg-custom-args/fatal-warnings/type-test-syntesize.scala deleted file mode 100644 index 45ef924ce55a..000000000000 --- a/tests/neg-custom-args/fatal-warnings/type-test-syntesize.scala +++ /dev/null @@ -1,29 +0,0 @@ -import scala.reflect.TypeTest - -object Test { - def test[S, T](using TypeTest[S, T]): Unit = () - val a: A = ??? - - test[Any, Any] - test[Int, Int] - - test[Int, Any] - test[String, Any] - test[String, AnyRef] - - test[Any, Int] - test[Any, String] - test[Any, Some[_]] - test[Any, Array[Int]] - test[Seq[Int], List[Int]] - - test[Any, Some[Int]] // error - test[Any, a.X] // error - test[a.X, a.Y] // error - -} - -class A { - type X - type Y <: X -} diff --git a/tests/neg-custom-args/fatal-warnings/xfatalWarnings.scala b/tests/neg-custom-args/fatal-warnings/xfatalWarnings.scala deleted file mode 100644 index 862b94039e2a..000000000000 --- a/tests/neg-custom-args/fatal-warnings/xfatalWarnings.scala +++ /dev/null @@ -1,11 +0,0 @@ -object xfatalWarnings { - val opt:Option[String] = Some("test") - - opt match { // error when running with -Xfatal-warnings - case None => - } - - object Test { - while (true) {} // should be ok. no "pure expression does nothing in statement position" issued. - } -} diff --git a/tests/neg-custom-args/feature/convertible.scala b/tests/neg-custom-args/feature/convertible.scala deleted file mode 100644 index 1b9e1c79f011..000000000000 --- a/tests/neg-custom-args/feature/convertible.scala +++ /dev/null @@ -1,29 +0,0 @@ -import language.experimental.into - -class Text(val str: String) - -object Test: - - given Conversion[String, Text] = Text(_) - - def f(x: Text, y: => Text, zs: Text*) = - println(s"${x.str} ${y.str} ${zs.map(_.str).mkString(" ")}") - - f("abc", "def") // error // error - f("abc", "def", "xyz", "uvw") // error // error // error // error - f("abc", "def", "xyz", Text("uvw")) // error // error // error - - def g(x: into Text) = - println(x.str) - - - g("abc") // OK - val gg = g - gg("abc") // straight eta expansion is also OK - - def h1[X](x: X)(y: X): Unit = () - - def h(x: into Text) = - val y = h1(x) - y("abc") // error, inference through type variable does not propagate - diff --git a/tests/neg-custom-args/feature/i13946/BadPrinter.scala b/tests/neg-custom-args/feature/i13946/BadPrinter.scala deleted file mode 100644 index 3ab935de9711..000000000000 --- a/tests/neg-custom-args/feature/i13946/BadPrinter.scala +++ /dev/null @@ -1,4 +0,0 @@ -// in BadPrinter.scala -import language.future -class BadPrinter extends Printer: // error - override def print(s: String): Unit = println("Bad!!!") \ No newline at end of file diff --git a/tests/neg-custom-args/feature/impl-conv/B.scala b/tests/neg-custom-args/feature/impl-conv/B.scala deleted file mode 100644 index 45a51f28daf8..000000000000 --- a/tests/neg-custom-args/feature/impl-conv/B.scala +++ /dev/null @@ -1,10 +0,0 @@ -package implConv - -object B { - import A.{_, given} - - "".foo - - val x: Int = "" // ok - val y: String = 1 // error: feature -} diff --git a/tests/neg-custom-args/i10994.scala b/tests/neg-custom-args/i10994.scala deleted file mode 100644 index 65695ccf4352..000000000000 --- a/tests/neg-custom-args/i10994.scala +++ /dev/null @@ -1,2 +0,0 @@ -def foo = true match - case (b: Boolean): Boolean => () // error diff --git a/tests/neg-custom-args/i3246.scala b/tests/neg-custom-args/i3246.scala deleted file mode 100644 index dc22c33ac2f8..000000000000 --- a/tests/neg-custom-args/i3246.scala +++ /dev/null @@ -1,4 +0,0 @@ -class Test { - def foo(x: Int) = 1 - val bar: () => Int = foo _ // error: type mismatch -} diff --git a/tests/neg-custom-args/i9517.scala b/tests/neg-custom-args/i9517.scala deleted file mode 100644 index d1201d8ca39e..000000000000 --- a/tests/neg-custom-args/i9517.scala +++ /dev/null @@ -1,3 +0,0 @@ - -def test():Unit = foo({ case 1 => 10 }) // error -def foo(x: Any): Boolean = true diff --git a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala deleted file mode 100644 index c7c8a6c4e1fb..000000000000 --- a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala +++ /dev/null @@ -1,10 +0,0 @@ -sealed trait Exp[T] -case class Fun[A, B](f: Exp[A => B]) extends Exp[A => B] - -class Test { - def eval(e: Fun[Int, Int]) = e match { - case Fun(x: Fun[Int, Double]) => ??? // error - case Fun(x: Exp[Int => String]) => ??? // error - case _ => - } -} diff --git a/tests/neg-custom-args/isInstanceOf/gadt.scala b/tests/neg-custom-args/isInstanceOf/gadt.scala deleted file mode 100644 index 940555e160e7..000000000000 --- a/tests/neg-custom-args/isInstanceOf/gadt.scala +++ /dev/null @@ -1,13 +0,0 @@ -class Test { - trait A[+T] - class B[T] extends A[T] - - class C - class D extends C - - def quux(a: A[C]): Unit = a match { - case _: B[C] => // error!! - } - - quux(new B[D]) -} \ No newline at end of file diff --git a/tests/neg-custom-args/isInstanceOf/i11178.scala b/tests/neg-custom-args/isInstanceOf/i11178.scala deleted file mode 100644 index 47e8b4c3acab..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i11178.scala +++ /dev/null @@ -1,39 +0,0 @@ -trait Box[+T] -case class Foo[+S](s: S) extends Box[S] - -def unwrap2[A](b: Box[A]): A = - b match - case _: Foo[Int] => 0 // error - -object Test1 { - // Invariant case, OK - sealed trait Bar[A] - - def test[A](bar: Bar[A]) = - bar match { - case _: Bar[Boolean] => ??? // error - case _ => ??? - } -} - -object Test2 { - // Covariant case - sealed trait Bar[+A] - - def test[A](bar: Bar[A]) = - bar match { - case _: Bar[Boolean] => ??? // error - case _ => ??? - } -} - -object Test3 { - // Contravariant case - sealed trait Bar[-A] - - def test[A](bar: Bar[A]) = - bar match { - case _: Bar[Boolean] => ??? // error - case _ => ??? - } -} diff --git a/tests/neg-custom-args/isInstanceOf/i3324.scala b/tests/neg-custom-args/isInstanceOf/i3324.scala deleted file mode 100644 index 9fac958e8f3c..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i3324.scala +++ /dev/null @@ -1,4 +0,0 @@ -class Foo { - def foo(x: Any): Boolean = - x.isInstanceOf[List[String]] // error -} diff --git a/tests/neg-custom-args/isInstanceOf/i4297.scala b/tests/neg-custom-args/isInstanceOf/i4297.scala deleted file mode 100644 index 139123d15973..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i4297.scala +++ /dev/null @@ -1,11 +0,0 @@ -class Test { - def test[X <: Option[Int]](x: X) = x.isInstanceOf[Some[Int]] - def test1[Y <: Int, X <: Option[Y]](x: X) = x.isInstanceOf[Some[Int]] - def test2(x: Any) = x.isInstanceOf[Function1[Nothing, _]] - def test3a(x: Any) = x.isInstanceOf[Function1[Any, _]] // error - def test3b(x: Any) = x.isInstanceOf[Function1[Int, _]] // error - def test4[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, _]] // error - def test5[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, Unit]] // error - def test6[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, Any]] // error - def test7[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[_, Unit]] -} diff --git a/tests/neg-custom-args/isInstanceOf/i5826.scala b/tests/neg-custom-args/isInstanceOf/i5826.scala deleted file mode 100644 index c63bf3ab4aef..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i5826.scala +++ /dev/null @@ -1,41 +0,0 @@ -class Foo { - def test[A]: (List[Int] | A) => Int = { - case ls: List[Int] => ls.head // error, A = List[String] - case _ => 0 - } - - def test2: List[Int] | List[String] => Int = { - case ls: List[Int] => ls.head // error - case _ => 0 - } - - trait A[T] - trait B[T] - - // suppose: class C extends A[Int] with B[String] - def test3[X]: A[X] | B[X] => Int = { - case ls: A[X] => 4 // error - case _ => 0 - } - - def test4[A](x: List[Int] | (A => Int)) = x match { - case ls: List[Int] => ls.head // error, List extends Int => T - case _ => 0 - } - - final class C[T] extends A[T] - - def test5[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[String]] // error - - def test6[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[T]] - - def test7[A](x: Option[Int] | (A => Int)) = x match { - case ls: Option[Int] => ls.head // OK, Option decomposes to Some and None - case _ => 0 - } - - def test8(x: List[Int] | A[String]) = x match { - case ls: List[Int] => ls.head // OK, List decomposes to :: and Nil - case _ => 0 - } -} diff --git a/tests/neg-custom-args/isInstanceOf/i5826b.scala b/tests/neg-custom-args/isInstanceOf/i5826b.scala deleted file mode 100644 index 025eb39c4f88..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i5826b.scala +++ /dev/null @@ -1,11 +0,0 @@ -class Foo { - def test1[A]: List[Int] | A => Int = { - case ls: List[_] => ls.head // error - case _ => 0 - } - - def test2[A]: List[Int] | A => Int = { - case ls: List[_] => ls.size - case _ => 0 - } -} diff --git a/tests/neg-custom-args/isInstanceOf/i8932.scala b/tests/neg-custom-args/isInstanceOf/i8932.scala deleted file mode 100644 index 84d2f7d4990a..000000000000 --- a/tests/neg-custom-args/isInstanceOf/i8932.scala +++ /dev/null @@ -1,12 +0,0 @@ -sealed trait Foo[+A] -case class Bar[A]() extends Foo[A] - -class Dummy extends Bar[Nothing] with Foo[String] - -def bugReport[A](foo: Foo[A]): Foo[A] = - foo match { - case bar: Bar[A] => bar // error - case dummy: Dummy => ??? - } - -def test = bugReport(new Dummy: Foo[String]) diff --git a/tests/neg-custom-args/kind-projector-underscores.scala b/tests/neg-custom-args/kind-projector-underscores.scala deleted file mode 100644 index 64d46b16a7c6..000000000000 --- a/tests/neg-custom-args/kind-projector-underscores.scala +++ /dev/null @@ -1,14 +0,0 @@ -package kind_projector_neg - -trait Foo[F[_]] - -class Bar1 extends Foo[Either[_, _]] // error -class Bar2 extends Foo[_] // error -class Bar3 extends Foo[λ[List[x] => Int]] // error - -object Test { - type -_ = Int // error -_ not allowed as a type def name without backticks - type +_ = Int // error +_ not allowed as a type def name without backticks -} - -class BacktickUnderscoreIsNotFine extends Foo[List[`_`]] // error wildcard invalid as backquoted identifier diff --git a/tests/neg-custom-args/kind-projector.scala b/tests/neg-custom-args/kind-projector.scala deleted file mode 100644 index 56f7894de078..000000000000 --- a/tests/neg-custom-args/kind-projector.scala +++ /dev/null @@ -1,7 +0,0 @@ -package kind_projector_neg - -trait Foo[F[_]] - -class Bar1 extends Foo[Either[*, *]] // error -class Bar2 extends Foo[*] // error -class Bar3 extends Foo[λ[List[x] => Int]] // error diff --git a/tests/neg-custom-args/newline-braces.scala b/tests/neg-custom-args/newline-braces.scala deleted file mode 100644 index 0a5e2a469bf3..000000000000 --- a/tests/neg-custom-args/newline-braces.scala +++ /dev/null @@ -1,6 +0,0 @@ -def f: List[Int] = { - List(1, 2, 3).map // no newline inserted here in Scala-2 compat mode - { x => // error (migration) - x + 1 - } -} diff --git a/tests/neg-custom-args/no-experimental/experimentalInline.scala b/tests/neg-custom-args/no-experimental/experimentalInline.scala deleted file mode 100644 index eb49bf15d11a..000000000000 --- a/tests/neg-custom-args/no-experimental/experimentalInline.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.experimental - -@experimental -inline def g() = () - -def test: Unit = - g() // error - () diff --git a/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala b/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala deleted file mode 100644 index 3aaeb960bae9..000000000000 --- a/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala +++ /dev/null @@ -1,6 +0,0 @@ -import scala.annotation.experimental - -@experimental type E - -type A = E // error -@experimental type B = E diff --git a/tests/neg-custom-args/no-experimental/i13848.scala b/tests/neg-custom-args/no-experimental/i13848.scala deleted file mode 100644 index 886ab1e85d67..000000000000 --- a/tests/neg-custom-args/no-experimental/i13848.scala +++ /dev/null @@ -1,7 +0,0 @@ -import annotation.experimental - -@main -def run(): Unit = f // error - -@experimental -def f = 2 diff --git a/tests/neg-custom-args/nowarn/nowarn-parser-error.check b/tests/neg-custom-args/nowarn/nowarn-parser-error.check deleted file mode 100644 index 8be545a288a6..000000000000 --- a/tests/neg-custom-args/nowarn/nowarn-parser-error.check +++ /dev/null @@ -1,13 +0,0 @@ --- [E040] Syntax Error: tests/neg-custom-args/nowarn/nowarn-parser-error.scala:3:6 ------------------------------------- -3 | def def // error - | ^^^ - | an identifier expected, but 'def' found - | - | longer explanation available when compiling with `-explain` --- [E002] Syntax Warning: tests/neg-custom-args/nowarn/nowarn-parser-error.scala:2:10 ---------------------------------- -2 | def a = try 1 // warn - | ^^^^^ - | A try without catch or finally is equivalent to putting - | its body in a block; no exceptions are handled. - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/nowarn/nowarn-parser-error.scala b/tests/neg-custom-args/nowarn/nowarn-parser-error.scala deleted file mode 100644 index 2c65d3cf1838..000000000000 --- a/tests/neg-custom-args/nowarn/nowarn-parser-error.scala +++ /dev/null @@ -1,4 +0,0 @@ -class C { - def a = try 1 // warn - def def // error -} diff --git a/tests/neg-custom-args/nowarn/nowarn-typer-error.check b/tests/neg-custom-args/nowarn/nowarn-typer-error.check deleted file mode 100644 index a6c915807dca..000000000000 --- a/tests/neg-custom-args/nowarn/nowarn-typer-error.check +++ /dev/null @@ -1,6 +0,0 @@ --- [E006] Not Found Error: tests/neg-custom-args/nowarn/nowarn-typer-error.scala:4:11 ---------------------------------- -4 | def t1 = / // error - | ^ - | Not found: / - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/nowarn/nowarn.check b/tests/neg-custom-args/nowarn/nowarn.check deleted file mode 100644 index 855f741a15bf..000000000000 --- a/tests/neg-custom-args/nowarn/nowarn.check +++ /dev/null @@ -1,108 +0,0 @@ --- [E002] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:9:10 ----------------------------------------------- -9 |def t1a = try 1 // warning (parser) - | ^^^^^ - | A try without catch or finally is equivalent to putting - | its body in a block; no exceptions are handled. - | - | longer explanation available when compiling with `-explain` --- [E002] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:23:25 ---------------------------------------------- -23 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) - | ^^^^^ - | A try without catch or finally is equivalent to putting - | its body in a block; no exceptions are handled. - | - | longer explanation available when compiling with `-explain` --- [E002] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:31:26 ---------------------------------------------- -31 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) - | ^^^^^ - | A try without catch or finally is equivalent to putting - | its body in a block; no exceptions are handled. - | - | longer explanation available when compiling with `-explain` --- [E002] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:33:28 ---------------------------------------------- -33 |@nowarn("verbose") def t5 = try 1 // warning with details - | ^^^^^ - | A try without catch or finally is equivalent to putting - | its body in a block; no exceptions are handled. -Matching filters for @nowarn or -Wconf: - - id=E2 - - name=EmptyCatchAndFinallyBlock - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg-custom-args/nowarn/nowarn.scala:13:11 ------------------------------------- -13 |def t2 = { 1; 2 } // warning (the invalid nowarn doesn't silence anything) - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- Warning: tests/neg-custom-args/nowarn/nowarn.scala:12:8 ------------------------------------------------------------- -12 |@nowarn("wat?") // warning (typer, invalid filter) - | ^^^^^^ - | Invalid message filter - | unknown filter: wat? --- [E129] Potential Issue Warning: tests/neg-custom-args/nowarn/nowarn.scala:16:12 ------------------------------------- -16 |def t2a = { 1; 2 } // warning (invalid nowarn doesn't silence) - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- Warning: tests/neg-custom-args/nowarn/nowarn.scala:15:8 ------------------------------------------------------------- -15 |@nowarn(t1a.toString) // warning (typer, argument not a compile-time constant) - | ^^^^^^^^^^^^ - | filter needs to be a compile-time constant string --- Warning: tests/neg-custom-args/nowarn/nowarn.scala:23:10 ------------------------------------------------------------ -23 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) - | ^^^^^ - | filter needs to be a compile-time constant string --- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:37:10 ------------------------------------------------ -37 |def t6a = f // warning (refchecks, deprecation) - | ^ - | method f is deprecated --- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:40:30 ------------------------------------------------ -40 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) - | ^ - | method f is deprecated --- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:47:10 ------------------------------------------------ -47 |def t7c = f // warning (deprecation) - | ^ - | method f is deprecated --- Unchecked Warning: tests/neg-custom-args/nowarn/nowarn.scala:53:7 --------------------------------------------------- -53 | case _: List[Int] => 0 // warning (patmat, unchecked) - | ^ - |the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from Any --- Error: tests/neg-custom-args/nowarn/nowarn.scala:31:1 --------------------------------------------------------------- -31 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) - |^^^^^^^^^^^^^^^ - |@nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:40:1 --------------------------------------------------------------- -40 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) - |^^^^^^^^^^^^^^^^^^^ - |@nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:48:5 --------------------------------------------------------------- -48 | : @nowarn("msg=fish") // error (unused nowarn) - | ^^^^^^^^^^^^^^^^^^^ - | @nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:60:0 --------------------------------------------------------------- -60 |@nowarn def t9a = { 1: @nowarn; 2 } // error (outer @nowarn is unused) - |^^^^^^^ - |@nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:61:27 -------------------------------------------------------------- -61 |@nowarn def t9b = { 1: Int @nowarn; 2 } // error (inner @nowarn is unused, it covers the type, not the expression) - | ^^^^^^^ - | @nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:66:0 --------------------------------------------------------------- -66 |@nowarn @ann(f) def t10b = 0 // error (unused nowarn) - |^^^^^^^ - |@nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:67:8 --------------------------------------------------------------- -67 |@ann(f: @nowarn) def t10c = 0 // error (unused nowarn), should be silent - | ^^^^^^^ - | @nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:70:0 --------------------------------------------------------------- -70 |@nowarn class I1a { // error (unused nowarn) - |^^^^^^^ - |@nowarn annotation does not suppress any warnings --- Error: tests/neg-custom-args/nowarn/nowarn.scala:75:0 --------------------------------------------------------------- -75 |@nowarn class I1b { // error (unused nowarn) - |^^^^^^^ - |@nowarn annotation does not suppress any warnings diff --git a/tests/neg-custom-args/ovlazy.scala b/tests/neg-custom-args/ovlazy.scala deleted file mode 100644 index 3a263e5ed4bf..000000000000 --- a/tests/neg-custom-args/ovlazy.scala +++ /dev/null @@ -1,6 +0,0 @@ -class A { - val x: Int = 1 -} -class B extends A { - override lazy val x: Int = 2 // error -} diff --git a/tests/neg-custom-args/wildcards.scala b/tests/neg-custom-args/wildcards.scala index e8ac77c116be..c734e2d046b5 100644 --- a/tests/neg-custom-args/wildcards.scala +++ b/tests/neg-custom-args/wildcards.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + object Test { val xs: List[_] = List(1, 2, 3) // error val ys: Map[_ <: AnyRef, _ >: Null] = Map() // error // error diff --git a/tests/neg-custom-args/isInstanceOf/1828.scala b/tests/neg-deep-subtype/1828.scala similarity index 85% rename from tests/neg-custom-args/isInstanceOf/1828.scala rename to tests/neg-deep-subtype/1828.scala index aeb83f1a1070..ae228a83e898 100644 --- a/tests/neg-custom-args/isInstanceOf/1828.scala +++ b/tests/neg-deep-subtype/1828.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Test { def remove[S](a: S | Int, f: Int => S):S = a match { case a: S => a // error diff --git a/tests/neg-custom-args/isInstanceOf/3324b.scala b/tests/neg-deep-subtype/3324b.scala similarity index 88% rename from tests/neg-custom-args/isInstanceOf/3324b.scala rename to tests/neg-deep-subtype/3324b.scala index 8b60bff4d9da..df0cc5432eff 100644 --- a/tests/neg-custom-args/isInstanceOf/3324b.scala +++ b/tests/neg-deep-subtype/3324b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class C[T] { val x: Any = ??? if (x.isInstanceOf[List[String]]) // error: unchecked diff --git a/tests/neg-custom-args/isInstanceOf/3324f.scala b/tests/neg-deep-subtype/3324f.scala similarity index 79% rename from tests/neg-custom-args/isInstanceOf/3324f.scala rename to tests/neg-deep-subtype/3324f.scala index 431c2710e47b..445da5cb25a0 100644 --- a/tests/neg-custom-args/isInstanceOf/3324f.scala +++ b/tests/neg-deep-subtype/3324f.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + trait C[T] class D[T] diff --git a/tests/neg-custom-args/isInstanceOf/3324g.scala b/tests/neg-deep-subtype/3324g.scala similarity index 90% rename from tests/neg-custom-args/isInstanceOf/3324g.scala rename to tests/neg-deep-subtype/3324g.scala index 5c37929eb464..a5b842e4e450 100644 --- a/tests/neg-custom-args/isInstanceOf/3324g.scala +++ b/tests/neg-deep-subtype/3324g.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Test { trait A[+T] class B[T] extends A[T] diff --git a/tests/neg-custom-args/isInstanceOf/4075.scala.ignore b/tests/neg-deep-subtype/4075.scala.ignore similarity index 92% rename from tests/neg-custom-args/isInstanceOf/4075.scala.ignore rename to tests/neg-deep-subtype/4075.scala.ignore index 4fcbaf0331fe..4d87fdce1c81 100644 --- a/tests/neg-custom-args/isInstanceOf/4075.scala.ignore +++ b/tests/neg-deep-subtype/4075.scala.ignore @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { trait Foo case class One[+T](fst: T) diff --git a/tests/neg-custom-args/isInstanceOf/JavaSeqLiteral.scala b/tests/neg-deep-subtype/JavaSeqLiteral.scala similarity index 93% rename from tests/neg-custom-args/isInstanceOf/JavaSeqLiteral.scala rename to tests/neg-deep-subtype/JavaSeqLiteral.scala index 19f15dc0957a..6003731ae657 100644 --- a/tests/neg-custom-args/isInstanceOf/JavaSeqLiteral.scala +++ b/tests/neg-deep-subtype/JavaSeqLiteral.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test1 { trait Tree[-T] diff --git a/tests/neg-custom-args/conditionalWarnings.scala b/tests/neg-deep-subtype/conditionalWarnings.scala similarity index 86% rename from tests/neg-custom-args/conditionalWarnings.scala rename to tests/neg-deep-subtype/conditionalWarnings.scala index a85e3c8af427..c4757cbb7546 100644 --- a/tests/neg-custom-args/conditionalWarnings.scala +++ b/tests/neg-deep-subtype/conditionalWarnings.scala @@ -1,5 +1,5 @@ +//> using options -deprecation -Xfatal-warnings -// run with -deprecation -Xfatal-warnings object Test { @deprecated def foo = ??? diff --git a/tests/neg-deep-subtype/gadt.scala b/tests/neg-deep-subtype/gadt.scala new file mode 100644 index 000000000000..661c04fef373 --- /dev/null +++ b/tests/neg-deep-subtype/gadt.scala @@ -0,0 +1,15 @@ +//> using options -Xfatal-warnings + +class Test { + trait A[+T] + class B[T] extends A[T] + + class C + class D extends C + + def quux(a: A[C]): Unit = a match { + case _: B[C] => // error!! + } + + quux(new B[D]) +} \ No newline at end of file diff --git a/tests/neg-custom-args/isInstanceOf/html.scala b/tests/neg-deep-subtype/html.scala similarity index 93% rename from tests/neg-custom-args/isInstanceOf/html.scala rename to tests/neg-deep-subtype/html.scala index 52afde2a289b..f17cfb661505 100644 --- a/tests/neg-custom-args/isInstanceOf/html.scala +++ b/tests/neg-deep-subtype/html.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object HTML: type AttrArg = AppliedAttr | Seq[AppliedAttr] opaque type AppliedAttr = String diff --git a/tests/neg-custom-args/allow-deep-subtypes/i11064.scala b/tests/neg-deep-subtype/i11064.scala similarity index 100% rename from tests/neg-custom-args/allow-deep-subtypes/i11064.scala rename to tests/neg-deep-subtype/i11064.scala diff --git a/tests/neg-custom-args/allow-double-bindings/i1240.scala b/tests/neg-deep-subtype/i1240.scala similarity index 100% rename from tests/neg-custom-args/allow-double-bindings/i1240.scala rename to tests/neg-deep-subtype/i1240.scala diff --git a/tests/neg-custom-args/i12650.scala b/tests/neg-deep-subtype/i12650.scala similarity index 100% rename from tests/neg-custom-args/i12650.scala rename to tests/neg-deep-subtype/i12650.scala diff --git a/tests/neg-custom-args/i1650.scala b/tests/neg-deep-subtype/i1650.scala similarity index 100% rename from tests/neg-custom-args/i1650.scala rename to tests/neg-deep-subtype/i1650.scala diff --git a/tests/neg-custom-args/isInstanceOf/i17435.scala b/tests/neg-deep-subtype/i17435.scala similarity index 75% rename from tests/neg-custom-args/isInstanceOf/i17435.scala rename to tests/neg-deep-subtype/i17435.scala index e32149db3137..aec165a18f56 100644 --- a/tests/neg-custom-args/isInstanceOf/i17435.scala +++ b/tests/neg-deep-subtype/i17435.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.collection.mutable object Test: @@ -14,10 +16,10 @@ object Test: type JsonArray = mutable.Buffer[Json] def encode(x: Json): Int = x match - case str: String => 1 - case b: Boolean => 2 - case i: Int => 3 - case d: Double => 4 + case str: String => 1 // error + case b: Boolean => 2 // error + case i: Int => 3 // error + case d: Double => 4 // error case arr: JsonArray => 5 // error case obj: JsonObject => 6 // error case _ => 7 diff --git a/tests/neg-custom-args/i1754.scala b/tests/neg-deep-subtype/i1754.scala similarity index 100% rename from tests/neg-custom-args/i1754.scala rename to tests/neg-deep-subtype/i1754.scala diff --git a/tests/neg-custom-args/allow-double-bindings/i2002.scala b/tests/neg-deep-subtype/i2002.scala similarity index 100% rename from tests/neg-custom-args/allow-double-bindings/i2002.scala rename to tests/neg-deep-subtype/i2002.scala diff --git a/tests/neg-deep-subtype/i3324.scala b/tests/neg-deep-subtype/i3324.scala new file mode 100644 index 000000000000..9b1060836430 --- /dev/null +++ b/tests/neg-deep-subtype/i3324.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings + +class Foo { + def foo(x: Any): Boolean = + x.isInstanceOf[List[String]] // error +} diff --git a/tests/neg-custom-args/i3627.scala b/tests/neg-deep-subtype/i3627.scala similarity index 100% rename from tests/neg-custom-args/i3627.scala rename to tests/neg-deep-subtype/i3627.scala diff --git a/tests/neg-custom-args/i3882.scala b/tests/neg-deep-subtype/i3882.scala similarity index 100% rename from tests/neg-custom-args/i3882.scala rename to tests/neg-deep-subtype/i3882.scala diff --git a/tests/neg-deep-subtype/i4297.scala b/tests/neg-deep-subtype/i4297.scala new file mode 100644 index 000000000000..88e66c59d1b4 --- /dev/null +++ b/tests/neg-deep-subtype/i4297.scala @@ -0,0 +1,13 @@ +//> using options -Xfatal-warnings + +class Test { + def test[X <: Option[Int]](x: X) = x.isInstanceOf[Some[Int]] + def test1[Y <: Int, X <: Option[Y]](x: X) = x.isInstanceOf[Some[Int]] + def test2(x: Any) = x.isInstanceOf[Function1[Nothing, ?]] + def test3a(x: Any) = x.isInstanceOf[Function1[Any, ?]] // error + def test3b(x: Any) = x.isInstanceOf[Function1[Int, ?]] // error + def test4[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, ?]] // error + def test5[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, Unit]] // error + def test6[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[Int, Any]] // error + def test7[Y <: Int, X <: Function1[Y, Unit]](x: X) = x.isInstanceOf[Function1[?, Unit]] +} diff --git a/tests/neg-custom-args/i4372.scala b/tests/neg-deep-subtype/i4372.scala similarity index 100% rename from tests/neg-custom-args/i4372.scala rename to tests/neg-deep-subtype/i4372.scala diff --git a/tests/neg-custom-args/isInstanceOf/i5495.scala b/tests/neg-deep-subtype/i5495.scala similarity index 82% rename from tests/neg-custom-args/isInstanceOf/i5495.scala rename to tests/neg-deep-subtype/i5495.scala index ad396f1ff6c8..7d50c9761c30 100644 --- a/tests/neg-custom-args/isInstanceOf/i5495.scala +++ b/tests/neg-deep-subtype/i5495.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class A class B diff --git a/tests/neg-deep-subtype/i5826b.scala b/tests/neg-deep-subtype/i5826b.scala new file mode 100644 index 000000000000..a3c3382bbc51 --- /dev/null +++ b/tests/neg-deep-subtype/i5826b.scala @@ -0,0 +1,13 @@ +//> using options -Xfatal-warnings + +class Foo { + def test1[A]: List[Int] | A => Int = { + case ls: List[?] => ls.head // error + case _ => 0 + } + + def test2[A]: List[Int] | A => Int = { + case ls: List[?] => ls.size + case _ => 0 + } +} diff --git a/tests/neg-custom-args/allow-deep-subtypes/i5877.scala b/tests/neg-deep-subtype/i5877.scala similarity index 93% rename from tests/neg-custom-args/allow-deep-subtypes/i5877.scala rename to tests/neg-deep-subtype/i5877.scala index dcb89e1b565e..c1d5ea4a31c1 100644 --- a/tests/neg-custom-args/allow-deep-subtypes/i5877.scala +++ b/tests/neg-deep-subtype/i5877.scala @@ -25,7 +25,7 @@ object Main { assert(implicitly[thatSelf.type <:< that.This] != null) } val that: HasThisType[_] = Foo() // null.asInstanceOf - testSelf(that) // error + testSelf(that) // error: recursion limit exceeded } @@ -36,7 +36,7 @@ object Main { } val that: HasThisType[_] = Foo() // null.asInstanceOf // this line of code makes Dotty compiler infinite recursion (stopped only by overflow) - comment it to make it compilable again - testSelf(that) // error + testSelf(that) // error: recursion limit exceeded } // ---- ---- ---- ---- diff --git a/tests/neg-custom-args/i6300.scala b/tests/neg-deep-subtype/i6300.scala similarity index 100% rename from tests/neg-custom-args/i6300.scala rename to tests/neg-deep-subtype/i6300.scala diff --git a/tests/neg-custom-args/allow-deep-subtypes/i8464a.scala b/tests/neg-deep-subtype/i8464a.scala similarity index 100% rename from tests/neg-custom-args/allow-deep-subtypes/i8464a.scala rename to tests/neg-deep-subtype/i8464a.scala diff --git a/tests/neg-custom-args/allow-deep-subtypes/i9325.scala b/tests/neg-deep-subtype/i9325.scala similarity index 100% rename from tests/neg-custom-args/allow-deep-subtypes/i9325.scala rename to tests/neg-deep-subtype/i9325.scala diff --git a/tests/neg-custom-args/interop-polytypes.scala b/tests/neg-deep-subtype/interop-polytypes.scala similarity index 89% rename from tests/neg-custom-args/interop-polytypes.scala rename to tests/neg-deep-subtype/interop-polytypes.scala index 5718e0fc564d..90922b63f7d0 100644 --- a/tests/neg-custom-args/interop-polytypes.scala +++ b/tests/neg-deep-subtype/interop-polytypes.scala @@ -1,3 +1,5 @@ +//> using options -Yexplicit-nulls + class Foo { import java.util.ArrayList // Test that return values in PolyTypes are marked as nullable. diff --git a/tests/neg-custom-args/allow-deep-subtypes/matchtype-loop2.scala b/tests/neg-deep-subtype/matchtype-loop2.scala similarity index 100% rename from tests/neg-custom-args/allow-deep-subtypes/matchtype-loop2.scala rename to tests/neg-deep-subtype/matchtype-loop2.scala diff --git a/tests/neg-custom-args/isInstanceOf/or-type-trees.scala b/tests/neg-deep-subtype/or-type-trees.scala similarity index 87% rename from tests/neg-custom-args/isInstanceOf/or-type-trees.scala rename to tests/neg-deep-subtype/or-type-trees.scala index b393578acdc7..d0338ffe6066 100644 --- a/tests/neg-custom-args/isInstanceOf/or-type-trees.scala +++ b/tests/neg-deep-subtype/or-type-trees.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test1 { trait Tree trait Context @@ -12,7 +14,7 @@ object Test1 { def foo3(myTree: Tree | (Context => Tree)) = myTree match - case treeFn: (_ => _) => // ok + case treeFn: (? => ?) => // ok case _ => } @@ -33,6 +35,6 @@ object Test2 { def foo3(myTree: Tree[Type] | (Context => Tree[Type])) = myTree match - case treeFn: (_ => _) => // ok + case treeFn: (? => ?) => // ok case _ => } \ No newline at end of file diff --git a/tests/neg-custom-args/allow-double-bindings/overloadsOnAbstractTypes.scala b/tests/neg-deep-subtype/overloadsOnAbstractTypes.scala similarity index 100% rename from tests/neg-custom-args/allow-double-bindings/overloadsOnAbstractTypes.scala rename to tests/neg-deep-subtype/overloadsOnAbstractTypes.scala diff --git a/tests/neg-custom-args/isInstanceOf/patmat-applied.scala b/tests/neg-deep-subtype/patmat-applied.scala similarity index 84% rename from tests/neg-custom-args/isInstanceOf/patmat-applied.scala rename to tests/neg-deep-subtype/patmat-applied.scala index eaf3c240aaa1..9123f3ab2ed6 100644 --- a/tests/neg-custom-args/isInstanceOf/patmat-applied.scala +++ b/tests/neg-deep-subtype/patmat-applied.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class A[-T] class B[T] extends A[T] diff --git a/tests/neg-custom-args/isInstanceOf/refined-types.scala b/tests/neg-deep-subtype/refined-types.scala similarity index 97% rename from tests/neg-custom-args/isInstanceOf/refined-types.scala rename to tests/neg-deep-subtype/refined-types.scala index dfcc3e343099..5f5cc5a45f04 100644 --- a/tests/neg-custom-args/isInstanceOf/refined-types.scala +++ b/tests/neg-deep-subtype/refined-types.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class A class B extends A type AA = A { type T = Int } diff --git a/tests/neg-custom-args/isInstanceOf/t2755.scala b/tests/neg-deep-subtype/t2755.scala similarity index 91% rename from tests/neg-custom-args/isInstanceOf/t2755.scala rename to tests/neg-deep-subtype/t2755.scala index 9073e9253098..ec3cb6aadefc 100644 --- a/tests/neg-custom-args/isInstanceOf/t2755.scala +++ b/tests/neg-deep-subtype/t2755.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + // Test cases: the only place we can cut and paste without crying // ourself to sleep. object Test { @@ -7,16 +9,16 @@ object Test { case x: Array[Float] => x.sum.toInt case x: Array[String] => x.size case x: Array[AnyRef] => 5 - case x: Array[_] => 6 + case x: Array[?] => 6 case _ => 7 } - def f2(a: Array[_]) = a match { + def f2(a: Array[?]) = a match { case x: Array[Int] => x(0) case x: Array[Double] => 2 case x: Array[Float] => x.sum.toInt case x: Array[String] => x.size case x: Array[AnyRef] => 5 - case x: Array[_] => 6 + case x: Array[?] => 6 case _ => 7 // error: only null is matched } def f3[T](a: Array[T]) = a match { @@ -25,7 +27,7 @@ object Test { case x: Array[Float] => x.sum.toInt case x: Array[String] => x.size case x: Array[AnyRef] => 5 - case x: Array[_] => 6 + case x: Array[?] => 6 case _ => 7 // error: only null is matched } diff --git a/tests/neg-custom-args/allow-double-bindings/test-typers.scala b/tests/neg-deep-subtype/test-typers.scala similarity index 100% rename from tests/neg-custom-args/allow-double-bindings/test-typers.scala rename to tests/neg-deep-subtype/test-typers.scala diff --git a/tests/neg-custom-args/isInstanceOf/type-lambda.scala b/tests/neg-deep-subtype/type-lambda.scala similarity index 88% rename from tests/neg-custom-args/isInstanceOf/type-lambda.scala rename to tests/neg-deep-subtype/type-lambda.scala index d17b3a026f4c..4c4627fe1cf3 100644 --- a/tests/neg-custom-args/isInstanceOf/type-lambda.scala +++ b/tests/neg-deep-subtype/type-lambda.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + trait A[T] trait B[T] extends A[T] diff --git a/tests/neg-macros/annot-result-owner.check b/tests/neg-macros/annot-result-owner.check index 5d67be058fdf..e2209998579c 100644 --- a/tests/neg-macros/annot-result-owner.check +++ b/tests/neg-macros/annot-result-owner.check @@ -2,7 +2,7 @@ -- Error: tests/neg-macros/annot-result-owner/Test_2.scala:1:0 --------------------------------------------------------- 1 |@insertVal // error |^^^^^^^^^^ - |macro annotation @insertVal added value definitionWithWrongOwner$macro$1 with an inconsistent owner. Expected it to be owned by package object Test_2$package but was owned by method foo. + |macro annotation @insertVal added value definitionWithWrongOwner$macro$1 with an inconsistent owner. Expected it to be owned by the top-level definitions in package but was owned by method foo. -- Error: tests/neg-macros/annot-result-owner/Test_2.scala:5:2 --------------------------------------------------------- 5 | @insertVal // error | ^^^^^^^^^^ diff --git a/tests/neg-macros/annot-suspend-cycle.check b/tests/neg-macros/annot-suspend-cycle.check index 237cbe4188b2..e3ecea7345fd 100644 --- a/tests/neg-macros/annot-suspend-cycle.check +++ b/tests/neg-macros/annot-suspend-cycle.check @@ -1,7 +1,7 @@ -- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:7:4 -------------------------------- 7 | new Foo | ^^^^^^^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | A pure expression does nothing in statement position | | longer explanation available when compiling with `-explain` Cyclic macro dependencies in tests/neg-macros/annot-suspend-cycle/Test.scala. diff --git a/tests/neg/higher-order-quoted-expressions.scala b/tests/neg-macros/higher-order-quoted-expressions.scala similarity index 100% rename from tests/neg/higher-order-quoted-expressions.scala rename to tests/neg-macros/higher-order-quoted-expressions.scala diff --git a/tests/neg/i12019.scala b/tests/neg-macros/i12019.scala similarity index 100% rename from tests/neg/i12019.scala rename to tests/neg-macros/i12019.scala diff --git a/tests/neg/i12196.scala b/tests/neg-macros/i12196.scala similarity index 100% rename from tests/neg/i12196.scala rename to tests/neg-macros/i12196.scala diff --git a/tests/neg/i12196b.scala b/tests/neg-macros/i12196b.scala similarity index 100% rename from tests/neg/i12196b.scala rename to tests/neg-macros/i12196b.scala diff --git a/tests/neg/i12344.scala b/tests/neg-macros/i12344.scala similarity index 100% rename from tests/neg/i12344.scala rename to tests/neg-macros/i12344.scala diff --git a/tests/neg/i12606.scala b/tests/neg-macros/i12606.scala similarity index 100% rename from tests/neg/i12606.scala rename to tests/neg-macros/i12606.scala diff --git a/tests/neg/i13407.scala b/tests/neg-macros/i13407.scala similarity index 100% rename from tests/neg/i13407.scala rename to tests/neg-macros/i13407.scala diff --git a/tests/neg/i14772.check b/tests/neg-macros/i14772.check similarity index 78% rename from tests/neg/i14772.check rename to tests/neg-macros/i14772.check index 4fb5b7f7cd5f..42f78b76754c 100644 --- a/tests/neg/i14772.check +++ b/tests/neg-macros/i14772.check @@ -1,4 +1,4 @@ --- [E044] Cyclic Error: tests/neg/i14772.scala:7:7 --------------------------------------------------------------------- +-- [E044] Cyclic Error: tests/neg-macros/i14772.scala:7:7 -------------------------------------------------------------- 7 | foo(a) // error | ^ | Overloaded or recursive method impl needs return type diff --git a/tests/neg/i14772.scala b/tests/neg-macros/i14772.scala similarity index 100% rename from tests/neg/i14772.scala rename to tests/neg-macros/i14772.scala diff --git a/tests/neg/i15009a.check b/tests/neg-macros/i15009a.check similarity index 77% rename from tests/neg/i15009a.check rename to tests/neg-macros/i15009a.check index 4bdca5318d38..7e154c2be1c9 100644 --- a/tests/neg/i15009a.check +++ b/tests/neg-macros/i15009a.check @@ -1,36 +1,36 @@ --- Error: tests/neg/i15009a.scala:4:9 ---------------------------------------------------------------------------------- +-- Error: tests/neg-macros/i15009a.scala:4:9 --------------------------------------------------------------------------- 4 | '[List[${Type.of[Int]}]] // error | ^^^^^^^^^^^^^^^ | Type splicing with `$` in quotes not supported anymore | | Hint: To use a given Type[T] in a quote just write T directly --- Error: tests/neg/i15009a.scala:7:16 --------------------------------------------------------------------------------- +-- Error: tests/neg-macros/i15009a.scala:7:16 -------------------------------------------------------------------------- 7 | case '[List[$a]] => // error | ^^ | Type splicing with `$` in quotes not supported anymore | | Hint: Use lower cased variable name without the `$` instead --- Error: tests/neg/i15009a.scala:10:16 -------------------------------------------------------------------------------- +-- Error: tests/neg-macros/i15009a.scala:10:16 ------------------------------------------------------------------------- 10 | '{ List.empty[$int] } // error | ^^^^ | Type splicing with `$` in quotes not supported anymore | | Hint: To use a given Type[T] in a quote just write T directly --- Error: tests/neg/i15009a.scala:11:9 --------------------------------------------------------------------------------- +-- Error: tests/neg-macros/i15009a.scala:11:9 -------------------------------------------------------------------------- 11 | val t: ${int} = ??? // error | ^^^^^^ | Type splicing with `$` in quotes not supported anymore | | Hint: To use a given Type[T] in a quote just write T directly --- Error: tests/neg/i15009a.scala:3:2 ---------------------------------------------------------------------------------- +-- Error: tests/neg-macros/i15009a.scala:3:2 --------------------------------------------------------------------------- 3 | '[Int] // error | ^^^^^^ | Quoted types `'[..]` can only be used in patterns. | | Hint: To get a scala.quoted.Type[T] use scala.quoted.Type.of[T] instead. --- [E006] Not Found Error: tests/neg/i15009a.scala:12:2 ---------------------------------------------------------------- +-- [E006] Not Found Error: tests/neg-macros/i15009a.scala:12:2 --------------------------------------------------------- 12 | $int // error: Not found: $int | ^^^^ - | Not found: $int + | Not found: $int - did you mean int? | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i15009a.scala b/tests/neg-macros/i15009a.scala similarity index 100% rename from tests/neg/i15009a.scala rename to tests/neg-macros/i15009a.scala diff --git a/tests/neg-macros/i15413.check b/tests/neg-macros/i15413.check new file mode 100644 index 000000000000..56f587eb2fb4 --- /dev/null +++ b/tests/neg-macros/i15413.check @@ -0,0 +1,6 @@ +-- Error: tests/neg-macros/i15413.scala:4:22 --------------------------------------------------------------------------- +4 | inline def foo = ${ Macro.fooImpl } // error + | ^^^^^^^^^^^^^ + | Macro implementation is not statically accessible. + | + | Non-static inline accessor was generated in class Macro diff --git a/tests/neg-macros/i15413.scala b/tests/neg-macros/i15413.scala new file mode 100644 index 000000000000..186ba60f3d25 --- /dev/null +++ b/tests/neg-macros/i15413.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +class Macro: + inline def foo = ${ Macro.fooImpl } // error + +object Macro: + private def fooImpl(using Quotes) = '{} diff --git a/tests/neg-macros/i16355a.scala b/tests/neg-macros/i16355a.scala index 8870b7777263..80a23aa7d7ee 100644 --- a/tests/neg-macros/i16355a.scala +++ b/tests/neg-macros/i16355a.scala @@ -1,4 +1,3 @@ -//> using scala "3.2.1" import scala.quoted.Expr import scala.quoted.Type import scala.quoted.quotes diff --git a/tests/neg-macros/i16522.check b/tests/neg-macros/i16522.check new file mode 100644 index 000000000000..75b678ac2f8e --- /dev/null +++ b/tests/neg-macros/i16522.check @@ -0,0 +1,19 @@ +-- [E007] Type Mismatch Error: tests/neg-macros/i16522.scala:10:45 ----------------------------------------------------- +10 | case '{HCons($h1: hd1, HCons($h2: hd2, $_ : tl))} => '{$h1.toString ++ $h2.toString} // error // error // error + | ^^^^^^^ + | Found: tl + | Required: HList + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg-macros/i16522.scala:10:62 --------------------------------------------------------- +10 | case '{HCons($h1: hd1, HCons($h2: hd2, $_ : tl))} => '{$h1.toString ++ $h2.toString} // error // error // error + | ^^ + | Not found: h1 + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg-macros/i16522.scala:10:78 --------------------------------------------------------- +10 | case '{HCons($h1: hd1, HCons($h2: hd2, $_ : tl))} => '{$h1.toString ++ $h2.toString} // error // error // error + | ^^ + | Not found: h2 + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/i16522.scala b/tests/neg-macros/i16522.scala new file mode 100644 index 000000000000..4f57f029f85f --- /dev/null +++ b/tests/neg-macros/i16522.scala @@ -0,0 +1,16 @@ + import scala.quoted.* + + sealed trait HList + case class HCons[+HD, TL <: HList](hd: HD, tl: TL) extends HList + case object HNil extends HList + + def showFirstTwoImpl(e: Expr[HList])(using Quotes): Expr[String] = { + e match { + case '{HCons($h1, HCons($h2, $_))} => '{$h1.toString ++ $h2.toString} + case '{HCons($h1: hd1, HCons($h2: hd2, $_ : tl))} => '{$h1.toString ++ $h2.toString} // error // error // error + case '{HCons[hd, HCons[sd, tl]]($h1, HCons($h2, $_))} => '{$h1.toString ++ $h2.toString} + case _ => '{""} + } + } + + transparent inline def showFirstTwo(inline xs: HList) = ${ showFirstTwoImpl('xs) } diff --git a/tests/neg/i16861a.scala b/tests/neg-macros/i16861a.scala similarity index 100% rename from tests/neg/i16861a.scala rename to tests/neg-macros/i16861a.scala diff --git a/tests/neg-macros/i17152/DFBits.scala b/tests/neg-macros/i17152/DFBits.scala new file mode 100644 index 000000000000..dd0e8b88a962 --- /dev/null +++ b/tests/neg-macros/i17152/DFBits.scala @@ -0,0 +1,82 @@ +// nopos-error +package crash + +import scala.quoted.* + +class IRDFType +class IRDFBoolOrBit extends IRDFType +class IRDFDecimal extends IRDFType +class IRDFBits extends IRDFType + +final class DFType[+T <: IRDFType, +A] +type DFTypeAny = DFType[IRDFType, Any] + +trait Baz + +trait Width[T]: + type Out <: Int +object Width: + given fromDFBoolOrBit[T <: DFBoolOrBit]: Width[T] with + type Out = 1 + transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = + '{ + new Width[T]: + type Out = 1 + } +end Width + +extension [T](t: T)(using baz: Baz) def width: 1 = ??? + +trait Check[T1 <: Int, T2 <: Int] + +type DFBits[W <: Int] = DFType[IRDFBits, Tuple1[W]] + +private object CompanionsDFBits: + object Val: + trait Candidate[R]: + type OutW <: Int + def apply(value: R): DFValOf[DFBits[OutW]] + object Candidate: + given fromDFUInt[W <: Int, R <: DFValOf[DFDecimal]]: Candidate[R] with + type OutW = W + def apply(value: R): DFValOf[DFBits[W]] = + import DFVal.Ops.bits + value.bits + ??? + end Candidate + + object TC: + import DFVal.TC + given DFBitsFromCandidate[ + LW <: Int, + V + ](using candidate: Candidate[V])(using + check: Check[LW, candidate.OutW] + ): TC[DFBits[LW], V] with + def conv(dfType: DFBits[LW], value: V): DFValOf[DFBits[LW]] = + val dfVal = candidate(value) + ??? + end TC + end Val + +end CompanionsDFBits + +type DFBoolOrBit = DFType[IRDFBoolOrBit, Any] +type DFDecimal = DFType[IRDFDecimal, Any] +object DFDecimal: + def foo(arg1: Int, arg2: Int): Unit = ??? + + object Val: + object TC: + import DFVal.TC + given [R]: TC[DFDecimal, R] = ??? + def apply( + dfType: DFDecimal, + dfVal: DFValOf[DFDecimal] + ): DFValOf[DFDecimal] = + foo(dfType.width, dfVal.width) + dfVal + end TC + end Val +end DFDecimal \ No newline at end of file diff --git a/tests/neg-macros/i17152/DFVal.scala b/tests/neg-macros/i17152/DFVal.scala new file mode 100644 index 000000000000..08da551c1072 --- /dev/null +++ b/tests/neg-macros/i17152/DFVal.scala @@ -0,0 +1,25 @@ +package crash + +trait TCConv[T <: DFTypeAny, V, O]: + type Out <: O + def conv(dfType: T, value: V): Out + +class DFVal[+T <: DFTypeAny] +type DFValAny = DFVal[DFTypeAny] +type DFValOf[+T <: DFTypeAny] = DFVal[T] + +object DFVal: + trait TC[T <: DFTypeAny, R] extends TCConv[T, R, DFValAny]: + type Out = DFValOf[T] + final def apply(dfType: T, value: R): Out = ??? + + object TC: + export CompanionsDFBits.Val.TC.given + end TC + + object Ops: + extension [T <: DFTypeAny, A, C, I](dfVal: DFVal[T]) + def bits(using w: Width[T]): DFValOf[DFBits[w.Out]] = ??? + end extension + end Ops +end DFVal \ No newline at end of file diff --git a/tests/neg-macros/i17294/DFVal.scala b/tests/neg-macros/i17294/DFVal.scala new file mode 100644 index 000000000000..268ad4e188b3 --- /dev/null +++ b/tests/neg-macros/i17294/DFVal.scala @@ -0,0 +1,3 @@ +package crash + +def bits[T](t: T)(using w: Width[T]): w.Out = ??? diff --git a/tests/neg-macros/i17294/Width.scala b/tests/neg-macros/i17294/Width.scala new file mode 100644 index 000000000000..255f4799c32c --- /dev/null +++ b/tests/neg-macros/i17294/Width.scala @@ -0,0 +1,12 @@ +// nopos-error +package crash +import scala.quoted.* + +trait Width[T]: + type Out +object Width: + transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = '{ new Width[T] {} } +end Width + +val x = bits(1) \ No newline at end of file diff --git a/tests/neg-macros/i17338.check b/tests/neg-macros/i17338.check new file mode 100644 index 000000000000..e47312130b1e --- /dev/null +++ b/tests/neg-macros/i17338.check @@ -0,0 +1,9 @@ +-- Error: tests/neg-macros/i17338.scala:4:5 ---------------------------------------------------------------------------- +4 | '{ '{ 1 } } // error + | ^ + | access to parameter quotes from wrong staging level: + | - the definition is at level 0, + | - but the access is at level 1. + | + | Hint: Nested quote needs a local context defined at level 1. + | One way to introduce this context is to give the outer quote the type `Expr[Quotes ?=> Expr[T]]`. diff --git a/tests/neg-macros/i17338.scala b/tests/neg-macros/i17338.scala new file mode 100644 index 000000000000..941cbdd6216d --- /dev/null +++ b/tests/neg-macros/i17338.scala @@ -0,0 +1,4 @@ +import scala.quoted.* + +def test(using quotes: Quotes): Expr[Expr[Int]] = + '{ '{ 1 } } // error diff --git a/tests/neg-macros/i17545a/Macro_1.scala b/tests/neg-macros/i17545a/Macro_1.scala new file mode 100644 index 000000000000..29127f7aa97e --- /dev/null +++ b/tests/neg-macros/i17545a/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +object InvokeConstructor { + inline def apply[A] = ${ constructorMacro[A] } + + def constructorMacro[A: Type](using Quotes) = { + import quotes.reflect.* + val tpe = TypeRepr.of[A] + New(Inferred(tpe)).select(tpe.typeSymbol.primaryConstructor).appliedToArgs(Nil).asExprOf[A] + } +} diff --git a/tests/neg-macros/i17545a/Test_2.scala b/tests/neg-macros/i17545a/Test_2.scala new file mode 100644 index 000000000000..82d4ab4dcfdd --- /dev/null +++ b/tests/neg-macros/i17545a/Test_2.scala @@ -0,0 +1,4 @@ +case object WhateverA + +def testA = + val whateverA: WhateverA.type = InvokeConstructor[WhateverA.type] // error diff --git a/tests/neg-macros/i17545b/Macro_1.scala b/tests/neg-macros/i17545b/Macro_1.scala new file mode 100644 index 000000000000..29127f7aa97e --- /dev/null +++ b/tests/neg-macros/i17545b/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +object InvokeConstructor { + inline def apply[A] = ${ constructorMacro[A] } + + def constructorMacro[A: Type](using Quotes) = { + import quotes.reflect.* + val tpe = TypeRepr.of[A] + New(Inferred(tpe)).select(tpe.typeSymbol.primaryConstructor).appliedToArgs(Nil).asExprOf[A] + } +} diff --git a/tests/neg-macros/i17545b/Test_2.scala b/tests/neg-macros/i17545b/Test_2.scala new file mode 100644 index 000000000000..6cad2cf16e5c --- /dev/null +++ b/tests/neg-macros/i17545b/Test_2.scala @@ -0,0 +1,3 @@ +def testB = + case object WhateverB + val whateverB: WhateverB.type = InvokeConstructor[WhateverB.type] // error diff --git a/tests/neg-macros/i17545c/Macro_1.scala b/tests/neg-macros/i17545c/Macro_1.scala new file mode 100644 index 000000000000..17e297497524 --- /dev/null +++ b/tests/neg-macros/i17545c/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +object InvokeConstructor { + inline def apply[A] = ${ constructorMacro[A] } + + def constructorMacro[A: Type](using Quotes) = { + import quotes.reflect.* + val tpe = TypeRepr.of[A].termSymbol.moduleClass.typeRef + New(Inferred(tpe)).select(tpe.typeSymbol.primaryConstructor).appliedToArgs(Nil).asExprOf[A] + } +} diff --git a/tests/neg-macros/i17545c/Test_2.scala b/tests/neg-macros/i17545c/Test_2.scala new file mode 100644 index 000000000000..82d4ab4dcfdd --- /dev/null +++ b/tests/neg-macros/i17545c/Test_2.scala @@ -0,0 +1,4 @@ +case object WhateverA + +def testA = + val whateverA: WhateverA.type = InvokeConstructor[WhateverA.type] // error diff --git a/tests/neg-macros/i17545d/Macro_1.scala b/tests/neg-macros/i17545d/Macro_1.scala new file mode 100644 index 000000000000..17e297497524 --- /dev/null +++ b/tests/neg-macros/i17545d/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +object InvokeConstructor { + inline def apply[A] = ${ constructorMacro[A] } + + def constructorMacro[A: Type](using Quotes) = { + import quotes.reflect.* + val tpe = TypeRepr.of[A].termSymbol.moduleClass.typeRef + New(Inferred(tpe)).select(tpe.typeSymbol.primaryConstructor).appliedToArgs(Nil).asExprOf[A] + } +} diff --git a/tests/neg-macros/i17545d/Test_2.scala b/tests/neg-macros/i17545d/Test_2.scala new file mode 100644 index 000000000000..6cad2cf16e5c --- /dev/null +++ b/tests/neg-macros/i17545d/Test_2.scala @@ -0,0 +1,3 @@ +def testB = + case object WhateverB + val whateverB: WhateverB.type = InvokeConstructor[WhateverB.type] // error diff --git a/tests/neg-macros/i18113.check b/tests/neg-macros/i18113.check new file mode 100644 index 000000000000..7a7a7069bd92 --- /dev/null +++ b/tests/neg-macros/i18113.check @@ -0,0 +1,15 @@ + +-- Error: tests/neg-macros/i18113/Test_2.scala:7:8 --------------------------------------------------------------------- + 7 | X.test(ref) // error + | ^^^^^^^^^^^ + |Exception occurred while executing macro expansion. + |java.lang.AssertionError: Reference to a method must be eta-expanded before it is used as an expression: x.Main.ref.plus + | at x.X$.testImpl(Macro_1.scala:16) + | + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Macro_1.scala:11 +11 | inline def test(ref:IntRef):Int = ${ testImpl('ref) } + | ^^^^^^^^^^^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i18113/Macro_1.scala b/tests/neg-macros/i18113/Macro_1.scala new file mode 100644 index 000000000000..0dd95c690b6a --- /dev/null +++ b/tests/neg-macros/i18113/Macro_1.scala @@ -0,0 +1,21 @@ +package x + +import scala.quoted.* + +class IntRef(var x: Int) { + def plus(y:Int): Int = ??? +} + +object X { + + inline def test(ref:IntRef):Int = ${ testImpl('ref) } + + def testImpl(ref:Expr[IntRef])(using Quotes):Expr[Int] = { + import quotes.reflect.* + val fun0 = Select.unique(ref.asTerm,"plus") + val fun1 = Block(List(Assign(Select.unique(ref.asTerm,"x"),Literal(IntConstant(1)))),fun0) + val r = Apply(fun1,List(Literal(IntConstant(2)))) + r.asExprOf[Int] + } + +} diff --git a/tests/neg-macros/i18113/Test_2.scala b/tests/neg-macros/i18113/Test_2.scala new file mode 100644 index 000000000000..d09536008046 --- /dev/null +++ b/tests/neg-macros/i18113/Test_2.scala @@ -0,0 +1,9 @@ +package x + +object Main { + + val ref = IntRef(0) + + X.test(ref) // error + +} diff --git a/tests/neg-macros/i18113b.check b/tests/neg-macros/i18113b.check new file mode 100644 index 000000000000..dd2091eba09f --- /dev/null +++ b/tests/neg-macros/i18113b.check @@ -0,0 +1,15 @@ + +-- Error: tests/neg-macros/i18113b/Test_2.scala:7:8 -------------------------------------------------------------------- + 7 | X.test(ref) // error + | ^^^^^^^^^^^ + | Exception occurred while executing macro expansion. + | java.lang.AssertionError: Expected `fun.tpe` to widen into a `MethodType` + | at x.X$.testImpl(Macro_1.scala:27) + | + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Macro_1.scala:12 +12 | inline def test(ref:IntRef):Int = ${ testImpl('ref) } + | ^^^^^^^^^^^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i18113b/Macro_1.scala b/tests/neg-macros/i18113b/Macro_1.scala new file mode 100644 index 000000000000..2f404f0368cb --- /dev/null +++ b/tests/neg-macros/i18113b/Macro_1.scala @@ -0,0 +1,31 @@ +package x + +import scala.quoted.* + +class IntRef(var x: Int) { + def plus(y:Int): Int = ??? +} + + +object X { + + inline def test(ref:IntRef):Int = ${ testImpl('ref) } + + def testImpl(ref:Expr[IntRef])(using Quotes):Expr[Int] = { + import quotes.reflect.* + val fun0 = Select.unique(ref.asTerm,"plus") + val mt = MethodType(List("p"))( + _ => List(TypeRepr.of[Int]), + _ => TypeRepr.of[Int] + ) + val etaExpanded = Lambda(Symbol.spliceOwner, mt, (owner, params) => { + Block( + List(Assign(Select.unique(ref.asTerm,"x"),Literal(IntConstant(1)))), + Apply(fun0,params.map(_.asInstanceOf[Term])) + ) + }) + val r = Apply(etaExpanded,List(Literal(IntConstant(2)))) + r.asExprOf[Int] + } + +} diff --git a/tests/neg-macros/i18113b/Test_2.scala b/tests/neg-macros/i18113b/Test_2.scala new file mode 100644 index 000000000000..d09536008046 --- /dev/null +++ b/tests/neg-macros/i18113b/Test_2.scala @@ -0,0 +1,9 @@ +package x + +object Main { + + val ref = IntRef(0) + + X.test(ref) // error + +} diff --git a/tests/neg-macros/i18174.check b/tests/neg-macros/i18174.check new file mode 100644 index 000000000000..1f4396d6b5ca --- /dev/null +++ b/tests/neg-macros/i18174.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg-macros/i18174.scala:27:33 -------------------------------------------------------------- +27 | (charClassIntersection.rep() | classItem.rep()) // error + | ^^^^^^^^^^^^^^^ + |No given instance of type pkg.Implicits.Repeater[pkg.RegexTree, V] was found. + |I found: + | + | pkg.Implicits.Repeater.GenericRepeaterImplicit[T] + | + |But method GenericRepeaterImplicit in object Repeater does not match type pkg.Implicits.Repeater[pkg.RegexTree, V] + | + |where: V is a type variable with constraint <: Seq[pkg.CharClassIntersection] + |. diff --git a/tests/neg-macros/i18174.scala b/tests/neg-macros/i18174.scala new file mode 100644 index 000000000000..8bb5ffd51a3f --- /dev/null +++ b/tests/neg-macros/i18174.scala @@ -0,0 +1,27 @@ +// does not compile anymore in Scala 3.4+ +package pkg + +import scala.language.`3.4` + +trait P[+T] + +extension [T](inline parse0: P[T]) + inline def | [V >: T](inline other: P[V]): P[V] = ??? + +extension [T](inline parse0: => P[T]) + inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? + +object Implicits: + trait Repeater[-T, R] + object Repeater: + implicit def GenericRepeaterImplicit[T]: Repeater[T, Seq[T]] = ??? + +sealed trait RegexTree +abstract class Node extends RegexTree +class CharClassIntersection() extends Node + +def classItem: P[RegexTree] = ??? +def charClassIntersection: P[CharClassIntersection] = ??? + +def x = + (charClassIntersection.rep() | classItem.rep()) // error diff --git a/tests/neg-macros/i18228.scala b/tests/neg-macros/i18228.scala new file mode 100644 index 000000000000..8fcd8988abe8 --- /dev/null +++ b/tests/neg-macros/i18228.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +case class QueryMeta[T](map: Map[String, String]) + +object QueryMeta: + given [T]: FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: + def unapply(expr: Expr[QueryMeta[T]])(using q: Quotes): Option[QueryMeta[T]] = + import q.reflect.* + expr match + case '{ QueryMeta/*[T]*/(${ map }: Map[String, String]) } => // error: Reference to T within quotes requires a given scala.quoted.Type[T] in scope. + map.value.map(QueryMeta[T].apply) + case _ => + None diff --git a/tests/neg-macros/i18695.scala b/tests/neg-macros/i18695.scala new file mode 100644 index 000000000000..7427839cdbe3 --- /dev/null +++ b/tests/neg-macros/i18695.scala @@ -0,0 +1,75 @@ +import scala.annotation.{tailrec, unused} +import scala.deriving.Mirror +import scala.quoted.* + +trait TypeLength[A] { + type Length <: Int + def length: Length +} +object TypeLength extends TypeLengthLowPriority: + type Aux[A, Length0 <: Int] = TypeLength[A] { + type Length = Length0 + } + + transparent inline given fromMirror[A](using m: Mirror.Of[A]): TypeLength[A] = + ${ macroImpl[A, m.MirroredElemTypes] } + + @tailrec + private def typesOfTuple( + using q: Quotes + )(tpe: q.reflect.TypeRepr, acc: List[q.reflect.TypeRepr]): List[q.reflect.TypeRepr] = + import q.reflect.* + val cons = Symbol.classSymbol("scala.*:") + tpe.widenTermRefByName.dealias match + case AppliedType(fn, tpes) if defn.isTupleClass(fn.typeSymbol) => + tpes.reverse_:::(acc) + case AppliedType(tp, List(headType, tailType)) if tp.derivesFrom(cons) => + typesOfTuple(tailType, headType :: acc) + case tpe => + if tpe.derivesFrom(Symbol.classSymbol("scala.EmptyTuple")) then acc.reverse + else report.errorAndAbort(s"Unknown type encountered in tuple ${tpe.show}") + + def macroImpl[A: Type, T <: Tuple: scala.quoted.Type]( + using q: scala.quoted.Quotes + ): scala.quoted.Expr[TypeLength[A]] = + import q.reflect.* + val l = typesOfTuple(TypeRepr.of[T], Nil).length + ConstantType(IntConstant(l)).asType match + case '[lt] => + val le = Expr[Int](l).asExprOf[lt & Int] + '{ + val r: TypeLength.Aux[A, lt & Int] = new TypeLength[A] { + type Length = lt & Int + val length: Length = ${ le } + } + r + } + + transparent inline given fromTuple[T <: Tuple]: TypeLength[T] = + ${ macroImpl[T, T] } + +trait TypeLengthLowPriority: + self: TypeLength.type => + given tupleFromMirrorAndLength[A, T <: Tuple]( + using @unused m: Mirror.Of[A] { type MirroredElemTypes = T }, + length: TypeLength[A] + ): TypeLength.Aux[T, length.Length] = length.asInstanceOf[TypeLength.Aux[T, length.Length]] + +trait HKDSumGeneric[A] +object HKDSumGeneric: + type NotZero[N <: Int] = N match + case 0 => false + case _ => true + + transparent inline given derived[A](using m: Mirror.SumOf[A], typeLength: TypeLength[A])( + using NotZero[typeLength.Length] =:= true + ): HKDSumGeneric[A] = + derivedImpl[A, m.MirroredElemTypes, m.MirroredLabel] // error + + def derivedImpl[A, ElemTypes <: Tuple, Label <: String]( + using m: Mirror.SumOf[A] { + type MirroredElemTypes = ElemTypes; type MirroredLabel = Label; + }, + typeLength: TypeLength[ElemTypes], + nz: NotZero[typeLength.Length] =:= true + ): HKDSumGeneric[A] = ??? \ No newline at end of file diff --git a/tests/neg-macros/i18825.check b/tests/neg-macros/i18825.check new file mode 100644 index 000000000000..0269f9880828 --- /dev/null +++ b/tests/neg-macros/i18825.check @@ -0,0 +1,3 @@ + +error overriding method toString in class Foo of type (): String; + method toString of type (): String cannot override final member method toString in class Foo diff --git a/tests/neg-macros/i18825/Macro_1.scala b/tests/neg-macros/i18825/Macro_1.scala new file mode 100644 index 000000000000..c099954f3858 --- /dev/null +++ b/tests/neg-macros/i18825/Macro_1.scala @@ -0,0 +1,19 @@ +import scala.annotation.experimental +import scala.annotation.MacroAnnotation +import scala.quoted.* + +@experimental +class toString extends MacroAnnotation : + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect.* + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + List(newClassDef) + case _ => + report.error("@toString can only be annotated on class definitions") + tree :: Nil diff --git a/tests/neg-macros/i18825/Test_2.scala b/tests/neg-macros/i18825/Test_2.scala new file mode 100644 index 000000000000..83ae9c778704 --- /dev/null +++ b/tests/neg-macros/i18825/Test_2.scala @@ -0,0 +1,15 @@ +// nopos-error + +import annotation.experimental + +class Foo : + final override def toString(): String = "Hello" + +@experimental +@toString +class AFoo extends Foo //: + //override def toString(): String = "Hello from macro" + +@experimental +@main def run = + println(new AFoo().toString) diff --git a/tests/pos-macros/i9570.scala b/tests/neg-macros/i9570.scala similarity index 94% rename from tests/pos-macros/i9570.scala rename to tests/neg-macros/i9570.scala index 295969813df6..9242fd2e9bbd 100644 --- a/tests/pos-macros/i9570.scala +++ b/tests/neg-macros/i9570.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.quoted.* object Macros { diff --git a/tests/neg-macros/i9685bis.check b/tests/neg-macros/i9685bis.check new file mode 100644 index 000000000000..45e7f85aa30d --- /dev/null +++ b/tests/neg-macros/i9685bis.check @@ -0,0 +1,9 @@ +-- [E008] Not Found Error: tests/neg-macros/i9685bis.scala:23:4 -------------------------------------------------------- +23 | 1.asdf // error + | ^^^^^^ + | value asdf is not a member of Int, but could be made available as an extension method. + | + | The following import might make progress towards fixing the problem: + | + | import foo.Baz.toBaz + | diff --git a/tests/neg-macros/i9685bis.scala b/tests/neg-macros/i9685bis.scala new file mode 100644 index 000000000000..0023d4d719b4 --- /dev/null +++ b/tests/neg-macros/i9685bis.scala @@ -0,0 +1,23 @@ +package foo + +import scala.language.implicitConversions + +class Foo + +object Foo: + + inline implicit def toFoo(x: Int): Foo = Foo() + +class Bar + +object Bar: + inline given Conversion[Int, Bar] with + def apply(x: Int): Bar = Bar() + +class Baz + +object Baz: + transparent inline implicit def toBaz(x: Int): Baz = Baz() + +object Usage: + 1.asdf // error diff --git a/tests/neg-macros/macro-deprecation.scala b/tests/neg-macros/macro-deprecation.scala new file mode 100644 index 000000000000..ad1cdda001bb --- /dev/null +++ b/tests/neg-macros/macro-deprecation.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings -deprecation + +import scala.quoted.* + +inline def f = ${ impl } // error +@deprecated def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/macro-experimental.scala b/tests/neg-macros/macro-experimental.scala similarity index 78% rename from tests/pos-macros/macro-experimental.scala rename to tests/neg-macros/macro-experimental.scala index dc011f4e45b9..eaa822d4b541 100644 --- a/tests/pos-macros/macro-experimental.scala +++ b/tests/neg-macros/macro-experimental.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.quoted.* import scala.annotation.experimental diff --git a/tests/neg-macros/quote-pattern-type-var-bounds.scala b/tests/neg-macros/quote-pattern-type-var-bounds.scala new file mode 100644 index 000000000000..b97b21552a1e --- /dev/null +++ b/tests/neg-macros/quote-pattern-type-var-bounds.scala @@ -0,0 +1,22 @@ +import scala.quoted.* +def types(t: Type[?])(using Quotes) = t match { + case '[ type t; Int ] => + case '[ type t <: Int; Int ] => + case '[ type t >: 1 <: Int; Int ] => + case '[ type t = Int; Int ] => // error + case '[ type t = scala.Int; Int ] => // error + case '[ type f[t] <: List[Any]; Int ] => + case '[ type f[t <: Int] <: List[Any]; Int ] => + case '[ type f[t] = List[Any]; Int ] => // error +} + +def expressions(x: Expr[Any])(using Quotes) = x match { + case '{ type t; () } => + case '{ type t <: Int; () } => + case '{ type t >: 1 <: Int; () } => + case '{ type t = Int; () } => // error + case '{ type t = scala.Int; () } => // error + case '{ type f[t] <: List[Any]; () } => + case '{ type f[t <: Int] <: List[Any]; () } => + case '{ type f[t] = List[Any]; () } => // error +} diff --git a/tests/neg-macros/quote-type-variable-no-inference-2.check b/tests/neg-macros/quote-type-variable-no-inference-2.check new file mode 100644 index 000000000000..f479c34bf8e8 --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference-2.check @@ -0,0 +1,13 @@ +-- Warning: tests/neg-macros/quote-type-variable-no-inference-2.scala:5:22 --------------------------------------------- +5 | case '{ $_ : F[t, t]; () } => // warn // error + | ^ + | Ignored bound <: Double + | + | Consider defining bounds explicitly: + | '{ type t <: Int & Double; ... } +-- [E057] Type Mismatch Error: tests/neg-macros/quote-type-variable-no-inference-2.scala:5:20 -------------------------- +5 | case '{ $_ : F[t, t]; () } => // warn // error + | ^ + | Type argument t does not conform to upper bound Double + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/quote-type-variable-no-inference-2.scala b/tests/neg-macros/quote-type-variable-no-inference-2.scala new file mode 100644 index 000000000000..1cb0d3dab7b3 --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference-2.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +def test2(x: Expr[Any])(using Quotes) = + x match + case '{ $_ : F[t, t]; () } => // warn // error + case '{ type u <: Int & Double; $_ : F[u, u] } => + +type F[X <: Int, Y <: Double] diff --git a/tests/neg-macros/quote-type-variable-no-inference-3.check b/tests/neg-macros/quote-type-variable-no-inference-3.check new file mode 100644 index 000000000000..91476728654d --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference-3.check @@ -0,0 +1,20 @@ +-- Warning: tests/neg-macros/quote-type-variable-no-inference-3.scala:5:22 --------------------------------------------- +5 | case '{ $_ : F[t, t]; () } => // warn // error + | ^ + | Ignored bound <: Comparable[U] + | + | Consider defining bounds explicitly: + | '{ type t <: Comparable[U]; ... } +-- Warning: tests/neg-macros/quote-type-variable-no-inference-3.scala:6:49 --------------------------------------------- +6 | case '{ type u <: Comparable[`u`]; $_ : F[u, u] } => + | ^ + | Ignored bound <: Comparable[Any] + | + | Consider defining bounds explicitly: + | '{ type u <: Comparable[u] & Comparable[Any]; ... } +-- [E057] Type Mismatch Error: tests/neg-macros/quote-type-variable-no-inference-3.scala:5:20 -------------------------- +5 | case '{ $_ : F[t, t]; () } => // warn // error + | ^ + | Type argument t does not conform to upper bound Comparable[t] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/quote-type-variable-no-inference-3.scala b/tests/neg-macros/quote-type-variable-no-inference-3.scala new file mode 100644 index 000000000000..04e3c4ef931e --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference-3.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +def test2(x: Expr[Any])(using Quotes) = + x match + case '{ $_ : F[t, t]; () } => // warn // error + case '{ type u <: Comparable[`u`]; $_ : F[u, u] } => + +type F[T, U <: Comparable[U]] diff --git a/tests/neg-macros/quote-type-variable-no-inference.check b/tests/neg-macros/quote-type-variable-no-inference.check new file mode 100644 index 000000000000..9de69c51f79b --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference.check @@ -0,0 +1,13 @@ +-- Warning: tests/neg-macros/quote-type-variable-no-inference.scala:5:17 ----------------------------------------------- +5 | case '[ F[t, t] ] => // warn // error + | ^ + | Ignored bound <: Double + | + | Consider defining bounds explicitly: + | '[ type t <: Int & Double; ... ] +-- [E057] Type Mismatch Error: tests/neg-macros/quote-type-variable-no-inference.scala:5:15 ---------------------------- +5 | case '[ F[t, t] ] => // warn // error + | ^ + | Type argument t does not conform to upper bound Double + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/quote-type-variable-no-inference.scala b/tests/neg-macros/quote-type-variable-no-inference.scala new file mode 100644 index 000000000000..de03f4445302 --- /dev/null +++ b/tests/neg-macros/quote-type-variable-no-inference.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +def test(x: Type[?])(using Quotes) = + x match + case '[ F[t, t] ] => // warn // error + case '[ type u <: Int & Double; F[u, u] ] => + +type F[x <: Int, y <: Double] diff --git a/tests/neg-macros/quotedPatterns-5.scala b/tests/neg-macros/quotedPatterns-5.scala index 4030604d19f5..9c47fd31dab4 100644 --- a/tests/neg-macros/quotedPatterns-5.scala +++ b/tests/neg-macros/quotedPatterns-5.scala @@ -2,7 +2,7 @@ import scala.quoted.* object Test { def test(x: quoted.Expr[Int])(using Quotes): Unit = x match { case '{ type t; 4 } => Type.of[t] - case '{ type t; poly[t]($x); 4 } => // error: duplicate pattern variable: t + case '{ type t; poly[t]($x); 4 } => case '{ type `t`; poly[`t`]($x); 4 } => Type.of[t] // error case _ => diff --git a/tests/neg-macros/splice-pat.check b/tests/neg-macros/splice-pat.check new file mode 100644 index 000000000000..e81834adef92 --- /dev/null +++ b/tests/neg-macros/splice-pat.check @@ -0,0 +1,10 @@ +-- [E032] Syntax Error: tests/neg-macros/splice-pat.scala:12:16 -------------------------------------------------------- +12 | case '{ foo(${ // error: pattern expected + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg-macros/splice-pat.scala:15:5 --------------------------------------------------------- +15 | })} => ??? // error + | ^ + | '=>' expected, but ')' found diff --git a/tests/neg/splice-pat.scala b/tests/neg-macros/splice-pat.scala similarity index 100% rename from tests/neg/splice-pat.scala rename to tests/neg-macros/splice-pat.scala diff --git a/tests/neg-macros/toexproftuple.scala b/tests/neg-macros/toexproftuple.scala index 20ae2f08ff8d..7b69c578be70 100644 --- a/tests/neg-macros/toexproftuple.scala +++ b/tests/neg-macros/toexproftuple.scala @@ -1,33 +1,8 @@ -import scala.quoted._, scala.deriving.* // error -// ^ -// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) -// matches none of the cases -// -// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] -// case EmptyTuple => EmptyTuple +import scala.quoted._, scala.deriving.* -inline def mcr: Any = ${mcrImpl} // error -// ^ -// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) -// matches none of the cases -// -// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] -// case EmptyTuple => EmptyTuple +inline def mcr: Any = ${mcrImpl} -def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error - //^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple - - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple +def mcrImpl(using ctx: Quotes): Expr[Any] = { val tpl: (Expr[1], Expr[2], Expr[3]) = ('{1}, '{2}, '{3}) '{val res: (1, 3, 3) = ${Expr.ofTuple(tpl)}; res} // error @@ -36,7 +11,7 @@ def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error // Required: quoted.Expr[((1 : Int), (3 : Int), (3 : Int))] val tpl2: (Expr[1], 2, Expr[3]) = ('{1}, 2, '{3}) - '{val res = ${Expr.ofTuple(tpl2)}; res} // error // error // error // error + '{val res = ${Expr.ofTuple(tpl2)}; res} // error // ^ // Cannot prove that (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) =:= scala.Tuple.Map[ // scala.Tuple.InverseMap[ @@ -44,20 +19,4 @@ def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error // , quoted.Expr] // , quoted.Expr]. - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple - - // ^ - // Cyclic reference involving val res - - // ^ - // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) - // matches none of the cases - // - // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] - // case EmptyTuple => EmptyTuple } diff --git a/tests/neg-macros/wrong-owner.check b/tests/neg-macros/wrong-owner.check new file mode 100644 index 000000000000..ccaca98e3948 --- /dev/null +++ b/tests/neg-macros/wrong-owner.check @@ -0,0 +1,23 @@ + +-- Error: tests/neg-macros/wrong-owner/Test_2.scala:5:6 ---------------------------------------------------------------- +3 |@experimental +4 |@wrongOwner +5 |class Foo // error + |^ + |Malformed tree was found while expanding macro with -Xcheck-macros. + | |The tree does not conform to the compiler's tree invariants. + | | + | |Macro was: + | |@scala.annotation.internal.SourceFile("tests/neg-macros/wrong-owner/Test_2.scala") @wrongOwner @scala.annotation.experimental class Foo() + | | + | |The macro returned: + | |@scala.annotation.internal.SourceFile("tests/neg-macros/wrong-owner/Test_2.scala") @wrongOwner @scala.annotation.experimental class Foo() { + | override def toString(): java.lang.String = "Hello from macro" + |} + | | + | |Error: + | |assertion failed: bad owner; method toString has owner class String, expected was class Foo + |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , , , , , + | | + |stacktrace available when compiling with `-Ydebug` + | | diff --git a/tests/neg-macros/wrong-owner/Macro_1.scala b/tests/neg-macros/wrong-owner/Macro_1.scala new file mode 100644 index 000000000000..85127b701f81 --- /dev/null +++ b/tests/neg-macros/wrong-owner/Macro_1.scala @@ -0,0 +1,19 @@ +import scala.annotation.experimental +import scala.annotation.MacroAnnotation +import scala.quoted.* + +@experimental +class wrongOwner extends MacroAnnotation : + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect.* + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(Symbol.classSymbol("java.lang.String"), "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + List(newClassDef) + case _ => + report.error("@toString can only be annotated on class definitions") + tree :: Nil diff --git a/tests/neg-macros/wrong-owner/Test_2.scala b/tests/neg-macros/wrong-owner/Test_2.scala new file mode 100644 index 000000000000..ccba69beeb86 --- /dev/null +++ b/tests/neg-macros/wrong-owner/Test_2.scala @@ -0,0 +1,5 @@ +import scala.annotation.experimental + +@experimental +@wrongOwner +class Foo // error diff --git a/tests/neg-no-kind-polymorphism/anykind.scala b/tests/neg-no-kind-polymorphism/anykind.scala deleted file mode 100644 index 07c98c4f9576..000000000000 --- a/tests/neg-no-kind-polymorphism/anykind.scala +++ /dev/null @@ -1,2 +0,0 @@ - -trait Foo[T <: AnyKind] // error: Not found: type AnyKind diff --git a/tests/neg-scalajs/enumeration-warnings.scala b/tests/neg-scalajs/enumeration-warnings.scala index 8f1e17475d0b..b48ffc9500e3 100644 --- a/tests/neg-scalajs/enumeration-warnings.scala +++ b/tests/neg-scalajs/enumeration-warnings.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings class UnableToTransformValue extends Enumeration { val a = { diff --git a/tests/neg-scalajs/js-native-members.check b/tests/neg-scalajs/js-native-members.check index 466dbc9d2063..11acee62af90 100644 --- a/tests/neg-scalajs/js-native-members.check +++ b/tests/neg-scalajs/js-native-members.check @@ -1,3 +1,40 @@ +-- Warning: tests/neg-scalajs/js-native-members.scala:24:16 ------------------------------------------------------------ +24 | private[this] def this(x: Int) = this() // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:28:16 ------------------------------------------------------------ +28 | private[this] val a: Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:32:16 ------------------------------------------------------------ +32 | private[this] var d: Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:36:16 ------------------------------------------------------------ +36 | private[this] def g(): Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:49:25 ------------------------------------------------------------ +49 | class X3 private[this] () extends js.Object { // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- [E003] Syntax Warning: tests/neg-scalajs/js-native-members.scala:58:44 ---------------------------------------------- +58 | def assign[T, U](target: T, source: U): T with U = js.native // ok + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/js-native-members.scala:9:24 --------------------------------------------------------------- 9 | def this(z: String) = this(z.length, z) // error | ^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/js-non-native-members-qualified-private.check b/tests/neg-scalajs/js-non-native-members-qualified-private.check index 915a1bbe89eb..fb06b91f98f8 100644 --- a/tests/neg-scalajs/js-non-native-members-qualified-private.check +++ b/tests/neg-scalajs/js-non-native-members-qualified-private.check @@ -1,3 +1,9 @@ +-- Warning: tests/neg-scalajs/js-non-native-members-qualified-private.scala:52:28 -------------------------------------- +52 | class B private[this] () extends js.Object // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. -- Error: tests/neg-scalajs/js-non-native-members-qualified-private.scala:6:32 ----------------------------------------- 6 | private[Enclosing1] def foo(i: Int): Int = i // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check index 7687543ea75f..fe55c0caee52 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check @@ -1,3 +1,17 @@ +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:16:41 ------------------------- +16 | val c = js.constructorOf[NativeJSClass with NativeJSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:22:35 ------------------------- +22 | val g = js.constructorOf[JSClass with JSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:13:27 ----------------------------------------- 13 | val a = js.constructorOf[NativeJSTrait] // error | ^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check index 142de318efd3..df09d5b1953d 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check @@ -1,3 +1,17 @@ +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:16:42 ------------------------ +16 | val c = js.constructorTag[NativeJSClass with NativeJSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:22:36 ------------------------ +22 | val g = js.constructorTag[JSClass with JSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:13:42 ---------------------------------------- 13 | val a = js.constructorTag[NativeJSTrait] // error | ^ diff --git a/tests/neg-strict/i11225.scala b/tests/neg-strict/i11225.scala deleted file mode 100644 index 2a7e3ec57561..000000000000 --- a/tests/neg-strict/i11225.scala +++ /dev/null @@ -1,10 +0,0 @@ -import compiletime.uninitialized - -class Memo[A](x: => A): - private var cached: A = _ // error - private var known: Boolean = false - def force = - if !known then - known = true - cached = x - cached diff --git a/tests/neg/11982.scala b/tests/neg/11982.scala index 1f50ab2dfe4f..dd7a2b9b055e 100644 --- a/tests/neg/11982.scala +++ b/tests/neg/11982.scala @@ -4,8 +4,8 @@ type Head[X] = X match { } object Unpair { - def unpair[X <: Tuple2[Any, Any]]: Head[X] = 1 - unpair[Tuple2["msg", 42]]: "msg" // error + def unpair[X <: Tuple2[Any, Any]]: Head[X] = 1 // error + unpair[Tuple2["msg", 42]]: "msg" } @@ -14,8 +14,8 @@ type Head2[X] = X match { } object Unpair2 { - def unpair[X <: Tuple2[Tuple2[Any, Any], Tuple2[Any, Any]]]: Head2[X] = 1 - unpair[Tuple2[Tuple2["msg", 42], Tuple2[41, 40]]]: "msg" // error + def unpair[X <: Tuple2[Tuple2[Any, Any], Tuple2[Any, Any]]]: Head2[X] = 1 // error + unpair[Tuple2[Tuple2["msg", 42], Tuple2[41, 40]]]: "msg" } @@ -35,6 +35,6 @@ type Head4[X] = X match { } object Unpair4 { - def unpair[X <: Foo[Any, Any]]: Head4[Foo[X, X]] = 1 - unpair[Foo["msg", 42]]: "msg" // error + def unpair[X <: Foo[Any, Any]]: Head4[Foo[X, X]] = 1 // error + unpair[Foo["msg", 42]]: "msg" } diff --git a/tests/neg/12800.scala b/tests/neg/12800.scala deleted file mode 100644 index 164276396bec..000000000000 --- a/tests/neg/12800.scala +++ /dev/null @@ -1,21 +0,0 @@ -object Test { - type FieldType2[K, +V] = V with KeyTag2[K, V] - trait KeyTag2[K, +V] extends Any - - type WrapUpper = Tuple - type Wrap[A] = Tuple1[A] - - type Extract[A <: WrapUpper] = A match { - case Wrap[h] => h - } - - summon[Extract[Wrap[FieldType2["foo", Int]]] =:= FieldType2["foo", Int]] // error - // ^ - // Cannot prove that Main.Extract[Tuple1[Main.FieldType2[("foo" : String), Int]]] =:= Main.FieldType2[("foo" : String), Int]. - // - // Note: a match type could not be fully reduced: - // - // trying to reduce Main.Extract[Tuple1[Main.FieldType2[("foo" : String), Int]]] - // failed since selector Tuple1[Main.FieldType2[("foo" : String), Int]] - // is uninhabited. -} diff --git a/tests/neg-custom-args/no-experimental/14034.scala b/tests/neg/14034.scala similarity index 88% rename from tests/neg-custom-args/no-experimental/14034.scala rename to tests/neg/14034.scala index ab824c43395e..bdb09a011777 100644 --- a/tests/neg-custom-args/no-experimental/14034.scala +++ b/tests/neg/14034.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental @experimental trait Exp diff --git a/tests/neg-custom-args/deprecation/14034b.scala b/tests/neg/14034b.scala similarity index 86% rename from tests/neg-custom-args/deprecation/14034b.scala rename to tests/neg/14034b.scala index 07960bba9574..84f9ab3579c8 100644 --- a/tests/neg-custom-args/deprecation/14034b.scala +++ b/tests/neg/14034b.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -deprecation @deprecated trait Exp @deprecated val exp = 1 diff --git a/tests/neg/15981.check b/tests/neg/15981.check index c4d677b486e9..10745839c566 100644 --- a/tests/neg/15981.check +++ b/tests/neg/15981.check @@ -1,4 +1,6 @@ --- Error: tests/neg/15981.scala:4:45 ----------------------------------------------------------------------------------- +-- [E092] Pattern Match Error: tests/neg/15981.scala:4:45 -------------------------------------------------------------- 4 | override def equals(any: Any): Boolean = any.isInstanceOf[PosInt] // error | ^^^ | the type test for PosInt cannot be checked at runtime because it's a local class + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/15981.scala b/tests/neg/15981.scala index efbad2570e7b..5aba3555c010 100644 --- a/tests/neg/15981.scala +++ b/tests/neg/15981.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror val _ = locally{ sealed abstract class PosInt(val value: Int) { override def equals(any: Any): Boolean = any.isInstanceOf[PosInt] // error diff --git a/tests/neg/17211.check b/tests/neg/17211.check new file mode 100644 index 000000000000..be7086e3b3eb --- /dev/null +++ b/tests/neg/17211.check @@ -0,0 +1,14 @@ +-- [E182] Type Error: tests/neg/17211.scala:14:13 ---------------------------------------------------------------------- +14 | constValue[IsInt[Foo.Foo]] // error + | ^^^^^^^^^^^^^^ + | IsInt[Foo.Foo] is not a constant type; cannot take constValue + | + | Note: a match type could not be fully reduced: + | + | trying to reduce IsInt[Foo.Foo] + | failed since selector Foo.Foo + | does not match case Int => (true : Boolean) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ => (false : Boolean) diff --git a/tests/neg/17211.scala b/tests/neg/17211.scala new file mode 100644 index 000000000000..3e59e657c4a7 --- /dev/null +++ b/tests/neg/17211.scala @@ -0,0 +1,14 @@ +import scala.compiletime.constValue + +type IsInt[A] = A match + case Int => true + case _ => false + +def test = + val x = constValue[IsInt[Int]] // val res2: Boolean = true; works + val y = constValue[IsInt[String]] // val res3: Boolean = false; works + + object Foo: + opaque type Foo = Int + + constValue[IsInt[Foo.Foo]] // error diff --git a/tests/neg/17284.check b/tests/neg/17284.check new file mode 100644 index 000000000000..fa248c598311 --- /dev/null +++ b/tests/neg/17284.check @@ -0,0 +1,30 @@ +-- [E187] Potential Issue Error: tests/neg/17284.scala:4:6 ------------------------------------------------------------- +4 | 451.synchronized {} // error + | ^^^^^^^^^^^^^^^^ + | Suspicious synchronized call on boxed class + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | You called the synchronized method on a boxed primitive. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- +-- [E187] Potential Issue Error: tests/neg/17284.scala:8:4 ------------------------------------------------------------- +8 | x.synchronized {} // error + | ^^^^^^^^^^^^^^ + | Suspicious synchronized call on boxed class + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | You called the synchronized method on a boxed primitive. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- +-- [E187] Potential Issue Error: tests/neg/17284.scala:11:7 ------------------------------------------------------------ +11 | true.synchronized {} // error + | ^^^^^^^^^^^^^^^^^ + | Suspicious synchronized call on boxed class + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | You called the synchronized method on a boxed primitive. This might not be what + | you intended. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/17284.scala b/tests/neg/17284.scala new file mode 100644 index 000000000000..8f588233245a --- /dev/null +++ b/tests/neg/17284.scala @@ -0,0 +1,14 @@ +//> using options -Werror -explain + +def test = + 451.synchronized {} // error + +def test2 = + val x: Integer = 451 + x.synchronized {} // error + +def test3 = + true.synchronized {} // error + +def test4 = + true.hashCode() // success diff --git a/tests/neg/18031.check b/tests/neg/18031.check new file mode 100644 index 000000000000..47d3e784ade1 --- /dev/null +++ b/tests/neg/18031.check @@ -0,0 +1,28 @@ +-- Error: tests/neg/18031.scala:8:15 ----------------------------------------------------------------------------------- +8 | export A.{*, x as _} // error + | ^ + | named exports cannot follow wildcard exports +-- Error: tests/neg/18031.scala:11:15 ---------------------------------------------------------------------------------- +11 | import A.{*, x as _} // error + | ^ + | named imports cannot follow wildcard imports +-- Error: tests/neg/18031.scala:15:14 ---------------------------------------------------------------------------------- +15 | export A.{x => blah} // error + | ^ + | The export renaming `a => b` is no longer supported ; use `a as b` instead + | This construct can be rewritten automatically under -rewrite -source future-migration. +-- Error: tests/neg/18031.scala:18:14 ---------------------------------------------------------------------------------- +18 | import A.{x => blah} // error + | ^ + | The import renaming `a => b` is no longer supported ; use `a as b` instead + | This construct can be rewritten automatically under -rewrite -source future-migration. +-- Error: tests/neg/18031.scala:22:11 ---------------------------------------------------------------------------------- +22 | export A._ // error + | ^ + | `_` is no longer supported for a wildcard export; use `*` instead + | This construct can be rewritten automatically under -rewrite -source future-migration. +-- Error: tests/neg/18031.scala:25:11 ---------------------------------------------------------------------------------- +25 | import A._ // error + | ^ + | `_` is no longer supported for a wildcard import; use `*` instead + | This construct can be rewritten automatically under -rewrite -source future-migration. diff --git a/tests/neg/18031.scala b/tests/neg/18031.scala new file mode 100644 index 000000000000..a91ad8bfc66d --- /dev/null +++ b/tests/neg/18031.scala @@ -0,0 +1,25 @@ +//> using options -source:future + +object A: + val x, y, z = 0 + + +object B: + export A.{*, x as _} // error + +object C: + import A.{*, x as _} // error + + +object D: + export A.{x => blah} // error + +object E: + import A.{x => blah} // error + + +object F: + export A._ // error + +object G: + import A._ // error diff --git a/tests/neg/18493.check b/tests/neg/18493.check new file mode 100644 index 000000000000..79a2872e71e8 --- /dev/null +++ b/tests/neg/18493.check @@ -0,0 +1,8 @@ +-- [E030] Match case Unreachable Error: tests/neg/18493.scala:6:9 ------------------------------------------------------ +6 | case "abc" => // error + | ^^^^^ + | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg/18493.scala:12:9 ----------------------------------------------------- +12 | case "abc" => // error + | ^^^^^ + | Unreachable case diff --git a/tests/neg/18493.scala b/tests/neg/18493.scala new file mode 100644 index 000000000000..8dfb3bf923cc --- /dev/null +++ b/tests/neg/18493.scala @@ -0,0 +1,14 @@ +//> using options -Werror +object PartialFunctionNoWarning { + // nice warning + "abc" match { + case "abc" => + case "abc" => // error + } + + // no warnings + val pf: PartialFunction[String, Unit] = { + case "abc" => + case "abc" => // error + } +} \ No newline at end of file diff --git a/tests/neg/6314-1.scala b/tests/neg/6314-1.scala index 5d5662c338e3..8585b707004d 100644 --- a/tests/neg/6314-1.scala +++ b/tests/neg/6314-1.scala @@ -1,6 +1,7 @@ object G { - final class X - final class Y + trait X + class Y + class Z trait FooSig { type Type @@ -13,14 +14,14 @@ object G { type Foo = Foo.Type type Bar[A] = A match { - case X & Y => String + case X & Z => String case Y => Int } def main(args: Array[String]): Unit = { val a: Bar[X & Y] = "hello" // error val i: Bar[Y & Foo] = Foo.apply[Bar](a) - val b: Int = i // error + val b: Int = i println(b + 1) } } diff --git a/tests/neg/6314-6.check b/tests/neg/6314-6.check new file mode 100644 index 000000000000..7d6bd182173d --- /dev/null +++ b/tests/neg/6314-6.check @@ -0,0 +1,16 @@ +-- Error: tests/neg/6314-6.scala:26:3 ---------------------------------------------------------------------------------- +26 | (new YY {}).boom // error: object creation impossible + | ^ + |object creation impossible, since def apply(fa: String): Int in trait XX in object Test3 is not defined + |(Note that + | parameter String in def apply(fa: String): Int in trait XX in object Test3 does not match + | parameter Test3.Bar[X & Object with Test3.YY {...}#Foo] in def apply(fa: Test3.Bar[X & YY.this.Foo]): Test3.Bar[Y & YY.this.Foo] in trait YY in object Test3 + | ) +-- Error: tests/neg/6314-6.scala:52:3 ---------------------------------------------------------------------------------- +52 | (new YY {}).boom // error: object creation impossible + | ^ + |object creation impossible, since def apply(fa: String): Int in trait XX in object Test4 is not defined + |(Note that + | parameter String in def apply(fa: String): Int in trait XX in object Test4 does not match + | parameter Test4.Bar[X & Object with Test4.YY {...}#FooAlias] in def apply(fa: Test4.Bar[X & YY.this.FooAlias]): Test4.Bar[Y & YY.this.FooAlias] in trait YY in object Test4 + | ) diff --git a/tests/neg/6314-6.scala b/tests/neg/6314-6.scala index 6c400ab46d97..23853e20434d 100644 --- a/tests/neg/6314-6.scala +++ b/tests/neg/6314-6.scala @@ -21,11 +21,9 @@ object Test3 { trait YY extends XX { type Foo = X & Y - def apply(fa: Bar[X & Foo]): Bar[Y & Foo] = fa // error - // overriding method apply in trait XX of type (fa: String): Int; - // method apply of type (fa: String): String has incompatible type + def apply(fa: Bar[X & Foo]): Bar[Y & Foo] = fa } - (new YY {}).boom + (new YY {}).boom // error: object creation impossible } object Test4 { @@ -49,9 +47,7 @@ object Test4 { trait YY extends XX { type Foo = X & Y - def apply(fa: Bar[X & FooAlias]): Bar[Y & FooAlias] = fa // error - // overriding method apply in trait XX of type (fa: String): Int; - // method apply of type (fa: String): String has incompatible type + def apply(fa: Bar[X & FooAlias]): Bar[Y & FooAlias] = fa } - (new YY {}).boom + (new YY {}).boom // error: object creation impossible } diff --git a/tests/neg/6314.check b/tests/neg/6314.check new file mode 100644 index 000000000000..2a5e8b68a999 --- /dev/null +++ b/tests/neg/6314.check @@ -0,0 +1,44 @@ +-- [E007] Type Mismatch Error: tests/neg/6314.scala:28:27 -------------------------------------------------------------- +28 | val i: Bar[Y | Type] = 1 // error + | ^ + | Found: (1 : Int) + | Required: Test1Bis.Bar[Test1Bis.Y | Test.this.Type] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test1Bis.Bar[Test1Bis.Y | Test.this.Type] + | failed since selector Test1Bis.Y | Test.this.Type + | does not match case Test1Bis.X & Test1Bis.Y => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Any => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6314.scala:45:33 -------------------------------------------------------------- +45 | def right(fa: Bar[L]): Int = fa // error + | ^^ + | Found: (fa : Wizzle.this.Bar[L]) + | Required: Int + | + | where: L is a type in trait Wizzle with bounds <: Int & Singleton + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6314.scala:55:33 -------------------------------------------------------------- +55 | def right(fa: Bar[L]): Int = fa // error + | ^^ + | Found: (fa : Wazzlo.this.Bar[L]) + | Required: Int + | + | where: L is a type in trait Wazzlo with bounds <: Int & AnyVal + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6314.scala:65:33 -------------------------------------------------------------- +65 | def right(fa: Bar[L]): Int = fa // error + | ^^ + | Found: (fa : Wuzzlu.this.Bar[L]) + | Required: Int + | + | where: L is a type in trait Wuzzlu with bounds <: String & AnyRef + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/6314.scala b/tests/neg/6314.scala index beee41c48e9a..17adbceef1fd 100644 --- a/tests/neg/6314.scala +++ b/tests/neg/6314.scala @@ -1,22 +1,41 @@ -final class X -final class Y - object Test1 { + // X, Y and Z are unrelated, Y is provably disjoint from Z, but X is not provably disjoint with either + trait X + class Y + class Z + trait Test { type Type // This is testing that both permutations of the types in a & - // are taken into account by the intersection test - val i: Bar[Y & Type] = 1 // error + // are taken into account by the provablyDisjoint test + val i: Bar[Y & Type] = 1 // ok, disjoint from X & Z because Y and Z are disjoint } type Bar[A] = A match { - case X & Y => String + case X & Z => String case Y => Int } } +object Test1Bis { + final class X + final class Y + + trait Test { + type Type + // This is testing that both permutations of the types in a | + // are taken into account by the provablyDisjoint test + val i: Bar[Y | Type] = 1 // error + } + + type Bar[A] = A match { + case X & Y => String + case Any => Int + } +} + object Test2 { - trait Wizzle[L <: Int with Singleton] { + trait Wizzle[L <: Int & Singleton] { type Bar[A] = A match { case 0 => String case L => Int @@ -26,7 +45,7 @@ object Test2 { def right(fa: Bar[L]): Int = fa // error } - trait Wazzlo[L <: Int with AnyVal] { + trait Wazzlo[L <: Int & AnyVal] { type Bar[A] = A match { case 0 => String case L => Int @@ -36,7 +55,7 @@ object Test2 { def right(fa: Bar[L]): Int = fa // error } - trait Wuzzlu[L <: String with AnyRef] { + trait Wuzzlu[L <: String & AnyRef] { type Bar[A] = A match { case "" => String case L => Int diff --git a/tests/neg/6570-1.check b/tests/neg/6570-1.check index bdbadd0f752a..0abf96e2d350 100644 --- a/tests/neg/6570-1.check +++ b/tests/neg/6570-1.check @@ -27,6 +27,6 @@ | does not uniquely determine parameter x in | case Cov[x] => N[x] | The computed bounds for the parameter are: - | x >: Box[Int] + | x <: Box[Int] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/6570.check b/tests/neg/6570.check new file mode 100644 index 000000000000..e849814449eb --- /dev/null +++ b/tests/neg/6570.check @@ -0,0 +1,116 @@ +-- [E007] Type Mismatch Error: tests/neg/6570.scala:26:50 -------------------------------------------------------------- +26 | def foo[T <: Cov[Int]](c: Child[T]): Trait2 = c.thing // error + | ^^^^^^^ + | Found: UpperBoundParametricVariant.M[T] + | Required: Base.Trait2 + | + | where: T is a type in method foo with bounds <: UpperBoundParametricVariant.Cov[Int] + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce UpperBoundParametricVariant.M[T] + | failed since selector T + | does not uniquely determine parameter x in + | case UpperBoundParametricVariant.Cov[x] => Base.N[x] + | The computed bounds for the parameter are: + | x >: Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:29:29 -------------------------------------------------------------- +29 | def thing = new Trait1 {} // error + | ^ + | Found: Object with Base.Trait1 {...} + | Required: Base.N[String & Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Base.N[String & Int] + | failed since selector String & Int + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:47:32 -------------------------------------------------------------- +47 | def foo(c: Child): Trait2 = c.thing // error + | ^^^^^^^ + | Found: InheritanceVariant.M[c.B] + | Required: Base.Trait2 + | + | Note: a match type could not be fully reduced: + | + | trying to reduce InheritanceVariant.M[c.B] + | failed since selector c.B + | does not uniquely determine parameter a in + | case InheritanceVariant.Trick[a] => Base.N[a] + | The computed bounds for the parameter are: + | a >: Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:51:29 -------------------------------------------------------------- +51 | def thing = new Trait1 {} // error + | ^ + | Found: Object with Base.Trait1 {...} + | Required: Base.N[String & Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Base.N[String & Int] + | failed since selector String & Int + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:69:29 -------------------------------------------------------------- +69 | def thing = new Trait1 {} // error + | ^ + | Found: Object with Base.Trait1 {...} + | Required: Base.N[String & Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Base.N[String & Int] + | failed since selector String & Int + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:86:29 -------------------------------------------------------------- +86 | def thing = new Trait1 {} // error + | ^ + | Found: Object with Base.Trait1 {...} + | Required: Base.N[String & Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Base.N[String & Int] + | failed since selector String & Int + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:103:32 ------------------------------------------------------------- +103 | def foo(c: Child): Trait2 = c.thing // error + | ^^^^^^^ + | Found: UpperBoundVariant.M[c.A] + | Required: Base.Trait2 + | + | Note: a match type could not be fully reduced: + | + | trying to reduce UpperBoundVariant.M[c.A] + | failed since selector c.A + | does not uniquely determine parameter t in + | case UpperBoundVariant.Cov[t] => Base.N[t] + | The computed bounds for the parameter are: + | t >: Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/6570.scala:107:29 ------------------------------------------------------------- +107 | def thing = new Trait1 {} // error + | ^ + | Found: Object with Base.Trait1 {...} + | Required: Base.N[String & Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Base.N[String & Int] + | failed since selector String & Int + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/6570.scala b/tests/neg/6570.scala index f36471868d9b..cd6016a164c8 100644 --- a/tests/neg/6570.scala +++ b/tests/neg/6570.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + object Base { trait Trait1 trait Trait2 diff --git a/tests/neg/6571.check b/tests/neg/6571.check index 4172abb2919b..cb2fc50b86d2 100644 --- a/tests/neg/6571.check +++ b/tests/neg/6571.check @@ -8,7 +8,8 @@ | | trying to reduce Test.M[Test.Inv[Int] & Test.Inv[String]] | failed since selector Test.Inv[Int] & Test.Inv[String] - | is uninhabited (there are no values of that type). + | does not match case Test.Inv[u] => u + | and cannot be shown to be disjoint from it either. | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/6571.scala:7:39 --------------------------------------------------------------- @@ -21,6 +22,7 @@ | | trying to reduce Test.M[Test.Inv[String] & Test.Inv[Int]] | failed since selector Test.Inv[String] & Test.Inv[Int] - | is uninhabited (there are no values of that type). + | does not match case Test.Inv[u] => u + | and cannot be shown to be disjoint from it either. | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag.scala b/tests/neg/IsInstanceOfClassTag.scala similarity index 85% rename from tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag.scala rename to tests/neg/IsInstanceOfClassTag.scala index a398d7bdaf24..139ca7eae5a6 100644 --- a/tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag.scala +++ b/tests/neg/IsInstanceOfClassTag.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.reflect.ClassTag object IsInstanceOfClassTag { @@ -15,7 +17,7 @@ object IsInstanceOfClassTag { xs.head.substring(0) } - safeCast[List[_]](List[Int](1)) match { + safeCast[List[?]](List[Int](1)) match { case None => case Some(xs) => xs.head.substring(0) // error diff --git a/tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag2.scala b/tests/neg/IsInstanceOfClassTag2.scala similarity index 83% rename from tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag2.scala rename to tests/neg/IsInstanceOfClassTag2.scala index d9782bc14f34..9d32ee401092 100644 --- a/tests/neg-custom-args/fatal-warnings/IsInstanceOfClassTag2.scala +++ b/tests/neg/IsInstanceOfClassTag2.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.reflect.TypeTest object IsInstanceOfClassTag { @@ -14,7 +16,7 @@ object IsInstanceOfClassTag { case Some(xs) => } - safeCast[List[_]](List[Int](1)) match { + safeCast[List[?]](List[Int](1)) match { case None => case Some(xs) => } diff --git a/tests/neg-custom-args/adhoc-extension/A.scala b/tests/neg/adhoc-extension/A.scala similarity index 100% rename from tests/neg-custom-args/adhoc-extension/A.scala rename to tests/neg/adhoc-extension/A.scala diff --git a/tests/neg/adhoc-extension/B.scala b/tests/neg/adhoc-extension/B.scala new file mode 100644 index 000000000000..2343a9bc0060 --- /dev/null +++ b/tests/neg/adhoc-extension/B.scala @@ -0,0 +1,10 @@ +//> using options -source future -feature -Xfatal-warnings + +package adhoc +class B extends A // error: adhoc-extension (under -strict -feature -Xfatal-warnings) +class C extends A // error + +object O { + val a = new A {} // error + object E extends A // error +} \ No newline at end of file diff --git a/tests/neg/alphanumeric-infix-operator-3.4.scala b/tests/neg/alphanumeric-infix-operator-3.4.scala new file mode 100644 index 000000000000..14d3358127ca --- /dev/null +++ b/tests/neg/alphanumeric-infix-operator-3.4.scala @@ -0,0 +1,11 @@ +//> using options -Werror + +import language.`3.4` + +class Foo: + def x(i: Int) = i + infix def y(i: Int) = i + +def test(foo: Foo): Unit = + foo x 1 // error (because it was compiled with 3.4+) + foo y 2 // ok: is marked as infix diff --git a/tests/neg/alphanumeric-infix-operator.check b/tests/neg/alphanumeric-infix-operator.check new file mode 100644 index 000000000000..52b08f16b88c --- /dev/null +++ b/tests/neg/alphanumeric-infix-operator.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/alphanumeric-infix-operator.scala:8:6 -------------------------------------------------------------- +8 | foo x 1 // error (because it was compiled with 3.4+) + | ^ + | Alphanumeric method x is not declared infix; it should not be used as infix operator. + | Instead, use method syntax .x(...) or backticked identifier `x`. + | The latter can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/alphanumeric-infix-operator.scala b/tests/neg/alphanumeric-infix-operator.scala new file mode 100644 index 000000000000..1f2233dda6ce --- /dev/null +++ b/tests/neg/alphanumeric-infix-operator.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +class Foo: + def x(i: Int) = i + infix def y(i: Int) = i + +def test(foo: Foo): Unit = + foo x 1 // error (because it was compiled with 3.4+) + foo y 2 // ok: is marked as infix diff --git a/tests/neg/autoTuplingTestb.scala b/tests/neg/autoTuplingTestb.scala new file mode 100644 index 000000000000..be890508fad9 --- /dev/null +++ b/tests/neg/autoTuplingTestb.scala @@ -0,0 +1,11 @@ +//> using options -language:noAutoTupling + +object autoTupling { + + val x = Some(1, 2) // error when running with -language:noAutoTupling + + x match { + case Some(a, b) => a + b // error // error when running with -language:noAutoTupling + case None => + } +} diff --git a/tests/neg-custom-args/avoid-warn-deprecation.scala b/tests/neg/avoid-warn-deprecation.scala similarity index 81% rename from tests/neg-custom-args/avoid-warn-deprecation.scala rename to tests/neg/avoid-warn-deprecation.scala index fc8d71dd7f60..45baf7addb86 100644 --- a/tests/neg-custom-args/avoid-warn-deprecation.scala +++ b/tests/neg/avoid-warn-deprecation.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -feature + object A { @deprecated("use bar instead of this one", "0.2.3") def foo: Int = 3 diff --git a/tests/neg/bad-unapplies.check b/tests/neg/bad-unapplies.check index 44633ca6950a..51e71d8e8949 100644 --- a/tests/neg/bad-unapplies.check +++ b/tests/neg/bad-unapplies.check @@ -7,13 +7,13 @@ | both match arguments (C) | | longer explanation available when compiling with `-explain` --- [E127] Syntax Error: tests/neg/bad-unapplies.scala:23:9 ------------------------------------------------------------- +-- [E127] Pattern Match Error: tests/neg/bad-unapplies.scala:23:9 ------------------------------------------------------ 23 | case B("2") => // error (cannot be used as an extractor) | ^ | B cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method | | longer explanation available when compiling with `-explain` --- [E127] Syntax Error: tests/neg/bad-unapplies.scala:24:9 ------------------------------------------------------------- +-- [E127] Pattern Match Error: tests/neg/bad-unapplies.scala:24:9 ------------------------------------------------------ 24 | case D("2") => // error (cannot be used as an extractor) | ^ | D cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method @@ -31,9 +31,9 @@ | Wrong number of argument patterns for F; expected: () | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/bad-unapplies.scala:27:9 ---------------------------------------------------------- +-- [E189] Not Found Error: tests/neg/bad-unapplies.scala:27:9 ---------------------------------------------------------- 27 | case G("2") => // error (Not found: G) | ^ - | Not found: G + | no pattern match extractor named G was found | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/by-name.scala b/tests/neg/by-name.scala new file mode 100644 index 000000000000..74b003f56044 --- /dev/null +++ b/tests/neg/by-name.scala @@ -0,0 +1,6 @@ +//> using options -language:experimental.erasedDefinitions + +def f(x: => Int, erased y: => Int) = x // error +def g(erased x: => Int, y: => Int) = y // error + +val h: (erased => Int, Int) => Int = (erased x, y) => y // error diff --git a/tests/neg-custom-args/capt-wf.scala b/tests/neg/capt-wf.scala similarity index 87% rename from tests/neg-custom-args/capt-wf.scala rename to tests/neg/capt-wf.scala index 67e1bc9906fe..fbd334726e55 100644 --- a/tests/neg-custom-args/capt-wf.scala +++ b/tests/neg/capt-wf.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.captureChecking -Xfatal-warnings +import caps.cap class C type Cap = C^ @@ -11,7 +13,7 @@ def test(c: Cap, other: String): Unit = val x3a: () -> String = s1 val s2 = () => if x1 == null then "" else "abc" val x4: C^{s2} = ??? // OK - val x5: C^{c, c} = ??? // error: redundant + val x5: C^{c, c} = ??? // error: redundant // error: redundant // val x6: C^{c}^{c} = ??? // would be syntax error val x7: Cap^{c} = ??? // error: redundant // val x8: C^{c}^{cap} = ??? // would be syntax error diff --git a/tests/neg/cc-only-defs.scala b/tests/neg/cc-only-defs.scala index 43ac025f203a..a87978b41881 100644 --- a/tests/neg/cc-only-defs.scala +++ b/tests/neg/cc-only-defs.scala @@ -7,5 +7,5 @@ trait Test { val b: ImpureFunction1[Int, Int] // now OK - val a: {z} String // error -} // error + val a: {z} String // error // error +} diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index 60e9fb279364..cf4348ad42d7 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -sealed type T2 // error +type T2 // ok abstract type T3 // error abstract open type T4 // error diff --git a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_0-migration.scala b/tests/neg/classtag-typetest/3_0-migration.scala similarity index 100% rename from tests/neg-custom-args/fatal-warnings/classtag-typetest/3_0-migration.scala rename to tests/neg/classtag-typetest/3_0-migration.scala diff --git a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_0.scala b/tests/neg/classtag-typetest/3_0.scala similarity index 100% rename from tests/neg-custom-args/fatal-warnings/classtag-typetest/3_0.scala rename to tests/neg/classtag-typetest/3_0.scala diff --git a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1-migration.scala b/tests/neg/classtag-typetest/3_1-migration.scala similarity index 80% rename from tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1-migration.scala rename to tests/neg/classtag-typetest/3_1-migration.scala index da26fd3c5569..41e0537a6dc1 100644 --- a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1-migration.scala +++ b/tests/neg/classtag-typetest/3_1-migration.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.language.`future-migration` import scala.reflect.ClassTag diff --git a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1.scala b/tests/neg/classtag-typetest/3_1.scala similarity index 78% rename from tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1.scala rename to tests/neg/classtag-typetest/3_1.scala index b21fb0606b8e..d9101ff2ae57 100644 --- a/tests/neg-custom-args/fatal-warnings/classtag-typetest/3_1.scala +++ b/tests/neg/classtag-typetest/3_1.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.language.future import scala.reflect.ClassTag diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.check b/tests/neg/constructor-proxy-shadowing.check similarity index 82% rename from tests/neg-custom-args/explain/constructor-proxy-shadowing.check rename to tests/neg/constructor-proxy-shadowing.check index db223ba33640..091d1ed14c1e 100644 --- a/tests/neg-custom-args/explain/constructor-proxy-shadowing.check +++ b/tests/neg/constructor-proxy-shadowing.check @@ -1,5 +1,5 @@ --- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:10:12 ----------------------- -10 | val x = A22("") // error: shadowing +-- [E177] Reference Error: tests/neg/constructor-proxy-shadowing.scala:11:12 ------------------------------------------- +11 | val x = A22("") // error: shadowing | ^^^ | Reference to constructor proxy for class A22 in class A | shadows outer reference to method A22 in object Test @@ -23,8 +23,8 @@ | To disambiguate, use an explicit `new` if you mean the former, | or use a full prefix for A22 if you mean the latter. -------------------------------------------------------------------------------------------------------------------- --- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:11:12 ----------------------- -11 | val y = A33("") // error: shadowing +-- [E177] Reference Error: tests/neg/constructor-proxy-shadowing.scala:12:12 ------------------------------------------- +12 | val y = A33("") // error: shadowing | ^^^ | Reference to constructor proxy for class A33 in class A | shadows outer reference to object A33 in object Test @@ -48,11 +48,11 @@ | To disambiguate, use an explicit `new` if you mean the former, | or use a full prefix for A33 if you mean the latter. -------------------------------------------------------------------------------------------------------------------- --- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:16:8 ------------------------ -16 |val x = Seq(3) // error: shadowing +-- [E177] Reference Error: tests/neg/constructor-proxy-shadowing.scala:17:8 -------------------------------------------- +17 |val x = Seq(3) // error: shadowing | ^^^ | Reference to constructor proxy for class Seq - | shadows outer reference to getter Seq in package scala + | shadows outer reference to value Seq in package scala | | The instance needs to be created with an explicit `new`. |-------------------------------------------------------------------------------------------------------------------- @@ -66,7 +66,7 @@ | | new Seq(...) | - | Or it could mean calling the apply method of getter Seq in package scala as in + | Or it could mean calling the apply method of value Seq in package scala as in | | Seq.apply(...) | diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala b/tests/neg/constructor-proxy-shadowing.scala similarity index 92% rename from tests/neg-custom-args/explain/constructor-proxy-shadowing.scala rename to tests/neg/constructor-proxy-shadowing.scala index c47fc2f4859b..fafc9c112c49 100644 --- a/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala +++ b/tests/neg/constructor-proxy-shadowing.scala @@ -1,3 +1,4 @@ +//> using options -explain object Test extends App { def A22(s: String): String = s diff --git a/tests/neg/context-bounds-migration-3.5.check b/tests/neg/context-bounds-migration-3.5.check new file mode 100644 index 000000000000..dd8a2aeefbf3 --- /dev/null +++ b/tests/neg/context-bounds-migration-3.5.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/context-bounds-migration-3.5.scala:9:2 ------------------------------------------------------------- +9 | foo(C[Int]()) // error + | ^^^ + | Context bounds will map to context parameters. + | A `using` clause is needed to pass explicit arguments to them. + | This code can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/context-bounds-migration-3.5.scala b/tests/neg/context-bounds-migration-3.5.scala new file mode 100644 index 000000000000..e5c571d0e22e --- /dev/null +++ b/tests/neg/context-bounds-migration-3.5.scala @@ -0,0 +1,10 @@ +//> using options -source 3.5 + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // error + foo(using C[Int]()) // ok diff --git a/tests/neg/context-bounds-migration-future.check b/tests/neg/context-bounds-migration-future.check new file mode 100644 index 000000000000..f56da5d6b28d --- /dev/null +++ b/tests/neg/context-bounds-migration-future.check @@ -0,0 +1,6 @@ +-- [E050] Type Error: tests/neg/context-bounds-migration-future.scala:9:2 ---------------------------------------------- +9 | foo(C[Int]()) // error + | ^^^ + | method foo does not take more parameters + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/context-bounds-migration-future.scala b/tests/neg/context-bounds-migration-future.scala new file mode 100644 index 000000000000..6d0e94c0b434 --- /dev/null +++ b/tests/neg/context-bounds-migration-future.scala @@ -0,0 +1,10 @@ +//> using options -source future + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // error + foo(using C[Int]()) // ok diff --git a/tests/neg/convertible.scala b/tests/neg/convertible.scala new file mode 100644 index 000000000000..e72de452f41d --- /dev/null +++ b/tests/neg/convertible.scala @@ -0,0 +1,31 @@ +//> using options -Xfatal-warnings -feature + +import language.experimental.into + +class Text(val str: String) + +object Test: + + given Conversion[String, Text] = Text(_) + + def f(x: Text, y: => Text, zs: Text*) = + println(s"${x.str} ${y.str} ${zs.map(_.str).mkString(" ")}") + + f("abc", "def") // error // error + f("abc", "def", "xyz", "uvw") // error // error // error // error + f("abc", "def", "xyz", Text("uvw")) // error // error // error + + def g(x: into Text) = + println(x.str) + + + g("abc") // OK + val gg = g + gg("abc") // straight eta expansion is also OK + + def h1[X](x: X)(y: X): Unit = () + + def h(x: into Text) = + val y = h1(x) + y("abc") // error, inference through type variable does not propagate + diff --git a/tests/neg-strict/deprecated-override.scala b/tests/neg/deprecated-override.scala similarity index 75% rename from tests/neg-strict/deprecated-override.scala rename to tests/neg/deprecated-override.scala index 998fa244ca8c..b532416c7126 100644 --- a/tests/neg-strict/deprecated-override.scala +++ b/tests/neg/deprecated-override.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + trait A: def f: Int diff --git a/tests/neg-custom-args/deptypes.scala b/tests/neg/deptypes.scala similarity index 87% rename from tests/neg-custom-args/deptypes.scala rename to tests/neg/deptypes.scala index f01be10f2217..39b1e42ccbca 100644 --- a/tests/neg-custom-args/deptypes.scala +++ b/tests/neg/deptypes.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.dependent type Vec[T] = (n: Int) =>> Array[T] // error: not yet implemented diff --git a/tests/neg/enum-variance.check b/tests/neg/enum-variance.check new file mode 100644 index 000000000000..f250df84cae6 --- /dev/null +++ b/tests/neg/enum-variance.check @@ -0,0 +1,10 @@ +-- Error: tests/neg/enum-variance.scala:4:12 --------------------------------------------------------------------------- +4 | case Refl(f: T => T) // error: enum case Refl requires explicit declaration of type T + | ^^^^^^^^^ + | contravariant type T occurs in covariant position in type T => T of value f + | enum case Refl requires explicit declaration of type T to resolve this issue. + | See an example at https://docs.scala-lang.org/scala3/reference/enums/adts.html#parameter-variance-of-enums +-- Error: tests/neg/enum-variance.scala:7:16 --------------------------------------------------------------------------- +7 | case Refl[-T](f: T => T) extends ExplicitView[T] // error: contravariant type T occurs in covariant position + | ^^^^^^^^^ + | contravariant type T occurs in covariant position in type T => T of value f diff --git a/tests/neg-custom-args/fatal-warnings/enum-variance.scala b/tests/neg/enum-variance.scala similarity index 92% rename from tests/neg-custom-args/fatal-warnings/enum-variance.scala rename to tests/neg/enum-variance.scala index efe0dbbc6cdd..ae6693ad94cc 100644 --- a/tests/neg-custom-args/fatal-warnings/enum-variance.scala +++ b/tests/neg/enum-variance.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + enum View[-T]: case Refl(f: T => T) // error: enum case Refl requires explicit declaration of type T diff --git a/tests/neg/erased-1.scala b/tests/neg/erased-1.scala new file mode 100644 index 000000000000..62a1024e80f5 --- /dev/null +++ b/tests/neg/erased-1.scala @@ -0,0 +1,36 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def foo0(a: Int): Int = a + def foo1(erased a: Int): Int = { + foo0( + a // error + ) + foo0({ + println() + a // error + }) + foo1(a) // OK + foo2( // error + a // error + ) + foo3( // error + a + ) + a // error + } + erased def foo2(a: Int): Int = { + foo0(a) // OK + foo1(a) // OK + foo2(a) // OK + foo3(a) // OK + a // OK + } + erased def foo3(erased a: Int): Int = { + foo0(a) // OK + foo1(a) // OK + foo2(a) // OK + foo3(a) // OK + a // OK + } +} \ No newline at end of file diff --git a/tests/neg/erased-15.scala b/tests/neg/erased-15.scala new file mode 100644 index 000000000000..47073d2517bd --- /dev/null +++ b/tests/neg/erased-15.scala @@ -0,0 +1,20 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + new Foo().apply(foo) + } + + def foo = { + println("foo") + 42 + } +} + +class Foo extends PolyFunction { // error + def apply(erased x: Int): Int = { + println("Foo.apply") + 42 + } +} diff --git a/tests/neg-custom-args/erased/erased-2.scala b/tests/neg/erased-2.scala similarity index 91% rename from tests/neg-custom-args/erased/erased-2.scala rename to tests/neg/erased-2.scala index bab269061899..02e4b56e11ac 100644 --- a/tests/neg-custom-args/erased/erased-2.scala +++ b/tests/neg/erased-2.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def foo0(a: Int): Int = a def foo1(erased a: Int): Int = { diff --git a/tests/neg-custom-args/erased/erased-24.scala b/tests/neg/erased-24.scala similarity index 88% rename from tests/neg-custom-args/erased/erased-24.scala rename to tests/neg/erased-24.scala index dc39d4f9b653..bf2f1d21435e 100644 --- a/tests/neg-custom-args/erased/erased-24.scala +++ b/tests/neg/erased-24.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + // Could become a run test if we had totality checking for erased arguments object Test { diff --git a/tests/neg/erased-3.scala b/tests/neg/erased-3.scala new file mode 100644 index 000000000000..5c6a31860b11 --- /dev/null +++ b/tests/neg/erased-3.scala @@ -0,0 +1,47 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def foo0(a: Int): Int = a + def foo1(erased a: Int): Int = { + foo0( + u() // error + ) + foo1(u()) // OK + foo2( // error + u() // error + ) + foo3( // error + u() + ) + u() // error + u() // error + } + erased def foo2(a: Int): Int = { + foo0(u()) // OK + foo1(u()) // OK + foo2(u()) // OK + foo3(u()) // OK + u() // warn + u() // OK + } + erased def foo3(erased a: Int): Int = { + foo0(u()) // OK + foo1(u()) // OK + foo2(u()) // OK + foo3(u()) // OK + u() // warn + u() // OK + } + + erased val foo4: Int = { + foo0(u()) // OK + foo1(u()) // OK + foo2(u()) // OK + foo3(u()) // OK + println() + u() // warn + u() // OK + } + + erased def u(): Int = 42 +} \ No newline at end of file diff --git a/tests/neg/erased-4.scala b/tests/neg/erased-4.scala new file mode 100644 index 000000000000..46b101acafd4 --- /dev/null +++ b/tests/neg/erased-4.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + def foo (erased i: Int) = 0 + + val f: (erased Int) => Int = + (erased x: Int) => { + x // error + } + + val f2: (erased Int) => Int = + (erased x: Int) => { + foo(x) + } + } + +} diff --git a/tests/neg/erased-5.scala b/tests/neg/erased-5.scala new file mode 100644 index 000000000000..2d88b9f90d9f --- /dev/null +++ b/tests/neg/erased-5.scala @@ -0,0 +1,20 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + type UU[T] = (erased T) => Int + + def main(args: Array[String]): Unit = { + fun { x => // error: `Int => Int` not compatible with `(erased Int) => Int` + x + } + + fun { + (x: Int) => x // error: `Int => Int` not compatible with `(erased Int) => Int` + } + } + + def fun(f: UU[Int]): Int = { + f(35) + } +} diff --git a/tests/neg/erased-6.scala b/tests/neg/erased-6.scala new file mode 100644 index 000000000000..4585ab876b3d --- /dev/null +++ b/tests/neg/erased-6.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + erased def foo: Foo = new Foo + foo.x() // error + foo.y // error + foo.z // error +} + +class Foo { + def x(): String = "abc" + def y: String = "abc" + val z: String = "abc" +} \ No newline at end of file diff --git a/tests/neg/erased-args-lifted.scala b/tests/neg/erased-args-lifted.scala new file mode 100644 index 000000000000..2deee749ed3d --- /dev/null +++ b/tests/neg/erased-args-lifted.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def foo(a: Int)(b: Int, c: Int) = 42 + erased def bar(i: Int): Int = { + println(1) + 42 + } + def baz: Int = { + println(1) + 2 + } + foo( + bar(baz) // error + )( + c = baz, b = baz // force all args to be lifted in vals befor the call + ) +} diff --git a/tests/neg/erased-assign.scala b/tests/neg/erased-assign.scala new file mode 100644 index 000000000000..5026ca3f1856 --- /dev/null +++ b/tests/neg/erased-assign.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + var i: Int = 1 + def foo(erased a: Int): Int = { + i = a // error + erased def r = { + i = a + () + } + 42 + } +} diff --git a/tests/neg/erased-case-class.scala b/tests/neg/erased-case-class.scala new file mode 100644 index 000000000000..5fdda47d5257 --- /dev/null +++ b/tests/neg/erased-case-class.scala @@ -0,0 +1,3 @@ +//> using options -language:experimental.erasedDefinitions + +case class Foo1(erased x: Int) // error // error diff --git a/tests/neg/erased-def-rhs.scala b/tests/neg/erased-def-rhs.scala new file mode 100644 index 000000000000..a57b9b1c149e --- /dev/null +++ b/tests/neg/erased-def-rhs.scala @@ -0,0 +1,8 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def f(erased i: Int) = { + def j: Int = i // error + j + } +} diff --git a/tests/neg-custom-args/erased/erased-if-else.scala b/tests/neg/erased-if-else.scala similarity index 81% rename from tests/neg-custom-args/erased/erased-if-else.scala rename to tests/neg/erased-if-else.scala index 503dfc0bd1e3..d879f0ebf2e7 100644 --- a/tests/neg-custom-args/erased/erased-if-else.scala +++ b/tests/neg/erased-if-else.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { var b = true def foo(erased a: Boolean): Boolean = { diff --git a/tests/neg/erased-implicit.scala b/tests/neg/erased-implicit.scala new file mode 100644 index 000000000000..28fca7427942 --- /dev/null +++ b/tests/neg/erased-implicit.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + fun // error + + def fun(implicit a: Double): Int = 42 + + erased implicit def doubleImplicit: Double = 42.0 + +} diff --git a/tests/neg-custom-args/erased/erased-in-tuples.scala b/tests/neg/erased-in-tuples.scala similarity index 85% rename from tests/neg-custom-args/erased/erased-in-tuples.scala rename to tests/neg/erased-in-tuples.scala index 11a251c3bd4d..d1567c8e3a47 100644 --- a/tests/neg-custom-args/erased/erased-in-tuples.scala +++ b/tests/neg/erased-in-tuples.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + @main def Test() = val x = 5 val y = 7 diff --git a/tests/neg/erased-lazy-val.scala b/tests/neg/erased-lazy-val.scala new file mode 100644 index 000000000000..271f87cc2cf0 --- /dev/null +++ b/tests/neg/erased-lazy-val.scala @@ -0,0 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + erased lazy val i: Int = 1 // error +} diff --git a/tests/neg-custom-args/erased/erased-machine-state-encoding-with-inline.scala b/tests/neg/erased-machine-state-encoding-with-inline.scala similarity index 92% rename from tests/neg-custom-args/erased/erased-machine-state-encoding-with-inline.scala rename to tests/neg/erased-machine-state-encoding-with-inline.scala index f2d1e8300b45..f6f2acd48766 100644 --- a/tests/neg-custom-args/erased/erased-machine-state-encoding-with-inline.scala +++ b/tests/neg/erased-machine-state-encoding-with-inline.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import scala.compiletime.* sealed trait State diff --git a/tests/neg-custom-args/erased/erased-match.scala b/tests/neg/erased-match.scala similarity index 82% rename from tests/neg-custom-args/erased/erased-match.scala rename to tests/neg/erased-match.scala index 2d8057519bfc..d05a0c7e0f03 100644 --- a/tests/neg-custom-args/erased/erased-match.scala +++ b/tests/neg/erased-match.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { var b = true def foo(erased a: Int): Int = { diff --git a/tests/neg/erased-object.scala b/tests/neg/erased-object.scala new file mode 100644 index 000000000000..6f616d02010f --- /dev/null +++ b/tests/neg/erased-object.scala @@ -0,0 +1,3 @@ +//> using options -language:experimental.erasedDefinitions + +erased object Test // error diff --git a/tests/neg-custom-args/erased/erased-path.scala b/tests/neg/erased-path.scala similarity index 76% rename from tests/neg-custom-args/erased/erased-path.scala rename to tests/neg/erased-path.scala index f7fbda262e8e..ece90e563483 100644 --- a/tests/neg-custom-args/erased/erased-path.scala +++ b/tests/neg/erased-path.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.erasedDefinitions trait Sys { type X } diff --git a/tests/neg-custom-args/erased/erased-pathdep-1.scala b/tests/neg/erased-pathdep-1.scala similarity index 90% rename from tests/neg-custom-args/erased/erased-pathdep-1.scala rename to tests/neg/erased-pathdep-1.scala index 55e8b89013b2..422ceb5e37fe 100644 --- a/tests/neg-custom-args/erased/erased-pathdep-1.scala +++ b/tests/neg/erased-pathdep-1.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + // Could become a neg test if we had totality checking for erased arguments object Test { diff --git a/tests/neg-custom-args/erased/erased-pathdep-2.scala b/tests/neg/erased-pathdep-2.scala similarity index 83% rename from tests/neg-custom-args/erased/erased-pathdep-2.scala rename to tests/neg/erased-pathdep-2.scala index 29dcf216b32e..0b50acbf3b30 100644 --- a/tests/neg-custom-args/erased/erased-pathdep-2.scala +++ b/tests/neg/erased-pathdep-2.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + // Could become a neg test if we had totality checking for erased arguments object Test { diff --git a/tests/neg-custom-args/erased/erased-return.scala b/tests/neg/erased-return.scala similarity index 75% rename from tests/neg-custom-args/erased/erased-return.scala rename to tests/neg/erased-return.scala index f7cf15a079be..f1abdef1df22 100644 --- a/tests/neg-custom-args/erased/erased-return.scala +++ b/tests/neg/erased-return.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { var b = true def foo(erased a: Int): Int = { diff --git a/tests/neg/erased-singleton.scala b/tests/neg/erased-singleton.scala new file mode 100644 index 000000000000..5ffa78e24b07 --- /dev/null +++ b/tests/neg/erased-singleton.scala @@ -0,0 +1,9 @@ +//> using options -language:experimental.erasedDefinitions + +trait Sys + +trait Obj { + erased val s: Sys + + type S = s.type // error: non final +} diff --git a/tests/neg-custom-args/erased/erased-try.scala b/tests/neg/erased-try.scala similarity index 78% rename from tests/neg-custom-args/erased/erased-try.scala rename to tests/neg/erased-try.scala index 3e9aae8ab9a4..0de3732a4e6e 100644 --- a/tests/neg-custom-args/erased/erased-try.scala +++ b/tests/neg/erased-try.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def foo(erased a: Int): Int = { try { diff --git a/tests/neg/erased-type.scala b/tests/neg/erased-type.scala new file mode 100644 index 000000000000..4df6780ff2f4 --- /dev/null +++ b/tests/neg/erased-type.scala @@ -0,0 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + +class Test { + erased type T // error +} diff --git a/tests/neg/erased-val-rhs.scala b/tests/neg/erased-val-rhs.scala new file mode 100644 index 000000000000..f8bb838e4298 --- /dev/null +++ b/tests/neg/erased-val-rhs.scala @@ -0,0 +1,8 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def f(erased i: Int) = { + val j: Int = i // error + () + } +} diff --git a/tests/neg/erased-value-class.scala b/tests/neg/erased-value-class.scala new file mode 100644 index 000000000000..81dfeeee3e1c --- /dev/null +++ b/tests/neg/erased-value-class.scala @@ -0,0 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + +class Foo(erased x: Int) extends AnyVal // error + +class Bar(x: Int)(y: Int) extends AnyVal // error diff --git a/tests/neg/erased-var.scala b/tests/neg/erased-var.scala new file mode 100644 index 000000000000..51c9221e3bd9 --- /dev/null +++ b/tests/neg/erased-var.scala @@ -0,0 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + erased var i: Int = 1 // error +} diff --git a/tests/pos-custom-args/erasedInline.scala b/tests/neg/erasedInline.scala similarity index 100% rename from tests/pos-custom-args/erasedInline.scala rename to tests/neg/erasedInline.scala diff --git a/tests/neg/erasedValueb.scala b/tests/neg/erasedValueb.scala new file mode 100644 index 000000000000..5c1f1d359e93 --- /dev/null +++ b/tests/neg/erasedValueb.scala @@ -0,0 +1,9 @@ +//> using options -language:experimental.erasedDefinitions + +import scala.compiletime.erasedValue +object Test { + def foo0(a: Int): Int = 3 + def foo1(erased a: Int): Int = 3 + foo0(erasedValue[Int]) // error + foo1(erasedValue[Int]) +} diff --git a/tests/neg/expeimental-flag-with-lang-feature-1.scala b/tests/neg/expeimental-flag-with-lang-feature-1.scala new file mode 100644 index 000000000000..a5ece729fa3d --- /dev/null +++ b/tests/neg/expeimental-flag-with-lang-feature-1.scala @@ -0,0 +1,5 @@ +//> using options -Yno-experimental + +import scala.language.experimental.erasedDefinitions + +erased def erasedFun(erased x: Int): Int = x // error // error diff --git a/tests/neg/expeimental-flag-with-lang-feature-2.scala b/tests/neg/expeimental-flag-with-lang-feature-2.scala new file mode 100644 index 000000000000..3e0b9359711a --- /dev/null +++ b/tests/neg/expeimental-flag-with-lang-feature-2.scala @@ -0,0 +1,7 @@ +//> using options -Yno-experimental + +import scala.language.experimental.namedTypeArguments // error + +def namedTypeArgumentsFun[T, U]: Int = + namedTypeArgumentsFun[T = Int, U = Int] + namedTypeArgumentsFun[U = Int, T = Int] diff --git a/tests/neg/expeimental-flag.scala b/tests/neg/expeimental-flag.scala new file mode 100644 index 000000000000..8b2e729ea8da --- /dev/null +++ b/tests/neg/expeimental-flag.scala @@ -0,0 +1,18 @@ +//> using options -Yno-experimental + +import scala.annotation.experimental + +class Foo: + def foo: Int = experimentalDef // error + +class Bar: + def bar: Int = experimentalDef // error +object Bar: + def bar: Int = experimentalDef // error + +object Baz: + def bar: Int = experimentalDef // error + +def toplevelMethod: Int = experimentalDef // error + +@experimental def experimentalDef: Int = 1 diff --git a/tests/neg-custom-args/no-experimental/experimental-2.scala b/tests/neg/experimental-2.scala similarity index 85% rename from tests/neg-custom-args/no-experimental/experimental-2.scala rename to tests/neg/experimental-2.scala index e2a8dcef58b8..f2d0262d83f5 100644 --- a/tests/neg-custom-args/no-experimental/experimental-2.scala +++ b/tests/neg/experimental-2.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + class Test7 { import scala.language.experimental import experimental.genericNumberLiterals // error: no aliases can be used to refer to a language import diff --git a/tests/neg-custom-args/no-experimental/experimental-erased.scala b/tests/neg/experimental-erased.scala similarity index 84% rename from tests/neg-custom-args/no-experimental/experimental-erased.scala rename to tests/neg/experimental-erased.scala index c80c3e0d4b49..3619d0059a95 100644 --- a/tests/neg-custom-args/no-experimental/experimental-erased.scala +++ b/tests/neg/experimental-erased.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import language.experimental.erasedDefinitions import annotation.experimental diff --git a/tests/neg-custom-args/no-experimental/experimental-imports.scala b/tests/neg/experimental-imports.scala similarity index 97% rename from tests/neg-custom-args/no-experimental/experimental-imports.scala rename to tests/neg/experimental-imports.scala index 63a150978b1c..3a672ac65a22 100644 --- a/tests/neg-custom-args/no-experimental/experimental-imports.scala +++ b/tests/neg/experimental-imports.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala b/tests/neg/experimental-nested-imports-2.scala similarity index 96% rename from tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala rename to tests/neg/experimental-nested-imports-2.scala index a4962c6153a0..4aac719a81d6 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala +++ b/tests/neg/experimental-nested-imports-2.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental class Class1: diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala b/tests/neg/experimental-nested-imports-3.scala similarity index 96% rename from tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala rename to tests/neg/experimental-nested-imports-3.scala index 77fbe41479d2..39b548b2586b 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala +++ b/tests/neg/experimental-nested-imports-3.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental class Class1: diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala b/tests/neg/experimental-nested-imports.scala similarity index 96% rename from tests/neg-custom-args/no-experimental/experimental-nested-imports.scala rename to tests/neg/experimental-nested-imports.scala index 180c43b9f671..91fe3bfeb27b 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala +++ b/tests/neg/experimental-nested-imports.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental class Class1: diff --git a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala b/tests/neg/experimental-package-imports.scala similarity index 93% rename from tests/neg-custom-args/no-experimental/experimental-package-imports.scala rename to tests/neg/experimental-package-imports.scala index 047b3eb61e82..7a4b04606b9d 100644 --- a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala +++ b/tests/neg/experimental-package-imports.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental package foo { diff --git a/tests/neg-custom-args/no-experimental/experimental.scala b/tests/neg/experimental.scala similarity index 95% rename from tests/neg-custom-args/no-experimental/experimental.scala rename to tests/neg/experimental.scala index 42d0d8066c35..efca9a26ec14 100644 --- a/tests/neg-custom-args/no-experimental/experimental.scala +++ b/tests/neg/experimental.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + class Test0 { import language.experimental.namedTypeArguments // error object Foo { diff --git a/tests/neg-custom-args/no-experimental/experimentalAnnot.scala b/tests/neg/experimentalAnnot.scala similarity index 91% rename from tests/neg-custom-args/no-experimental/experimentalAnnot.scala rename to tests/neg/experimentalAnnot.scala index e6dfbf28f8bb..e50d9165b5a4 100644 --- a/tests/neg-custom-args/no-experimental/experimentalAnnot.scala +++ b/tests/neg/experimentalAnnot.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental class myExperimentalAnnot extends scala.annotation.Annotation diff --git a/tests/neg-custom-args/no-experimental/experimentalCaseClass.scala b/tests/neg/experimentalCaseClass.scala similarity index 90% rename from tests/neg-custom-args/no-experimental/experimentalCaseClass.scala rename to tests/neg/experimentalCaseClass.scala index b112c8a1213a..383824954041 100644 --- a/tests/neg-custom-args/no-experimental/experimentalCaseClass.scala +++ b/tests/neg/experimentalCaseClass.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala b/tests/neg/experimentalDefaultParams.scala similarity index 91% rename from tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala rename to tests/neg/experimentalDefaultParams.scala index 4dedb3afa11d..fb9ffa282e60 100644 --- a/tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala +++ b/tests/neg/experimentalDefaultParams.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental def x = 2 diff --git a/tests/neg-custom-args/no-experimental/experimentalEnum.scala b/tests/neg/experimentalEnum.scala similarity index 80% rename from tests/neg-custom-args/no-experimental/experimentalEnum.scala rename to tests/neg/experimentalEnum.scala index 1cbe78ca5427..14ced7ca22bb 100644 --- a/tests/neg-custom-args/no-experimental/experimentalEnum.scala +++ b/tests/neg/experimentalEnum.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalErased.scala b/tests/neg/experimentalErased.scala similarity index 90% rename from tests/neg-custom-args/no-experimental/experimentalErased.scala rename to tests/neg/experimentalErased.scala index 6fcb11a3cc2f..99de8048c261 100644 --- a/tests/neg-custom-args/no-experimental/experimentalErased.scala +++ b/tests/neg/experimentalErased.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import language.experimental.erasedDefinitions import annotation.experimental diff --git a/tests/neg/experimentalInline.scala b/tests/neg/experimentalInline.scala new file mode 100644 index 000000000000..b837ad498492 --- /dev/null +++ b/tests/neg/experimentalInline.scala @@ -0,0 +1,10 @@ +//> using options -Yno-experimental + +import scala.annotation.experimental + +@experimental +inline def g() = () + +def test: Unit = + g() // error + () diff --git a/tests/neg-custom-args/no-experimental/experimentalInline2.scala b/tests/neg/experimentalInline2.scala similarity index 76% rename from tests/neg-custom-args/no-experimental/experimentalInline2.scala rename to tests/neg/experimentalInline2.scala index c40eb050a832..8bf6a82fba2e 100644 --- a/tests/neg-custom-args/no-experimental/experimentalInline2.scala +++ b/tests/neg/experimentalInline2.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalMembers.scala b/tests/neg/experimentalMembers.scala similarity index 93% rename from tests/neg-custom-args/no-experimental/experimentalMembers.scala rename to tests/neg/experimentalMembers.scala index e30f27b069a8..fe4adbfcf44d 100644 --- a/tests/neg-custom-args/no-experimental/experimentalMembers.scala +++ b/tests/neg/experimentalMembers.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental def x = 2 diff --git a/tests/neg-custom-args/no-experimental/experimentalOverride.scala b/tests/neg/experimentalOverride.scala similarity index 94% rename from tests/neg-custom-args/no-experimental/experimentalOverride.scala rename to tests/neg/experimentalOverride.scala index 653bd3b23da4..adc8b919dc6b 100644 --- a/tests/neg-custom-args/no-experimental/experimentalOverride.scala +++ b/tests/neg/experimentalOverride.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalRHS.scala b/tests/neg/experimentalRHS.scala similarity index 92% rename from tests/neg-custom-args/no-experimental/experimentalRHS.scala rename to tests/neg/experimentalRHS.scala index 27143c120b96..cffa35ed4ba9 100644 --- a/tests/neg-custom-args/no-experimental/experimentalRHS.scala +++ b/tests/neg/experimentalRHS.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalSam.scala b/tests/neg/experimentalSam.scala similarity index 82% rename from tests/neg-custom-args/no-experimental/experimentalSam.scala rename to tests/neg/experimentalSam.scala index cdc9e61858d9..ab86cabc6816 100644 --- a/tests/neg-custom-args/no-experimental/experimentalSam.scala +++ b/tests/neg/experimentalSam.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalSignature.scala b/tests/neg/experimentalSignature.scala similarity index 96% rename from tests/neg-custom-args/no-experimental/experimentalSignature.scala rename to tests/neg/experimentalSignature.scala index 9b1d3c5e999f..479f9140d0a0 100644 --- a/tests/neg-custom-args/no-experimental/experimentalSignature.scala +++ b/tests/neg/experimentalSignature.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental class A diff --git a/tests/neg-custom-args/no-experimental/experimentalTerms.scala b/tests/neg/experimentalTerms.scala similarity index 94% rename from tests/neg-custom-args/no-experimental/experimentalTerms.scala rename to tests/neg/experimentalTerms.scala index ada5e5b74d2c..10776e78bae7 100644 --- a/tests/neg-custom-args/no-experimental/experimentalTerms.scala +++ b/tests/neg/experimentalTerms.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/no-experimental/experimentalTests.scala b/tests/neg/experimentalTests.scala similarity index 87% rename from tests/neg-custom-args/no-experimental/experimentalTests.scala rename to tests/neg/experimentalTests.scala index f3fbcf8c587c..a45809c099c4 100644 --- a/tests/neg-custom-args/no-experimental/experimentalTests.scala +++ b/tests/neg/experimentalTests.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental def x = 2 diff --git a/tests/neg-custom-args/no-experimental/experimentalType.scala b/tests/neg/experimentalType.scala similarity index 88% rename from tests/neg-custom-args/no-experimental/experimentalType.scala rename to tests/neg/experimentalType.scala index f4013788796a..22bdecf415e3 100644 --- a/tests/neg-custom-args/no-experimental/experimentalType.scala +++ b/tests/neg/experimentalType.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg/experimentalTypeRHS.scala b/tests/neg/experimentalTypeRHS.scala new file mode 100644 index 000000000000..f5801ea2f11d --- /dev/null +++ b/tests/neg/experimentalTypeRHS.scala @@ -0,0 +1,8 @@ +//> using options -Yno-experimental + +import scala.annotation.experimental + +@experimental type E + +type A = E // error +@experimental type B = E diff --git a/tests/neg-custom-args/no-experimental/experimentalTypes2.scala b/tests/neg/experimentalTypes2.scala similarity index 93% rename from tests/neg-custom-args/no-experimental/experimentalTypes2.scala rename to tests/neg/experimentalTypes2.scala index 706fd39fd15c..3d042792c4de 100644 --- a/tests/neg-custom-args/no-experimental/experimentalTypes2.scala +++ b/tests/neg/experimentalTypes2.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental class A diff --git a/tests/neg-custom-args/no-experimental/experimentalUnapply.scala b/tests/neg/experimentalUnapply.scala similarity index 90% rename from tests/neg-custom-args/no-experimental/experimentalUnapply.scala rename to tests/neg/experimentalUnapply.scala index 0ba338a15a96..049577bf63fd 100644 --- a/tests/neg-custom-args/no-experimental/experimentalUnapply.scala +++ b/tests/neg/experimentalUnapply.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/neg-custom-args/feature/feature-shadowing.scala b/tests/neg/feature-shadowing.scala similarity index 87% rename from tests/neg-custom-args/feature/feature-shadowing.scala rename to tests/neg/feature-shadowing.scala index 270f7c1e12d9..16286d5eea87 100644 --- a/tests/neg-custom-args/feature/feature-shadowing.scala +++ b/tests/neg/feature-shadowing.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -feature + import language.implicitConversions given Conversion[Int, String] = _.toString diff --git a/tests/neg-custom-args/fatal-warnings/filtering-fors.scala b/tests/neg/filtering-fors.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/filtering-fors.scala rename to tests/neg/filtering-fors.scala index df0224a5cea3..7d998a37f057 100644 --- a/tests/neg-custom-args/fatal-warnings/filtering-fors.scala +++ b/tests/neg/filtering-fors.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { val xs: List[AnyRef] = ??? diff --git a/tests/neg/foldinf-ill-kinded.check b/tests/neg/foldinf-ill-kinded.check new file mode 100644 index 000000000000..c19c70c00a0c --- /dev/null +++ b/tests/neg/foldinf-ill-kinded.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/foldinf-ill-kinded.scala:9:16 ------------------------------------------------- +9 | ys.combine(x) // error + | ^^^^^^^^^^^^^ + | Found: Foo[List] + | Required: Foo[Nothing] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/foldinf-ill-kinded.scala b/tests/neg/foldinf-ill-kinded.scala new file mode 100644 index 000000000000..d4824561b0fc --- /dev/null +++ b/tests/neg/foldinf-ill-kinded.scala @@ -0,0 +1,10 @@ +class Foo[+T[_]]: + def combine[T1[x] >: T[x]](x: T1[Int]): Foo[T1] = new Foo +object Foo: + def empty: Foo[Nothing] = new Foo + +object X: + def test(xs: List[List[Int]]): Unit = + xs.foldLeft(Foo.empty)((ys, x) => + ys.combine(x) // error + ) diff --git a/tests/neg/gadt-contradictory-pattern.scala b/tests/neg/gadt-contradictory-pattern.scala index 561c0c23d518..6fbd06120a48 100644 --- a/tests/neg/gadt-contradictory-pattern.scala +++ b/tests/neg/gadt-contradictory-pattern.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns object Test { sealed abstract class Foo[T] case object Bar1 extends Foo[Int] diff --git a/tests/neg/hidden-type-errors.check b/tests/neg/hidden-type-errors.check new file mode 100644 index 000000000000..2f4a1748dc67 --- /dev/null +++ b/tests/neg/hidden-type-errors.check @@ -0,0 +1,20 @@ +-- [E007] Type Mismatch Error: tests/neg/hidden-type-errors/Test.scala:8:24 -------------------------------------------- +8 | val x = X.doSomething("XXX") // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: String + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: t12717.A.bar("XXX") + | I tried to show that + | String + | conforms to + | Int + | but none of the attempts shown below succeeded: + | + | ==> String <: Int = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/hidden-type-errors/Macro.scala b/tests/neg/hidden-type-errors/Macro.scala similarity index 100% rename from tests/neg-custom-args/explain/hidden-type-errors/Macro.scala rename to tests/neg/hidden-type-errors/Macro.scala diff --git a/tests/neg/hidden-type-errors/Test.scala b/tests/neg/hidden-type-errors/Test.scala new file mode 100644 index 000000000000..69516252e486 --- /dev/null +++ b/tests/neg/hidden-type-errors/Test.scala @@ -0,0 +1,8 @@ +//> using options -explain + +package t12717 + + +object Test: + + val x = X.doSomething("XXX") // error diff --git a/tests/neg/i10075.check b/tests/neg/i10075.check new file mode 100644 index 000000000000..6f3e9ab4334a --- /dev/null +++ b/tests/neg/i10075.check @@ -0,0 +1,32 @@ +-- Error: tests/neg/i10075.scala:8:24 ---------------------------------------------------------------------------------- +8 |trait PolyTrait extends PolyFunction // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:10:24 --------------------------------------------------------------------------------- +10 |class PolyClass extends PolyTrait { // error + | ^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:14:26 --------------------------------------------------------------------------------- +14 |object PolyObject extends PolyFunction // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:2:14 ---------------------------------------------------------------------------------- +2 |val foo = new PolyFunction { } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:3:14 ---------------------------------------------------------------------------------- +3 |val bar = new PolyFunction { def bar = 23 } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:4:14 ---------------------------------------------------------------------------------- +4 |val baz = new PolyFunction { def apply = 23 } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:5:14 ---------------------------------------------------------------------------------- +5 |val qux = new PolyFunction { def apply[T] = 47 } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i10075.scala:6:15 ---------------------------------------------------------------------------------- +6 |val quxx = new PolyFunction { def apply[T](x: T): T = x } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements diff --git a/tests/neg/i10075.scala b/tests/neg/i10075.scala new file mode 100644 index 000000000000..e1a255ec8b54 --- /dev/null +++ b/tests/neg/i10075.scala @@ -0,0 +1,14 @@ +val poly = [T] => (x: T) => x +val foo = new PolyFunction { } // error +val bar = new PolyFunction { def bar = 23 } // error +val baz = new PolyFunction { def apply = 23 } // error +val qux = new PolyFunction { def apply[T] = 47 } // error +val quxx = new PolyFunction { def apply[T](x: T): T = x } // error + +trait PolyTrait extends PolyFunction // error + +class PolyClass extends PolyTrait { // error + def apply[T](x: T): T = x +} + +object PolyObject extends PolyFunction // error diff --git a/tests/neg/i10137.check b/tests/neg/i10137.check new file mode 100644 index 000000000000..38fed4a50581 --- /dev/null +++ b/tests/neg/i10137.check @@ -0,0 +1,10 @@ +-- Error: tests/neg/i10137.scala:2:12 ------------------------------------------------------- +2 | @main def main(): Unit = println("Hello, World!") // error + | ^ + | The class `foo.main` generated from `@main` will shadow the existing class main in package scala. + | The existing definition might no longer be found on recompile. +-- Error: tests/neg----------------------------------------------- +4 |@main def List(): Unit = println("List") // error + | ^ + | The class `List` generated from `@main` will shadow the existing type List in package scala. + | The existing definition might no longer be found on recompile. diff --git a/tests/neg/i10247.scala b/tests/neg/i10247.scala new file mode 100644 index 000000000000..fc6268720a6d --- /dev/null +++ b/tests/neg/i10247.scala @@ -0,0 +1,28 @@ +//> using options -Xfatal-warnings -deprecation + +def usered = Color.Red // error: value Red is deprecated + +object DeprecatedContainer { + @deprecated("no foo", "0.1") val foo = 23 +} + +enum Day { + + @deprecated("no more Mondays!", "0.1") case Monday + +} + +enum Color { + + @deprecated("no Red", "0.1") case Red + + @deprecated("no Generic", "0.1") case Generic(rgb: Int) + + def useFoo1 = DeprecatedContainer.foo // error // check that only enum cases are avoided + def useMonday = Day.Monday // error // check that enum cases are declared in this enum + +} + +object Color { + def useFoo2 = DeprecatedContainer.foo // error // check that only enum cases are avoided +} diff --git a/tests/neg/i10369.scala b/tests/neg/i10369.scala new file mode 100644 index 000000000000..703dea249d7a --- /dev/null +++ b/tests/neg/i10369.scala @@ -0,0 +1,10 @@ +type Upgrade[T] = T match + case Int => Double + case Char => String + case Boolean => Boolean + +val upgrade: [t] => t => Upgrade[t] = new PolyFunction: // error + def apply[T](x: T): Upgrade[T] = x match + case x: Int => x.toDouble + case x: Char => x.toString + case x: Boolean => !x diff --git a/tests/neg-strict/i1050.scala b/tests/neg/i1050.scala similarity index 98% rename from tests/neg-strict/i1050.scala rename to tests/neg/i1050.scala index 6962dd44f826..2489cbde49c9 100644 --- a/tests/neg-strict/i1050.scala +++ b/tests/neg/i1050.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + // i1050 checks failing at posttyper trait A { type L <: Nothing } trait B { type L >: Any} diff --git a/tests/neg/i10901.check b/tests/neg/i10901.check index e055bed7dd3a..4a8fa5db28bf 100644 --- a/tests/neg/i10901.check +++ b/tests/neg/i10901.check @@ -12,11 +12,11 @@ | [T1, T2] | (x: BugExp4Point2D.ColumnType[T1]) | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$7: Numeric[T1], evidence$8: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | [T1, T2] | (x: T1) | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$5: Numeric[T1], evidence$6: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) -- [E008] Not Found Error: tests/neg/i10901.scala:48:38 ---------------------------------------------------------------- 48 | val pos4: Point2D[Int,Double] = x º 201.1 // error @@ -31,8 +31,8 @@ | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] | (x: BugExp4Point2D.ColumnType[T1]) - | (y: T2)(implicit evidence$9: Numeric[T1], evidence$10: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | [T1, T2](x: T1)(y: T2)(implicit evidence$3: Numeric[T1], evidence$4: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | (y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | [T1, T2](x: T1)(y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | both match arguments ((x : BugExp4Point2D.IntT.type))((201.1d : Double)) -- [E008] Not Found Error: tests/neg/i10901.scala:62:16 ---------------------------------------------------------------- 62 | val y = "abc".foo // error diff --git a/tests/neg/i10901.scala b/tests/neg/i10901.scala index 19a2e3023c85..dc1ea6e6eef6 100644 --- a/tests/neg/i10901.scala +++ b/tests/neg/i10901.scala @@ -13,23 +13,23 @@ object BugExp4Point2D { // N - N @targetName("point2DConstant") - def º(y: T2): Point2D[T1,T2] = ??? + infix def º(y: T2): Point2D[T1,T2] = ??? // N - C @targetName("point2DConstantData") - def º(y: ColumnType[T2]): Point2D[T1,T2] = ??? + infix def º(y: ColumnType[T2]): Point2D[T1,T2] = ??? extension [T1:Numeric, T2:Numeric](x: ColumnType[T1]) // C - C @targetName("point2DData") - def º(y: ColumnType[T2]): Point2D[T1,T2] = ??? + infix def º(y: ColumnType[T2]): Point2D[T1,T2] = ??? // C - N @targetName("point2DDataConstant") - def º(y: T2): Point2D[T1,T2] = ??? + infix def º(y: T2): Point2D[T1,T2] = ??? } diff --git a/tests/neg-custom-args/fatal-warnings/i10930.scala b/tests/neg/i10930.scala similarity index 92% rename from tests/neg-custom-args/fatal-warnings/i10930.scala rename to tests/neg/i10930.scala index d1fbdde10574..5f8a0ca1ba76 100644 --- a/tests/neg-custom-args/fatal-warnings/i10930.scala +++ b/tests/neg/i10930.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import language.future @main def Test = type LeafElem[X] = X match diff --git a/tests/neg/i10994.scala b/tests/neg/i10994.scala new file mode 100644 index 000000000000..f5f237f22dd6 --- /dev/null +++ b/tests/neg/i10994.scala @@ -0,0 +1,4 @@ +//> using options -Xfatal-warnings + +def foo = true match + case (b: Boolean): Boolean => () // error diff --git a/tests/neg/i11022.check b/tests/neg/i11022.check new file mode 100644 index 000000000000..55bdb0526264 --- /dev/null +++ b/tests/neg/i11022.check @@ -0,0 +1,20 @@ +-- Error: tests/neg/i11022.scala:10:7 ---------------------------------------------------------------------------------- +10 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg/i11022.scala:10:19 --------------------------------------------------------------------------------- +10 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg/i11022.scala:11:7 ---------------------------------------------------------------------------------- +11 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg/i11022.scala:11:23 --------------------------------------------------------------------------------- +11 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg/i11022.scala:12:14 --------------------------------------------------------------------------------- +12 |val c: Unit = CaseClass(42).magic() // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass diff --git a/tests/neg-custom-args/deprecation/i11022.scala b/tests/neg/i11022.scala similarity index 90% rename from tests/neg-custom-args/deprecation/i11022.scala rename to tests/neg/i11022.scala index 4608017eeed9..14bc600666f9 100644 --- a/tests/neg-custom-args/deprecation/i11022.scala +++ b/tests/neg/i11022.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation + @deprecated("no CaseClass") case class CaseClass(rgb: Int): def magic(): Unit = () diff --git a/tests/neg-custom-args/fatal-warnings/i11097.scala b/tests/neg/i11097.scala similarity index 92% rename from tests/neg-custom-args/fatal-warnings/i11097.scala rename to tests/neg/i11097.scala index 763babff81e2..149fe89249c8 100644 --- a/tests/neg-custom-args/fatal-warnings/i11097.scala +++ b/tests/neg/i11097.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + @main def test: Unit = { class C { type T1; type T2 } diff --git a/tests/neg/i11225b.scala b/tests/neg/i11225b.scala new file mode 100644 index 000000000000..a9dd1d3d2227 --- /dev/null +++ b/tests/neg/i11225b.scala @@ -0,0 +1,12 @@ +//> using options -source future -deprecation -Xfatal-warnings + +import compiletime.uninitialized + +class Memo[A](x: => A): + private var cached: A = _ // error + private var known: Boolean = false + def force = + if !known then + known = true + cached = x + cached diff --git a/tests/neg/i11333.check b/tests/neg/i11333.check new file mode 100644 index 000000000000..ba5723488899 --- /dev/null +++ b/tests/neg/i11333.check @@ -0,0 +1,30 @@ +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:4:19 ---------------------------------------------------------- +4 | val f1: Float = 123456789 // error + | ^^^^^^^^^ + | Widening conversion from Int to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:5:19 ---------------------------------------------------------- +5 | val d1: Double = 1234567890123456789L // error + | ^^^^^^^^^^^^^^^^^^^^ + | Widening conversion from Long to Double loses precision. + | Write `.toDouble` instead. +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:6:19 ---------------------------------------------------------- +6 | val f2: Float = 123456789L // error + | ^^^^^^^^^^ + | Widening conversion from Long to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:12:21 --------------------------------------------------------- +12 | val f1_b: Float = i1 // error + | ^^ + | Widening conversion from Int to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:13:21 --------------------------------------------------------- +13 | val d1_b: Double = l1 // error + | ^^ + | Widening conversion from Long to Double loses precision. + | Write `.toDouble` instead. +-- [E167] Lossy Conversion Error: tests/neg/i11333.scala:14:21 --------------------------------------------------------- +14 | val f2_b: Float = l2 // error + | ^^ + | Widening conversion from Long to Float loses precision. + | Write `.toFloat` instead. diff --git a/tests/neg-custom-args/fatal-warnings/i11333.scala b/tests/neg/i11333.scala similarity index 90% rename from tests/neg-custom-args/fatal-warnings/i11333.scala rename to tests/neg/i11333.scala index 3ba39efeb29e..bbdcceaf7e1e 100644 --- a/tests/neg-custom-args/fatal-warnings/i11333.scala +++ b/tests/neg/i11333.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class C: val f1: Float = 123456789 // error val d1: Double = 1234567890123456789L // error diff --git a/tests/neg-custom-args/deprecation/i11344.scala b/tests/neg/i11344.scala similarity index 76% rename from tests/neg-custom-args/deprecation/i11344.scala rename to tests/neg/i11344.scala index 4829b9fcef6b..0602fe14a995 100644 --- a/tests/neg-custom-args/deprecation/i11344.scala +++ b/tests/neg/i11344.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation + trait Pet(val name: String, rest: Int): def f(suffix: String) = s"$name$suffix$rest" diff --git a/tests/neg/i11350.check b/tests/neg/i11350.check index 3a43ac6a12b8..1aec236cbbd5 100644 --- a/tests/neg/i11350.check +++ b/tests/neg/i11350.check @@ -3,12 +3,20 @@ | ^ | Missing parameter type | - | I could not infer the type of the parameter evidence$1. + | I could not infer the type of the parameter contextual$1 + | in expanded function: + | contextual$1 ?=> "" | What I could infer was: A1[] + | Expected type for the whole anonymous function: + | (A1[]) ?=> String -- [E081] Type Error: tests/neg/i11350.scala:2:39 ---------------------------------------------------------------------- 2 |class A2[T](action: A1[T] ?=> String = summon[A1[T]]) // error | ^ | Missing parameter type | - | I could not infer the type of the parameter evidence$2. + | I could not infer the type of the parameter contextual$2 + | in expanded function: + | contextual$2 ?=> summon[A1[T]] | What I could infer was: A1[] + | Expected type for the whole anonymous function: + | (A1[]) ?=> String diff --git a/tests/neg/i11561.check b/tests/neg/i11561.check index 96bf1ec6accf..28d7e355c499 100644 --- a/tests/neg/i11561.check +++ b/tests/neg/i11561.check @@ -3,8 +3,11 @@ | ^ | Missing parameter type | - | I could not infer the type of the parameter _$1 of expanded function: - | _$1 => State.this.text = _$1. + | I could not infer the type of the parameter _$1 + | in expanded function: + | _$1 => State.this.text = _$1 + | Expected type for the whole anonymous function: + | String -- [E052] Type Error: tests/neg/i11561.scala:3:30 ---------------------------------------------------------------------- 3 | val updateText2 = copy(text = (_: String)) // error | ^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg/i11567.scala b/tests/neg/i11567.scala index a6eed7cf0271..89fc1092c04b 100644 --- a/tests/neg/i11567.scala +++ b/tests/neg/i11567.scala @@ -1,4 +1,4 @@ -import language.`future-migration` +import language.future class Test object Test { def foo[A <% Test](x: A) = x // error diff --git a/tests/neg/i11637.check b/tests/neg/i11637.check new file mode 100644 index 000000000000..5ea5c2d5a2cf --- /dev/null +++ b/tests/neg/i11637.check @@ -0,0 +1,42 @@ +-- [E057] Type Mismatch Error: tests/neg/i11637.scala:13:33 ------------------------------------------------------------ +13 | var h = new HKT3_1[FunctorImpl](); // error // error + | ^ + | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | test2.FunctorImpl + | conforms to + | [Generic2[T <: String] <: Set[T]] =>> Any + | but none of the attempts shown below succeeded: + | + | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any + | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] + | ==> [T <: String] =>> Set[T] <: Iterable + | ==> type bounds [] <: type bounds [ <: String] + | ==> Any <: String = false + | + | The tests were made under the empty constraint + -------------------------------------------------------------------------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg/i11637.scala:13:21 ------------------------------------------------------------ +13 | var h = new HKT3_1[FunctorImpl](); // error // error + | ^ + | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | test2.FunctorImpl + | conforms to + | [Generic2[T <: String] <: Set[T]] =>> Any + | but none of the attempts shown below succeeded: + | + | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any + | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] + | ==> [T <: String] =>> Set[T] <: Iterable + | ==> type bounds [] <: type bounds [ <: String] + | ==> Any <: String = false + | + | The tests were made under the empty constraint + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i11637.scala b/tests/neg/i11637.scala similarity index 92% rename from tests/neg-custom-args/explain/i11637.scala rename to tests/neg/i11637.scala index 0b566ecd4328..4721e373f236 100644 --- a/tests/neg-custom-args/explain/i11637.scala +++ b/tests/neg/i11637.scala @@ -1,3 +1,5 @@ +//> using options -explain + // This is OK object test1: class FunctorImpl[Generic1[T] <: Iterable[T]]{} diff --git a/tests/neg/i11963a.scala b/tests/neg/i11963a.scala new file mode 100644 index 000000000000..0eb2f557624f --- /dev/null +++ b/tests/neg/i11963a.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings + +open trait Foo // error diff --git a/tests/neg/i11963b.scala b/tests/neg/i11963b.scala new file mode 100644 index 000000000000..76097b225896 --- /dev/null +++ b/tests/neg/i11963b.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings + +open abstract class Foo // error diff --git a/tests/neg/i11963c.scala b/tests/neg/i11963c.scala new file mode 100644 index 000000000000..f58f68c72368 --- /dev/null +++ b/tests/neg/i11963c.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings + +object Test { + def foo: Any = { + open class Bar // error + new Bar + } +} diff --git a/tests/neg/i11982.check b/tests/neg/i11982.check deleted file mode 100644 index 304accbf0269..000000000000 --- a/tests/neg/i11982.check +++ /dev/null @@ -1,4 +0,0 @@ --- [E172] Type Error: tests/neg/i11982.scala:22:38 --------------------------------------------------------------------- -22 | val p1: ("msg", 42) = unpair[Tshape] // error: no singleton value for Any - | ^ - |No singleton value available for Any; eligible singleton types for `ValueOf` synthesis include literals and stable paths. diff --git a/tests/neg/i11982.scala b/tests/neg/i11982.scala deleted file mode 100644 index e8ef12ef34e0..000000000000 --- a/tests/neg/i11982.scala +++ /dev/null @@ -1,27 +0,0 @@ -package tuplefun -object Unpair { - - def pair[A, B](using a: ValueOf[A], b: ValueOf[B]): Tuple2[A, B] = - (a.value, b.value) - - def unpair[X <: Tuple2[?, ?]]( - using a: ValueOf[Tuple.Head[X]], - b: ValueOf[Tuple.Head[Tuple.Tail[X]]] - ): Tuple2[Tuple.Head[X], Tuple.Head[Tuple.Tail[X]]] = - type AA = Tuple.Head[X] - type BB = Tuple.Head[Tuple.Tail[X]] - pair[AA, BB](using a, b) -} - -object UnpairApp { - import Unpair._ - - type Tshape = ("msg", 42) - - // the following won't compile when in the same file as Unpair - val p1: ("msg", 42) = unpair[Tshape] // error: no singleton value for Any - - @main def pairHello: Unit = - assert(p1 == ("msg", 42)) - println(p1) -} \ No newline at end of file diff --git a/tests/neg/i11982a.check b/tests/neg/i11982a.check index 1977aa30e8b5..5433688447f7 100644 --- a/tests/neg/i11982a.check +++ b/tests/neg/i11982a.check @@ -10,7 +10,7 @@ | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: - | xs >: Any *: EmptyTuple.type <: Tuple + | xs <: Any *: EmptyTuple.type | | longer explanation available when compiling with `-explain` -- [E057] Type Mismatch Error: tests/neg/i11982a.scala:10:38 ----------------------------------------------------------- @@ -25,7 +25,7 @@ | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: - | xs >: Any *: EmptyTuple.type <: Tuple + | xs <: Any *: EmptyTuple.type | | longer explanation available when compiling with `-explain` -- [E057] Type Mismatch Error: tests/neg/i11982a.scala:12:25 ----------------------------------------------------------- @@ -40,6 +40,6 @@ | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: - | xs >: Any *: EmptyTuple.type <: Tuple + | xs <: Any *: EmptyTuple.type | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11982a.scala b/tests/neg/i11982a.scala deleted file mode 100644 index 53126eaf3986..000000000000 --- a/tests/neg/i11982a.scala +++ /dev/null @@ -1,14 +0,0 @@ -package tuplefun -object Unpair { - - def pair[A, B](using a: ValueOf[A], b: ValueOf[B]): Tuple2[A, B] = - (a.value, b.value) - - def unpair[X <: Tuple2[?, ?]]( - using a: ValueOf[Tuple.Head[X]], - b: ValueOf[Tuple.Head[Tuple.Tail[X]]] // error - ): Tuple2[Tuple.Head[X], Tuple.Head[Tuple.Tail[X]]] = // error - type AA = Tuple.Head[X] - type BB = Tuple.Head[Tuple.Tail[X]] // error - pair[AA, BB](using a, b) -} \ No newline at end of file diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index 11c648e35a57..b44eb612f627 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -15,17 +15,17 @@ | case B => String | | longer explanation available when compiling with `-explain` --- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------- 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error - | ^ - | Match type reduction failed since selector EmptyTuple.type + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Match type reduction failed since selector EmptyTuple | matches none of the cases | | case _ *: _ *: t => Last[t] | case t *: EmptyTuple => t --- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------- 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error - | ^ + | ^^^^^^^^^^^^^^^^^^ | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | @@ -45,17 +45,17 @@ | Therefore, reduction cannot advance to the remaining case | | case B => String --- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error - | ^ - | Match type reduction failed since selector EmptyTuple.type + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Match type reduction failed since selector EmptyTuple | matches none of the cases | | case _ *: _ *: t => Last[t] | case t *: EmptyTuple => t --- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error - | ^ + | ^^^^^^^^^^^^^^^^^^ | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | diff --git a/tests/neg-custom-args/fatal-warnings/i12188/Macro.scala b/tests/neg/i12188/Macro.scala similarity index 100% rename from tests/neg-custom-args/fatal-warnings/i12188/Macro.scala rename to tests/neg/i12188/Macro.scala diff --git a/tests/neg/i12188/Test.scala b/tests/neg/i12188/Test.scala new file mode 100644 index 000000000000..66864438694e --- /dev/null +++ b/tests/neg/i12188/Test.scala @@ -0,0 +1,11 @@ +//> using options -Xfatal-warnings + +sealed trait P +case class PC1(a: String) extends P +case class PC2(b: Int) extends P + +def Test = MatchTest.test(PC2(10): P) + +def foo(x: P): Unit = + x match // error + case _: PC1 => \ No newline at end of file diff --git a/tests/neg/i12207.scala b/tests/neg/i12207.scala index 08c1e76f5ecd..8ed8c479d7e6 100644 --- a/tests/neg/i12207.scala +++ b/tests/neg/i12207.scala @@ -5,4 +5,4 @@ extension [T](t: T) inline def pi[P <: Tuple](using P): T = ??? inline def env[P <: Tuple, T](op: P ?=> T): P ?=> T = op @main def Test = - env { pi[String] } // error // error + env { pi[String] } // error diff --git a/tests/neg/i12253.check b/tests/neg/i12253.check new file mode 100644 index 000000000000..75a698249dee --- /dev/null +++ b/tests/neg/i12253.check @@ -0,0 +1,13 @@ +-- [E092] Pattern Match Error: tests/neg/i12253.scala:13:10 ------------------------------------------------------------ +13 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error + | ^ + |the type test for extractors.q2.reflect.Term cannot be checked at runtime because it refers to an abstract type member or type parameter + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i12253.scala:13:38 ------------------------------------------------------------ +13 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error + | ^ + |the type test for q1.reflect.Select cannot be checked at runtime because it refers to an abstract type member or type parameter + | + | longer explanation available when compiling with `-explain` +there was 1 deprecation warning; re-run with -deprecation for details diff --git a/tests/neg-custom-args/fatal-warnings/i12253.scala b/tests/neg/i12253.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/i12253.scala rename to tests/neg/i12253.scala index 88cae0c9f3ec..540ff8ef6d5b 100644 --- a/tests/neg-custom-args/fatal-warnings/i12253.scala +++ b/tests/neg/i12253.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.quoted.{given, *} import deriving.*, compiletime.* diff --git a/tests/neg/i12284.scala b/tests/neg/i12284.scala index 11635a1638cb..c1e35a3eac41 100644 --- a/tests/neg/i12284.scala +++ b/tests/neg/i12284.scala @@ -1,7 +1,7 @@ trait I[F[_], A] def magic[F[_], A](in: I[F, A]): F[A] = - val deps: Vector[I[F, _]] = ??? + val deps: Vector[I[F, ?]] = ??? val xx = deps.map(i => magic(i)) val y: Vector[F[Any]] = xx // error ??? diff --git a/tests/neg/i12448.scala b/tests/neg/i12448.scala index e495cfd19f1d..dab781729ae1 100644 --- a/tests/neg/i12448.scala +++ b/tests/neg/i12448.scala @@ -1,5 +1,5 @@ object Main { def mkArray[T <: A]: T#AType // error // error - mkArray[Array] // was: "assertion failed: invalid prefix HKTypeLambda..." - val x = mkArray[Array] + mkArray[Array] // was: "assertion failed: invalid prefix HKTypeLambda..." // error + val x = mkArray[Array] // error } diff --git a/tests/neg/i12573.check b/tests/neg/i12573.check index d250f4beabbe..50fe36aa2aa9 100644 --- a/tests/neg/i12573.check +++ b/tests/neg/i12573.check @@ -1,9 +1,8 @@ -- [E008] Not Found Error: tests/neg/i12573.scala:23:38 ---------------------------------------------------------------- 23 |val w: Value[8] = DFBits(Value[8](8)).getDFType.width // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | value getDFType is not a member of DFBits[(8 : Int)]. - | Extension methods were tried, but the search failed with: + |value getDFType is not a member of DFBits[(8 : Int)]. + |Extension methods were tried, but the search failed with: | - | method getDFType cannot be accessed as a member of DFType.type from module class i12573$package$. - | Access to protected method getDFType not permitted because enclosing package object i12573$package - | is not a subclass of object DFType where target is defined \ No newline at end of file + | method getDFType cannot be accessed as a member of DFType.type from the top-level definitions in package . + | protected method getDFType can only be accessed from object DFType. diff --git a/tests/neg/i12597.scala b/tests/neg/i12597.scala new file mode 100644 index 000000000000..b8859a0ae991 --- /dev/null +++ b/tests/neg/i12597.scala @@ -0,0 +1,7 @@ +//> using options -Xfatal-warnings -deprecation + +@main def Test = + val a: IArray[Int] = IArray(2) + val b: IArray[Any] = a + val c = b.toArray // error: deprecated + c(0) = "" diff --git a/tests/neg/i12682.scala b/tests/neg/i12682.scala index 0b37816ef0df..a1850534b4c3 100644 --- a/tests/neg/i12682.scala +++ b/tests/neg/i12682.scala @@ -1,4 +1,4 @@ -// scalac: -explain +//> using options -explain object C: def m(x: Int) = 1 diff --git a/tests/neg-custom-args/fatal-warnings/i13011.scala b/tests/neg/i13011.scala similarity index 94% rename from tests/neg-custom-args/fatal-warnings/i13011.scala rename to tests/neg/i13011.scala index ae534394bf96..c0541aeb7f8e 100644 --- a/tests/neg-custom-args/fatal-warnings/i13011.scala +++ b/tests/neg/i13011.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class i13011 { lazy implicit val simple1: String = simple1 // error def f: Unit = { @@ -11,7 +13,7 @@ class i13011 { lazy val simple4: String = if firstDigitIsEven(22) then this.simple4 else "a" // ok lazy val simple5: String = identity(this.simple5) // error - + lazy val simple6: String = { // error this.simple6 "aa" diff --git a/tests/neg-custom-args/i13026.scala b/tests/neg/i13026.scala similarity index 76% rename from tests/neg-custom-args/i13026.scala rename to tests/neg/i13026.scala index 9ecf909f7122..847f5e7d8897 100644 --- a/tests/neg-custom-args/i13026.scala +++ b/tests/neg/i13026.scala @@ -1,3 +1,5 @@ +//> using options -print-lines + val x: Int = "not an int" // error val y: Int = "not an int" // error def foo(x: Any) = x.foo // error diff --git a/tests/neg/i13044.check b/tests/neg/i13044.check index c5584aadf767..1fbe978a49b8 100644 --- a/tests/neg/i13044.check +++ b/tests/neg/i13044.check @@ -1,5 +1,5 @@ --- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- -50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- +65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | given instance gen is declared as `inline`, but was not inlined | @@ -71,8 +71,8 @@ 37 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- -50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- +65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | method recurse is declared as `inline`, but was not inlined | diff --git a/tests/neg/i13044.scala b/tests/neg/i13044.scala index 081b642c604c..42417a9096f9 100644 --- a/tests/neg/i13044.scala +++ b/tests/neg/i13044.scala @@ -37,7 +37,22 @@ trait SchemaDerivation { inline given gen[A]: Schema[A] = derived } -case class H(i: Int) +case class X15(i: Int) +case class X14(i: X15) +case class X13(i: X14) +case class X12(i: X13) +case class X11(i: X12) +case class X10(i: X11) +case class X9(i: X10) +case class X8(i: X9) +case class X7(i: X8) +case class X6(i: X7) +case class X5(i: X6) +case class X4(i: X5) +case class X3(i: X4) +case class X2(i: X3) +case class X1(i: X2) +case class H(i: X1) case class G(h: H) case class F(g: G) case class E(f: Option[F]) diff --git a/tests/neg-custom-args/no-experimental/i13091.scala b/tests/neg/i13091.scala similarity index 77% rename from tests/neg-custom-args/no-experimental/i13091.scala rename to tests/neg/i13091.scala index 2b08788ebbc1..208d05d6886d 100644 --- a/tests/neg-custom-args/no-experimental/i13091.scala +++ b/tests/neg/i13091.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental @experimental class Foo diff --git a/tests/neg/i13190.check b/tests/neg/i13190.check new file mode 100644 index 000000000000..d6096eae30e0 --- /dev/null +++ b/tests/neg/i13190.check @@ -0,0 +1,15 @@ + +-- [E172] Type Error: tests/neg/i13190/B_2.scala:14:38 ----------------------------------------------------------------- +14 | summon[FindField[R, "B"] =:= Double] // error + | ^ + | Cannot prove that Test.FindField[Test.R, ("B" : String)] =:= Double. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.FindField[Test.R, ("B" : String)] + | failed since selector Test.R + | does not match case Opaque.FieldType[("B" : String), f] *: t => f + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => Test.FindField[t, ("B" : String)] diff --git a/tests/pos/i13190/A_1.scala b/tests/neg/i13190/A_1.scala similarity index 96% rename from tests/pos/i13190/A_1.scala rename to tests/neg/i13190/A_1.scala index 9bb9b20f2976..7ab97942eaf9 100644 --- a/tests/pos/i13190/A_1.scala +++ b/tests/neg/i13190/A_1.scala @@ -1,3 +1,3 @@ object Opaque { opaque type FieldType[K, +V] <: V = V -} \ No newline at end of file +} diff --git a/tests/neg/i13190/B_2.scala b/tests/neg/i13190/B_2.scala new file mode 100644 index 000000000000..71b6cac970d3 --- /dev/null +++ b/tests/neg/i13190/B_2.scala @@ -0,0 +1,15 @@ +import Opaque.* + +object Test { + type FindField[R <: scala.Tuple, K] = R match { + case FieldType[K, f] *: t => f + case _ *: t => FindField[t, K] + } + + val f: FieldType["A", Int] = ??? + val f1: Int = f + //val f2: Int = f + + type R = FieldType["A", Int] *: FieldType["B", Double] *: FieldType["C", String] *: FieldType["D", Boolean] *: EmptyTuple + summon[FindField[R, "B"] =:= Double] // error +} diff --git a/tests/neg/i13190b.check b/tests/neg/i13190b.check new file mode 100644 index 000000000000..7708de3769a8 --- /dev/null +++ b/tests/neg/i13190b.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/i13190b.scala:18:38 -------------------------------------------------------------------- +18 | summon[FindField[R, "B"] =:= Double] // error + | ^ + | Cannot prove that Test.FindField[Test.R, ("B" : String)] =:= Double. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.FindField[Test.R, ("B" : String)] + | failed since selector Test.R + | does not match case Opaque.FieldType[("B" : String), f] *: t => f + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => Test.FindField[t, ("B" : String)] diff --git a/tests/neg/i13190b.scala b/tests/neg/i13190b.scala new file mode 100644 index 000000000000..0791a171c629 --- /dev/null +++ b/tests/neg/i13190b.scala @@ -0,0 +1,19 @@ +object Opaque { + opaque type FieldType[K, +V] <: V = V +} + +import Opaque.* + +object Test { + type FindField[R <: scala.Tuple, K] = R match { + case FieldType[K, f] *: t => f + case _ *: t => FindField[t, K] + } + + val f: FieldType["A", Int] = ??? + val f1: Int = f + //val f2: Int = f + + type R = FieldType["A", Int] *: FieldType["B", Double] *: FieldType["C", String] *: FieldType["D", Boolean] *: EmptyTuple + summon[FindField[R, "B"] =:= Double] // error +} diff --git a/tests/neg/i13320.check b/tests/neg/i13320.check index 1e336d8fa7bf..557846cc7d7e 100644 --- a/tests/neg/i13320.check +++ b/tests/neg/i13320.check @@ -9,4 +9,4 @@ -- [E008] Not Found Error: tests/neg/i13320.scala:4:22 ----------------------------------------------------------------- 4 |var x: Foo.Booo = Foo.Booo // error // error | ^^^^^^^^ - | value Booo is not a member of object Foo - did you mean Foo.Boo? \ No newline at end of file + | value Booo is not a member of object Foo - did you mean Foo.Boo? diff --git a/tests/neg/i13440.check b/tests/neg/i13440.check new file mode 100644 index 000000000000..277d3dce0bf0 --- /dev/null +++ b/tests/neg/i13440.check @@ -0,0 +1,15 @@ +-- Error: tests/neg/i13440.scala:5:4 ----------------------------------------------------------------------------------- +5 |def given = 42 // error + | ^ + | given is now a keyword, write `given` instead of given to keep it as an identifier + | This can be rewritten automatically under -rewrite -source 3.0-migration. +-- Error: tests/neg/i13440.scala:7:13 ---------------------------------------------------------------------------------- +7 |case class C(enum: List[Int] = Nil) { // error + | ^ + | enum is now a keyword, write `enum` instead of enum to keep it as an identifier + | This can be rewritten automatically under -rewrite -source 3.0-migration. +-- Error: tests/neg/i13440.scala:8:11 ---------------------------------------------------------------------------------- +8 | val s = s"$enum" // error + | ^ + | enum is now a keyword, write `enum` instead of enum to keep it as an identifier + | This can be rewritten automatically under -rewrite -source 3.0-migration. diff --git a/tests/neg-custom-args/fatal-warnings/i13440.scala b/tests/neg/i13440.scala similarity index 78% rename from tests/neg-custom-args/fatal-warnings/i13440.scala rename to tests/neg/i13440.scala index 6cb4956e7434..443215c621ef 100644 --- a/tests/neg-custom-args/fatal-warnings/i13440.scala +++ b/tests/neg/i13440.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import language.`3.0-migration` def given = 42 // error diff --git a/tests/neg-custom-args/fatal-warnings/i13542.scala b/tests/neg/i13542.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/i13542.scala rename to tests/neg/i13542.scala index 0ce46313dcad..c0feb88b556d 100644 --- a/tests/neg-custom-args/fatal-warnings/i13542.scala +++ b/tests/neg/i13542.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.language.implicitConversions case class Foo(i: Int) extends AnyVal: diff --git a/tests/neg/i13558.scala b/tests/neg/i13558.scala deleted file mode 100644 index 1d4e1c506e43..000000000000 --- a/tests/neg/i13558.scala +++ /dev/null @@ -1,31 +0,0 @@ -package testcode - -class A - -class B - -object ExtensionA { - extension (self: A) { - def id = "A" - } -} -object ExtensionB { - extension (self: B) { - def id = "B" - } -} - -object Main { - def main1(args: Array[String]): Unit = { - import ExtensionB._ - import ExtensionA._ - val a = A() - println(a.id) // error - } - def main2(args: Array[String]): Unit = { - import ExtensionA._ - import ExtensionB._ - val a = A() - println(a.id) // error - } -} \ No newline at end of file diff --git a/tests/neg/i13703.check b/tests/neg/i13703.check index 34f37cc1502f..eb782c982295 100644 --- a/tests/neg/i13703.check +++ b/tests/neg/i13703.check @@ -3,3 +3,10 @@ | ^^^^^^^^^^ | refinement cannot be a mutable var. | You can use an explicit getter i and setter i_= instead +-- [E007] Type Mismatch Error: tests/neg/i13703.scala:5:78 ------------------------------------------------------------- +5 |val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error + | ^ + | Found: Object with Foo {...} + | Required: Foo{val i: Int; def i_=(x: Int): Unit} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i13703.scala b/tests/neg/i13703.scala index c81d1b6e3e0b..e8e54db8807d 100644 --- a/tests/neg/i13703.scala +++ b/tests/neg/i13703.scala @@ -2,4 +2,6 @@ trait Foo extends reflect.Selectable val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error -val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // OK +val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error + +val f3: Foo { def i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // OK diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index d8273e546dab..a80e8b2b289b 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -1,16 +1,16 @@ object Test: - type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case Option[a] => Int type AnyKindMatchType2[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case Option => Int // error: Missing type parameter for Option - type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int end Test diff --git a/tests/neg/i13780-1.check b/tests/neg/i13780-1.check new file mode 100644 index 000000000000..029ef3f3ac4b --- /dev/null +++ b/tests/neg/i13780-1.check @@ -0,0 +1,39 @@ +-- [E007] Type Mismatch Error: tests/neg/i13780-1.scala:38:24 ---------------------------------------------------------- +38 | case x: (h *: t) => x.head // error + | ^^^^^^ + | Found: Tuple.Head[VS & h *: t] + | Required: h + | + | where: VS is a type in method foo with bounds <: Tuple + | h is a type in method foo with bounds + | t is a type in method foo with bounds <: Tuple + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Head[VS & h *: t] + | failed since selector VS & h *: t + | does not uniquely determine parameter x in + | case x *: _ => x + | The computed bounds for the parameter are: + | x <: h + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than h + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i13780-1.scala:52:31 ---------------------------------------------------------- +52 | def unpair: SelectH[Y & W] = "" // error + | ^^ + | Found: ("" : String) + | Required: SelectH[A.this.Y & A.this.W] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce SelectH[A.this.Y & A.this.W] + | failed since selector A.this.Y & A.this.W + | does not uniquely determine parameter h in + | case h *: _ => h + | The computed bounds for the parameter are: + | h + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i13780-1.scala b/tests/neg/i13780-1.scala new file mode 100644 index 000000000000..39f06e520797 --- /dev/null +++ b/tests/neg/i13780-1.scala @@ -0,0 +1,69 @@ +/* It is tempting to relax the `isConcrete` test of match types for `AndType` + * in the following way: + * + * (isConcrete(tp.tp1) || !tp.tp1.derivesFrom(targetClass)) && (... same for tp.tp2) + * + * but the test in this file shows that this would be unsound. + * + * If we did relax the rule, it would help usages of match types applied to the + * singleton type of term pattern match scrutinees. For example: + * + * def foo[VS <: Tuple](x: VS): SelectH[VS] = x match + * case x: (h *: t) => x.head + * + * The type of `x` in the branch body is `(VS & (h *: t))`. The result of + * `x.head` is therefore a `Tuple.Head[VS & (h *: t)]`, which does not reduce + * according to the current rules, but would reduce to `h` with the relaxed + * rule. + * + * Note that the code can be fixed with an explicit type argument to `.head`: + * + * def foo[VS <: Tuple](x: VS): SelectH[VS] = x match + * case x: (h *: t) => x.head[h *: t] + * + * So it *seems* like it would be fine to relax the rule, based on the insight + * that `VS` in `Tuple.Head[VS & (h *: t)]` does not contribute anything to the + * computed type capture in `Tuple.Head`. + * + * The test is this file demonstrates that relaxing the rule can cause + * unsoundness. So don't do it. + */ + +type SelectH[VS <: Tuple] = VS match + case h *: ? => h + +// The original example found in the fingo/spata library +object ExampleFromSpata: + def foo[VS <: Tuple](x: VS): SelectH[VS] = x match + case x: (h *: t) => x.head // error + + def bar[VS <: Tuple](x: VS): SelectH[VS] = x match + case x: (h *: t) => x.head[h *: t] // ok +end ExampleFromSpata + +trait Z { + type Y <: Tuple + type W <: Tuple + def unpair: SelectH[Y & W] +} + +class A extends Z { + type Y = Tuple2[Any, Any] + def unpair: SelectH[Y & W] = "" // error + def any: Any = unpair +} + +class B extends A { this: Z => + type W = Tuple2[Int, Int] + def int: Int = unpair +} + +class C extends A { this: Z => + type W = Tuple2[String, String] + def string: String = unpair +} + +object Main { + def main(args: Array[String]): Unit = + println((new B).int + 1) // would give ClassCastException +} diff --git a/tests/neg/i13780.check b/tests/neg/i13780.check index aa0a47db5737..69537d8a3f3b 100644 --- a/tests/neg/i13780.check +++ b/tests/neg/i13780.check @@ -1,3 +1,23 @@ +-- [E007] Type Mismatch Error: tests/neg/i13780.scala:12:32 ------------------------------------------------------------ +12 | def unpair[X <: Y]: Head[X] = "" // error + | ^^ + | Found: ("" : String) + | Required: Head[X] + | + | where: X is a type in method unpair with bounds <: A.this.Y + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Head[X] + | failed since selector X + | does not uniquely determine parameters a, b in + | case (a, b) => a + | The computed bounds for the parameters are: + | a + | b + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/i13780.scala:18:31 ------------------------------------------------------------ 18 | def int[X <: Y]: Int = unpair[X] // error | ^^^^^^^^^ @@ -14,8 +34,8 @@ | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: - | a >: Int - | b >: Int + | a <: Int + | b <: Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/i13780.scala:23:37 ------------------------------------------------------------ @@ -34,7 +54,7 @@ | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: - | a >: String - | b >: String + | a <: String + | b <: String | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i13780.scala b/tests/neg/i13780.scala index 7e7e2e3ecb74..56badbd356fc 100644 --- a/tests/neg/i13780.scala +++ b/tests/neg/i13780.scala @@ -9,7 +9,7 @@ trait Z { class A extends Z { type Y <: Tuple2[Any, Any] - def unpair[X <: Y]: Head[X] = "" + def unpair[X <: Y]: Head[X] = "" // error def any[X <: Y]: Any = unpair[X] } diff --git a/tests/neg-custom-args/i13838.scala b/tests/neg/i13838.scala similarity index 99% rename from tests/neg-custom-args/i13838.scala rename to tests/neg/i13838.scala index c99d3fa1f82d..2b43ce1c254b 100644 --- a/tests/neg-custom-args/i13838.scala +++ b/tests/neg/i13838.scala @@ -1,3 +1,5 @@ +//> using options -Ximplicit-search-limit 1000 + implicit def catsSyntaxEq[A: Eq](a: A): Foo[A] = ??? class Foo[A] diff --git a/tests/neg/i13848.scala b/tests/neg/i13848.scala new file mode 100644 index 000000000000..fcc519c47592 --- /dev/null +++ b/tests/neg/i13848.scala @@ -0,0 +1,9 @@ +//> using options -Yno-experimental + +import annotation.experimental + +@main +def run(): Unit = f // error + +@experimental +def f = 2 diff --git a/tests/neg/i13946/BadPrinter.scala b/tests/neg/i13946/BadPrinter.scala new file mode 100644 index 000000000000..46913d2c2805 --- /dev/null +++ b/tests/neg/i13946/BadPrinter.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings -feature + +// in BadPrinter.scala +import language.future +class BadPrinter extends Printer: // error + override def print(s: String): Unit = println("Bad!!!") diff --git a/tests/neg-custom-args/feature/i13946/Printer.scala b/tests/neg/i13946/Printer.scala similarity index 100% rename from tests/neg-custom-args/feature/i13946/Printer.scala rename to tests/neg/i13946/Printer.scala diff --git a/tests/neg/i14025.check b/tests/neg/i14025.check index a44cdc67c1f8..803d01485ce3 100644 --- a/tests/neg/i14025.check +++ b/tests/neg/i14025.check @@ -1,8 +1,8 @@ -- [E172] Type Error: tests/neg/i14025.scala:1:88 ---------------------------------------------------------------------- 1 |val foo = summon[deriving.Mirror.Product { type MirroredType = [X] =>> [Y] =>> (X, Y) }] // error | ^ - |No given instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) + |No given instance of type scala.deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) -- [E172] Type Error: tests/neg/i14025.scala:2:90 ---------------------------------------------------------------------- 2 |val bar = summon[deriving.Mirror.Sum { type MirroredType = [X] =>> [Y] =>> List[(X, Y)] }] // error | ^ - |No given instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) + |No given instance of type scala.deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) diff --git a/tests/neg/i14127.check b/tests/neg/i14127.check index 15babe8b2775..4319f76df3be 100644 --- a/tests/neg/i14127.check +++ b/tests/neg/i14127.check @@ -1,8 +1,8 @@ -- [E172] Type Error: tests/neg/i14127.scala:6:55 ---------------------------------------------------------------------- 6 | *: Int *: Int *: Int *: Int *: Int *: EmptyTuple)]] // error | ^ - |No given instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int, Int, Int)]: + |No given instance of type scala.deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int, Int, Int)]: | * class *: is not a generic product because it reduces to a tuple with arity 23, expected arity <= 22 | * class *: is not a generic sum because it does not have subclasses diff --git a/tests/neg/i14177a.scala b/tests/neg/i14177a.scala index 3031271c369b..237eaacb3b66 100644 --- a/tests/neg/i14177a.scala +++ b/tests/neg/i14177a.scala @@ -3,4 +3,4 @@ import scala.compiletime.* trait C[A] inline given [Tup <: Tuple]: C[Tup] with - val cs = summonAll[Tuple.Map[Tup, C]] // error cannot reduce inline match with + val cs = summonAll[Tuple.Map[Tup, C]] // error: Tuple element types must be known at compile time diff --git a/tests/neg/i14363.check b/tests/neg/i14363.check index 4da37612f28b..0c356ceec407 100644 --- a/tests/neg/i14363.check +++ b/tests/neg/i14363.check @@ -9,8 +9,8 @@ | | One of the following imports might fix the problem: | - | import math.BigDecimal.int2bigDecimal - | import math.BigInt.int2bigInt + | import scala.math.BigDecimal.int2bigDecimal + | import scala.math.BigInt.int2bigInt | | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i14386.scala b/tests/neg/i14386.scala index c25b6ba9f3c1..be0b2497ed5f 100644 --- a/tests/neg/i14386.scala +++ b/tests/neg/i14386.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror def logLevelDetail(level: Int): String = s"""$level diff --git a/tests/neg/i14432.check b/tests/neg/i14432.check index d19d952b0153..d649987555fd 100644 --- a/tests/neg/i14432.check +++ b/tests/neg/i14432.check @@ -1,6 +1,6 @@ -- [E172] Type Error: tests/neg/i14432.scala:13:33 --------------------------------------------------------------------- 13 |val mFoo = summon[Mirror.Of[Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: - | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. + |No given instance of type scala.deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[example.Foo]: + | * class Foo is not a generic product because the constructor of class Foo is inaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432a.check b/tests/neg/i14432a.check index 705a7ed0e88b..c417dec20bb9 100644 --- a/tests/neg/i14432a.check +++ b/tests/neg/i14432a.check @@ -1,6 +1,6 @@ -- [E172] Type Error: tests/neg/i14432a.scala:14:43 -------------------------------------------------------------------- 14 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: - | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. + |No given instance of type scala.deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[example.Foo]: + | * class Foo is not a generic product because the constructor of class Foo is inaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432b.check b/tests/neg/i14432b.check index 5b0dac3e6ad0..2d5ac3f997eb 100644 --- a/tests/neg/i14432b.check +++ b/tests/neg/i14432b.check @@ -1,6 +1,6 @@ -- [E172] Type Error: tests/neg/i14432b.scala:15:43 -------------------------------------------------------------------- 15 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: - | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. + |No given instance of type scala.deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[example.Foo]: + | * class Foo is not a generic product because the constructor of class Foo is inaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432c.check b/tests/neg/i14432c.check index a61e100ceb98..2710f0dfb3ed 100644 --- a/tests/neg/i14432c.check +++ b/tests/neg/i14432c.check @@ -2,9 +2,10 @@ 12 |class Bar extends example.Foo(23) { // error: cant access private[example] ctor | ^^^^^^^^^^^ | constructor Foo cannot be accessed as a member of example.Foo from class Bar. + | private[example] constructor Foo can only be accessed from package example. -- [E172] Type Error: tests/neg/i14432c.scala:16:43 -------------------------------------------------------------------- 16 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: - | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. + |No given instance of type scala.deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[example.Foo]: + | * class Foo is not a generic product because the constructor of class Foo is inaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432d.check b/tests/neg/i14432d.check index aff070d90192..e0837bb584e2 100644 --- a/tests/neg/i14432d.check +++ b/tests/neg/i14432d.check @@ -1,6 +1,6 @@ -- [E172] Type Error: tests/neg/i14432d.scala:17:45 -------------------------------------------------------------------- 17 | val mFoo = summon[Mirror.Of[example.Foo]] // error | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: - | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. + |No given instance of type scala.deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[example.Foo]: + | * class Foo is not a generic product because the constructor of class Foo is inaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14564.check b/tests/neg/i14564.check index c93e1317a95e..01b222361653 100644 --- a/tests/neg/i14564.check +++ b/tests/neg/i14564.check @@ -1,17 +1,4 @@ --- [E018] Syntax Error: tests/neg/i14564.scala:5:28 -------------------------------------------------------------------- -5 |def test = sum"${ List(42)* }" // error // error - | ^ - | expression expected but '}' found - | - | longer explanation available when compiling with `-explain` --- [E008] Not Found Error: tests/neg/i14564.scala:5:26 ----------------------------------------------------------------- -5 |def test = sum"${ List(42)* }" // error // error - | ^^^^^^^^^ - | value * is not a member of List[Int], but could be made available as an extension method. - | - | One of the following imports might make progress towards fixing the problem: - | - | import math.Fractional.Implicits.infixFractionalOps - | import math.Integral.Implicits.infixIntegralOps - | import math.Numeric.Implicits.infixNumericOps - | +-- Error: tests/neg/i14564.scala:5:26 ---------------------------------------------------------------------------------- +5 |def test = sum"${ List(42)* }" // error + | ^ + | spread operator `*` not allowed here; must come last in a parameter list diff --git a/tests/neg/i14564.scala b/tests/neg/i14564.scala index c8d8c369575e..cdc989554254 100644 --- a/tests/neg/i14564.scala +++ b/tests/neg/i14564.scala @@ -2,5 +2,5 @@ import language.postfixOps as _ extension (sc: StringContext) def sum(xs: Int*): String = xs.sum.toString -def test = sum"${ List(42)* }" // error // error +def test = sum"${ List(42)* }" // error diff --git a/tests/neg/i14705.scala b/tests/neg/i14705.scala new file mode 100644 index 000000000000..9ffb4443416e --- /dev/null +++ b/tests/neg/i14705.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings + +val n = Nil +val b = n.head.isInstanceOf[String] // error + diff --git a/tests/neg-custom-args/fatal-warnings/i14721.scala b/tests/neg/i14721.scala similarity index 83% rename from tests/neg-custom-args/fatal-warnings/i14721.scala rename to tests/neg/i14721.scala index 6e884cad22d7..46786f04558b 100644 --- a/tests/neg-custom-args/fatal-warnings/i14721.scala +++ b/tests/neg/i14721.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings class C: def op: Unit = println("op") diff --git a/tests/neg/i14823.check b/tests/neg/i14823.check index 47b15f04e2da..2fdad79bd52e 100644 --- a/tests/neg/i14823.check +++ b/tests/neg/i14823.check @@ -1,6 +1,6 @@ -- [E172] Type Error: tests/neg/i14823.scala:8:50 ---------------------------------------------------------------------- 8 |val baz = summon[Mirror.Of[SubA[Int] | SubB[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]]: + |No given instance of type scala.deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[SubA[Int] | SubB[Int]]: | * type `SubA[Int] | SubB[Int]` is not a generic product because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. | * type `SubA[Int] | SubB[Int]` is not a generic sum because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. diff --git a/tests/neg/i14823a.check b/tests/neg/i14823a.check index 3c9b749780e0..081276421eec 100644 --- a/tests/neg/i14823a.check +++ b/tests/neg/i14823a.check @@ -1,7 +1,7 @@ -- [E172] Type Error: tests/neg/i14823a.scala:16:48 -------------------------------------------------------------------- 16 |val foo = summon[Mirror.Of[Box[Int] | Box[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Box[Int] | Box[Int]]: + |No given instance of type scala.deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Box[Int] | Box[Int]]: | * type `Box[Int] | Box[Int]` is not a generic product because its subpart `Box[Int] | Box[Int]` is a top-level union type. | * type `Box[Int] | Box[Int]` is not a generic sum because its subpart `Box[Int] | Box[Int]` is a top-level union type. -- [E172] Type Error: tests/neg/i14823a.scala:17:58 -------------------------------------------------------------------- @@ -13,12 +13,12 @@ -- [E172] Type Error: tests/neg/i14823a.scala:18:63 -------------------------------------------------------------------- 18 |def baz = summon[deriving.Mirror.Of[Foo[String] | Foo[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo[String] | Foo[String]]: + |No given instance of type scala.deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Foo[String] | Foo[String]]: | * type `Foo[String] | Foo[String]` is not a generic product because its subpart `Foo[String] | Foo[String]` is a top-level union type. | * type `Foo[String] | Foo[String]` is not a generic sum because its subpart `Foo[String] | Foo[String]` is a top-level union type. -- [E172] Type Error: tests/neg/i14823a.scala:20:66 -------------------------------------------------------------------- 20 |def qux = summon[deriving.Mirror.Of[Option[Int] | Option[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Option[Int] | Option[String]]: + |No given instance of type scala.deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Option[Int] | Option[String]]: | * type `Option[Int] | Option[String]` is not a generic product because its subpart `Option[Int] | Option[String]` is a top-level union type. | * type `Option[Int] | Option[String]` is not a generic sum because its subpart `Option[Int] | Option[String]` is a top-level union type. diff --git a/tests/pos/i15158.scala b/tests/neg/i15158.scala similarity index 82% rename from tests/pos/i15158.scala rename to tests/neg/i15158.scala index 431430680954..65f785c1f975 100644 --- a/tests/pos/i15158.scala +++ b/tests/neg/i15158.scala @@ -13,9 +13,9 @@ val x = foo { type Rec[A] = A match case String => Opt[Rec[String]] - val arr = new Buf[Rec[String]](8) - val arr2 = Buf[Rec[String]](8) - val arr3 = Buf.apply[Rec[String]](8) + val arr = new Buf[Rec[String]](8) // error + val arr2 = Buf[Rec[String]](8) // error + val arr3 = Buf.apply[Rec[String]](8) // error } import scala.collection.mutable @@ -38,6 +38,6 @@ class Spec { JsonPrimitive ] - val arr = new mutable.ArrayBuffer[Json](8) + val arr = new mutable.ArrayBuffer[Json](8) // error } } diff --git a/tests/neg/i15312.check b/tests/neg/i15312.check new file mode 100644 index 000000000000..188b03518b43 --- /dev/null +++ b/tests/neg/i15312.check @@ -0,0 +1,17 @@ +-- [E007] Type Mismatch Error: tests/neg/i15312.scala:7:27 ------------------------------------------------------------- +7 |val b: F[{type A = Int}] = "asd" // error + | ^^^^^ + | Found: ("asd" : String) + | Required: F[Object{type A = Int}] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce F[Object{type A = Int}] + | failed since selector Object{type A = Int} + | does not match case Object{type A = Float} => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Object{type A = Int} => String + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i15312.scala b/tests/neg/i15312.scala new file mode 100644 index 000000000000..c3f225aabc74 --- /dev/null +++ b/tests/neg/i15312.scala @@ -0,0 +1,7 @@ +type F[t] = + t match + case {type A = Float} => Int + case {type A = Int} => String + +val a: F[{type A = Float}] = 10 +val b: F[{type A = Int}] = "asd" // error diff --git a/tests/neg/i15381.scala b/tests/neg/i15381.scala index c3e1cc771521..a0328931b36e 100644 --- a/tests/neg/i15381.scala +++ b/tests/neg/i15381.scala @@ -1,4 +1,4 @@ -// scalac: -Vprint:parser +//> using options -Vprint:parser case class $[A](value: A) diff --git a/tests/neg/i15474.check b/tests/neg/i15474.check new file mode 100644 index 000000000000..3205f703cd50 --- /dev/null +++ b/tests/neg/i15474.check @@ -0,0 +1,31 @@ +-- Error: tests/neg/i15474.scala:6:39 ---------------------------------------------------------------------------------- +6 | given c: Conversion[ String, Int ] = _.toInt // error + | ^ + | Result of implicit search for ?{ toInt: ? } will change. + | Current result Test2.c will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: augmentString. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Test2.c comes earlier, + | - use an explicit conversion, + | - use an import to get extension method into scope. + | This will be an error in Scala 3.5 and later. +-- Error: tests/neg/i15474.scala:12:56 --------------------------------------------------------------------------------- +12 | given Ordering[Price] = summon[Ordering[BigDecimal]] // error + | ^ + | Result of implicit search for Ordering[BigDecimal] will change. + | Current result Prices.Price.given_Ordering_Price will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: scala.math.Ordering.BigDecimal. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Prices.Price.given_Ordering_Price comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. diff --git a/tests/neg/i15474.scala b/tests/neg/i15474.scala new file mode 100644 index 000000000000..b196d1b400ef --- /dev/null +++ b/tests/neg/i15474.scala @@ -0,0 +1,16 @@ +//> using options -Xfatal-warnings + +import scala.language.implicitConversions + +object Test2: + given c: Conversion[ String, Int ] = _.toInt // error + +object Prices { + opaque type Price = BigDecimal + + object Price{ + given Ordering[Price] = summon[Ordering[BigDecimal]] // error + } +} + + diff --git a/tests/neg/i15474b.check b/tests/neg/i15474b.check new file mode 100644 index 000000000000..73ef720af7e3 --- /dev/null +++ b/tests/neg/i15474b.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i15474b.scala:7:40 --------------------------------------------------------------------------------- +7 | def apply(from: String): Int = from.toInt // error: infinite loop in function body + | ^^^^^^^^^^ + | Infinite loop in function body + | Test1.c.apply(from).toInt diff --git a/tests/neg/i15474b.scala b/tests/neg/i15474b.scala new file mode 100644 index 000000000000..9d496c37ef00 --- /dev/null +++ b/tests/neg/i15474b.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings + +import scala.language.implicitConversions + +object Test1: + given c: Conversion[ String, Int ] with + def apply(from: String): Int = from.toInt // error: infinite loop in function body + diff --git a/tests/neg-strict/i15479.scala b/tests/neg/i15479.scala similarity index 81% rename from tests/neg-strict/i15479.scala rename to tests/neg/i15479.scala index 3f8bdcba9a1f..d4f306a12368 100644 --- a/tests/neg-strict/i15479.scala +++ b/tests/neg/i15479.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + package deptest { @deprecated("Not used any more", since="7") object DeprecatedThing { diff --git a/tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala b/tests/neg/i15503-scala2/scala2-t11681.scala similarity index 97% rename from tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala rename to tests/neg/i15503-scala2/scala2-t11681.scala index 13d540dc2a5d..2436668e0c9c 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala +++ b/tests/neg/i15503-scala2/scala2-t11681.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:params +//> using options -Xfatal-warnings -Wunused:params // import Answers._ diff --git a/tests/neg-custom-args/fatal-warnings/i15503a.scala b/tests/neg/i15503a.scala similarity index 99% rename from tests/neg-custom-args/fatal-warnings/i15503a.scala rename to tests/neg/i15503a.scala index cd7282490fc9..b5f17a1e2f52 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503a.scala +++ b/tests/neg/i15503a.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:imports +//> using options -Xfatal-warnings -Wunused:imports object FooUnused: diff --git a/tests/neg-custom-args/fatal-warnings/i15503b.scala b/tests/neg/i15503b.scala similarity index 97% rename from tests/neg-custom-args/fatal-warnings/i15503b.scala rename to tests/neg/i15503b.scala index c8a2d6bc2074..4dc10e12606d 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503b.scala +++ b/tests/neg/i15503b.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:locals +//> using options -Xfatal-warnings -Wunused:locals val a = 1 // OK diff --git a/tests/neg-custom-args/fatal-warnings/i15503c.scala b/tests/neg/i15503c.scala similarity index 95% rename from tests/neg-custom-args/fatal-warnings/i15503c.scala rename to tests/neg/i15503c.scala index e4e15116bf0d..040dae43a2c9 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503c.scala +++ b/tests/neg/i15503c.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:privates +//> using options -Xfatal-warnings -Wunused:privates -source:3.3 trait C class A: self: C => // OK diff --git a/tests/neg-custom-args/fatal-warnings/i15503d.scala b/tests/neg/i15503d.scala similarity index 92% rename from tests/neg-custom-args/fatal-warnings/i15503d.scala rename to tests/neg/i15503d.scala index 6c5973c66a3a..c43dc07c638e 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503d.scala +++ b/tests/neg/i15503d.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:unsafe-warn-patvars +//> using options -Xfatal-warnings -Wunused:unsafe-warn-patvars // todo : change to :patvars sealed trait Calc diff --git a/tests/neg-custom-args/fatal-warnings/i15503e.scala b/tests/neg/i15503e.scala similarity index 97% rename from tests/neg-custom-args/fatal-warnings/i15503e.scala rename to tests/neg/i15503e.scala index 57664cd08dcd..f3214e7b24e6 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503e.scala +++ b/tests/neg/i15503e.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:explicits +//> using options -Xfatal-warnings -Wunused:explicits object Foo { /* This goes around the "trivial method" detection */ diff --git a/tests/neg-custom-args/fatal-warnings/i15503f.scala b/tests/neg/i15503f.scala similarity index 88% rename from tests/neg-custom-args/fatal-warnings/i15503f.scala rename to tests/neg/i15503f.scala index f909272af732..08520d149b95 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503f.scala +++ b/tests/neg/i15503f.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:implicits +//> using options -Xfatal-warnings -Wunused:implicits /* This goes around the "trivial method" detection */ val default_int = 1 diff --git a/tests/neg-custom-args/fatal-warnings/i15503g.scala b/tests/neg/i15503g.scala similarity index 92% rename from tests/neg-custom-args/fatal-warnings/i15503g.scala rename to tests/neg/i15503g.scala index 2185bfed711d..4da98a8d43f2 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503g.scala +++ b/tests/neg/i15503g.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:params +//> using options -Xfatal-warnings -Wunused:params /* This goes around the "trivial method" detection */ object Foo { diff --git a/tests/neg-custom-args/fatal-warnings/i15503h.scala b/tests/neg/i15503h.scala similarity index 86% rename from tests/neg-custom-args/fatal-warnings/i15503h.scala rename to tests/neg/i15503h.scala index 3bab6cdbd098..cef1f6cf566f 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503h.scala +++ b/tests/neg/i15503h.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:linted +//> using options -Xfatal-warnings -Wunused:linted import collection.mutable.Set // error diff --git a/tests/neg-custom-args/fatal-warnings/i15503i.scala b/tests/neg/i15503i.scala similarity index 99% rename from tests/neg-custom-args/fatal-warnings/i15503i.scala rename to tests/neg/i15503i.scala index 768e4d5c3ce0..d415c95a701a 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503i.scala +++ b/tests/neg/i15503i.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:all +//> using options -Xfatal-warnings -Wunused:all import collection.mutable.{Map => MutMap} // error import collection.mutable.Set // error diff --git a/tests/neg-custom-args/fatal-warnings/i15503j.scala b/tests/neg/i15503j.scala similarity index 94% rename from tests/neg-custom-args/fatal-warnings/i15503j.scala rename to tests/neg/i15503j.scala index 51c1fa6fda0c..66140934abc1 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503j.scala +++ b/tests/neg/i15503j.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:strict-no-implicit-warn +//> using options -Xfatal-warnings -Wunused:strict-no-implicit-warn package foo.unused.strict.test: package a: diff --git a/tests/neg/i15575.check b/tests/neg/i15575.check new file mode 100644 index 000000000000..25642f8f7259 --- /dev/null +++ b/tests/neg/i15575.check @@ -0,0 +1,35 @@ +-- [E057] Type Mismatch Error: tests/neg/i15575.scala:5:27 ------------------------------------------------------------- +5 | def bar[T]: Unit = foo[T & Any] // error + | ^ + | Type argument T & Any does not conform to lower bound Any + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | Any + | conforms to + | T & Any + | but none of the attempts shown below succeeded: + | + | ==> Any <: T & Any + | ==> Any <: T = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg/i15575.scala:9:14 ------------------------------------------------------------- +9 | val _ = foo[String] // error + | ^ + | Type argument String does not conform to lower bound CharSequence + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | CharSequence + | conforms to + | String + | but none of the attempts shown below succeeded: + | + | ==> CharSequence <: String = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i15575.scala b/tests/neg/i15575.scala similarity index 86% rename from tests/neg-custom-args/explain/i15575.scala rename to tests/neg/i15575.scala index 367d0f36f1ed..8d32258352b5 100644 --- a/tests/neg-custom-args/explain/i15575.scala +++ b/tests/neg/i15575.scala @@ -1,3 +1,5 @@ +//> using options -explain + object Test1: def foo[T >: Any]: Unit = () def bar[T]: Unit = foo[T & Any] // error diff --git a/tests/neg/i15662.scala b/tests/neg/i15662.scala new file mode 100644 index 000000000000..c84ed6e5e341 --- /dev/null +++ b/tests/neg/i15662.scala @@ -0,0 +1,16 @@ +//> using options -Xfatal-warnings + +case class Composite[T](v: T) + +def m(composite: Composite[?]): Unit = + composite match { + case Composite[Int](v) => println(v) // error: cannot be checked at runtime + } + +def m2(composite: Composite[?]): Unit = + composite match { + case Composite(v) => println(v) // ok + } + +@main def Test = + m(Composite("This is String")) diff --git a/tests/neg/i15741.scala b/tests/neg/i15741.scala index b5304f83d8b3..2d536c515f76 100644 --- a/tests/neg/i15741.scala +++ b/tests/neg/i15741.scala @@ -1,15 +1,15 @@ def get(using Int): String = summon[Int].toString def pf2: PartialFunction[String, Int ?=> String] = { - case "hoge" => get // error + case "hoge" => get case "huga" => get - } + } // error type IS = Int ?=> String def pf3: PartialFunction[String, IS] = { - case "hoge" => get // error + case "hoge" => get case "huga" => get - } + } // error diff --git a/tests/neg-custom-args/fatal-warnings/i15893.scala b/tests/neg/i15893.scala similarity index 91% rename from tests/neg-custom-args/fatal-warnings/i15893.scala rename to tests/neg/i15893.scala index f23e6150106a..a62ddc3ae653 100644 --- a/tests/neg-custom-args/fatal-warnings/i15893.scala +++ b/tests/neg/i15893.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + sealed trait NatT case class Zero() extends NatT case class Succ[+N <: NatT](n: N) extends NatT @@ -25,17 +27,17 @@ transparent inline def transparentInlineMod2(inline n: NatT): NatT = inline n m def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match case Zero(): Zero => Zero() // error case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error - case Succ(Succ(predPredN)): Succ[Succ[_]] => dependentlyTypedMod2(predPredN) // error + case Succ(Succ(predPredN)): Succ[Succ[?]] => dependentlyTypedMod2(predPredN) // error inline def inlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match case Zero(): Zero => Zero() // error case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error - case Succ(Succ(predPredN)): Succ[Succ[_]] => inlineDependentlyTypedMod2(predPredN) // error + case Succ(Succ(predPredN)): Succ[Succ[?]] => inlineDependentlyTypedMod2(predPredN) // error transparent inline def transparentInlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match case Zero(): Zero => Zero() // error case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error - case Succ(Succ(predPredN)): Succ[Succ[_]] => transparentInlineDependentlyTypedMod2(predPredN) // error + case Succ(Succ(predPredN)): Succ[Succ[?]] => transparentInlineDependentlyTypedMod2(predPredN) // error def foo(n: NatT): NatT = mod2(n) match case Succ(Zero()) => Zero() diff --git a/tests/neg-strict/i16092.scala b/tests/neg/i16092.scala similarity index 90% rename from tests/neg-strict/i16092.scala rename to tests/neg/i16092.scala index b86c034c815b..c1327d3f6f70 100644 --- a/tests/neg-strict/i16092.scala +++ b/tests/neg/i16092.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + trait X { type T def process(t: T): Unit diff --git a/tests/neg/i16438.scala b/tests/neg/i16438.scala index 33873b13384b..a2b88080c2cd 100644 --- a/tests/neg/i16438.scala +++ b/tests/neg/i16438.scala @@ -1,4 +1,4 @@ -// scalac: -Ysafe-init +//> using options -Ysafe-init trait ATrait(val string: String, val int: Int) trait AnotherTrait( override val string: String, override val int: Int) extends ATrait case class ACaseClass(override val string: String) extends AnotherTrait(string, 3) // error diff --git a/tests/neg/i16453b1.check b/tests/neg/i16453b1.check new file mode 100644 index 000000000000..07f23c95d9ad --- /dev/null +++ b/tests/neg/i16453b1.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i16453b1.scala:11:19 ------------------------------------------------------------------- +11 | val ko = get[Int] // error + | ^ + |No given instance of type Ctx => Option[Int] was found for parameter fn of method get + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Ctx => Option[Int]: + |- final lazy given val foo: Ctx => Int diff --git a/tests/neg/i16453b1.scala b/tests/neg/i16453b1.scala new file mode 100644 index 000000000000..4a99321a9421 --- /dev/null +++ b/tests/neg/i16453b1.scala @@ -0,0 +1,12 @@ +import scala.language.implicitConversions + +sealed trait Ctx +given ct[T]: Conversion[Ctx => T, Ctx => Option[T]] = fn => fn.andThen(Option.apply) + +def get[T](using fn: Ctx => Option[T]): Option[T] = ??? + +def Test = { + given foo: (Ctx => Int) = _ => 42 + val ok = get[Int](using summon[Ctx => Int]) + val ko = get[Int] // error +} diff --git a/tests/neg/i16453b2.check b/tests/neg/i16453b2.check new file mode 100644 index 000000000000..fc0a1fa37b82 --- /dev/null +++ b/tests/neg/i16453b2.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i16453b2.scala:11:19 ------------------------------------------------------------------- +11 | val ko = get[Int] // error + | ^ + |No given instance of type Ctx => Option[Int] was found for parameter fn of method get + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Ctx => Option[Int]: + |- final given def foo2[A]: Ctx => Int diff --git a/tests/neg/i16453b2.scala b/tests/neg/i16453b2.scala new file mode 100644 index 000000000000..6cafeb54b540 --- /dev/null +++ b/tests/neg/i16453b2.scala @@ -0,0 +1,12 @@ +import scala.language.implicitConversions + +sealed trait Ctx +given ct[T]: Conversion[Ctx => T, Ctx => Option[T]] = fn => fn.andThen(Option.apply) + +def get[T](using fn: Ctx => Option[T]): Option[T] = ??? + +def Test = { + given foo2[A]: (Ctx => Int) = _ => 42 + val ok = get[Int](using summon[Ctx => Int]) + val ko = get[Int] // error +} diff --git a/tests/neg/i16601.check b/tests/neg/i16601.check index 25baef04e479..c2059506cb09 100644 --- a/tests/neg/i16601.check +++ b/tests/neg/i16601.check @@ -1,6 +1,6 @@ --- [E042] Type Error: tests/neg/i16601.scala:1:27 ---------------------------------------------------------------------- -1 |@main def Test: Unit = new concurrent.ExecutionContext // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | ExecutionContext is a trait; it cannot be instantiated +-- [E042] Type Error: tests/neg/i16601.scala:1:26 ---------------------------------------------------------------------- +1 |@main def Test: Any = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16601.scala b/tests/neg/i16601.scala index 2e058db0093c..00d0c7c9b15e 100644 --- a/tests/neg/i16601.scala +++ b/tests/neg/i16601.scala @@ -1 +1 @@ -@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file +@main def Test: Any = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg/i16601a.check b/tests/neg/i16601a.check new file mode 100644 index 000000000000..6640f5b41749 --- /dev/null +++ b/tests/neg/i16601a.check @@ -0,0 +1,18 @@ +-- [E042] Type Error: tests/neg/i16601a.scala:3:26 --------------------------------------------------------------------- +3 |@main def Test: Any = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Abstract classes and traits need to be extended by a concrete class or object + | to make their functionality accessible. + | + | You may want to create an anonymous class extending ExecutionContext with + | class ExecutionContext { } + | + | or add a companion object with + | object ExecutionContext extends ExecutionContext + | + | You need to implement any abstract members in both cases. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i16601a.scala b/tests/neg/i16601a.scala new file mode 100644 index 000000000000..b85828122e8a --- /dev/null +++ b/tests/neg/i16601a.scala @@ -0,0 +1,3 @@ +//> using options -explain + +@main def Test: Any = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i16639a.scala b/tests/neg/i16639a.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/i16639a.scala rename to tests/neg/i16639a.scala index c62910b7f566..5b348d594f5b 100644 --- a/tests/neg-custom-args/fatal-warnings/i16639a.scala +++ b/tests/neg/i16639a.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:all +//> using options -Xfatal-warnings -Wunused:all -source:3.3 // class Bippy(a: Int, b: Int) { private def this(c: Int) = this(c, c) // warn /Dotty:NoWarn @@ -26,11 +26,11 @@ trait Bing trait Accessors { private var v1: Int = 0 // error warn private var v2: Int = 0 // error warn, never set - private var v3: Int = 0 // warn, never got /Dotty: no warn even if not used + private var v3: Int = 0 private var v4: Int = 0 // no warn private[this] var v5 = 0 // error warn, never set - private[this] var v6 = 0 // warn, never got /Dotty: no warn even if not used + private[this] var v6 = 0 private[this] var v7 = 0 // no warn def bippy(): Int = { diff --git a/tests/neg/i16649-refutable.check b/tests/neg/i16649-refutable.check new file mode 100644 index 000000000000..a1325d7cfa91 --- /dev/null +++ b/tests/neg/i16649-refutable.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i16649-refutable.scala:6:6 ------------------------------------------------------------------------- +6 | val '{ ($y: Int) + ($z: Int) } = x // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | pattern binding uses refutable extractor `'{...}` + | + | If this usage is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg/i16649-refutable.scala b/tests/neg/i16649-refutable.scala new file mode 100644 index 000000000000..8caac5b2576a --- /dev/null +++ b/tests/neg/i16649-refutable.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings + +import quoted.* + +def foo(using Quotes)(x: Expr[Int]) = + val '{ ($y: Int) + ($z: Int) } = x // error + val '{ $a: Int } = x + val '{ $b: Any } = x + val '{ $c } = x diff --git a/tests/neg/i16653.check b/tests/neg/i16653.check index dd5c756f6f79..1ed7a1dbbc8e 100644 --- a/tests/neg/i16653.check +++ b/tests/neg/i16653.check @@ -1,6 +1,6 @@ -- [E006] Not Found Error: tests/neg/i16653.scala:1:7 ------------------------------------------------------------------ 1 |import demo.implicits._ // error | ^^^^ - | Not found: demo + | Not found: demo - did you mean Demo? | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16728.check b/tests/neg/i16728.check new file mode 100644 index 000000000000..93cc215696c2 --- /dev/null +++ b/tests/neg/i16728.check @@ -0,0 +1,6 @@ +-- [E092] Pattern Match Error: tests/neg/i16728.scala:18:11 ------------------------------------------------------------ +18 | case tx : C[Int]#X => // error + | ^ + | the type test for C[Int] cannot be checked at runtime because its type arguments can't be determined from A + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/fatal-warnings/i16728.scala b/tests/neg/i16728.scala similarity index 93% rename from tests/neg-custom-args/fatal-warnings/i16728.scala rename to tests/neg/i16728.scala index 42c860cc40b2..b76bb03fa14a 100644 --- a/tests/neg-custom-args/fatal-warnings/i16728.scala +++ b/tests/neg/i16728.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class A[T] { class X { def outer : A.this.type = A.this diff --git a/tests/neg/i16842.check b/tests/neg/i16842.check new file mode 100644 index 000000000000..936b08f95dbb --- /dev/null +++ b/tests/neg/i16842.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i16842.scala:24:7 ---------------------------------------------------------------------------------- +24 | Liter(SemanticArray[SemanticInt.type], x) // error + | ^ + | invalid new prefix (dim: Int): SemanticArray[SemanticInt.type] cannot replace ty.type in type ty.T diff --git a/tests/neg/i16842.scala b/tests/neg/i16842.scala index 1e7e5cc14339..e9935b46c01d 100644 --- a/tests/neg/i16842.scala +++ b/tests/neg/i16842.scala @@ -21,5 +21,5 @@ def typecheckArrayLiter( a: ArrayLiter ): Liter[SemanticArray[SemanticType]] = { val x: List[Expr2[SemanticInt.type]] = List() - Liter(SemanticArray[SemanticInt.type], x) // error // error + Liter(SemanticArray[SemanticInt.type], x) // error } diff --git a/tests/neg-custom-args/fatal-warnings/i16876/Macro.scala b/tests/neg/i16876/Macro.scala similarity index 100% rename from tests/neg-custom-args/fatal-warnings/i16876/Macro.scala rename to tests/neg/i16876/Macro.scala diff --git a/tests/neg/i16876/Test.scala b/tests/neg/i16876/Test.scala new file mode 100644 index 000000000000..2dd6bfb34421 --- /dev/null +++ b/tests/neg/i16876/Test.scala @@ -0,0 +1,11 @@ +//> using options -Xfatal-warnings -Wunused:all + +object Foo { + private def myMethod(a: Int, b: Int, c: Int) = adder // ok + myMethod(1, 2, 3) + + private def myMethodFailing(a: Int, b: Int, c: Int) = a + 0 // error // error + myMethodFailing(1, 2, 3) +} + + diff --git a/tests/neg-custom-args/explain/i16888.check b/tests/neg/i16888.check similarity index 78% rename from tests/neg-custom-args/explain/i16888.check rename to tests/neg/i16888.check index 53103576d158..3f050fe5cc2d 100644 --- a/tests/neg-custom-args/explain/i16888.check +++ b/tests/neg/i16888.check @@ -1,7 +1,7 @@ --- [E172] Type Error: tests/neg-custom-args/explain/i16888.scala:1:38 -------------------------------------------------- -1 |def test = summon[scala.quoted.Quotes] // error +-- [E172] Type Error: tests/neg/i16888.scala:3:38 ---------------------------------------------------------------------- +3 |def test = summon[scala.quoted.Quotes] // error | ^ - | No given instance of type quoted.Quotes was found for parameter x of method summon in object Predef + | No given instance of type scala.quoted.Quotes was found for parameter x of method summon in object Predef |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/neg/i16888.scala b/tests/neg/i16888.scala new file mode 100644 index 000000000000..b6e89275cf9c --- /dev/null +++ b/tests/neg/i16888.scala @@ -0,0 +1,3 @@ +//> using options -explain + +def test = summon[scala.quoted.Quotes] // error diff --git a/tests/neg/i16920.check b/tests/neg/i16920.check index 131ba4c6265e..8f8172b5538e 100644 --- a/tests/neg/i16920.check +++ b/tests/neg/i16920.check @@ -1,5 +1,5 @@ --- [E008] Not Found Error: tests/neg/i16920.scala:20:11 ---------------------------------------------------------------- -20 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:19:11 ---------------------------------------------------------------- +19 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -10,8 +10,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:28:6 ----------------------------------------------------------------- -28 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:27:6 ----------------------------------------------------------------- +27 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: @@ -22,8 +22,8 @@ | | Found: (5 : Int) | Required: Boolean --- [E008] Not Found Error: tests/neg/i16920.scala:29:11 ---------------------------------------------------------------- -29 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:28:11 ---------------------------------------------------------------- +28 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -34,8 +34,8 @@ | | Found: ("five" : String) | Required: Boolean --- [E008] Not Found Error: tests/neg/i16920.scala:36:6 ----------------------------------------------------------------- -36 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:35:6 ----------------------------------------------------------------- +35 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: @@ -48,8 +48,8 @@ | both Three.wow(5) | and Two.wow(5) | are possible expansions of 5.wow --- [E008] Not Found Error: tests/neg/i16920.scala:44:11 ---------------------------------------------------------------- -44 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:43:11 ---------------------------------------------------------------- +43 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -60,8 +60,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:51:11 ---------------------------------------------------------------- -51 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:50:11 ---------------------------------------------------------------- +50 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -72,8 +72,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:58:6 ----------------------------------------------------------------- -58 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:57:6 ----------------------------------------------------------------- +57 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: diff --git a/tests/neg/i16920.scala b/tests/neg/i16920.scala index 38345e811c1f..c4a54046e027 100644 --- a/tests/neg/i16920.scala +++ b/tests/neg/i16920.scala @@ -1,4 +1,3 @@ -import language.experimental.relaxedExtensionImports object One: extension (s: String) diff --git a/tests/neg-custom-args/fatal-warnings/i16930.scala b/tests/neg/i16930.scala similarity index 89% rename from tests/neg-custom-args/fatal-warnings/i16930.scala rename to tests/neg/i16930.scala index 1f6c5bf1a09f..c7af60161424 100644 --- a/tests/neg-custom-args/fatal-warnings/i16930.scala +++ b/tests/neg/i16930.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:imports +//> using options -Xfatal-warnings -Wunused:imports trait Outer: trait Used diff --git a/tests/neg/i17121.check b/tests/neg/i17121.check new file mode 100644 index 000000000000..59895dd2474a --- /dev/null +++ b/tests/neg/i17121.check @@ -0,0 +1,24 @@ +-- [E191] Type Error: tests/neg/i17121.scala:13:17 --------------------------------------------------------------------- +13 | type G1[X] = X match { case Consumer[List[t]] => t } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | The match type contains an illegal case: + | case Consumer[List[t]] => t + | (this error can be ignored for now with `-source:3.3`) +-- [E191] Type Error: tests/neg/i17121.scala:15:17 --------------------------------------------------------------------- +15 | type G2[X] = X match { case Consumer[Consumer[t]] => t } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | The match type contains an illegal case: + | case Consumer[Consumer[t]] => t + | (this error can be ignored for now with `-source:3.3`) +-- [E191] Type Error: tests/neg/i17121.scala:17:17 --------------------------------------------------------------------- +17 | type G3[X] = X match { case Consumer[Consumer[Consumer[t]]] => t } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | The match type contains an illegal case: + | case Consumer[Consumer[Consumer[t]]] => t + | (this error can be ignored for now with `-source:3.3`) +-- [E191] Type Error: tests/neg/i17121.scala:19:17 --------------------------------------------------------------------- +19 | type G4[X] = X match { case Consumer[List[Consumer[t]]] => t } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | The match type contains an illegal case: + | case Consumer[List[Consumer[t]]] => t + | (this error can be ignored for now with `-source:3.3`) diff --git a/tests/neg/i17121.scala b/tests/neg/i17121.scala new file mode 100644 index 000000000000..0e845cf3266d --- /dev/null +++ b/tests/neg/i17121.scala @@ -0,0 +1,20 @@ +class Consumer[-T] + +object Test: + type F1[X] = X match { case List[t] => t } // OK + summon[F1[List[?]] =:= Any] + + type F2[X] = X match { case Consumer[t] => t } // OK + summon[F2[Consumer[?]] =:= Nothing] + + type F3[X] = X match { case List[Consumer[t]] => t } // OK + summon[F3[List[Consumer[?]]] =:= Nothing] + + type G1[X] = X match { case Consumer[List[t]] => t } // error + + type G2[X] = X match { case Consumer[Consumer[t]] => t } // error + + type G3[X] = X match { case Consumer[Consumer[Consumer[t]]] => t } // error + + type G4[X] = X match { case Consumer[List[Consumer[t]]] => t } // error +end Test diff --git a/tests/neg/i17149.scala b/tests/neg/i17149.scala new file mode 100644 index 000000000000..17f3f901e457 --- /dev/null +++ b/tests/neg/i17149.scala @@ -0,0 +1,17 @@ +type Ext1[S] = S match { + case Seq[t] => t +} +type Ext2[S] = S match { + case Seq[_] => Int +} +type Ext3[S] = S match { + case Array[t] => t +} +type Ext4[S] = S match { + case Array[_] => Int +} +def foo[T <: Seq[Any], A <: Array[B], B] = + summon[Ext1[T] =:= T] // error + summon[Ext2[T] =:= Int] // ok + summon[Ext3[A] =:= B] // ok + summon[Ext4[A] =:= Int] // ok \ No newline at end of file diff --git a/tests/neg/i17183.check b/tests/neg/i17183.check new file mode 100644 index 000000000000..d299976997f5 --- /dev/null +++ b/tests/neg/i17183.check @@ -0,0 +1,20 @@ +-- [E081] Type Error: tests/neg/i17183.scala:11:24 --------------------------------------------------------------------- +11 |def test = Context(f = (_, _) => ???) // error // error + | ^ + | Missing parameter type + | + | I could not infer the type of the parameter _$1 + | in expanded function: + | (_$1, _$2) => ??? + | Expected type for the whole anonymous function: + | MyFunc +-- [E081] Type Error: tests/neg/i17183.scala:11:27 --------------------------------------------------------------------- +11 |def test = Context(f = (_, _) => ???) // error // error + | ^ + | Missing parameter type + | + | I could not infer the type of the parameter _$2 + | in expanded function: + | (_$1, _$2) => ??? + | Expected type for the whole anonymous function: + | MyFunc diff --git a/tests/neg/i17183.scala b/tests/neg/i17183.scala new file mode 100644 index 000000000000..a7d2d51c5935 --- /dev/null +++ b/tests/neg/i17183.scala @@ -0,0 +1,11 @@ +trait Dependency { + trait More +} + +trait MyFunc { + def apply(a: Int, b: String)(using dep: Dependency, more: dep.More): String +} + +case class Context(f: MyFunc) + +def test = Context(f = (_, _) => ???) // error // error diff --git a/tests/neg/i17183b.scala b/tests/neg/i17183b.scala new file mode 100644 index 000000000000..476c216c8e18 --- /dev/null +++ b/tests/neg/i17183b.scala @@ -0,0 +1,9 @@ +trait Dependency + +trait MyFunc { + def apply(a: Int, b: String)(using Dependency): String +} + +case class Context(f: MyFunc) + +def test = Context(f = (_, _) => ???) // error // error diff --git a/tests/neg/i17192.5.scala b/tests/neg/i17192.5.scala new file mode 100644 index 000000000000..a23c8b589a1a --- /dev/null +++ b/tests/neg/i17192.5.scala @@ -0,0 +1,13 @@ +class Ifce[BT <: Boolean]: + type RT = BT match + case true => this.type { val v1: Int } + case false => this.type + def cast: RT = this.asInstanceOf[RT] + +class Test: + def t1: Unit = + val full1 = new Ifce[true]().cast + val v1 = full1.v1 // error +// ^^^^^ +// Found: (full1 : Ifce[(true : Boolean)]#RT) +// Required: Selectable | Dynamic diff --git a/tests/neg/i17266.check b/tests/neg/i17266.check index 7e07e3d43de4..1a84d1bf5e89 100644 --- a/tests/neg/i17266.check +++ b/tests/neg/i17266.check @@ -20,6 +20,16 @@ | resolved to calls on Predef or on imported methods. This might not be what | you intended. -------------------------------------------------------------------------------------------------------------------- +-- [E187] Potential Issue Error: tests/neg/i17266.scala:22:4 ----------------------------------------------------------- +22 | 1.synchronized { // error + | ^^^^^^^^^^^^^^ + | Suspicious synchronized call on boxed class + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | You called the synchronized method on a boxed primitive. This might not be what + | you intended. + -------------------------------------------------------------------------------------------------------------------- -- [E181] Potential Issue Error: tests/neg/i17266.scala:108:2 ---------------------------------------------------------- 108 | wait() // error | ^^^^ diff --git a/tests/neg/i17266.scala b/tests/neg/i17266.scala index 5b74ea76810b..439abafc44a1 100644 --- a/tests/neg/i17266.scala +++ b/tests/neg/i17266.scala @@ -1,4 +1,4 @@ -// scalac: -Werror -explain +//> using options -Werror -explain def test1 = synchronized { // error @@ -19,7 +19,7 @@ def test3 = } def test4 = - 1.synchronized { // not an error (should be?) + 1.synchronized { // error println("hello") } diff --git a/tests/neg-custom-args/no-experimental/i17292.scala b/tests/neg/i17292.scala similarity index 80% rename from tests/neg-custom-args/no-experimental/i17292.scala rename to tests/neg/i17292.scala index 381d252dbea8..9cf7fc7b1c30 100644 --- a/tests/neg-custom-args/no-experimental/i17292.scala +++ b/tests/neg/i17292.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental class Foo { @experimental type Bar = (Int, String) } diff --git a/tests/neg-custom-args/no-experimental/i17292b.scala b/tests/neg/i17292b.scala similarity index 92% rename from tests/neg-custom-args/no-experimental/i17292b.scala rename to tests/neg/i17292b.scala index f644dd60ecd5..b89a64439699 100644 --- a/tests/neg-custom-args/no-experimental/i17292b.scala +++ b/tests/neg/i17292b.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental type A[T] = Int class Foo { diff --git a/tests/neg-custom-args/fatal-warnings/i17314b.scala b/tests/neg/i17314b.scala similarity index 80% rename from tests/neg-custom-args/fatal-warnings/i17314b.scala rename to tests/neg/i17314b.scala index 384767765cf4..9e89fecde935 100644 --- a/tests/neg-custom-args/fatal-warnings/i17314b.scala +++ b/tests/neg/i17314b.scala @@ -1,4 +1,4 @@ -// scalac: -Wunused:all +//> using options -Xfatal-warnings -Wunused:all package foo: class Foo[T] diff --git a/tests/neg/i17335.scala b/tests/neg/i17335.scala new file mode 100644 index 000000000000..b22ece6e42bf --- /dev/null +++ b/tests/neg/i17335.scala @@ -0,0 +1,4 @@ +//> using options -Xfatal-warnings -Wunused:all + +def aMethod() = + doStuff { (x) => x } // error diff --git a/tests/neg/i17443.scala b/tests/neg/i17443.scala new file mode 100644 index 000000000000..95e963b6a1d9 --- /dev/null +++ b/tests/neg/i17443.scala @@ -0,0 +1,2 @@ +def run() = + val x = List(1) match { case (xs*) => xs } // error diff --git a/tests/neg/i17467.check b/tests/neg/i17467.check new file mode 100644 index 000000000000..a274a519f69a --- /dev/null +++ b/tests/neg/i17467.check @@ -0,0 +1,64 @@ +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:6:20 ------------------------------------------------------------- +6 | val b1: "foo" = null // error + | ^^^^ + | Found: Null + | Required: ("foo" : String) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than ("foo" : String) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:9:22 ------------------------------------------------------------- +9 | val c2: c1.type = null // error + | ^^^^ + | Found: Null + | Required: (c1 : ("foo" : String)) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (c1 : ("foo" : String)) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:17:22 ------------------------------------------------------------ +17 | val e2: e1.type = null // error + | ^^^^ + | Found: Null + | Required: (e1 : MyNonNullable) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (e1 : MyNonNullable) + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/i17467.scala:19:26 --------------------------------------------------------------------- +19 | summon[Null <:< "foo"] // error + | ^ + | Cannot prove that Null <:< ("foo" : String). +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:21:23 ------------------------------------------------------------ +21 | val f1: Mod.type = null // error + | ^^^^ + | Found: Null + | Required: Test.Mod.type + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than Test.Mod.type + | + | longer explanation available when compiling with `-explain` +-- [E083] Type Error: tests/neg/i17467.scala:24:12 --------------------------------------------------------------------- +24 | val g2: g1.type = null // error // error + | ^^^^^^^ + | (g1 : AnyRef) is not a valid singleton type, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:24:22 ------------------------------------------------------------ +24 | val g2: g1.type = null // error // error + | ^^^^ + | Found: Null + | Required: (g1 : AnyRef) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (g1 : AnyRef) + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i17467.scala:36:24 ------------------------------------------------------------ +36 | def me: this.type = null // error + | ^^^^ + | Found: Null + | Required: (Baz.this : Test.Baz) + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than (Baz.this : Test.Baz) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i17467.scala b/tests/neg/i17467.scala new file mode 100644 index 000000000000..f8023e74742f --- /dev/null +++ b/tests/neg/i17467.scala @@ -0,0 +1,37 @@ +object Test: + def test(): Unit = + val a1: String = "foo" + val a2: a1.type = null // OK + + val b1: "foo" = null // error + + val c1: "foo" = "foo" + val c2: c1.type = null // error + + type MyNullable = String + val d1: MyNullable = "foo" + val d2: d1.type = null // OK + + type MyNonNullable = Int + val e1: MyNonNullable = 5 + val e2: e1.type = null // error + + summon[Null <:< "foo"] // error + + val f1: Mod.type = null // error + + var g1: AnyRef = "foo" + val g2: g1.type = null // error // error + + val h1: Null = null + val h2: h1.type = null + end test + + object Mod + + class Bar: + def me: this.type = null + + class Baz(val x: String) extends AnyVal: + def me: this.type = null // error +end Test diff --git a/tests/neg/i17581.check b/tests/neg/i17581.check new file mode 100644 index 000000000000..e21e9fd32019 --- /dev/null +++ b/tests/neg/i17581.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i17581.scala:9:6 -------------------------------------------------------------- +9 | foo(test) // error // was NoSuchMethodException + | ^^^^ + | Found: (test : Test) + | Required: Object{def bar: Any} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i17581.scala b/tests/neg/i17581.scala new file mode 100644 index 000000000000..1cd86a9d39cd --- /dev/null +++ b/tests/neg/i17581.scala @@ -0,0 +1,9 @@ +import scala.reflect.Selectable.reflectiveSelectable + +class Test + +def foo[A <: { def bar: Any }](ob: A) = ob.bar + +@main def main = + val test = new Test + foo(test) // error // was NoSuchMethodException diff --git a/tests/neg/i17612a.check b/tests/neg/i17612a.check new file mode 100644 index 000000000000..04a5b07b6e33 --- /dev/null +++ b/tests/neg/i17612a.check @@ -0,0 +1,32 @@ +-- Error: tests/neg/i17612a.scala:18:15 -------------------------------------------------------------------------------- +18 | class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value x in class Derived shadows field x inherited from class Base +-- Error: tests/neg/i17612a.scala:18:24 -------------------------------------------------------------------------------- +18 | class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value y in class Derived shadows field y inherited from class Base +-- Error: tests/neg/i17612a.scala:20:2 --------------------------------------------------------------------------------- +20 | private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + | ^ + | value shadowed2 in class Derived shadows field shadowed2 inherited from class Base +-- Error: tests/neg/i17612a.scala:21:2 --------------------------------------------------------------------------------- +21 | private[this] val shadowed3 = 3 + 3 // error + | ^ + | value shadowed3 in class Derived shadows field shadowed3 inherited from class Base +-- Error: tests/neg/i17612a.scala:23:2 --------------------------------------------------------------------------------- +23 | private val shadowed5 = 5 + 5 // error + | ^ + | value shadowed5 in class Derived shadows field shadowed5 inherited from class Base +-- Error: tests/neg/i17612a.scala:34:20 -------------------------------------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value x in class UnderDerived shadows field x inherited from class Base +-- Error: tests/neg/i17612a.scala:34:28 -------------------------------------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value y in class UnderDerived shadows field y inherited from class Base +-- Error: tests/neg/i17612a.scala:34:36 -------------------------------------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value z in class UnderDerived shadows field z inherited from class Base diff --git a/tests/neg/i17612a.scala b/tests/neg/i17612a.scala new file mode 100644 index 000000000000..099b528965e1 --- /dev/null +++ b/tests/neg/i17612a.scala @@ -0,0 +1,42 @@ +//> using options -Xfatal-warnings -Xlint:private-shadow -source:3.3 + +object i17612a: + class Base(var x: Int, val y: Int, var z: Int): + var shadowed2 = 2 + val shadowed3 = 3 + val shadowed4 = 4 + protected var shadowed5 = 5 + //var shadowed6 = 6 + + val notShadowed = -1 + private val notShadowed2 = -2 + //val fatalOverride = 0 + + def increment(): Unit = + x = x + 1 + + class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + private def hello() = 4 + private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + private[this] val shadowed3 = 3 + 3 // error + //private[Derived] val fatalOverride = 0 // value fatalOverride of type Int has weaker access privileges; it should be public + private val shadowed5 = 5 + 5 // error + private val notShadowed2 = -4 + //protected var shadowed6 = 6 + 6 // variable shadowed6 of type Int has weaker access privileges; it should be public + + def inFunctionScope() = + val notShadowed = -2 // OK + -2 + + override def toString = + s"x : ${x.toString}, y : ${y.toString}" + + class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + + def main(args: Array[String]) = + val derived = new Derived(1, 1, 1) + println(derived.toString) // yields x: '1', as expected + derived.increment() + println(derived.toString) // still x: '1', probably unexpected, for y it never prints the super value, less surprising + println(derived.shadowed2) + println(derived.shadowed3) \ No newline at end of file diff --git a/tests/neg/i17612b.check b/tests/neg/i17612b.check new file mode 100644 index 000000000000..75e8b7312833 --- /dev/null +++ b/tests/neg/i17612b.check @@ -0,0 +1,32 @@ +-- Error: tests/neg/i17612b/i17612b.scala:21:15 ------------------------------------------------------------------------ +21 | class Derived(x : Int, x3: Int, y: Int, z2: Int) extends BaseB, BaseC(x3), Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value x in class Derived shadows field x inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:21:33 ------------------------------------------------------------------------ +21 | class Derived(x : Int, x3: Int, y: Int, z2: Int) extends BaseB, BaseC(x3), Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value y in class Derived shadows field y inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:23:2 ------------------------------------------------------------------------- +23 | private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + | ^ + | value shadowed2 in class Derived shadows field shadowed2 inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:24:2 ------------------------------------------------------------------------- +24 | private[this] val shadowed3 = 3 + 3 // error + | ^ + | value shadowed3 in class Derived shadows field shadowed3 inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:26:2 ------------------------------------------------------------------------- +26 | private val shadowed5 = 5 + 5 // error + | ^ + | value shadowed5 in class Derived shadows field shadowed5 inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:41:20 ------------------------------------------------------------------------ +41 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, 1, y, z) // error // error // error + | ^ + | value x in class UnderDerived shadows field x inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:41:28 ------------------------------------------------------------------------ +41 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, 1, y, z) // error // error // error + | ^ + | value y in class UnderDerived shadows field y inherited from trait Base +-- Error: tests/neg/i17612b/i17612b.scala:41:36 ------------------------------------------------------------------------ +41 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, 1, y, z) // error // error // error + | ^ + | value z in class UnderDerived shadows field z inherited from trait Base diff --git a/tests/neg/i17612b/i17612b.scala b/tests/neg/i17612b/i17612b.scala new file mode 100644 index 000000000000..d16feb240c2a --- /dev/null +++ b/tests/neg/i17612b/i17612b.scala @@ -0,0 +1,44 @@ +//> using options -Xfatal-warnings -Xlint:private-shadow -source:3.3 + +object i17612b: + + trait Base(var x: Int, val y: Int, var z: Int): + var shadowed2 = 2 + val shadowed3 = 3 + val shadowed4 = 4 + protected var shadowed5 = 5 + + val notShadowed = -1 + private val notShadowed2 = -2 + val notShadowedbyLambda = -2 + + def increment(): Unit = + x = x + 1 + + trait BaseB + trait BaseC(var x2: Int) + + class Derived(x : Int, x3: Int, y: Int, z2: Int) extends BaseB, BaseC(x3), Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + private def hello() = 4 + private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + private[this] val shadowed3 = 3 + 3 // error + + private val shadowed5 = 5 + 5 // error + private val notShadowed2 = -4 + + val lambda: Int => Int => Int = + notShadowedbyLambda => + notShadowedbyLambda => + notShadowedbyLambda * 2 + + def inFunctionScope() = + val notShadowed = -2 // OK + -2 + + override def toString = + s"x : ${x.toString}, y : ${y.toString}" + + class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, 1, y, z) // error // error // error + + def main(args: Array[String]) = + val derived = new Derived(1, 1, 1, 1) diff --git a/tests/neg/i17612b/importTry.scala b/tests/neg/i17612b/importTry.scala new file mode 100644 index 000000000000..879f40ace356 --- /dev/null +++ b/tests/neg/i17612b/importTry.scala @@ -0,0 +1,5 @@ +object importTry: + + trait ImTrait + + class ImClass \ No newline at end of file diff --git a/tests/neg/i17613a.check b/tests/neg/i17613a.check new file mode 100644 index 000000000000..4721b786c82d --- /dev/null +++ b/tests/neg/i17613a.check @@ -0,0 +1,28 @@ +-- Error: tests/neg/i17613a.scala:8:13 --------------------------------------------------------------------------------- +8 | def foobar[D](in: D) = in.toString // error method parameter shadows some other type + | ^ + | Type parameter D for method foobar shadows the type defined by trait D in class B +-- Error: tests/neg/i17613a.scala:9:13 --------------------------------------------------------------------------------- +9 | type MySeq[D] = Seq[D] // error type member's parameter shadows some other type + | ^ + | Type parameter D for type MySeq shadows the type defined by trait D in class B +-- Error: tests/neg/i17613a.scala:11:12 -------------------------------------------------------------------------------- +11 | class Foo[T](t: T): // error class parameter shadows some other type + | ^ + | Type parameter T for class Foo shadows the type defined by type T in class B +-- Error: tests/neg/i17613a.scala:12:11 -------------------------------------------------------------------------------- +12 | def bar[T](w: T) = w.toString // error a type parameter shadows another type parameter + | ^ + | Type parameter T for method bar shadows the type defined by type T in class Foo +-- Error: tests/neg/i17613a.scala:15:12 -------------------------------------------------------------------------------- +15 | class C[M[List[_]]] // error + | ^^^^^^^ + | Type parameter List for class C shadows the type defined by type List in package scala +-- Error: tests/neg/i17613a.scala:16:11 -------------------------------------------------------------------------------- +16 | type E[M[List[_]]] = Int // error + | ^^^^^^^ + | Type parameter List for type E shadows the type defined by type List in package scala +-- Error: tests/neg/i17613a.scala:17:14 -------------------------------------------------------------------------------- +17 | def foo[N[M[List[_]]]] = ??? // error + | ^^^^^^^ + | Type parameter List for method foo shadows the type defined by type List in package scala diff --git a/tests/neg/i17613a.scala b/tests/neg/i17613a.scala new file mode 100644 index 000000000000..d0e413098cec --- /dev/null +++ b/tests/neg/i17613a.scala @@ -0,0 +1,23 @@ +//> using options -Xfatal-warnings -Xlint:type-parameter-shadow + +object i17613a: + class B: + type T = Int + trait D + + def foobar[D](in: D) = in.toString // error method parameter shadows some other type + type MySeq[D] = Seq[D] // error type member's parameter shadows some other type + + class Foo[T](t: T): // error class parameter shadows some other type + def bar[T](w: T) = w.toString // error a type parameter shadows another type parameter + + // even deeply nested... + class C[M[List[_]]] // error + type E[M[List[_]]] = Int // error + def foo[N[M[List[_]]]] = ??? // error + + // ...but not between type parameters in the same list + class F[A, M[N[A]]] + type G[A, M[L[A]]] = Int + def bar[A, N[M[L[A]]]] = ??? + def main(args: Array[String]) = println("Test for type parameter shadow") diff --git a/tests/neg/i17613b.check b/tests/neg/i17613b.check new file mode 100644 index 000000000000..d8cf8618fb27 --- /dev/null +++ b/tests/neg/i17613b.check @@ -0,0 +1,56 @@ +-- Error: tests/neg/i17613b/i17613b.scala:9:13 ------------------------------------------------------------------------- +9 | def foobar[ImTrait](in: D) = in.toString // error + | ^^^^^^^ + | Type parameter ImTrait for method foobar shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:10:13 ------------------------------------------------------------------------ +10 | type MySeq[ImTrait] = Seq[D] // error + | ^^^^^^^ + | Type parameter ImTrait for type MySeq shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:12:14 ------------------------------------------------------------------------ +12 | def foobar2[ImClass](in: D) = in.toString // error + | ^^^^^^^ + | Type parameter ImClass for method foobar2 shadows the type defined by class ImClass in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:13:14 ------------------------------------------------------------------------ +13 | type MySeq2[ImClass] = Seq[D] // error + | ^^^^^^^ + | Type parameter ImClass for type MySeq2 shadows the type defined by class ImClass in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:16:24 ------------------------------------------------------------------------ +16 | type TypeLambda[A] = [ImTrait] =>> Map[ImTrait, B] // error + | ^^^^^^^ + | Type parameter ImTrait for type TypeLambda shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:17:21 ------------------------------------------------------------------------ +17 | type PolyFun[A] = [ImTrait] => ImTrait => B // error + | ^^^^^^^ + | Type parameter ImTrait for type PolyFun shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg/i17613b/i17613b.scala:23:12 ------------------------------------------------------------------------ +23 | class Foo[T](t: T): // error class parameter shadows some other type + | ^ + | Type parameter T for class Foo shadows the type defined by type T in class B +-- Error: tests/neg/i17613b/i17613b.scala:27:15 ------------------------------------------------------------------------ +27 | def intType[List1](x: T) = x.toString() // error + | ^^^^^ + | Type parameter List1 for method intType shadows an explicitly renamed type : List1 +-- Error: tests/neg/i17613b/i17613b.scala:32:10 ------------------------------------------------------------------------ +32 | given [Int]: Ordering[Int]() // error + | ^^^ + | Type parameter Int for method given_Ordering_Int shadows the type defined by class Int in package scala +-- Error: tests/neg/i17613b/i17613b.scala:34:12 ------------------------------------------------------------------------ +34 | class C[M[List[_]]] // error List not renamed here + | ^^^^^^^ + | Type parameter List for class C shadows the type defined by type List in package scala +-- Error: tests/neg/i17613b/i17613b.scala:35:11 ------------------------------------------------------------------------ +35 | type E[M[Int[_]]] = Int // error + | ^^^^^^ + | Type parameter Int for type E shadows the type defined by class Int in package scala +-- Error: tests/neg/i17613b/i17613b.scala:37:14 ------------------------------------------------------------------------ +37 | def foo[N[M[List[_]]]] = // error + | ^^^^^^^ + | Type parameter List for method foo shadows the type defined by type List in package scala +-- Error: tests/neg/i17613b/i17613b.scala:40:11 ------------------------------------------------------------------------ +40 | type Z[ImClassR] = Int // error + | ^^^^^^^^ + | Type parameter ImClassR for type Z shadows an explicitly renamed type : ImClassR +-- Error: tests/neg/i17613b/i17613b.scala:41:18 ------------------------------------------------------------------------ +41 | class InnerCl[ImClassR] // error + | ^^^^^^^^ + | Type parameter ImClassR for class InnerCl shadows an explicitly renamed type : ImClassR diff --git a/tests/neg/i17613b/i17613b.scala b/tests/neg/i17613b/i17613b.scala new file mode 100644 index 000000000000..b0c4f11b949c --- /dev/null +++ b/tests/neg/i17613b/i17613b.scala @@ -0,0 +1,44 @@ +//> using options -Xfatal-warnings -Xlint:type-parameter-shadow + +object i17613b: + import importTry._ + class B: + type T = Int + trait D + + def foobar[ImTrait](in: D) = in.toString // error + type MySeq[ImTrait] = Seq[D] // error + + def foobar2[ImClass](in: D) = in.toString // error + type MySeq2[ImClass] = Seq[D] // error + + given [A]: Ordering[Int]() + type TypeLambda[A] = [ImTrait] =>> Map[ImTrait, B] // error + type PolyFun[A] = [ImTrait] => ImTrait => B // error + type MatchType[A] = A match { + case String => Int + case ImTrait => Boolean + } + + class Foo[T](t: T): // error class parameter shadows some other type + import scala.collection.immutable.{List => List1} + def bar[List](w: T) = w.toString // no warning due to the explicit import renaming + + def intType[List1](x: T) = x.toString() // error + + type Y[List] = Int // no warning + + given [A]: Ordering[A]() + given [Int]: Ordering[Int]() // error + + class C[M[List[_]]] // error List not renamed here + type E[M[Int[_]]] = Int // error + + def foo[N[M[List[_]]]] = // error + import importTry.{ImClass => ImClassR} + def inner[ImClass] = // no warning + type Z[ImClassR] = Int // error + class InnerCl[ImClassR] // error + 5 + + def main(args: Array[String]) = println("Test for type parameter shadow") \ No newline at end of file diff --git a/tests/neg/i17613b/importTry.scala b/tests/neg/i17613b/importTry.scala new file mode 100644 index 000000000000..879f40ace356 --- /dev/null +++ b/tests/neg/i17613b/importTry.scala @@ -0,0 +1,5 @@ +object importTry: + + trait ImTrait + + class ImClass \ No newline at end of file diff --git a/tests/neg/i17738-infix-object/NestedInfixObject.scala b/tests/neg/i17738-infix-object/NestedInfixObject.scala new file mode 100644 index 000000000000..703cbef9e2ba --- /dev/null +++ b/tests/neg/i17738-infix-object/NestedInfixObject.scala @@ -0,0 +1,2 @@ +object ToplevelObject: + infix object NestedInfixObject // error diff --git a/tests/neg/i17738-infix-object/ToplevelInfixObject.scala b/tests/neg/i17738-infix-object/ToplevelInfixObject.scala new file mode 100644 index 000000000000..1ee13265b06c --- /dev/null +++ b/tests/neg/i17738-infix-object/ToplevelInfixObject.scala @@ -0,0 +1 @@ +infix object ToplevelInfixObject // error diff --git a/tests/neg/i17738-toplevel-infix.check b/tests/neg/i17738-toplevel-infix.check new file mode 100644 index 000000000000..a4946e7afb97 --- /dev/null +++ b/tests/neg/i17738-toplevel-infix.check @@ -0,0 +1,45 @@ +-- [E156] Syntax Error: tests/neg/i17738-toplevel-infix.scala:14:10 ---------------------------------------------------- +14 |infix val toplevelVal = ??? // error + | ^ + | Modifier infix is not allowed for this definition + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A top-level value cannot be infix. + -------------------------------------------------------------------------------------------------------------------- +-- [E156] Syntax Error: tests/neg/i17738-toplevel-infix.scala:15:10 ---------------------------------------------------- +15 |infix var toplevelVar = ??? // error + | ^ + | Modifier infix is not allowed for this definition + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A top-level variable cannot be infix. + -------------------------------------------------------------------------------------------------------------------- +-- [E156] Syntax Error: tests/neg/i17738-toplevel-infix.scala:16:10 ---------------------------------------------------- +16 |infix def toplevelDef = ??? // error + | ^ + | Modifier infix is not allowed for this definition + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A top-level method cannot be infix. + -------------------------------------------------------------------------------------------------------------------- +-- [E156] Syntax Error: tests/neg/i17738-toplevel-infix.scala:17:12 ---------------------------------------------------- +17 |infix given toplevelGiven: Int = ??? // error + | ^ + | Modifier infix is not allowed for this definition + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A top-level given instance cannot be infix. + -------------------------------------------------------------------------------------------------------------------- +-- [E156] Syntax Error: tests/neg/i17738-toplevel-infix.scala:18:19 ---------------------------------------------------- +18 |infix implicit val toplevelImplicit: Int = ??? // error + | ^ + | Modifier infix is not allowed for this definition + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A top-level value cannot be infix. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i17738-toplevel-infix.scala b/tests/neg/i17738-toplevel-infix.scala new file mode 100644 index 000000000000..b57ac0bb150c --- /dev/null +++ b/tests/neg/i17738-toplevel-infix.scala @@ -0,0 +1,18 @@ +//> using options -explain +infix type A[b, a] = Nothing + +infix type B[b, a] = b match { + case Int => a +} + +infix class C[A, B] +infix trait D[A, B] + +extension (x: Boolean) + infix def or (y: => Boolean) = x || y + +infix val toplevelVal = ??? // error +infix var toplevelVar = ??? // error +infix def toplevelDef = ??? // error +infix given toplevelGiven: Int = ??? // error +infix implicit val toplevelImplicit: Int = ??? // error diff --git a/tests/neg/i17944.check b/tests/neg/i17944.check new file mode 100644 index 000000000000..80dfaac8c4c8 --- /dev/null +++ b/tests/neg/i17944.check @@ -0,0 +1,46 @@ +-- [E172] Type Error: tests/neg/i17944.scala:40:87 --------------------------------------------------------------------- +40 | val s = Selector.selectorInst[("s" ->> String) *: ("i" ->> Int) *: EmptyTuple, "i"] // error + | ^ + |No singleton value available for Tuple.Elem[test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)], (1 : Int)]; eligible singleton types for `ValueOf` synthesis include literals and stable paths. + | + |Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Elem[test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)], (1 : Int)] + | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] + | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] + | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) + | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] + | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] + | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] + | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) + | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] + | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] + | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) + | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] + | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] + | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] + | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) + | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] + | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] + | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) + | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] diff --git a/tests/neg/i17944.scala b/tests/neg/i17944.scala new file mode 100644 index 000000000000..214dfaebbfcf --- /dev/null +++ b/tests/neg/i17944.scala @@ -0,0 +1,44 @@ +package test { + + import types._ + + object types { + opaque type ->>[K, V] = V + extension [K <: Singleton](k: K) def ->>[V](v: V): K ->> V = v.asInstanceOf[K ->> V] + } + + type FindField[T <: Tuple, K] = FindField0[T, K, 0] + + type FindField0[T <: Tuple, K, I <: Int] <: (Any, Int) = T match { + case (K ->> f) *: _ => (f, I) + case _ *: t => FindField0[t, K, compiletime.ops.int.S[I]] + } + + trait Selector[T, Key, Out] { + def apply(t: T): Out + } + + object Selector { + inline def selectorInst[T <: Tuple, K]( + using idx: ValueOf[Tuple.Elem[FindField[T, K], 1]], + ): Selector[T, K, Tuple.Head[FindField[T, K]]] = + new Selector[T, K, Tuple.Head[FindField[T, K]]] { + def apply(t: T): Tuple.Head[FindField[T, K]] = + val i: Int = idx.value.asInstanceOf[Int] + t.productElement(i).asInstanceOf[Tuple.Head[FindField[T, K]]] + } + } + +} + +object Test { + def main(args: Array[String]): Unit = { + import test._ + import test.types._ + + val t = ("s" ->> "foo") *: ("i" ->> 3) *: EmptyTuple + val s = Selector.selectorInst[("s" ->> String) *: ("i" ->> Int) *: EmptyTuple, "i"] // error + val r = s(t) + println(r) + } +} diff --git a/tests/neg/i17981.check b/tests/neg/i17981.check new file mode 100644 index 000000000000..0ef7225d857e --- /dev/null +++ b/tests/neg/i17981.check @@ -0,0 +1,6 @@ +-- [E015] Syntax Error: tests/neg/i17981.scala:1:6 --------------------------------------------------------------------- +1 |final final case class Foo() // error + | ^^^^^ + | Repeated modifier final + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i17981.scala b/tests/neg/i17981.scala new file mode 100644 index 000000000000..d2eaa0cfa7f2 --- /dev/null +++ b/tests/neg/i17981.scala @@ -0,0 +1 @@ +final final case class Foo() // error diff --git a/tests/neg/i18020.scala b/tests/neg/i18020.scala new file mode 100644 index 000000000000..ffeae9b66128 --- /dev/null +++ b/tests/neg/i18020.scala @@ -0,0 +1,62 @@ +import _root_.scala.StringContext // ok + +class Test : + val Foo = 1 + def foo0: Unit = + val x = new _root_.scala.StringContext() // ok + val y: Option[_root_.scala.Serializable] = None // ok + val z: _root_.scala.None.type = None + val w = _root_.scala.None + val (_root_, other) = (1, 2) // error + val (Test.this.Foo, 1) = ??? + ??? match + case (Test.this.Foo, 1) => () + +def foo3 = + val _root_ = "abc" // error + +def foo1: Unit = + val _root_: String = "abc" // error + // _root_: is, technically, a legal name + // so then it tries to construct the infix op pattern + // "_root_ String .." and then throws in a null when it fails + // to find an argument + // then Typer rejects "String" as an infix extractor (like ::) + // which is the second error + +def foo2: Unit = // was: error, recursive value _root_ needs type + val _root_ : String = "abc" // error + +// i17757 +def fooVal: Unit = + val _root_ = "abc" // error + println(_root_.length) // error + println(_root_) // error + +def barVal: Unit = + _root_ // error + _root_.scala // error + println(_root_) // error + println(_root_.scala) // error + +// i18050 +package p { + package _root_ { // error + object X // error + } +} + +// scala/bug#12508 +package _root_ { // ok + class C { + val _root_ = 42 // error + } +} +package _root_.p { // ok + class C +} + +// from ScalaPB +def fromScalaPb(x: Option[String]) = x match + case _root_.scala.Some(s) => s + case _ => "" diff --git a/tests/neg/i18035.scala b/tests/neg/i18035.scala new file mode 100644 index 000000000000..ba2373481c85 --- /dev/null +++ b/tests/neg/i18035.scala @@ -0,0 +1,15 @@ +import reflect.Selectable.reflectiveSelectable + +class A[+Cov](f: Cov => Unit) { + def foo: { def apply(c: Cov): Unit } = // error + f +} + +val aForString = new A[String](_.length) +// => val aForString: A[String] + +val aForStringIsAForAny: A[Any] = aForString +// => val aForStringIsAForAny: A[Any] + +val _ = aForStringIsAForAny.foo(123) +// => java.lang.ClassCastException: class java.lang.Integer cannot be cast to class java.lang.String (java.lang.Integer and java.lang.String are in module java.base of loader 'bootstrap') diff --git a/tests/neg/i18047.scala b/tests/neg/i18047.scala new file mode 100644 index 000000000000..561aabb3342f --- /dev/null +++ b/tests/neg/i18047.scala @@ -0,0 +1,15 @@ +def foo(x: Any { def foo: Int }): Any { val foo: Int } = x // error +def foo1(x: Any { val foo: Int }): Any { def foo: Int } = x // ok +def foo2(x: Any { val foo: Int }): Any { val foo: Int } = x // ok +def foo3(x: Any { def foo: Int }): Any { def foo: Int } = x // ok + +class Foo: + val foo: Int = 1 +class Foo1: + def foo: Int = 1 +class Foo2: + var foo: Int = 1 + +def foo4(x: Foo): Any { val foo: Int } = x // ok +def foo4(x: Foo1): Any { val foo: Int } = x // error +def foo4(x: Foo2): Any { val foo: Int } = x // error diff --git a/tests/neg/i18058.check b/tests/neg/i18058.check new file mode 100644 index 000000000000..f610d9e7abb1 --- /dev/null +++ b/tests/neg/i18058.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i18058.scala:4:21 ---------------------------------------------------------------------------------- +4 |type G = (f: ? <: F) => f.A // error + | ^ + | invalid new prefix <: F cannot replace f.type in type f.A diff --git a/tests/neg/i18058.scala b/tests/neg/i18058.scala new file mode 100644 index 000000000000..b032f810d2fd --- /dev/null +++ b/tests/neg/i18058.scala @@ -0,0 +1,4 @@ +trait F: + type A + +type G = (f: ? <: F) => f.A // error diff --git a/tests/neg/i18082.check b/tests/neg/i18082.check new file mode 100644 index 000000000000..46fc1909c4e1 --- /dev/null +++ b/tests/neg/i18082.check @@ -0,0 +1,20 @@ +-- [E122] Syntax Error: tests/neg/i18082.scala:5:23 -------------------------------------------------------------------- +5 |import O.{a as b, b as b} // error + | ^ + | b is imported twice on the same import line. +-- [E122] Syntax Error: tests/neg/i18082.scala:6:18 -------------------------------------------------------------------- +6 |import O.{a as b, b} // error + | ^ + | b is imported twice on the same import line. +-- [E122] Syntax Error: tests/neg/i18082.scala:7:13 -------------------------------------------------------------------- +7 |import O.{a, a} // error + | ^ + | a is imported twice on the same import line. +-- [E185] Syntax Error: tests/neg/i18082.scala:10:18 ------------------------------------------------------------------- +10 |import O.{a as _, a as _} // error + | ^ + | a is unimported twice on the same import line. +-- [E185] Syntax Error: tests/neg/i18082.scala:11:13 ------------------------------------------------------------------- +11 |import O.{a, a as _} // error + | ^ + | a is unimported and imported on the same import line. diff --git a/tests/neg/i18082.scala b/tests/neg/i18082.scala new file mode 100644 index 000000000000..4d97c389181d --- /dev/null +++ b/tests/neg/i18082.scala @@ -0,0 +1,12 @@ +object O { val a = 1; val b = 2 } + +import O.{a as b, a} // OK +import O.{a as b, b as a} // OK +import O.{a as b, b as b} // error +import O.{a as b, b} // error +import O.{a, a} // error + +import O.{a as _, b as _} // ok +import O.{a as _, a as _} // error +import O.{a, a as _} // error + diff --git a/tests/neg/i18090.check b/tests/neg/i18090.check new file mode 100644 index 000000000000..fe61a3337acf --- /dev/null +++ b/tests/neg/i18090.check @@ -0,0 +1,42 @@ +-- [E188] Syntax Error: tests/neg/i18090.scala:2:18 -------------------------------------------------------------------- +2 |def foo(using xs: Int*) = xs // error + | ^^^^ + | repeated parameters are not allowed in a using clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:3:27 -------------------------------------------------------------------- +3 |def foo5(using d: Int, xs: Int*) = xs // error + | ^^^^ + | repeated parameters are not allowed in a using clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:4:22 -------------------------------------------------------------------- +4 |def foo2(implicit xs: Int*) = xs // error + | ^^^^ + | repeated parameters are not allowed in a implicit clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:5:35 -------------------------------------------------------------------- +5 |def foo3(u: Int)(using d: Int, xs: Int*) = xs // error + | ^^^^ + | repeated parameters are not allowed in a using clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:6:38 -------------------------------------------------------------------- +6 |def foo4(u: Int)(implicit d: Int, xs: Int*) = xs // error + | ^^^^ + | repeated parameters are not allowed in a implicit clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:9:20 -------------------------------------------------------------------- +9 | def bar(using xs: Float*) = ??? // error + | ^^^^^^ + | repeated parameters are not allowed in a using clause + | + | longer explanation available when compiling with `-explain` +-- [E188] Syntax Error: tests/neg/i18090.scala:10:33 ------------------------------------------------------------------- +10 | def bar2(using d: Boolean, xs: Float*) = ??? // error + | ^^^^^^ + | repeated parameters are not allowed in a using clause + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18090.scala b/tests/neg/i18090.scala new file mode 100644 index 000000000000..05c0df65c3b1 --- /dev/null +++ b/tests/neg/i18090.scala @@ -0,0 +1,10 @@ + +def foo(using xs: Int*) = xs // error +def foo5(using d: Int, xs: Int*) = xs // error +def foo2(implicit xs: Int*) = xs // error +def foo3(u: Int)(using d: Int, xs: Int*) = xs // error +def foo4(u: Int)(implicit d: Int, xs: Int*) = xs // error + +extension (i: Int) + def bar(using xs: Float*) = ??? // error + def bar2(using d: Boolean, xs: Float*) = ??? // error diff --git a/tests/neg/i18122.check b/tests/neg/i18122.check new file mode 100644 index 000000000000..0d08dc33c52a --- /dev/null +++ b/tests/neg/i18122.check @@ -0,0 +1,52 @@ +-- Error: tests/neg/i18122.scala:10:16 --------------------------------------------------------------------------------- +10 | foo1(y = 1, 2, x = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:11:16 --------------------------------------------------------------------------------- +11 | foo2(y = 1, 2, x = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:12:16 --------------------------------------------------------------------------------- +12 | foo1(y = 1, 2, z = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:13:16 --------------------------------------------------------------------------------- +13 | foo2(y = 1, 2, z = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:14:16 --------------------------------------------------------------------------------- +14 | foo1(y = 1, 2) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:15:16 --------------------------------------------------------------------------------- +15 | foo2(y = 1, 2) // error: positional after named + | ^ + | positional after named argument +-- [E171] Type Error: tests/neg/i18122.scala:17:8 ---------------------------------------------------------------------- +17 | bar1() // error: missing arg + | ^^^^^^ + | missing argument for parameter x of method bar1 in object Test: (x: Int, ys: Int*): Unit +-- [E171] Type Error: tests/neg/i18122.scala:23:8 ---------------------------------------------------------------------- +23 | bar1(ys = 1) // error: missing arg + | ^^^^^^^^^^^^ + | missing argument for parameter x of method bar1 in object Test: (x: Int, ys: Int*): Unit +-- Error: tests/neg/i18122.scala:43:16 --------------------------------------------------------------------------------- +43 | bar1(x = 1, 2, ys = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:44:18 --------------------------------------------------------------------------------- +44 | bar1(1, 2, ys = 3) // error: parameter ys is already instantiated + | ^^^^^^ + | parameter ys of method bar1 in object Test: (x: Int, ys: Int*): Unit is already instantiated +-- Error: tests/neg/i18122.scala:45:16 --------------------------------------------------------------------------------- +45 | bar2(x = 1, 2, ys = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:46:17 --------------------------------------------------------------------------------- +46 | bar1(ys = 1, 2, x = 3) // error: positional after named + | ^ + | positional after named argument +-- Error: tests/neg/i18122.scala:47:17 --------------------------------------------------------------------------------- +47 | bar2(ys = 1, 2, x = 3) // error: positional after named + | ^ + | positional after named argument diff --git a/tests/neg/i18122.scala b/tests/neg/i18122.scala new file mode 100644 index 000000000000..ceb6275af333 --- /dev/null +++ b/tests/neg/i18122.scala @@ -0,0 +1,49 @@ +object Test { + def foo1(x: Int, y: Int, z: Int) = println((x, y, z)) + def foo2(x: Int = 0, y: Int, z: Int) = println((x, y, z)) + def bar1(x: Int, ys: Int*) = println((x, ys)) + def bar2(x: Int = 0, ys: Int*) = println((x, ys)) + + def main(args: Array[String]) = { + foo1(1, y = 2, 3) + foo2(1, y = 2, 3) + foo1(y = 1, 2, x = 3) // error: positional after named + foo2(y = 1, 2, x = 3) // error: positional after named + foo1(y = 1, 2, z = 3) // error: positional after named + foo2(y = 1, 2, z = 3) // error: positional after named + foo1(y = 1, 2) // error: positional after named + foo2(y = 1, 2) // error: positional after named + + bar1() // error: missing arg + bar2() + bar1(1) + bar2(1) + bar1(x = 1) + bar2(x = 1) + bar1(ys = 1) // error: missing arg + bar2(ys = 1) + bar1(1, 2) + bar2(1, 2) + bar1(1, ys = 2) + bar2(1, ys = 2) + bar1(x = 1, 2) + bar2(x = 1, 2) + bar1(x = 1, ys = 2) + bar2(x = 1, ys = 2) + bar1(ys = 1, x = 2) + bar2(ys = 1, x = 2) + bar1(1, 2, 3) + bar2(1, 2, 3) + bar1(1, ys = 2, 3) + bar2(1, ys = 2, 3) + bar1(x = 1, 2, 3) + bar2(x = 1, 2, 3) + bar1(x = 1, ys = 2, 3) + bar2(x = 1, ys = 2, 3) + bar1(x = 1, 2, ys = 3) // error: positional after named + bar1(1, 2, ys = 3) // error: parameter ys is already instantiated + bar2(x = 1, 2, ys = 3) // error: positional after named + bar1(ys = 1, 2, x = 3) // error: positional after named + bar2(ys = 1, 2, x = 3) // error: positional after named + } +} \ No newline at end of file diff --git a/tests/neg/i18171.scala b/tests/neg/i18171.scala new file mode 100644 index 000000000000..d269f70beb30 --- /dev/null +++ b/tests/neg/i18171.scala @@ -0,0 +1,10 @@ +type BAZ[T] = T match + case Any => DFVal[BAZREC[T]] + +type BAZREC[T] = T match + case NonEmptyTuple => Tuple.Map[T, BAZ] + +trait DFVal[T] + +def foo(relIdx: BAZ[Any]): Unit = + relIdx.bar // error diff --git a/tests/neg/i18188.check b/tests/neg/i18188.check new file mode 100644 index 000000000000..46fe03fbe15c --- /dev/null +++ b/tests/neg/i18188.check @@ -0,0 +1,11 @@ +-- [E081] Type Error: tests/neg/i18188.scala:3:39 ---------------------------------------------------------------------- +3 |class A1[T](action: A1[T] ?=> String = "") // error + | ^ + | Missing parameter type + | + | I could not infer the type of the parameter contextual$1 + | in expanded function: + | contextual$1 ?=> "" + | What I could infer was: dotty.tools.dotc.typer.A1[] + | Expected type for the whole anonymous function: + | (dotty.tools.dotc.typer.A1[]) ?=> String diff --git a/tests/neg/i18188.scala b/tests/neg/i18188.scala new file mode 100644 index 000000000000..5e7b34a73c37 --- /dev/null +++ b/tests/neg/i18188.scala @@ -0,0 +1,3 @@ +package dotty.tools.dotc.typer + +class A1[T](action: A1[T] ?=> String = "") // error \ No newline at end of file diff --git a/tests/neg/i18244.scala b/tests/neg/i18244.scala new file mode 100644 index 000000000000..c7059f1db401 --- /dev/null +++ b/tests/neg/i18244.scala @@ -0,0 +1,12 @@ +import scala.annotation.* + +class A: + def foo: Int = 1 +class B extends A: + @targetName("foo") private[this] def bla: Int = 2 // error +class C extends A: + @targetName("foo") private def bla: Int = 2 // error + +@main def Test = + val b = new B + println(b.foo) diff --git a/tests/neg/i18274.check b/tests/neg/i18274.check new file mode 100644 index 000000000000..2535d641451c --- /dev/null +++ b/tests/neg/i18274.check @@ -0,0 +1,9 @@ +-- [E068] Syntax Error: tests/neg/i18274.scala:3:7 --------------------------------------------------------------------- +3 |object Foo extends AnyVal // error + | ^ + | object Foo cannot extend AnyVal + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Only classes (not objects) are allowed to extend AnyVal. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18274.scala b/tests/neg/i18274.scala new file mode 100644 index 000000000000..19c27b969d98 --- /dev/null +++ b/tests/neg/i18274.scala @@ -0,0 +1,3 @@ +//> using options -explain + +object Foo extends AnyVal // error diff --git a/tests/neg/i18302a.scala b/tests/neg/i18302a.scala new file mode 100644 index 000000000000..dc4bc703c404 --- /dev/null +++ b/tests/neg/i18302a.scala @@ -0,0 +1,4 @@ +def test = polyFun(1) + +def polyFun: PolyFunction { def apply(x: Int): Int } = + new PolyFunction { def apply(x: Int): Int = x + 1 } // error diff --git a/tests/neg/i18302b.check b/tests/neg/i18302b.check new file mode 100644 index 000000000000..624c0cc0e415 --- /dev/null +++ b/tests/neg/i18302b.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i18302b.scala:3:32 --------------------------------------------------------------------------------- +3 |def polyFun: PolyFunction { def apply(x: Int)(y: Int): Int } = // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |Implementation restriction: PolyFunction apply must have exactly one parameter list and optionally type arguments. No by-name nor varags are allowed. +-- Error: tests/neg/i18302b.scala:4:6 ---------------------------------------------------------------------------------- +4 | new PolyFunction: // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements diff --git a/tests/neg/i18302b.scala b/tests/neg/i18302b.scala new file mode 100644 index 000000000000..4e6379d7e8e0 --- /dev/null +++ b/tests/neg/i18302b.scala @@ -0,0 +1,5 @@ +def test = polyFun(1)(2) + +def polyFun: PolyFunction { def apply(x: Int)(y: Int): Int } = // error + new PolyFunction: // error + def apply(x: Int)(y: Int): Int = x + y diff --git a/tests/neg/i18302c.check b/tests/neg/i18302c.check new file mode 100644 index 000000000000..67dffcfae98c --- /dev/null +++ b/tests/neg/i18302c.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i18302c.scala:4:32 --------------------------------------------------------------------------------- +4 |def polyFun: PolyFunction { def foo(x: Int): Int } = // error + | ^^^^^^^^^^^^^^^^^^^^ + | PolyFunction only supports apply method refinements +-- Error: tests/neg/i18302c.scala:5:6 ---------------------------------------------------------------------------------- +5 | new PolyFunction { def foo(x: Int): Int = x + 1 } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements diff --git a/tests/neg/i18302c.scala b/tests/neg/i18302c.scala new file mode 100644 index 000000000000..ddf7e28e47d5 --- /dev/null +++ b/tests/neg/i18302c.scala @@ -0,0 +1,5 @@ +import scala.reflect.Selectable.reflectiveSelectable + +def test = polyFun.foo(1) +def polyFun: PolyFunction { def foo(x: Int): Int } = // error + new PolyFunction { def foo(x: Int): Int = x + 1 } // error diff --git a/tests/neg/i18302d.check b/tests/neg/i18302d.check new file mode 100644 index 000000000000..f052735a4db2 --- /dev/null +++ b/tests/neg/i18302d.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i18302d.scala:1:32 --------------------------------------------------------------------------------- +1 |def polyFun: PolyFunction { def apply: Int } = // error + | ^^^^^^^^^^^^^^ + |Implementation restriction: PolyFunction apply must have exactly one parameter list and optionally type arguments. No by-name nor varags are allowed. +-- Error: tests/neg/i18302d.scala:2:6 ---------------------------------------------------------------------------------- +2 | new PolyFunction { def apply: Int = 1 } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements diff --git a/tests/neg/i18302d.scala b/tests/neg/i18302d.scala new file mode 100644 index 000000000000..2555bffe81aa --- /dev/null +++ b/tests/neg/i18302d.scala @@ -0,0 +1,2 @@ +def polyFun: PolyFunction { def apply: Int } = // error + new PolyFunction { def apply: Int = 1 } // error diff --git a/tests/neg/i18302e.check b/tests/neg/i18302e.check new file mode 100644 index 000000000000..7fbe19e8213d --- /dev/null +++ b/tests/neg/i18302e.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/i18302e.scala:1:13 --------------------------------------------------------------------------------- +1 |def polyFun: PolyFunction { } = // error + | ^^^^^^^^^^^^^^^^^ + | PolyFunction subtypes must refine the apply method +-- Error: tests/neg/i18302e.scala:2:6 ---------------------------------------------------------------------------------- +2 | new PolyFunction { } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i18302e.scala:4:15 --------------------------------------------------------------------------------- +4 |def polyFun(f: PolyFunction { }) = () // error + | ^^^^^^^^^^^^^^^^^ + | PolyFunction subtypes must refine the apply method diff --git a/tests/neg/i18302e.scala b/tests/neg/i18302e.scala new file mode 100644 index 000000000000..80f13053e8c6 --- /dev/null +++ b/tests/neg/i18302e.scala @@ -0,0 +1,4 @@ +def polyFun: PolyFunction { } = // error + new PolyFunction { } // error + +def polyFun(f: PolyFunction { }) = () // error diff --git a/tests/neg/i18302f.check b/tests/neg/i18302f.check new file mode 100644 index 000000000000..5231e894fabb --- /dev/null +++ b/tests/neg/i18302f.check @@ -0,0 +1,20 @@ +-- Error: tests/neg/i18302f.scala:1:13 --------------------------------------------------------------------------------- +1 |def polyFun: PolyFunction = // error + | ^^^^^^^^^^^^ + | PolyFunction subtypes must refine the apply method +-- Error: tests/neg/i18302f.scala:2:6 ---------------------------------------------------------------------------------- +2 | new PolyFunction { } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements +-- Error: tests/neg/i18302f.scala:4:16 --------------------------------------------------------------------------------- +4 |def polyFun2(a: PolyFunction) = () // error + | ^^^^^^^^^^^^ + | PolyFunction subtypes must refine the apply method +-- Error: tests/neg/i18302f.scala:6:14 --------------------------------------------------------------------------------- +6 |val polyFun3: PolyFunction = // error + | ^^^^^^^^^^^^ + | PolyFunction subtypes must refine the apply method +-- Error: tests/neg/i18302f.scala:7:6 ---------------------------------------------------------------------------------- +7 | new PolyFunction { } // error + | ^^^^^^^^^^^^ + | `PolyFunction` marker trait is reserved for compiler generated refinements diff --git a/tests/neg/i18302f.scala b/tests/neg/i18302f.scala new file mode 100644 index 000000000000..c62d4fdc1189 --- /dev/null +++ b/tests/neg/i18302f.scala @@ -0,0 +1,7 @@ +def polyFun: PolyFunction = // error + new PolyFunction { } // error + +def polyFun2(a: PolyFunction) = () // error + +val polyFun3: PolyFunction = // error + new PolyFunction { } // error diff --git a/tests/neg/i18302i.scala b/tests/neg/i18302i.scala new file mode 100644 index 000000000000..e64330879e55 --- /dev/null +++ b/tests/neg/i18302i.scala @@ -0,0 +1,6 @@ +def polyFun1: Option[PolyFunction] = ??? // error +def polyFun2: PolyFunction & Any = ??? // error +def polyFun3: Any & PolyFunction = ??? // error +def polyFun4: PolyFunction | Any = ??? // error +def polyFun5: Any | PolyFunction = ??? // error +def polyFun6(a: Any | PolyFunction) = ??? // error diff --git a/tests/neg/i18302j.scala b/tests/neg/i18302j.scala new file mode 100644 index 000000000000..1f674f953fa9 --- /dev/null +++ b/tests/neg/i18302j.scala @@ -0,0 +1,5 @@ +def polyFunByName: PolyFunction { def apply(thunk: => Int): Int } = // error + new PolyFunction { def apply(thunk: => Int): Int = 1 } // error + +def polyFunVarArgs: PolyFunction { def apply(args: Int*): Int } = // error + new PolyFunction { def apply(thunk: Int*): Int = 1 } // error diff --git a/tests/neg/i18340.scala b/tests/neg/i18340.scala new file mode 100644 index 000000000000..a16d506ee196 --- /dev/null +++ b/tests/neg/i18340.scala @@ -0,0 +1,36 @@ +@main def main: Unit = + type T = 3f + val value0: T = -3.5f // error + val value1: T = -100500 // error + val value2: T = -100500L // error + val value3: T = -100500D // error + val value4: T = true // error + val value5: 3f = -100500 // error + val value6: 3f = -100500L // error + + type Ti = 3 + val value1i: Ti = -100500 // error + val value2i: Ti = -100500L // error + val value0i: Ti = -100500F // error + val value3i: Ti = -100500D // error + val value4i: Ti = true // error + val value5i: 3 = -100500 // error + val value6i: 3 = -100500L // error + + type Tl = 3L + val value1l: Tl = -100500 // error + val value2l: Tl = -100500L // error + val value0l: Tl = -100500F // error + val value3l: Tl = -100500D // error + val value4l: Tl = true // error + val value5l: 3L = -100500 // error + val value6l: 3L = -100500L // error + + type Td = 3D + val value1d: Td = -100500 // error + val value2d: Td = -100500L // error + val value0d: Td = -100500F // error + val value3d: Td = -100500D // error + val value4d: Td = true // error + val value5d: 3D = -100500 // error + val value6d: 3D = -100500L // error diff --git a/tests/neg/i18408a.check b/tests/neg/i18408a.check new file mode 100644 index 000000000000..ff278e6fe5cb --- /dev/null +++ b/tests/neg/i18408a.check @@ -0,0 +1,18 @@ +-- [E103] Syntax Error: tests/neg/i18408a.scala:2:0 -------------------------------------------------------------------- +2 |fa(42) // error + |^^ + |Illegal start of toplevel definition + | + | longer explanation available when compiling with `-explain` +-- [E190] Potential Issue Warning: tests/neg/i18408a.scala:3:15 -------------------------------------------------------- +3 |def test1 = fa(42) + | ^^ + | Discarded non-Unit value of type Int. You may want to use `()`. + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg/i18408a.scala:4:16 -------------------------------------------------------- +4 |def test2 = fa({42; ()}) + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18408a.scala b/tests/neg/i18408a.scala new file mode 100644 index 000000000000..2ad64d94e2ce --- /dev/null +++ b/tests/neg/i18408a.scala @@ -0,0 +1,4 @@ +def fa(f: String ?=> Unit): Unit = ??? +fa(42) // error +def test1 = fa(42) +def test2 = fa({42; ()}) diff --git a/tests/neg/i18408b.check b/tests/neg/i18408b.check new file mode 100644 index 000000000000..7c72833fe5ad --- /dev/null +++ b/tests/neg/i18408b.check @@ -0,0 +1,18 @@ +-- [E103] Syntax Error: tests/neg/i18408b.scala:2:0 -------------------------------------------------------------------- +2 |fa(42) // error + |^^ + |Illegal start of toplevel definition + | + | longer explanation available when compiling with `-explain` +-- [E190] Potential Issue Warning: tests/neg/i18408b.scala:3:15 -------------------------------------------------------- +3 |def test1 = fa(42) + | ^^ + | Discarded non-Unit value of type Int. You may want to use `()`. + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg/i18408b.scala:4:16 -------------------------------------------------------- +4 |def test2 = fa({42; ()}) + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18408b.scala b/tests/neg/i18408b.scala new file mode 100644 index 000000000000..1cbe5474eb92 --- /dev/null +++ b/tests/neg/i18408b.scala @@ -0,0 +1,4 @@ +def fa(f: => Unit): Unit = ??? +fa(42) // error +def test1 = fa(42) +def test2 = fa({42; ()}) diff --git a/tests/neg/i18408c.check b/tests/neg/i18408c.check new file mode 100644 index 000000000000..078f42bb0006 --- /dev/null +++ b/tests/neg/i18408c.check @@ -0,0 +1,18 @@ +-- [E103] Syntax Error: tests/neg/i18408c.scala:2:0 -------------------------------------------------------------------- +2 |fa(42) // error + |^^ + |Illegal start of toplevel definition + | + | longer explanation available when compiling with `-explain` +-- [E190] Potential Issue Warning: tests/neg/i18408c.scala:3:15 -------------------------------------------------------- +3 |def test1 = fa(42) + | ^^ + | Discarded non-Unit value of type Int. You may want to use `()`. + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg/i18408c.scala:4:16 -------------------------------------------------------- +4 |def test2 = fa({42; ()}) + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18408c.scala b/tests/neg/i18408c.scala new file mode 100644 index 000000000000..0ca80601c0c6 --- /dev/null +++ b/tests/neg/i18408c.scala @@ -0,0 +1,4 @@ +def fa(f: Unit): Unit = ??? +fa(42) // error +def test1 = fa(42) +def test2 = fa({42; ()}) diff --git a/tests/neg/i18453.min.scala b/tests/neg/i18453.min.scala new file mode 100644 index 000000000000..e63a818e8f71 --- /dev/null +++ b/tests/neg/i18453.min.scala @@ -0,0 +1,11 @@ +// Slightly nicer version of i18453 +// which uses a non-abstract type Foo instead +trait Box[T] + +trait Foo + +class Test: + def meth[A](func: A => A & Foo)(using boxA: Box[A]): Unit = ??? + def test[B] (using boxB: Box[B]): Unit = + def nest(p: B): B & Foo = ??? + meth(nest) // error diff --git a/tests/neg/i18453.scala b/tests/neg/i18453.scala new file mode 100644 index 000000000000..9a865b420f65 --- /dev/null +++ b/tests/neg/i18453.scala @@ -0,0 +1,12 @@ +// Would be nice if this compiled +// but it doesn't +// because of how we constrain `A` +// and then try to "minimise" its instantiation +trait Box[T] + +class Test: + def f[A, B](c: A => A & B)(using ba: Box[A]): Unit = ??? + + def g[X, Y](using bx: Box[X]): Unit = + def d(t: X): X & Y = t.asInstanceOf[X & Y] + f(d) // error diff --git a/tests/neg/i18458.check b/tests/neg/i18458.check new file mode 100644 index 000000000000..5e8bb4e33121 --- /dev/null +++ b/tests/neg/i18458.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i18458.scala:1:4 ----------------------------------------------------------------------------------- +1 |def foo( // error + | ^ + | Platform restriction: a parameter list's length cannot exceed 254 (Long and Double count as 2). diff --git a/tests/neg/i18458.scala b/tests/neg/i18458.scala new file mode 100644 index 000000000000..a14286addc28 --- /dev/null +++ b/tests/neg/i18458.scala @@ -0,0 +1,206 @@ +def foo( // error + foo1: Long, + foo2: Long, + foo3: Long, + foo4: Long, + foo5: Long, + foo6: Long, + foo7: Long, + foo8: Long, + foo9: Long, + foo10: Long, + foo11: Long, + foo12: Long, + foo13: Long, + foo14: Long, + foo15: Long, + foo16: Long, + foo17: Long, + foo18: Long, + foo19: Long, + foo20: Long, + foo21: Long, + foo22: Long, + foo23: Long, + foo24: Long, + foo25: Long, + foo26: Long, + foo27: Long, + foo28: Long, + foo29: Long, + foo30: Long, + foo31: Long, + foo32: Long, + foo33: Long, + foo34: Long, + foo35: Long, + foo36: Long, + foo37: Long, + foo38: Long, + foo39: Long, + foo40: Long, + foo41: Long, + foo42: Long, + foo43: Long, + foo44: Long, + foo45: Long, + foo46: Long, + foo47: Long, + foo48: Long, + foo49: Long, + foo50: Long, + foo51: Long, + foo52: Long, + foo53: Long, + foo54: Long, + foo55: Long, + foo56: Long, + foo57: Long, + foo58: Long, + foo59: Long, + foo60: Long, + foo61: Long, + foo62: Long, + foo63: Long, + foo64: Long, + foo65: Long, + foo66: Long, + foo67: Long, + foo68: Long, + foo69: Long, + foo70: Long, + foo71: Long, + foo72: Long, + foo73: Long, + foo74: Long, + foo75: Long, + foo76: Long, + foo77: Long, + foo78: Long, + foo79: Long, + foo80: Long, + foo81: Long, + foo82: Long, + foo83: Long, + foo84: Long, + foo85: Long, + foo86: Long, + foo87: Long, + foo88: Long, + foo89: Long, + foo90: Long, + foo91: Long, + foo92: Long, + foo93: Long, + foo94: Long, + foo95: Long, + foo96: Long, + foo97: Long, + foo98: Long, + foo99: Long, + foo100: Long, + foo101: Long, + foo102: Long, + foo103: Long, + foo104: Long, + foo105: Long, + foo106: Long, + foo107: Long, + foo108: Long, + foo109: Long, + foo110: Long, + foo111: Long, + foo112: Long, + foo113: Long, + foo114: Long, + foo115: Long, + foo116: Long, + foo117: Long, + foo118: Long, + foo119: Long, + foo120: Long, + foo121: Long, + foo122: Long, + foo123: Long, + foo124: Long, + foo125: Long, + foo126: Long, + foo127: Long, + foo128: Long, + foo129: Long, + foo130: Long, + foo131: Long, + foo132: Long, + foo133: Long, + foo134: Long, + foo135: Long, + foo136: Long, + foo137: Long, + foo138: Long, + foo139: Long, + foo140: Long, + foo141: Long, + foo142: Long, + foo143: Long, + foo144: Long, + foo145: Long, + foo146: Long, + foo147: Long, + foo148: Long, + foo149: Long, + foo150: Long, + foo151: Long, + foo152: Long, + foo153: Long, + foo154: Long, + foo155: Long, + foo156: Long, + foo157: Long, + foo158: Long, + foo159: Long, + foo160: Long, + foo161: Long, + foo162: Long, + foo163: Long, + foo164: Long, + foo165: Long, + foo166: Long, + foo167: Long, + foo168: Long, + foo169: Long, + foo170: Long, + foo171: Long, + foo172: Long, + foo173: Long, + foo174: Long, + foo175: Long, + foo176: Long, + foo177: Long, + foo178: Long, + foo179: Long, + foo180: Long, + foo181: Long, + foo182: Long, + foo183: Long, + foo184: Long, + foo185: Long, + foo186: Long, + foo187: Long, + foo188: Long, + foo189: Long, + foo190: Long, + foo191: Long, + foo192: Long, + foo193: Long, + foo194: Long, + foo195: Long, + foo196: Long, + foo197: Long, + foo198: Long, + foo199: Long, + foo200: Long +) = foo1 + foo2 + +@main +def run = + println(foo(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200)) diff --git a/tests/neg/i18545.check b/tests/neg/i18545.check new file mode 100644 index 000000000000..95edeacc0c95 --- /dev/null +++ b/tests/neg/i18545.check @@ -0,0 +1,16 @@ +-- [E173] Reference Error: tests/neg/i18545.scala:13:20 ---------------------------------------------------------------- +13 | def test: IOLocal.IOLocalImpl[Int] = // error + | ^^^^^^^^^^^^^^^^^^^ + |class IOLocalImpl cannot be accessed as a member of iolib.IOLocal.type from the top-level definitions in package tests. + | private[IOLocal] class IOLocalImpl can only be accessed from object IOLocal in package iolib. +-- [E173] Reference Error: tests/neg/i18545.scala:14:24 ---------------------------------------------------------------- +14 | IOLocal.IOLocalImpl.apply(42) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + |method apply cannot be accessed as a member of iolib.IOLocal.IOLocalImpl.type from the top-level definitions in package tests. + | private[IOLocal] method apply can only be accessed from object IOLocal in package iolib. +-- [E050] Type Error: tests/neg/i18545.scala:15:22 --------------------------------------------------------------------- +15 | def test2 = IOLocal.IOLocalImpl(42) // error + | ^^^^^^^^^^^^^^^^^^^ + | object IOLocalImpl in object IOLocal does not take parameters + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18545.scala b/tests/neg/i18545.scala new file mode 100644 index 000000000000..330482df11ae --- /dev/null +++ b/tests/neg/i18545.scala @@ -0,0 +1,15 @@ +package iolib: + case class IO[A](value: A) + + sealed trait IOLocal[A] + + object IOLocal: + def apply[A](default: A): IO[IOLocal[A]] = IO(new IOLocalImpl(default)) + + private[IOLocal] final class IOLocalImpl[A](default: A) extends IOLocal[A] + +package tests: + import iolib.IOLocal + def test: IOLocal.IOLocalImpl[Int] = // error + IOLocal.IOLocalImpl.apply(42) // error + def test2 = IOLocal.IOLocalImpl(42) // error diff --git a/tests/neg/i18588.check b/tests/neg/i18588.check new file mode 100644 index 000000000000..5f7d6181a93c --- /dev/null +++ b/tests/neg/i18588.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i18588.scala:7:14 ---------------------------------------------------------------------------------- +7 | private var cached: A = value // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | covariant type A occurs in invariant position in type A of variable cached +-- Error: tests/neg/i18588.scala:17:14 --------------------------------------------------------------------------------- +17 | private var cached: A = value // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | covariant type A occurs in invariant position in type A of variable cached diff --git a/tests/neg/i18588.scala b/tests/neg/i18588.scala new file mode 100644 index 000000000000..e8c89c1b06f1 --- /dev/null +++ b/tests/neg/i18588.scala @@ -0,0 +1,42 @@ +class ROBox[+A](value: A) { + private var cached: A = value + def get: A = ROBox[A](value).cached +} + +class Box[+A](value: A) { + private var cached: A = value // error + def get: A = cached + + def put[AA >: A](value: AA): Unit = { + val box: Box[AA] = this + box.cached = value + } +} + +class BoxWithCompanion[+A](value: A) { + private var cached: A = value // error + def get: A = cached +} + +class BoxValid[+A](value: A, orig: A) { + private var cached: A = value // ok + def get: A = cached + + def reset(): Unit = + cached = orig // ok: mutated through this prefix +} + +trait Animal +object Dog extends Animal +object Cat extends Animal + +val dogBox: Box[Dog.type] = new Box(Dog) +val _ = dogBox.put(Cat) +val dog: Dog.type = dogBox.get + + +object BoxWithCompanion { + def put[A](box: BoxWithCompanion[A], value: A): Unit = { + box.cached = value + } +} \ No newline at end of file diff --git a/tests/neg/i18650.min.scala b/tests/neg/i18650.min.scala new file mode 100644 index 000000000000..0756c05bfc25 --- /dev/null +++ b/tests/neg/i18650.min.scala @@ -0,0 +1,8 @@ +class Church[B]: + type Nat = Tuple1[B] + +class Test: + given makeChurch[C]: Church[C] = ??? // necessary to cause crash + + def churchTest(c: Church[Int]): Unit = + val res1 = summon[c.Nat =:= Int] // error (not a compiler crash) diff --git a/tests/neg/i18650.min2.scala b/tests/neg/i18650.min2.scala new file mode 100644 index 000000000000..43ea86492d54 --- /dev/null +++ b/tests/neg/i18650.min2.scala @@ -0,0 +1,8 @@ +class Church[B]: + type Nat = Tuple1[B] + +class Test2: + given makeChurch2[C](using DummyImplicit): Church[C] = ??? + + def churchTest2(c: Church[Int]): Unit = + val res2 = summon[c.Nat =:= Int] // error (not a compiler crash) diff --git a/tests/neg/i18650.scala b/tests/neg/i18650.scala new file mode 100644 index 000000000000..d627c6ea329b --- /dev/null +++ b/tests/neg/i18650.scala @@ -0,0 +1,26 @@ +trait Lam: + type F[_] + extension [A, B](f: F[A => B]) def apply(arg: F[A]): F[B] + def lam[A, B](f: F[A] => F[B]): F[A => B] + final def id[A]: F[A => A] = lam(identity[F[A]]) + +object LamInterpreter extends Lam: + type F[t] = t + def lam[A, B](f: F[A] => F[B]): F[A => B] = f + extension [A, B](f: F[A => B]) def apply(arg: F[A]): F[B] = f(arg) + + +class Church[A](using val l: Lam): + import l.* + type Nat = F[(A => A) => (A => A)] + def zero: Nat = id + extension (n: Nat) def suc: Nat = lam(f => lam(x => f(n(f)(x)))) + +given [A](using l: Lam): Church[A] = Church() + + +@main +def churchTest = + given Lam = LamInterpreter + val c: Church[Int] = summon + summon[c.Nat =:= ((Int => Int) => (Int => Int))] // error (not a compiler crash) diff --git a/tests/neg/i18657.check b/tests/neg/i18657.check new file mode 100644 index 000000000000..a873239bfe9d --- /dev/null +++ b/tests/neg/i18657.check @@ -0,0 +1,17 @@ +-- [E086] Syntax Error: tests/neg/i18657.scala:2:27 -------------------------------------------------------------------- +2 |val f: (Int, Int) => Int = Integer.compare(_ + 1, _) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | Wrong number of parameters, expected: 2 + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | The function literal + | + | _$2 => Integer.compare(_$1 => _$1 + 1, _$2) + | + | has 1 parameter. But the expected type + | + | (Int, Int) => Int + | + | requires a function with 2 parameters. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18657.scala b/tests/neg/i18657.scala new file mode 100644 index 000000000000..393982a5ea14 --- /dev/null +++ b/tests/neg/i18657.scala @@ -0,0 +1,2 @@ +//> using options -explain +val f: (Int, Int) => Int = Integer.compare(_ + 1, _) // error diff --git a/tests/neg/i18678.check b/tests/neg/i18678.check new file mode 100644 index 000000000000..45629693f078 --- /dev/null +++ b/tests/neg/i18678.check @@ -0,0 +1,87 @@ +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:11:14 ------------------------------------------------------------ +11 |def u: Unit = () // error + | ^^ + | Found: Unit + | Required: Unit² + | + | where: Unit is a class in package scala + | Unit² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:12:20 ------------------------------------------------------------ +12 |def bool: Boolean = true // error + | ^^^^ + | Found: (true : Boolean) + | Required: Boolean² + | + | where: Boolean is a class in package scala + | Boolean² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:13:14 ------------------------------------------------------------ +13 |def b: Byte = 1: scala.Byte // error + | ^^^^^^^^^^^^^ + | Found: Byte + | Required: Byte² + | + | where: Byte is a class in package scala + | Byte² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:14:15 ------------------------------------------------------------ +14 |def s: Short = 2: scala.Short // error + | ^^^^^^^^^^^^^^ + | Found: Short + | Required: Short² + | + | where: Short is a class in package scala + | Short² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:15:13 ------------------------------------------------------------ +15 |def i: Int = 3 // error + | ^ + | Found: (3 : Int) + | Required: Int² + | + | where: Int is a class in package scala + | Int² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:16:14 ------------------------------------------------------------ +16 |def l: Long = 4L // error + | ^^ + | Found: (4L : Long) + | Required: Long² + | + | where: Long is a class in package scala + | Long² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:17:15 ------------------------------------------------------------ +17 |def f: Float = 5.6 // error + | ^^^ + | Found: (5.6d : Double) + | Required: Float + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:18:16 ------------------------------------------------------------ +18 |def d: Double = 6.7d // error + | ^^^^ + | Found: (6.7d : Double) + | Required: Double² + | + | where: Double is a class in package scala + | Double² is a class in the empty package + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i18678.scala:19:14 ------------------------------------------------------------ +19 |def c: Char = 'a' // error + | ^^^ + | Found: ('a' : Char) + | Required: Char² + | + | where: Char is a class in package scala + | Char² is a class in the empty package + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18678.scala b/tests/neg/i18678.scala new file mode 100644 index 000000000000..642a033f79f7 --- /dev/null +++ b/tests/neg/i18678.scala @@ -0,0 +1,19 @@ +class Unit +class Boolean +class Byte +class Short +class Int +class Long +class Double +class Float +class Char + +def u: Unit = () // error +def bool: Boolean = true // error +def b: Byte = 1: scala.Byte // error +def s: Short = 2: scala.Short // error +def i: Int = 3 // error +def l: Long = 4L // error +def f: Float = 5.6 // error +def d: Double = 6.7d // error +def c: Char = 'a' // error diff --git a/tests/neg/i18678b.check b/tests/neg/i18678b.check new file mode 100644 index 000000000000..ea4a4288b36b --- /dev/null +++ b/tests/neg/i18678b.check @@ -0,0 +1,10 @@ +-- [E007] Type Mismatch Error: tests/neg/i18678b.scala:3:16 ------------------------------------------------------------ +3 |def s: String = "" // error + | ^^ + | Found: ("" : String) + | Required: String² + | + | where: String is a class in package java.lang + | String² is a class in the empty package + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18678b.scala b/tests/neg/i18678b.scala new file mode 100644 index 000000000000..9f8b40eb1b8d --- /dev/null +++ b/tests/neg/i18678b.scala @@ -0,0 +1,3 @@ +class String + +def s: String = "" // error \ No newline at end of file diff --git a/tests/neg/i18682.check b/tests/neg/i18682.check new file mode 100644 index 000000000000..650204ebfbdb --- /dev/null +++ b/tests/neg/i18682.check @@ -0,0 +1,50 @@ +-- [E006] Not Found Error: tests/neg/i18682.scala:3:8 ------------------------------------------------------------------ +3 |val _ = Fop(1) // error + | ^^^ + | Not found: Fop - did you mean Foo? + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i18682.scala:4:12 ----------------------------------------------------------------- +4 |val _ = new Fooo(2) // error + | ^^^^ + | Not found: type Fooo - did you mean Foo? + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i18682.scala:6:8 ------------------------------------------------------------------ +6 |val _ = hellx // error + | ^^^^^ + | Not found: hellx - did you mean hello? + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg/i18682.scala:16:12 ---------------------------------------------------------------- +16 |val _ = bar.Bap // error, App does not show as hint, too far away + | ^^^^^^^ + | value Bap is not a member of object Bar +-- [E008] Not Found Error: tests/neg/i18682.scala:17:12 ---------------------------------------------------------------- +17 |val _ = bar.Bap() // error + | ^^^^^^^ + | value Bap is not a member of object Bar - did you mean bar.Baz? +-- [E006] Not Found Error: tests/neg/i18682.scala:19:8 ----------------------------------------------------------------- +19 |val _ = error // error, java.lang.Error does not show as hint, since it is not a value + | ^^^^^ + | Not found: error + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg/i18682.scala:22:50 ---------------------------------------------------------------- +22 |val _ = "123".view.reverse.padTo(5, '0').iterator.reverse // error, no hint since `reversed` is not accessible + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | value reverse is not a member of Iterator[Char] +-- [E006] Not Found Error: tests/neg/i18682.scala:27:8 ----------------------------------------------------------------- +27 |val _ = pool // error + | ^^^^ + | Not found: pool - did you mean cool? or perhaps wool? + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg/i18682.scala:29:12 ---------------------------------------------------------------- +29 |val _ = bar.poodle // error + | ^^^^^^^^^^ + | value poodle is not a member of object Bar - did you mean bar.pool? +-- [E008] Not Found Error: tests/neg/i18682.scala:31:12 ---------------------------------------------------------------- +31 |val _ = bar.ool // error + | ^^^^^^^ + | value ool is not a member of object Bar - did you mean bar.cool? or perhaps bar.pool or bar.wool? diff --git a/tests/neg/i18682.scala b/tests/neg/i18682.scala new file mode 100644 index 000000000000..d1478ebf6e84 --- /dev/null +++ b/tests/neg/i18682.scala @@ -0,0 +1,31 @@ +class Foo(x: Int) + +val _ = Fop(1) // error +val _ = new Fooo(2) // error +val hello = "hi" +val _ = hellx // error + +object Bar: + class Baz() + object App + def cool = 1 + def wool = 2 + def pool = 3 + +val bar = Bar +val _ = bar.Bap // error, App does not show as hint, too far away +val _ = bar.Bap() // error + +val _ = error // error, java.lang.Error does not show as hint, since it is not a value + +// #17067 +val _ = "123".view.reverse.padTo(5, '0').iterator.reverse // error, no hint since `reversed` is not accessible + +val cool = "cool" +val wool = "wool" + +val _ = pool // error + +val _ = bar.poodle // error + +val _ = bar.ool // error diff --git a/tests/neg/i18684.check b/tests/neg/i18684.check new file mode 100644 index 000000000000..5dc4a2fdd736 --- /dev/null +++ b/tests/neg/i18684.check @@ -0,0 +1,82 @@ +-- [E189] Not Found Error: tests/neg/i18684.scala:3:6 ------------------------------------------------------------------ +3 | val s(): String = "hello, world" // error + | ^ + | no pattern match extractor named s was found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | An application s(...) in a pattern can refer to an extractor + | which defines an unapply or unapplySeq method. Example: + | + | object split: + | def unapply(x: String) = + | val (leading, trailing) = x.splitAt(x.length / 2) + | Some((leading, trailing)) + | + | val split(fst, snd) = "HiHo" + | + | The extractor pattern `split(fst, snd)` defines `fst` as the first half "Hi" and + | `snd` as the second half "Ho" of the right hand side "HiHo". Case classes and + | enum cases implicitly define extractors with the name of the class or enum case. + | Here, no extractor named s was found, so the pattern could not be typed. + --------------------------------------------------------------------------------------------------------------------- +-- [E189] Not Found Error: tests/neg/i18684.scala:5:6 ------------------------------------------------------------------ +5 | val i() = 22 // error + | ^ + | no pattern match extractor named i was found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | An application i(...) in a pattern can refer to an extractor + | which defines an unapply or unapplySeq method. Example: + | + | object split: + | def unapply(x: String) = + | val (leading, trailing) = x.splitAt(x.length / 2) + | Some((leading, trailing)) + | + | val split(fst, snd) = "HiHo" + | + | The extractor pattern `split(fst, snd)` defines `fst` as the first half "Hi" and + | `snd` as the second half "Ho" of the right hand side "HiHo". Case classes and + | enum cases implicitly define extractors with the name of the class or enum case. + | Here, no extractor named i was found, so the pattern could not be typed. + --------------------------------------------------------------------------------------------------------------------- +-- [E189] Not Found Error: tests/neg/i18684.scala:10:8 ----------------------------------------------------------------- +10 | val foo() = "33" // error + | ^^^ + | no pattern match extractor named foo was found + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | An application foo(...) in a pattern can refer to an extractor + | which defines an unapply or unapplySeq method. Example: + | + | object split: + | def unapply(x: String) = + | val (leading, trailing) = x.splitAt(x.length / 2) + | Some((leading, trailing)) + | + | val split(fst, snd) = "HiHo" + | + | The extractor pattern `split(fst, snd)` defines `fst` as the first half "Hi" and + | `snd` as the second half "Ho" of the right hand side "HiHo". Case classes and + | enum cases implicitly define extractors with the name of the class or enum case. + | Here, no extractor named foo was found, so the pattern could not be typed. + -------------------------------------------------------------------------------------------------------------------- +-- [E127] Pattern Match Error: tests/neg/i18684.scala:12:6 ------------------------------------------------------------- +12 | val inner(x) = 3 // error + | ^^^^^ + | Test.inner cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | An unapply method should be defined in an object as follow: + | - If it is just a test, return a Boolean. For example case even() + | - If it returns a single sub-value of type T, return an Option[T] + | - If it returns several sub-values T1,...,Tn, group them in an optional tuple Option[(T1,...,Tn)] + | + | Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. + | For this reason, you can also define patterns through unapplySeq which returns Option[Seq[T]]. + | This mechanism is used for instance in pattern case List(x1, ..., xn) + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18684.scala b/tests/neg/i18684.scala new file mode 100644 index 000000000000..a6d27a1b3d01 --- /dev/null +++ b/tests/neg/i18684.scala @@ -0,0 +1,12 @@ +//> using options -explain +object Test: + val s(): String = "hello, world" // error + + val i() = 22 // error + + def foo(): String = "22" + + object inner: + val foo() = "33" // error + + val inner(x) = 3 // error \ No newline at end of file diff --git a/tests/neg/i18686.check b/tests/neg/i18686.check new file mode 100644 index 000000000000..6ed69c515051 --- /dev/null +++ b/tests/neg/i18686.check @@ -0,0 +1,30 @@ +-- [E173] Reference Error: tests/neg/i18686.scala:13:16 ---------------------------------------------------------------- +13 | println(Foo.Bar1) // error + | ^^^^^^^^ + | value Bar1 cannot be accessed as a member of Foo.type from object Main. + | private value Bar1 can only be accessed from object Foo. +-- [E173] Reference Error: tests/neg/i18686.scala:14:16 ---------------------------------------------------------------- +14 | println(Foo.Bar2) // error + | ^^^^^^^^ + | value Bar2 cannot be accessed as a member of Foo.type from object Main. + | private[Foo] value Bar2 can only be accessed from object Foo. +-- [E173] Reference Error: tests/neg/i18686.scala:15:16 ---------------------------------------------------------------- +15 | println(Foo.Bar3) // error + | ^^^^^^^^ + | value Bar3 cannot be accessed as a member of Foo.type from object Main. + | protected value Bar3 can only be accessed from object Foo. +-- [E173] Reference Error: tests/neg/i18686.scala:16:16 ---------------------------------------------------------------- +16 | println(Foo.Bar4) // error + | ^^^^^^^^ + | value Bar4 cannot be accessed as a member of Foo.type from object Main. + | protected[Foo] value Bar4 can only be accessed from object Foo. +-- [E173] Reference Error: tests/neg/i18686.scala:17:20 ---------------------------------------------------------------- +17 | println(Foo.Baz.Bar5) // error + | ^^^^^^^^^^^^ + | value Bar5 cannot be accessed as a member of Foo.Baz.type from object Main. + | private[Foo] value Bar5 can only be accessed from object Foo. +-- [E173] Reference Error: tests/neg/i18686.scala:18:20 ---------------------------------------------------------------- +18 | println(Foo.Baz.Bar6) // error + | ^^^^^^^^^^^^ + | value Bar6 cannot be accessed as a member of Foo.Baz.type from object Main. + | protected[Foo] value Bar6 can only be accessed from object Foo. diff --git a/tests/neg/i18686.scala b/tests/neg/i18686.scala new file mode 100644 index 000000000000..88da7b9d802a --- /dev/null +++ b/tests/neg/i18686.scala @@ -0,0 +1,20 @@ +object Foo: + private val Bar1: Int = 1 + private[Foo] val Bar2: Int = 2 + protected val Bar3: Int = 3 + protected[Foo] val Bar4: Int = 5 + object Baz: + private[Foo] val Bar5: Int = 5 + protected[Foo] val Bar6: Int = 6 +end Foo + +object Main: + def main(args: Array[String]): Unit = + println(Foo.Bar1) // error + println(Foo.Bar2) // error + println(Foo.Bar3) // error + println(Foo.Bar4) // error + println(Foo.Baz.Bar5) // error + println(Foo.Baz.Bar6) // error + end main +end Main diff --git a/tests/neg/i18686b.check b/tests/neg/i18686b.check new file mode 100644 index 000000000000..6394aeedf35a --- /dev/null +++ b/tests/neg/i18686b.check @@ -0,0 +1,28 @@ +-- [E173] Reference Error: tests/neg/i18686b.scala:15:16 --------------------------------------------------------------- +15 | println(foo.Bar1) // error + | ^^^^^^^^ + | value Bar1 cannot be accessed as a member of Foo from object Main. + | private value Bar1 can only be accessed from class Foo. +-- [E173] Reference Error: tests/neg/i18686b.scala:16:16 --------------------------------------------------------------- +16 | println(foo.Bar2) // error + | ^^^^^^^^ + | value Bar2 cannot be accessed as a member of Foo from object Main. + | private[Foo] value Bar2 can only be accessed from class Foo. +-- [E173] Reference Error: tests/neg/i18686b.scala:17:16 --------------------------------------------------------------- +17 | println(foo.Bar3) // error + | ^^^^^^^^ + | value Bar3 cannot be accessed as a member of Foo from object Main. + | protected value Bar3 can only be accessed from class Foo or one of its subclasses. +-- [E173] Reference Error: tests/neg/i18686b.scala:18:16 --------------------------------------------------------------- +18 | println(foo.Bar4) // error + | ^^^^^^^^ + | value Bar4 cannot be accessed as a member of Foo from object Main. + | protected[Foo] value Bar4 can only be accessed from class Foo or one of its subclasses. +-- [E008] Not Found Error: tests/neg/i18686b.scala:19:20 --------------------------------------------------------------- +19 | println(foo.Baz.Bar5) // error + | ^^^^^^^^^^^^ + | value Bar5 is not a member of object Foo#Baz +-- [E008] Not Found Error: tests/neg/i18686b.scala:20:20 --------------------------------------------------------------- +20 | println(foo.Baz.Bar6) // error + | ^^^^^^^^^^^^ + | value Bar6 is not a member of object Foo#Baz diff --git a/tests/neg/i18686b.scala b/tests/neg/i18686b.scala new file mode 100644 index 000000000000..86f8066073c0 --- /dev/null +++ b/tests/neg/i18686b.scala @@ -0,0 +1,22 @@ +class Foo: + private val Bar1: Int = 1 + private[Foo] val Bar2: Int = 2 + protected val Bar3: Int = 3 + protected[Foo] val Bar4: Int = 5 + class Baz: + private[Foo] val Bar5: Int = 5 + protected[Foo] val Bar6: Int = 6 +end Foo + +def foo = new Foo + +object Main: + def main(args: Array[String]): Unit = + println(foo.Bar1) // error + println(foo.Bar2) // error + println(foo.Bar3) // error + println(foo.Bar4) // error + println(foo.Baz.Bar5) // error + println(foo.Baz.Bar6) // error + end main +end Main diff --git a/tests/neg/i18686c.check b/tests/neg/i18686c.check new file mode 100644 index 000000000000..328d9cfd42a6 --- /dev/null +++ b/tests/neg/i18686c.check @@ -0,0 +1,8 @@ +-- [E173] Reference Error: tests/neg/i18686c.scala:8:6 ----------------------------------------------------------------- +8 | foo.foo // error + | ^^^^^^^ + |method foo cannot be accessed as a member of (foo² : Bar.Foo) from the top-level definitions in package . + | protected[Bar] method foo can only be accessed from object Bar, or class Foo in object Bar or one of its subclasses. + | + |where: foo is a method in class Foo + | foo² is a parameter in method test diff --git a/tests/neg/i18686c.scala b/tests/neg/i18686c.scala new file mode 100644 index 000000000000..d120e416ed9f --- /dev/null +++ b/tests/neg/i18686c.scala @@ -0,0 +1,8 @@ +object Bar: + class Foo: + protected[Bar] def foo = 23 + class Qux extends Foo: + val qux = foo + +def test(foo: Bar.Foo) = + foo.foo // error diff --git a/tests/neg/i18695.scala b/tests/neg/i18695.scala new file mode 100644 index 000000000000..e10f30b26779 --- /dev/null +++ b/tests/neg/i18695.scala @@ -0,0 +1,3 @@ +trait Foo { type Num <: Int } +given derived[A](using foo: Foo): Any = derivedImpl(foo) // error +def derivedImpl(foo: Foo)(using bar: foo.Num =:= Int): Any = ??? diff --git a/tests/neg/i18722.check b/tests/neg/i18722.check new file mode 100644 index 000000000000..539e23787752 --- /dev/null +++ b/tests/neg/i18722.check @@ -0,0 +1,44 @@ +-- [E190] Potential Issue Error: tests/neg/i18722.scala:3:15 ----------------------------------------------------------- +3 |def f1: Unit = null // error + | ^^^^ + | Discarded non-Unit value of type Null. You may want to use `()`. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | As this expression is not of type Unit, it is desugared into `{ null; () }`. + | Here the `null` expression is a pure statement that can be discarded. + | Therefore the expression is effectively equivalent to `()`. + --------------------------------------------------------------------------------------------------------------------- +-- [E190] Potential Issue Error: tests/neg/i18722.scala:4:15 ----------------------------------------------------------- +4 |def f2: Unit = 1 // error + | ^ + | Discarded non-Unit value of type Int. You may want to use `()`. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | As this expression is not of type Unit, it is desugared into `{ 1; () }`. + | Here the `1` expression is a pure statement that can be discarded. + | Therefore the expression is effectively equivalent to `()`. + --------------------------------------------------------------------------------------------------------------------- +-- [E190] Potential Issue Error: tests/neg/i18722.scala:5:15 ----------------------------------------------------------- +5 |def f3: Unit = "a" // error + | ^^^ + | Discarded non-Unit value of type String. You may want to use `()`. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | As this expression is not of type Unit, it is desugared into `{ "a"; () }`. + | Here the `"a"` expression is a pure statement that can be discarded. + | Therefore the expression is effectively equivalent to `()`. + --------------------------------------------------------------------------------------------------------------------- +-- [E190] Potential Issue Error: tests/neg/i18722.scala:7:15 ----------------------------------------------------------- +7 |def f4: Unit = i // error + | ^ + | Discarded non-Unit value of type Int. You may want to use `()`. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | As this expression is not of type Unit, it is desugared into `{ i; () }`. + | Here the `i` expression is a pure statement that can be discarded. + | Therefore the expression is effectively equivalent to `()`. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18722.scala b/tests/neg/i18722.scala new file mode 100644 index 000000000000..c68390b76201 --- /dev/null +++ b/tests/neg/i18722.scala @@ -0,0 +1,9 @@ +//> using options -Werror -explain + +def f1: Unit = null // error +def f2: Unit = 1 // error +def f3: Unit = "a" // error +val i: Int = 1 +def f4: Unit = i // error +val u: Unit = () +def f5: Unit = u diff --git a/tests/neg/i18734.check b/tests/neg/i18734.check new file mode 100644 index 000000000000..bb348025cf73 --- /dev/null +++ b/tests/neg/i18734.check @@ -0,0 +1,28 @@ +-- [E040] Syntax Error: tests/neg/i18734.scala:7:8 --------------------------------------------------------------------- +7 | Foo(1 2) // error + | ^ + | ',' or ')' expected, but integer literal found +-- [E040] Syntax Error: tests/neg/i18734.scala:9:8 --------------------------------------------------------------------- +9 | Foo(x y) // error + | ^ + | ',' or ')' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i18734.scala:11:8 -------------------------------------------------------------------- +11 | Foo(1 b = 2) // error + | ^ + | ',' or ')' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i18734.scala:16:4 -------------------------------------------------------------------- +16 | b = 2 // error + | ^ + | ',' or ')' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i18734.scala:19:32 ------------------------------------------------------------------- +19 | val f: (Int, Int) => Int = (x y) => x + y // error + | ^ + | ',' or ')' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i18734.scala:23:10 ------------------------------------------------------------------- +23 | bar[Int String](1 2) // error // error + | ^^^^^^ + | ',' or ']' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i18734.scala:23:20 ------------------------------------------------------------------- +23 | bar[Int String](1 2) // error // error + | ^ + | ',' or ')' expected, but integer literal found diff --git a/tests/neg/i18734.scala b/tests/neg/i18734.scala new file mode 100644 index 000000000000..b658ef9f94d3 --- /dev/null +++ b/tests/neg/i18734.scala @@ -0,0 +1,25 @@ +case class Foo(a: Int, b: Int) + +object Bar: + val x = 1 + val y = 2 + + Foo(1 2) // error + + Foo(x y) // error + + Foo(1 b = 2) // error + + // Or + Foo( + a = 1 + b = 2 // error + ) + + val f: (Int, Int) => Int = (x y) => x + y // error + + def bar[X, Y](x: X, y: Y) = ??? + + bar[Int String](1 2) // error // error + + diff --git a/tests/neg/i18737.check b/tests/neg/i18737.check new file mode 100644 index 000000000000..f2067ad017cd --- /dev/null +++ b/tests/neg/i18737.check @@ -0,0 +1,75 @@ +-- [E007] Type Mismatch Error: tests/neg/i18737.scala:3:36 ------------------------------------------------------------- +3 |def test2(v: String & Long) = test1(v) // error + | ^ + | Found: (v : String & Long) + | Required: String & Integer & List[String] + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: v + | I tried to show that + | (v : String & Long) + | conforms to + | String & Integer & List[String] + | but none of the attempts shown below succeeded: + | + | ==> (v : String & Long) <: String & Integer & List[String] + | ==> (v : String & Long) <: String & Integer + | ==> (v : String & Long) <: Integer + | ==> String & Long <: Integer + | ==> String <: Integer = false + | ==> Long <: Integer = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/i18737.scala:6:36 ------------------------------------------------------------- +6 |def test4(v: String | Long) = test3(v) // error + | ^ + | Found: (v : String | Long) + | Required: String | Integer | List[String] + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: v + | I tried to show that + | (v : String | Long) + | conforms to + | String | Integer | List[String] + | but none of the attempts shown below succeeded: + | + | ==> (v : String | Long) <: String | Integer | List[String] + | ==> String | Long <: String | Integer | List[String] + | ==> Long <: String | Integer | List[String] + | ==> Long <: String | Integer + | ==> Long <: String = false + | ==> Long <: Integer = false + | ==> Long <: List[String] = false + | ==> (v : String | Long) <: String | Integer + | ==> String | Long <: String | Integer + | ==> Long <: String | Integer + | ==> Long <: String = false + | ==> Long <: Integer = false + | ==> (v : String | Long) <: String + | ==> String | Long <: String + | ==> Long <: String = false + | ==> (v : String | Long) <: Integer + | ==> String | Long <: Integer + | ==> String <: Integer = false + | ==> String | Long <: String | Integer + | ==> Long <: String | Integer + | ==> Long <: String = false + | ==> Long <: Integer = false + | ==> (v : String | Long) <: List[String] + | ==> String | Long <: List[String] + | ==> String <: List[String] = false + | ==> String | Long <: String | Integer | List[String] + | ==> Long <: String | Integer | List[String] + | ==> Long <: String | Integer + | ==> Long <: String = false + | ==> Long <: Integer = false + | ==> Long <: List[String] = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18737.scala b/tests/neg/i18737.scala new file mode 100644 index 000000000000..ccba19627fbf --- /dev/null +++ b/tests/neg/i18737.scala @@ -0,0 +1,6 @@ +//> using options -explain +def test1(v: String & Integer & List[String]) = () +def test2(v: String & Long) = test1(v) // error + +def test3(v: String | Integer | List[String]) = () +def test4(v: String | Long) = test3(v) // error diff --git a/tests/neg/i18750-format.check b/tests/neg/i18750-format.check new file mode 100644 index 000000000000..a20cb8eaadf1 --- /dev/null +++ b/tests/neg/i18750-format.check @@ -0,0 +1,68 @@ +-- [E032] Syntax Error: tests/neg/i18750-format.scala:4:7 -------------------------------------------------------------- +4 | case # => () // error + | ^ + | pattern expected + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Simple patterns can be divided into several groups: + | - Variable Patterns: case x => ... or case _ => ... + | It matches any value, and binds the variable name to that value. + | A special case is the wild-card pattern _ which is treated as if it was a fresh + | variable on each occurrence. + | + | - Typed Patterns: case x: Int => ... or case _: Int => ... + | This pattern matches any value matched by the specified type; it binds the variable + | name to that value. + | + | - Given Patterns: case given ExecutionContext => ... + | This pattern matches any value matched by the specified type; it binds a given + | instance with the same type to that value. + | + | - Literal Patterns: case 123 => ... or case 'A' => ... + | This type of pattern matches any value that is equal to the specified literal. + | + | - Stable Identifier Patterns: + | + | def f(x: Int, y: Int) = x match + | case `y` => ... + | + | the match succeeds only if the x argument and the y argument of f are equal. + | + | - Constructor Patterns: + | + | case class Person(name: String, age: Int) + | + | def test(p: Person) = p match + | case Person(name, age) => ... + | + | The pattern binds all object's fields to the variable names (name and age, in this + | case). + | + | - Tuple Patterns: + | + | def swap(tuple: (String, Int)): (Int, String) = tuple match + | case (text, number) => (number, text) + | + | Calling: + | + | swap(("Luftballons", 99)) + | + | would give (99, "Luftballons") as a result. + | + | - Pattern Sequences: + | + | def getSecondValue(list: List[Int]): Int = list match + | case List(_, second, x*) => second + | case _ => 0 + | + | Calling: + | + | getSecondValue(List(1, 10, 2)) + | + | would give 10 as a result. + | This pattern is possible because a companion object for the List class has a method + | with the following signature: + | + | def unapplySeq[A](x: List[A]): Some[List[A]] + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18750-format.scala b/tests/neg/i18750-format.scala new file mode 100644 index 000000000000..5d007fcbcb04 --- /dev/null +++ b/tests/neg/i18750-format.scala @@ -0,0 +1,4 @@ +//> using options -explain + +def test = 23 match + case # => () // error diff --git a/tests/neg/i18750.check b/tests/neg/i18750.check new file mode 100644 index 000000000000..e0ef5c00988c --- /dev/null +++ b/tests/neg/i18750.check @@ -0,0 +1,104 @@ +-- [E040] Syntax Error: tests/neg/i18750.scala:3:4 --------------------------------------------------------------------- +3 |val do = 23 // error + | ^ + | an identifier expected, but 'do' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'do' as identifier, you may put it in backticks: `do`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:4:4 --------------------------------------------------------------------- +4 |val if = 23 // error + | ^ + | an identifier expected, but 'if' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'if' as identifier, you may put it in backticks: `if`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:5:4 --------------------------------------------------------------------- +5 |val val = 23 // error + | ^ + | an identifier expected, but 'val' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'val' as identifier, you may put it in backticks: `val`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:6:7 --------------------------------------------------------------------- +6 |val a, if = 23 // error + | ^^ + | an identifier expected, but 'if' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'if' as identifier, you may put it in backticks: `if`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:8:4 --------------------------------------------------------------------- +8 |def do; // error + | ^^ + | an identifier expected, but 'do' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'do' as identifier, you may put it in backticks: `do`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:9:4 --------------------------------------------------------------------- +9 |var do; // error + | ^ + | an identifier expected, but 'do' found + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'do' as identifier, you may put it in backticks: `do`. + --------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:10:6 -------------------------------------------------------------------- +10 |class do; // error + | ^^ + | an identifier expected, but 'do' found + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'do' as identifier, you may put it in backticks: `do`. + -------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:11:7 -------------------------------------------------------------------- +11 |object if; // error // error + | ^^ + | an identifier expected, but 'if' found + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'if' as identifier, you may put it in backticks: `if`. + -------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:12:6 -------------------------------------------------------------------- +12 |trait else; // error + | ^^^^ + | an identifier expected, but 'else' found + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'else' as identifier, you may put it in backticks: `else`. + -------------------------------------------------------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i18750.scala:13:5 -------------------------------------------------------------------- +13 |type for; // error + | ^^^ + | an identifier expected, but 'for' found + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | If you want to use 'for' as identifier, you may put it in backticks: `for`. + -------------------------------------------------------------------------------------------------------------------- +-- [E161] Naming Error: tests/neg/i18750.scala:11:0 -------------------------------------------------------------------- +11 |object if; // error // error + |^ + | clashes with class in tests/neg/i18750.scala; the two must be defined together diff --git a/tests/neg/i18750.scala b/tests/neg/i18750.scala new file mode 100644 index 000000000000..6c083205cd52 --- /dev/null +++ b/tests/neg/i18750.scala @@ -0,0 +1,13 @@ +//> using options -explain + +val do = 23 // error +val if = 23 // error +val val = 23 // error +val a, if = 23 // error + +def do; // error +var do; // error +class do; // error +object if; // error // error +trait else; // error +type for; // error diff --git a/tests/neg/i18862-3.4.check b/tests/neg/i18862-3.4.check new file mode 100644 index 000000000000..b56454feeeaa --- /dev/null +++ b/tests/neg/i18862-3.4.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i18862-3.4.scala:6:38 ------------------------------------------------------------------------------ +6 |def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning + | ^ + | The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/i18862-3.4.scala b/tests/neg/i18862-3.4.scala new file mode 100644 index 000000000000..a30c8c8f1a59 --- /dev/null +++ b/tests/neg/i18862-3.4.scala @@ -0,0 +1,6 @@ +//> using options -Werror + +import scala.language.`3.4` + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning diff --git a/tests/neg/i18862-future-migration.scala b/tests/neg/i18862-future-migration.scala new file mode 100644 index 000000000000..ff8ba1c377c3 --- /dev/null +++ b/tests/neg/i18862-future-migration.scala @@ -0,0 +1,6 @@ +//> using options -Werror + +import scala.language.`future-migration` + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning diff --git a/tests/neg/i18862-future.scala b/tests/neg/i18862-future.scala new file mode 100644 index 000000000000..07fc72aef34a --- /dev/null +++ b/tests/neg/i18862-future.scala @@ -0,0 +1,4 @@ +import scala.language.future + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration error diff --git a/tests/neg/i18867-3.4.scala b/tests/neg/i18867-3.4.scala new file mode 100644 index 000000000000..c5fd2976c8b0 --- /dev/null +++ b/tests/neg/i18867-3.4.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import language.`3.4` + +def foo(x: Int) = x + +def test = foo _ // error diff --git a/tests/neg/i18867.check b/tests/neg/i18867.check new file mode 100644 index 000000000000..014e9e7bd92e --- /dev/null +++ b/tests/neg/i18867.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/i18867.scala:5:15 ---------------------------------------------------------------------------------- +5 |def test = foo _ // error + | ^^^^^ + | The syntax ` _` is no longer supported; + | you can simply leave out the trailing ` _` + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/i18867.scala b/tests/neg/i18867.scala new file mode 100644 index 000000000000..722347a613f5 --- /dev/null +++ b/tests/neg/i18867.scala @@ -0,0 +1,5 @@ +//> using options -Werror + +def foo(x: Int) = x + +def test = foo _ // error diff --git a/tests/neg/i18884.scala b/tests/neg/i18884.scala new file mode 100644 index 000000000000..6ba17fde7f70 --- /dev/null +++ b/tests/neg/i18884.scala @@ -0,0 +1,2 @@ +def test(xs: ::[Int]): List[Int] = + xs.next$access$1 // error diff --git a/tests/neg/i18922.check b/tests/neg/i18922.check new file mode 100644 index 000000000000..f686d8e2f619 --- /dev/null +++ b/tests/neg/i18922.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i18922.scala:11:27 ------------------------------------------------------------ +11 |def test = doClose(Resource()) // error + | ^^^^^^^^^^ + | Found: Resource + | Required: Object{def close(): Unit} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18922.scala b/tests/neg/i18922.scala new file mode 100644 index 000000000000..9c76177a0c7a --- /dev/null +++ b/tests/neg/i18922.scala @@ -0,0 +1,11 @@ +import scala.annotation.targetName + +def doClose(closable: { def close(): Unit }): Unit = + import reflect.Selectable.reflectiveSelectable + closable.close() + +class Resource: + @targetName("foo") + def close(): Unit = ??? + +def test = doClose(Resource()) // error \ No newline at end of file diff --git a/tests/neg/i18933.check b/tests/neg/i18933.check new file mode 100644 index 000000000000..dc4db455c2de --- /dev/null +++ b/tests/neg/i18933.check @@ -0,0 +1,10 @@ +-- Error: tests/neg/i18933.scala:3:8 ----------------------------------------------------------------------------------- +3 | infix case B(b: B) // error // error + | ^^^^ + | end of statement expected but 'case' found +-- [E006] Not Found Error: tests/neg/i18933.scala:3:2 ------------------------------------------------------------------ +3 | infix case B(b: B) // error // error + | ^^^^^ + | Not found: infix + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i18933.scala b/tests/neg/i18933.scala new file mode 100644 index 000000000000..7802c3961a25 --- /dev/null +++ b/tests/neg/i18933.scala @@ -0,0 +1,3 @@ +enum Extends[A, B]: + case A(a: A) + infix case B(b: B) // error // error diff --git a/tests/neg/i19100.check b/tests/neg/i19100.check new file mode 100644 index 000000000000..e6ff80e450ab --- /dev/null +++ b/tests/neg/i19100.check @@ -0,0 +1,15 @@ +-- Error: tests/neg/i19100.scala:4:3 ----------------------------------------------------------------------------------- +4 | match // error + | ^^^^^ + | XML literals are no longer supported. + | See https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html +-- Error: tests/neg/i19100.scala:5:10 ---------------------------------------------------------------------------------- +5 | case => 1 // error + | ^^^^^ + | XML literals are no longer supported. + | See https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html +-- Error: tests/neg/i19100.scala:6:3 ----------------------------------------------------------------------------------- +6 | // error + | ^^^^^^^^^^ + | XML literals are no longer supported. + | See https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html diff --git a/tests/neg/i19100.scala b/tests/neg/i19100.scala new file mode 100644 index 000000000000..2f15046b8561 --- /dev/null +++ b/tests/neg/i19100.scala @@ -0,0 +1,6 @@ +import scala.language.future + +def test = + match // error + case => 1 // error + // error diff --git a/tests/neg/i19506.scala b/tests/neg/i19506.scala new file mode 100644 index 000000000000..4e139fed07d0 --- /dev/null +++ b/tests/neg/i19506.scala @@ -0,0 +1,8 @@ +//> using options "-source 3.4-migration", + +trait Reader[T] +def read[T: Reader](s: String, trace: Boolean = false): T = ??? + +def Test = + read[Object]("") // error + read[Object]("")() // error diff --git a/tests/neg/i2033.check b/tests/neg/i2033.check index 5751d91f4f3a..7737bba96a5e 100644 --- a/tests/neg/i2033.check +++ b/tests/neg/i2033.check @@ -13,3 +13,9 @@ 6 | val out = new ObjectOutputStream(println) | ^^^^^^^ |method println is eta-expanded even though java.io.OutputStream does not have the @FunctionalInterface annotation. +-- Warning: tests/neg/i2033.scala:7:18 --------------------------------------------------------------------------------- +7 | val arr = bos toByteArray () // error + | ^^^^^^^^^^^ + | Alphanumeric method toByteArray is not declared infix; it should not be used as infix operator. + | Instead, use method syntax .toByteArray(...) or backticked identifier `toByteArray`. + | The latter can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg-custom-args/deprecation/i2333.scala b/tests/neg/i2333.scala similarity index 77% rename from tests/neg-custom-args/deprecation/i2333.scala rename to tests/neg/i2333.scala index a22433394346..67cddbc73f48 100644 --- a/tests/neg-custom-args/deprecation/i2333.scala +++ b/tests/neg/i2333.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation + @deprecated("bla", "2.11.0") class Foo { println("") def this(x: Int) = this() diff --git a/tests/neg-custom-args/erased/i2642.scala b/tests/neg/i2642.scala similarity index 86% rename from tests/neg-custom-args/erased/i2642.scala rename to tests/neg/i2642.scala index fdb73e215d52..b9cde8b1b450 100644 --- a/tests/neg-custom-args/erased/i2642.scala +++ b/tests/neg/i2642.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Foo { type X = (using ) => Int // error: an identifier expected, but ')' found def ff: X = () // error: found: Unit, expected: Int diff --git a/tests/neg/i2673.scala b/tests/neg/i2673.scala new file mode 100644 index 000000000000..62c787fa00fb --- /dev/null +++ b/tests/neg/i2673.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings + +package Foos + +class Foo // error +class foo diff --git a/tests/neg/i2673b.scala b/tests/neg/i2673b.scala new file mode 100644 index 000000000000..26e09ec3297e --- /dev/null +++ b/tests/neg/i2673b.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings + +package Foos + +class Bar // error +object bar diff --git a/tests/neg/i2673c.scala b/tests/neg/i2673c.scala new file mode 100644 index 000000000000..575d8c25d9e0 --- /dev/null +++ b/tests/neg/i2673c.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings + +package Foos + +object Outer { + case class X() // error + object x +} diff --git a/tests/neg/i2887b.scala b/tests/neg/i2887b.scala index 649d869678cb..b41a392271e2 100644 --- a/tests/neg/i2887b.scala +++ b/tests/neg/i2887b.scala @@ -4,7 +4,7 @@ trait C { type M <: B } trait D { type M >: A } object Test { - def test(x: C with D): Unit = { + def test(x: C & D): Unit = { def foo(a: A, b: B)(z: a.S[b.I,a.I][b.S[a.I,a.I]]) = z def bar(a: A, y: x.M) = foo(a,y) def baz(a: A) = bar(a, a) diff --git a/tests/neg/i3246.scala b/tests/neg/i3246.scala new file mode 100644 index 000000000000..07f1480a14ab --- /dev/null +++ b/tests/neg/i3246.scala @@ -0,0 +1,6 @@ +//> using options -source 3.0-migration + +class Test { + def foo(x: Int) = 1 + val bar: () => Int = foo _ // error: type mismatch +} diff --git a/tests/neg-custom-args/fatal-warnings/i3561.scala b/tests/neg/i3561.scala similarity index 91% rename from tests/neg-custom-args/fatal-warnings/i3561.scala rename to tests/neg/i3561.scala index f6e754e9ec8c..862e65f3c2f7 100644 --- a/tests/neg-custom-args/fatal-warnings/i3561.scala +++ b/tests/neg/i3561.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Test { val Constant = 'Q' // OK if final def tokenMe(ch: Char) = (ch: @annotation.switch) match { // error: could not emit switch diff --git a/tests/neg/i3745c.scala b/tests/neg/i3745c.scala deleted file mode 100644 index 40daf0072e05..000000000000 --- a/tests/neg/i3745c.scala +++ /dev/null @@ -1 +0,0 @@ -import scala.collection.{ Seq as A, Seq as B } // error: Seq is renamed twice diff --git a/tests/neg/i4008.check b/tests/neg/i4008.check new file mode 100644 index 000000000000..c1f27866d1c1 --- /dev/null +++ b/tests/neg/i4008.check @@ -0,0 +1,40 @@ +-- [E158] Reference Error: tests/neg/i4008.scala:7:56 ------------------------------------------------------------------ +7 |@annotation.implicitNotFound("An implicit ShouldWarn1[${B}] is not in scope") // error + | ^ + | Invalid reference to a type variable `B` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn1`. +-- [E158] Reference Error: tests/neg/i4008.scala:11:56 ----------------------------------------------------------------- +11 |@annotation.implicitNotFound("An implicit ShouldWarn2[${A}] is not in scope") // error + | ^ + | Invalid reference to a type variable `A` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn2`. +-- [E158] Reference Error: tests/neg/i4008.scala:15:56 ----------------------------------------------------------------- +15 |@annotation.implicitNotFound("An implicit ShouldWarn3[${A},${B}] is not in scope") // error + | ^ + | Invalid reference to a type variable `A` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn3`. +-- [E158] Reference Error: tests/neg/i4008.scala:19:56 ----------------------------------------------------------------- +19 |@annotation.implicitNotFound("An implicit ShouldWarn4[${A},${B}] is not in scope") // error // error + | ^ + | Invalid reference to a type variable `A` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn4`. +-- [E158] Reference Error: tests/neg/i4008.scala:19:61 ----------------------------------------------------------------- +19 |@annotation.implicitNotFound("An implicit ShouldWarn4[${A},${B}] is not in scope") // error // error + | ^ + | Invalid reference to a type variable `B` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn4`. +-- [E158] Reference Error: tests/neg/i4008.scala:23:61 ----------------------------------------------------------------- +23 |@annotation.implicitNotFound("An implicit ShouldWarn5[${C},${Abc}] is not in scope") // error + | ^ + | Invalid reference to a type variable `Abc` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `ShouldWarn5`. +-- [E158] Reference Error: tests/neg/i4008.scala:46:54 ----------------------------------------------------------------- +46 |class C[A](using @annotation.implicitNotFound("No C[${B}] found") c: Class[A]) // error + | ^ + | Invalid reference to a type variable `B` found in the annotation argument. + | The variable does not occur as a parameter in the scope of the constructor of `C`. +-- [E158] Reference Error: tests/neg/i4008.scala:48:62 ----------------------------------------------------------------- +48 |def someMethod1[A](using @annotation.implicitNotFound("No C[${B}] found") sc: C[A]) = 0 // error + | ^ + | Invalid reference to a type variable `B` found in the annotation argument. + | The variable does not occur as a parameter in the scope of method `someMethod1`. diff --git a/tests/neg-custom-args/fatal-warnings/i4008.scala b/tests/neg/i4008.scala similarity index 97% rename from tests/neg-custom-args/fatal-warnings/i4008.scala rename to tests/neg/i4008.scala index e6e46dc83a78..5b851c987e37 100644 --- a/tests/neg-custom-args/fatal-warnings/i4008.scala +++ b/tests/neg/i4008.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + // ===== Template annotations ===== diff --git a/tests/neg-custom-args/erased/i4060.scala b/tests/neg/i4060.scala similarity index 92% rename from tests/neg-custom-args/erased/i4060.scala rename to tests/neg/i4060.scala index a1a2eee68dc0..ba641d633d3c 100644 --- a/tests/neg-custom-args/erased/i4060.scala +++ b/tests/neg/i4060.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + // See https://github.com/lampepfl/dotty/issues/4060#issuecomment-445808377 object App { diff --git a/tests/neg-custom-args/fatal-warnings/i4364.scala b/tests/neg/i4364.scala similarity index 88% rename from tests/neg-custom-args/fatal-warnings/i4364.scala rename to tests/neg/i4364.scala index 5ec3f9cd169d..85dca443f6d4 100644 --- a/tests/neg-custom-args/fatal-warnings/i4364.scala +++ b/tests/neg/i4364.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { def foo(c: java.util.function.Consumer[Integer]) = c.accept(0) def f(x: Int): Unit = () diff --git a/tests/neg/i4382.check b/tests/neg/i4382.check index 4905638ab62a..bf4dc4a192d8 100644 --- a/tests/neg/i4382.check +++ b/tests/neg/i4382.check @@ -1,23 +1,23 @@ -- [E043] Type Error: tests/neg/i4382.scala:3:10 ----------------------------------------------------------------------- -3 | def v1: Id[_] = ??? // error +3 | def v1: Id[?] = ??? // error | ^^^^^ | unreducible application of higher-kinded type App.Id to wildcard arguments | | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:6:10 ----------------------------------------------------------------------- -6 | def v2: HkL[_] = ??? // error +6 | def v2: HkL[?] = ??? // error | ^^^^^^ | unreducible application of higher-kinded type App.HkL to wildcard arguments | | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:9:10 ----------------------------------------------------------------------- -9 | def v3: HkU[_] = ??? // error +9 | def v3: HkU[?] = ??? // error | ^^^^^^ | unreducible application of higher-kinded type App.HkU to wildcard arguments | | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:12:10 ---------------------------------------------------------------------- -12 | def v4: HkAbs[_] = ??? // error +12 | def v4: HkAbs[?] = ??? // error | ^^^^^^^^ | unreducible application of higher-kinded type App.HkAbs to wildcard arguments | diff --git a/tests/neg/i4382.scala b/tests/neg/i4382.scala index aefe23094ce9..80f0948fa82c 100644 --- a/tests/neg/i4382.scala +++ b/tests/neg/i4382.scala @@ -1,13 +1,13 @@ object App { type Id[A] >: A <: A - def v1: Id[_] = ??? // error + def v1: Id[?] = ??? // error type HkL[A] >: A - def v2: HkL[_] = ??? // error + def v2: HkL[?] = ??? // error type HkU[A] <: A - def v3: HkU[_] = ??? // error + def v3: HkU[?] = ??? // error type HkAbs[A] - def v4: HkAbs[_] = ??? // error + def v4: HkAbs[?] = ??? // error } diff --git a/tests/neg/i4453.scala b/tests/neg/i4453.scala index cc339f83b35b..41f7dab62e1a 100644 --- a/tests/neg/i4453.scala +++ b/tests/neg/i4453.scala @@ -1,2 +1,2 @@ -class x0 { var x0 == _ * // error: _* can be used only for last argument // error: == cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method -// error '=' expected, but eof found \ No newline at end of file +class x0 { var x0 == _ * // error spread operator `*` not allowed here // error '=' expected + // error: == cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method \ No newline at end of file diff --git a/tests/neg/i4496b.scala b/tests/neg/i4496b.scala index ee7a0f444774..e84c29fd9347 100644 --- a/tests/neg/i4496b.scala +++ b/tests/neg/i4496b.scala @@ -10,7 +10,7 @@ object TestStructuralVar { type T = {val a: Int; def a_=(x: Int): Unit} def upcast1(v: Foo1): T = v // error def upcast2(v: Foo2): T = v // error - def upcast3(v: Foo3): T = v + def upcast3(v: Foo3): T = v // error def verify(v: T) = () def test(): Unit = { verify(upcast1(new Foo1 { val a = 10 })) diff --git a/tests/neg/i4557.scala b/tests/neg/i4557.scala index ffdf3b5be97e..74f9099c6d08 100644 --- a/tests/neg/i4557.scala +++ b/tests/neg/i4557.scala @@ -9,11 +9,11 @@ object O { type S0[X, Y] = C1[X, Y] type S1 = C1[Int] // error - class D0 extends T0 // error + class D0 extends T0 // was error, now ok class D1 extends T0[Int] class D2 extends T0[String, Int] // error - class E0 extends S0 // error + class E0 extends S0 // was error, now ok class E1 extends S0[Int] // error class E2 extends S0[String, Int] } diff --git a/tests/neg/i4812.check b/tests/neg/i4812.check index 275cda56defe..f4aee0e35dde 100644 --- a/tests/neg/i4812.check +++ b/tests/neg/i4812.check @@ -1,28 +1,42 @@ --- Error: tests/neg/i4812.scala:8:11 ----------------------------------------------------------------------------------- +-- [E092] Pattern Match Error: tests/neg/i4812.scala:8:11 -------------------------------------------------------------- 8 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:18:11 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:18:11 ------------------------------------------------------------- 18 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:28:11 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:28:11 ------------------------------------------------------------- 28 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:38:11 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:38:11 ------------------------------------------------------------- 38 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:50:13 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:50:13 ------------------------------------------------------------- 50 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:60:11 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:60:11 ------------------------------------------------------------- 60 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ | the type test for A cannot be checked at runtime because it's a local class --- Error: tests/neg/i4812.scala:96:11 ---------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Error: tests/neg/i4812.scala:96:11 ------------------------------------------------------------- 96 | case x: B => // error: the type test for B cannot be checked at runtime | ^ | the type test for B cannot be checked at runtime because it's a local class + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i4812.scala b/tests/neg/i4812.scala index fa38980f78d1..c6f6dafc656c 100644 --- a/tests/neg/i4812.scala +++ b/tests/neg/i4812.scala @@ -1,6 +1,6 @@ -// scalac: -Werror +//> using options -Werror object Test: - var prev: Any = _ + var prev: Any = scala.compiletime.uninitialized def test[T](x: T): T = class A(val elem: (T, Boolean)) @@ -55,7 +55,7 @@ object Test: def test6[T](x: T): T = class A { var b: B = null } - class B { var a: A = null; var elem: T = _ } + class B { var a: A = null; var elem: T = scala.compiletime.uninitialized } prev match case prev: A => // error: the type test for A cannot be checked at runtime prev.b.elem @@ -88,7 +88,7 @@ object Test: case x: B => x sealed class A - var prevA: A = _ + var prevA: A = scala.compiletime.uninitialized def test10: A = val methodCallId = System.nanoTime() class B(val id: Long) extends A diff --git a/tests/neg/i4936b.scala b/tests/neg/i4936b.scala new file mode 100644 index 000000000000..58c240e92309 --- /dev/null +++ b/tests/neg/i4936b.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings + +final object Foo // error diff --git a/tests/neg/i4986a.check b/tests/neg/i4986a.check index 141f3fa8aacb..c6e7299ee94f 100644 --- a/tests/neg/i4986a.check +++ b/tests/neg/i4986a.check @@ -4,6 +4,6 @@ |Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int].. |I found: | - | collection.BuildFrom.buildFromIterableOps[CC, A0, A] + | scala.collection.BuildFrom.buildFromIterableOps[CC, A0, A] | - |But method buildFromIterableOps in trait BuildFromLowPriority2 does not match type collection.BuildFrom[List[Int], Int, List[String]]. + |But method buildFromIterableOps in trait BuildFromLowPriority2 does not match type scala.collection.BuildFrom[List[Int], Int, List[String]]. diff --git a/tests/neg/i4986b.check b/tests/neg/i4986b.check new file mode 100644 index 000000000000..db30dcba8003 --- /dev/null +++ b/tests/neg/i4986b.check @@ -0,0 +1,40 @@ +-- [E158] Reference Error: tests/neg/i4986b.scala:5:65 ----------------------------------------------------------------- +5 |@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.") // error // error + | ^ + | Invalid reference to a type variable `Too` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `Meh`. +-- [E158] Reference Error: tests/neg/i4986b.scala:5:94 ----------------------------------------------------------------- +5 |@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.") // error // error + | ^ + | Invalid reference to a type variable `Elem` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `Meh`. +-- [E158] Reference Error: tests/neg/i4986b.scala:8:71 ----------------------------------------------------------------- +8 |@implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.") // error + | ^ + | Invalid reference to a type variable `Elem` found in the annotation argument. + | The variable does not occur as a parameter in the scope of type `Meh2`. +-- [E158] Reference Error: tests/neg/i4986b.scala:11:46 ---------------------------------------------------------------- +11 |class C[T](implicit @implicitNotFound("No C[${t}] available") t: T) // error + | ^ + | Invalid reference to a type variable `t` found in the annotation argument. + | The variable does not occur as a parameter in the scope of the constructor of `C`. +-- [E158] Reference Error: tests/neg/i4986b.scala:14:54 ---------------------------------------------------------------- +14 | def m[Aaa](implicit @implicitNotFound("I see no C[${Uuh}]") theC: C[Aaa]) = ??? // error + | ^ + | Invalid reference to a type variable `Uuh` found in the annotation argument. + | The variable does not occur as a parameter in the scope of method `m`. +-- [E158] Reference Error: tests/neg/i4986b.scala:20:73 ---------------------------------------------------------------- +20 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error + | ^ + | Invalid reference to a type variable `XX` found in the annotation argument. + | The variable does not occur as a parameter in the scope of method `m`. +-- [E158] Reference Error: tests/neg/i4986b.scala:20:79 ---------------------------------------------------------------- +20 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error + | ^ + | Invalid reference to a type variable `ZZ` found in the annotation argument. + | The variable does not occur as a parameter in the scope of method `m`. +-- [E158] Reference Error: tests/neg/i4986b.scala:20:86 ---------------------------------------------------------------- +20 | def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? // error // error // error + | ^ + | Invalid reference to a type variable `Nix` found in the annotation argument. + | The variable does not occur as a parameter in the scope of method `m`. diff --git a/tests/neg-custom-args/fatal-warnings/i4986b.scala b/tests/neg/i4986b.scala similarity index 95% rename from tests/neg-custom-args/fatal-warnings/i4986b.scala rename to tests/neg/i4986b.scala index e4ee1e64e8fe..c726e40fd4d8 100644 --- a/tests/neg-custom-args/fatal-warnings/i4986b.scala +++ b/tests/neg/i4986b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.annotation.implicitNotFound @implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.") // error // error diff --git a/tests/neg/i4986c.scala b/tests/neg/i4986c.scala index 31458a7e9cbb..13b608a69707 100644 --- a/tests/neg/i4986c.scala +++ b/tests/neg/i4986c.scala @@ -57,7 +57,7 @@ object Test { implicitly[U[Int, Option, Map]] // error - val u = new U[String, List, [A, _] =>> List[Option[_]]] { } + val u = new U[String, List, [A, _] =>> List[Option[?]]] { } val i = new u.I[Int] i.m[Option[Long]] // error } diff --git a/tests/neg-custom-args/fatal-warnings/i4986d.scala b/tests/neg/i4986d.scala similarity index 93% rename from tests/neg-custom-args/fatal-warnings/i4986d.scala rename to tests/neg/i4986d.scala index 74a3c01aaa71..909221bea5f0 100644 --- a/tests/neg-custom-args/fatal-warnings/i4986d.scala +++ b/tests/neg/i4986d.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + trait Foo[A] type Fooable[A] = { diff --git a/tests/neg/i5004.scala b/tests/neg/i5004.scala index a8acfa231bca..02105104efd1 100644 --- a/tests/neg/i5004.scala +++ b/tests/neg/i5004.scala @@ -1,6 +1,6 @@ object i0 { 1 match { def this(): Int // error - def this() // error -} + def this() +} // error } diff --git a/tests/neg-custom-args/fatal-warnings/i5013.scala b/tests/neg/i5013.scala similarity index 87% rename from tests/neg-custom-args/fatal-warnings/i5013.scala rename to tests/neg/i5013.scala index 3581810259e9..adf09e2cf3be 100644 --- a/tests/neg-custom-args/fatal-warnings/i5013.scala +++ b/tests/neg/i5013.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Foo { def foo1: Unit = 2 // error: A pure expression does nothing in statement position diff --git a/tests/neg-custom-args/fatal-warnings/i5013b.scala b/tests/neg/i5013b.scala similarity index 88% rename from tests/neg-custom-args/fatal-warnings/i5013b.scala rename to tests/neg/i5013b.scala index 309c153e2ea3..7ccaf47c12f3 100644 --- a/tests/neg-custom-args/fatal-warnings/i5013b.scala +++ b/tests/neg/i5013b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Foo { val a: Int = 3 diff --git a/tests/neg/i5077.scala b/tests/neg/i5077.scala index bcae42b6a5c5..d705ffe52dd9 100644 --- a/tests/neg/i5077.scala +++ b/tests/neg/i5077.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns trait Is[A] case object IsInt extends Is[Int] case object IsString extends Is[String] @@ -6,7 +7,7 @@ case class C[A](is: Is[A], value: A) @main def Test = { val c_string: C[String] = C(IsString, "name") - val c_any: C[_] = c_string + val c_any: C[?] = c_string val any: Any = c_string // Case 1: error diff --git a/tests/neg/i5101.check b/tests/neg/i5101.check index 4f4bac89aa44..c86976398b0e 100644 --- a/tests/neg/i5101.check +++ b/tests/neg/i5101.check @@ -1,6 +1,6 @@ --- [E006] Not Found Error: tests/neg/i5101.scala:11:11 ----------------------------------------------------------------- +-- [E189] Not Found Error: tests/neg/i5101.scala:11:11 ----------------------------------------------------------------- 11 | case A0(_) => // error | ^^ - | Not found: A0 + | no pattern match extractor named A0 was found | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i5498-postfixOps.check b/tests/neg/i5498-postfixOps.check index d41862364270..c3d0453aff94 100644 --- a/tests/neg/i5498-postfixOps.check +++ b/tests/neg/i5498-postfixOps.check @@ -4,13 +4,25 @@ | expression expected but end of statement found | | longer explanation available when compiling with `-explain` --- [E018] Syntax Error: tests/neg/i5498-postfixOps.scala:6:37 ---------------------------------------------------------- -6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error - | ^ - | expression expected but ')' found - | - | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/i5498-postfixOps.scala:6:29 ---------------------------------------------------------- +6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error // error (type error) // error (type error) + | ^^^^^^^^ + | ',' or ')' expected, but identifier found -- [E172] Type Error: tests/neg/i5498-postfixOps.scala:6:0 ------------------------------------------------------------- -6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error +6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error // error (type error) // error (type error) |^ - |No given instance of type scala.concurrent.duration.DurationConversions.Classifier[Null] was found for parameter ev of method second in trait DurationConversions + |Ambiguous given instances: both object spanConvert in object DurationConversions and object fromNowConvert in object DurationConversions match type scala.concurrent.duration.DurationConversions.Classifier[C] of parameter ev of method second in trait DurationConversions +-- [E007] Type Mismatch Error: tests/neg/i5498-postfixOps.scala:6:24 --------------------------------------------------- +6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error // error (type error) // error (type error) + | ^ + | Found: (1 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i5498-postfixOps.scala:6:26 --------------------------------------------------- +6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error // error (type error) // error (type error) + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i5498-postfixOps.scala b/tests/neg/i5498-postfixOps.scala index 6dd89517bf52..3b95603f7961 100644 --- a/tests/neg/i5498-postfixOps.scala +++ b/tests/neg/i5498-postfixOps.scala @@ -3,5 +3,5 @@ import scala.concurrent.duration.* def test() = { 1 second // error: usage of postfix operator - Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error + Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error // error (type error) // error (type error) } diff --git a/tests/neg/i5525b.scala b/tests/neg/i5525b.scala new file mode 100644 index 000000000000..d51564ad52c1 --- /dev/null +++ b/tests/neg/i5525b.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.erasedDefinitions + +erased enum Foo6 {} // error: only access modifiers allowed + +enum Foo10 { // error: Enumerations must contain at least one case + erased case C6() // error // error +} + +enum Foo11 { // error: Enumerations must contain at least one case + erased case C6 // error // error +} diff --git a/tests/neg-strict/i5854.scala b/tests/neg/i5854.scala similarity index 89% rename from tests/neg-strict/i5854.scala rename to tests/neg/i5854.scala index e8c68105bbae..61c7c2d833e7 100644 --- a/tests/neg-strict/i5854.scala +++ b/tests/neg/i5854.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + object bar { trait Sub { type M diff --git a/tests/neg/i5976.scala b/tests/neg/i5976.scala index 8a9c29b85ae1..0b037f50a4ea 100644 --- a/tests/neg/i5976.scala +++ b/tests/neg/i5976.scala @@ -4,4 +4,4 @@ object Test { val g: (=> Int) => Int = f val h: Int => Int = g // error -} \ No newline at end of file +} diff --git a/tests/neg/i6059.scala b/tests/neg/i6059.scala index 375c99fb6ba0..d6f28b0c2574 100644 --- a/tests/neg/i6059.scala +++ b/tests/neg/i6059.scala @@ -1,3 +1,3 @@ def I0(I1: Int ) = I1 -val I1 = I0(I0 i2) => // error +val I1 = I0(I0 i2) => // error // error true diff --git a/tests/neg/i6190a.check b/tests/neg/i6190a.check new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/neg/i6190b.check b/tests/neg/i6190b.check new file mode 100644 index 000000000000..5d4dbd7c24cb --- /dev/null +++ b/tests/neg/i6190b.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i6190b.scala:5:29 ---------------------------------------------------------------------------------- +5 |def foo = List("1", "2").map(Rule) // error + | ^^^^ + | The method `apply` is inserted. The auto insertion will be deprecated, please write `Rule.apply` explicitly. diff --git a/tests/neg/i6190b.scala b/tests/neg/i6190b.scala new file mode 100644 index 000000000000..42270c1a984c --- /dev/null +++ b/tests/neg/i6190b.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings + +case class Rule(name: String) + +def foo = List("1", "2").map(Rule) // error diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check new file mode 100644 index 000000000000..cdf655710452 --- /dev/null +++ b/tests/neg/i6716.check @@ -0,0 +1,15 @@ +-- Error: tests/neg/i6716.scala:12:39 ---------------------------------------------------------------------------------- +12 | given Monad[Bar] = summon[Monad[Foo]] // error + | ^ + | Result of implicit search for Monad[Foo] will change. + | Current result Bar.given_Monad_Bar will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: Foo.given_Monad_Foo. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala new file mode 100644 index 000000000000..bbbd9d6d6cd0 --- /dev/null +++ b/tests/neg/i6716.scala @@ -0,0 +1,18 @@ +//> using options -Xfatal-warnings + +trait Monad[T]: + def id: String +class Foo +object Foo { + given Monad[Foo] with { def id = "Foo" } +} + +opaque type Bar = Foo +object Bar { + given Monad[Bar] = summon[Monad[Foo]] // error +} + +object Test extends App { + println(summon[Monad[Foo]].id) + println(summon[Monad[Bar]].id) +} \ No newline at end of file diff --git a/tests/neg/i6724.check b/tests/neg/i6724.check index 4dd50236bbcb..0d2481ddaa2b 100644 --- a/tests/neg/i6724.check +++ b/tests/neg/i6724.check @@ -1,4 +1,4 @@ -- [E008] Not Found Error: tests/neg/i6724.scala:7:17 ------------------------------------------------------------------ -7 | def f(foo: Foo.Baz): Foo[_] = foo // error +7 | def f(foo: Foo.Baz): Foo[?] = foo // error | ^^^^^^^ | type Baz is not a member of object Foo - did you mean Foo.Bar? diff --git a/tests/neg/i6724.scala b/tests/neg/i6724.scala index 7cb4d8eddbfb..18b660e7bc0c 100644 --- a/tests/neg/i6724.scala +++ b/tests/neg/i6724.scala @@ -4,5 +4,5 @@ enum Foo[T] { } object Main { - def f(foo: Foo.Baz): Foo[_] = foo // error + def f(foo: Foo.Baz): Foo[?] = foo // error } diff --git a/tests/neg/i6795.check b/tests/neg/i6795.check new file mode 100644 index 000000000000..3dac666927a6 --- /dev/null +++ b/tests/neg/i6795.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/erased/i6795.scala:1:13 ---------------------------------------------------------------- +1 |erased class Foo // error + |^^^^^^^^^^^^^^^^ + |modifier(s) `erased` incompatible with type definition diff --git a/tests/neg/i7294-a.check b/tests/neg/i7294-a.check new file mode 100644 index 000000000000..2fe260fcf99a --- /dev/null +++ b/tests/neg/i7294-a.check @@ -0,0 +1,26 @@ +-- [E007] Type Mismatch Error: tests/neg/i7294-a.scala:10:20 ----------------------------------------------------------- +10 | case x: T => x.g(10) // error // error + | ^^^^^^^ + | Found: Any + | Required: T + | + | where: T is a type in given instance f with bounds <: foo.Foo + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i7294-a.scala:10:12 -------------------------------------------------------------------------------- +10 | case x: T => x.g(10) // error // error + | ^ + | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. + | Current result foo.Test.f will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that foo.Test.f comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. + | + | where: T is a type in given instance f with bounds <: foo.Foo diff --git a/tests/neg/i7294-a.scala b/tests/neg/i7294-a.scala index 13981fa4d375..3453e88cf741 100644 --- a/tests/neg/i7294-a.scala +++ b/tests/neg/i7294-a.scala @@ -1,9 +1,13 @@ +//> using options -Xfatal-warnings + package foo trait Foo { def g(x: Int): Any } -inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error -} +object Test: + + inline given f[T <: Foo]: T = ??? match { + case x: T => x.g(10) // error // error + } -@main def Test = f + @main def Test = f diff --git a/tests/neg/i7294-b.scala b/tests/neg/i7294-b.scala index 423d5037db96..8c6f9328cc20 100644 --- a/tests/neg/i7294-b.scala +++ b/tests/neg/i7294-b.scala @@ -1,9 +1,11 @@ +//> using options -Xfatal-warnings + package foo trait Foo { def g(x: Any): Any } inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error + case x: T => x.g(10) // error // error } @main def Test = f diff --git a/tests/neg-custom-args/i7314.scala b/tests/neg/i7314.scala similarity index 83% rename from tests/neg-custom-args/i7314.scala rename to tests/neg/i7314.scala index fbdf3dfc477f..b865f2ad881b 100644 --- a/tests/neg-custom-args/i7314.scala +++ b/tests/neg/i7314.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -source future + @main def Test = // conversion out of the opaque type: val imm1 = IArray(1,2,3) // supposedly immutable diff --git a/tests/neg/i7459.scala b/tests/neg/i7459.scala index b5b20d7bc22a..a17f32b15afa 100644 --- a/tests/neg/i7459.scala +++ b/tests/neg/i7459.scala @@ -2,7 +2,7 @@ object Foo { inline def summon[T](x: T): T = x match { case t: T => t } - println(summon) // error + println(summon) } import scala.deriving.* diff --git a/tests/neg/i7709.check b/tests/neg/i7709.check index 180cf1939d16..14d2dbaf4cde 100644 --- a/tests/neg/i7709.check +++ b/tests/neg/i7709.check @@ -2,47 +2,39 @@ 5 | class B extends X.Y // error | ^^^ | class Y cannot be accessed as a member of X.type from class B. - | Access to protected class Y not permitted because enclosing object A - | is not a subclass of object X where target is defined + | protected class Y can only be accessed from object X. -- [E173] Reference Error: tests/neg/i7709.scala:6:21 ------------------------------------------------------------------ 6 | class B2 extends X.Y: // error | ^^^ | class Y cannot be accessed as a member of X.type from class B2. - | Access to protected class Y not permitted because enclosing object A - | is not a subclass of object X where target is defined + | protected class Y can only be accessed from object X. -- [E173] Reference Error: tests/neg/i7709.scala:9:28 ------------------------------------------------------------------ 9 | class B4 extends B3(new X.Y) // error | ^^^ | class Y cannot be accessed as a member of X.type from class B4. - | Access to protected class Y not permitted because enclosing object A - | is not a subclass of object X where target is defined + | protected class Y can only be accessed from object X. -- [E173] Reference Error: tests/neg/i7709.scala:11:34 ----------------------------------------------------------------- 11 | def this(n: Int) = this(new X.Y().toString) // error | ^^^ | class Y cannot be accessed as a member of X.type from class B5. - | Access to protected class Y not permitted because enclosing object A - | is not a subclass of object X where target is defined + | protected class Y can only be accessed from object X. -- [E173] Reference Error: tests/neg/i7709.scala:13:20 ----------------------------------------------------------------- 13 | class B extends X.Y // error | ^^^ | class Y cannot be accessed as a member of X.type from class B. - | Access to protected class Y not permitted because enclosing trait T - | is not a subclass of object X where target is defined + | protected class Y can only be accessed from object X. -- [E173] Reference Error: tests/neg/i7709.scala:18:18 ----------------------------------------------------------------- 18 | def y = new xx.Y // error | ^^^^ | class Y cannot be accessed as a member of XX from class C. - | Access to protected class Y not permitted because enclosing class C - | is not a subclass of class XX where target is defined + | protected class Y can only be accessed from class XX or one of its subclasses. -- [E173] Reference Error: tests/neg/i7709.scala:23:20 ----------------------------------------------------------------- 23 | def y = new xx.Y // error | ^^^^ | class Y cannot be accessed as a member of XX from class D. - | Access to protected class Y not permitted because enclosing class D - | is not a subclass of class XX where target is defined + | protected class Y can only be accessed from class XX or one of its subclasses. -- [E173] Reference Error: tests/neg/i7709.scala:31:20 ----------------------------------------------------------------- 31 | class Q extends X.Y // error | ^^^ | class Y cannot be accessed as a member of p.X.type from class Q. - | Access to protected class Y not permitted because enclosing package p - | is not a subclass of object X in package p where target is defined + | protected class Y can only be accessed from object X in package p. diff --git a/tests/neg/i7812.scala b/tests/neg/i7812.scala index 264258fa4db3..9cbd58071bc8 100644 --- a/tests/neg/i7812.scala +++ b/tests/neg/i7812.scala @@ -1,3 +1,3 @@ def f(): Any = ??? var f: (UndefinedA & UndefinedB) { val x: Int } = ??? // error // error -val a = f // error \ No newline at end of file +val a = f diff --git a/tests/neg-custom-args/fatal-warnings/i7821.scala b/tests/neg/i7821.scala similarity index 93% rename from tests/neg-custom-args/fatal-warnings/i7821.scala rename to tests/neg/i7821.scala index 1574801826bc..5275c06e487f 100644 --- a/tests/neg-custom-args/fatal-warnings/i7821.scala +++ b/tests/neg/i7821.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object XObject { opaque type X = Int diff --git a/tests/neg-custom-args/fatal-warnings/i7821b.scala b/tests/neg/i7821b.scala similarity index 91% rename from tests/neg-custom-args/fatal-warnings/i7821b.scala rename to tests/neg/i7821b.scala index 9e38b33b0cb3..f82a38d259e1 100644 --- a/tests/neg-custom-args/fatal-warnings/i7821b.scala +++ b/tests/neg/i7821b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { { def f(x: Int, y: Int): Int = f(x, y) } // error diff --git a/tests/neg/i8012.scala b/tests/neg/i8012.scala index 01171fd3f80c..c5f3df050e2c 100644 --- a/tests/neg/i8012.scala +++ b/tests/neg/i8012.scala @@ -9,5 +9,5 @@ class C extends Q[?] // error: Type argument must be fully defined object O { def m(i: Int): Int = i - val x: Q[_] = m // error: result type of lambda is an underspecified SAM type Q[?] -} \ No newline at end of file + val x: Q[_] = m +} diff --git a/tests/neg/i8299.scala b/tests/neg/i8299.scala new file mode 100644 index 000000000000..e3e41515ff29 --- /dev/null +++ b/tests/neg/i8299.scala @@ -0,0 +1,8 @@ +package example + +object Main { + def main(a: Array[String]): Unit = { + val p: PolyFunction = // error: PolyFunction subtypes must refine the apply method + [A] => (xs: List[A]) => xs.headOption + } +} diff --git a/tests/neg-custom-args/fatal-warnings/i8427.scala b/tests/neg/i8427.scala similarity index 79% rename from tests/neg-custom-args/fatal-warnings/i8427.scala rename to tests/neg/i8427.scala index a5bdd68567c1..3db449bed41c 100644 --- a/tests/neg-custom-args/fatal-warnings/i8427.scala +++ b/tests/neg/i8427.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + @SerialVersionUID(1L) // error trait T diff --git a/tests/neg-custom-args/fatal-warnings/i8681.scala b/tests/neg/i8681.scala similarity index 89% rename from tests/neg-custom-args/fatal-warnings/i8681.scala rename to tests/neg/i8681.scala index c45b15ac3bf8..4d91509eb0d3 100644 --- a/tests/neg-custom-args/fatal-warnings/i8681.scala +++ b/tests/neg/i8681.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + case class A(a: Int) case class B(b: Int) case class C(c: Int) diff --git a/tests/neg/i8711.check b/tests/neg/i8711.check new file mode 100644 index 000000000000..5dbeeb22460c --- /dev/null +++ b/tests/neg/i8711.check @@ -0,0 +1,12 @@ +-- [E030] Match case Unreachable Error: tests/neg/i8711.scala:9:9 ------------------------------------------------------ +9 | case x: B => x // error: this case is unreachable since class A is not a subclass of class B + | ^^^^ + | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg/i8711.scala:14:9 ----------------------------------------------------- +14 | case x: C => x // error + | ^^^^ + | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg/i8711.scala:19:9 ----------------------------------------------------- +19 | case x: (B | C) => x // error + | ^^^^^^^^^^ + | Unreachable case diff --git a/tests/neg-custom-args/fatal-warnings/i8711.scala b/tests/neg/i8711.scala similarity index 90% rename from tests/neg-custom-args/fatal-warnings/i8711.scala rename to tests/neg/i8711.scala index 46fc5a85c90a..2647e20fe03b 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.scala +++ b/tests/neg/i8711.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class A class B class C diff --git a/tests/neg/i8715.check b/tests/neg/i8715.check new file mode 100644 index 000000000000..c00e5e150193 --- /dev/null +++ b/tests/neg/i8715.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i8715.scala:2:46 ----------------------------------------------------------------------------------- +2 |def Test = List(42) match { case List(xs @ (ys*)) => xs } // error + | ^ + | bad use of `*` - sequence pattern not allowed here diff --git a/tests/neg/i8715.scala b/tests/neg/i8715.scala new file mode 100644 index 000000000000..90610fd788f8 --- /dev/null +++ b/tests/neg/i8715.scala @@ -0,0 +1,2 @@ +@main +def Test = List(42) match { case List(xs @ (ys*)) => xs } // error diff --git a/tests/neg/i8736.scala b/tests/neg/i8736.scala index dc2fa1821791..b2e7a0a66cb6 100644 --- a/tests/neg/i8736.scala +++ b/tests/neg/i8736.scala @@ -13,14 +13,14 @@ object App extends App { type Rec[K <: String, V0] = Rec0[K] { def get(k: K): V0 } def field[V](s: String)(v: V): Rec[s.type, V] = Rec0(Map(s -> v)).asInstanceOf[Rec[s.type, V]] - implicit class RecOps[R <: Rec0[_]](has: R) { - def +[K1 <: String, V1](that: Rec[K1, V1]): R with Rec[K1, V1] = Rec0(has.map ++ that.map).asInstanceOf[R with Rec[K1, V1]] + implicit class RecOps[R <: Rec0[?]](has: R) { + def +[K1 <: String, V1](that: Rec[K1, V1]): R & Rec[K1, V1] = Rec0(has.map ++ that.map).asInstanceOf[R & Rec[K1, V1]] } def rec: Rec["k", String] - with Rec["v", Int] - with Rec["z", Boolean] + & Rec["v", Int] + & Rec["z", Boolean] = { field("k")("Str") + field("v")(0) + diff --git a/tests/neg-custom-args/fatal-warnings/i8781b.scala b/tests/neg/i8781b.scala similarity index 75% rename from tests/neg-custom-args/fatal-warnings/i8781b.scala rename to tests/neg/i8781b.scala index a30b0e5249c9..7c9b074e1be7 100644 --- a/tests/neg-custom-args/fatal-warnings/i8781b.scala +++ b/tests/neg/i8781b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test: println((3: Boolean | Int).isInstanceOf[Boolean]) diff --git a/tests/neg/i8827a.check b/tests/neg/i8827a.check new file mode 100644 index 000000000000..3d6c2bfa500b --- /dev/null +++ b/tests/neg/i8827a.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/i8827a.scala:16:26 --------------------------------------------------------------------- +16 | summon[Order[List[Foo]]] // error + | ^ + | No given instance of type pkg.Order[List[pkg.Foo]] was found for parameter x of method summon in object Predef. + | I found: + | + | pkg.Order.orderList[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) + | + | But no implicit values were found that match type pkg.Order[pkg.Foo]. + | + | The following import might fix the problem: + | + | import pkg.Implicits.orderFoo + | diff --git a/tests/neg/i8827a.scala b/tests/neg/i8827a.scala new file mode 100644 index 000000000000..428028aa9448 --- /dev/null +++ b/tests/neg/i8827a.scala @@ -0,0 +1,16 @@ +package pkg + +trait Order[A] + +object Order { + implicit def orderList[A](implicit orderA: Order[A]): Order[List[A]] = ??? +} + +class Foo + +object Implicits { + implicit def orderFoo: Order[Foo] = ??? +} + +@main def main: Unit = + summon[Order[List[Foo]]] // error diff --git a/tests/neg/i8827b.check b/tests/neg/i8827b.check new file mode 100644 index 000000000000..6848c53aee28 --- /dev/null +++ b/tests/neg/i8827b.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/i8827b.scala:16:28 --------------------------------------------------------------------- +16 | summon[Order[Option[Foo]]] // error + | ^ + |No given instance of type pkg.Order[Option[pkg.Foo]] was found for parameter x of method summon in object Predef. + |I found: + | + | pkg.Order.given_Order_Option[pkg.Foo](/* missing */summon[pkg.Order[pkg.Foo]]) + | + |But no implicit values were found that match type pkg.Order[pkg.Foo]. + | + |The following import might fix the problem: + | + | import pkg.Givens.orderFoo + | diff --git a/tests/neg/i8827b.scala b/tests/neg/i8827b.scala new file mode 100644 index 000000000000..283dd6b6d481 --- /dev/null +++ b/tests/neg/i8827b.scala @@ -0,0 +1,16 @@ +package pkg + +trait Order[A] + +object Order { + given [A](using orderA: Order[A]): Order[Option[A]] = ??? +} + +class Foo + +object Givens { + given orderFoo: Order[Foo] = ??? +} + +@main def main: Unit = + summon[Order[Option[Foo]]] // error diff --git a/tests/neg-custom-args/fatal-warnings/i8922b.scala b/tests/neg/i8922b.scala similarity index 99% rename from tests/neg-custom-args/fatal-warnings/i8922b.scala rename to tests/neg/i8922b.scala index 39b0bbedc9e3..9e615a8d5b58 100644 --- a/tests/neg-custom-args/fatal-warnings/i8922b.scala +++ b/tests/neg/i8922b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + case class Token(tokenType: TokenType, lexeme: StringV, line: IntV) sealed trait TokenType diff --git a/tests/neg/i9166.scala b/tests/neg/i9166.scala index 7bbf2871eed3..b9644bf56f71 100644 --- a/tests/neg/i9166.scala +++ b/tests/neg/i9166.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns object UnitTest extends App { def foo(m: Unit) = m match { case runtime.BoxedUnit.UNIT => println("ok") // error diff --git a/tests/neg/i9168.scala b/tests/neg/i9168.scala new file mode 100644 index 000000000000..ed8bd4750972 --- /dev/null +++ b/tests/neg/i9168.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings + +def g: Int = try 42 finally ; // error diff --git a/tests/neg-custom-args/fatal-warnings/i9241.scala b/tests/neg/i9241.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/i9241.scala rename to tests/neg/i9241.scala index d3be9bc9278d..e7c959fc9ee2 100644 --- a/tests/neg-custom-args/fatal-warnings/i9241.scala +++ b/tests/neg/i9241.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Foo { def unary_~() : Foo = this // error def unary_-(using Int)(): Foo = this // error diff --git a/tests/neg/i9266.check b/tests/neg/i9266.check new file mode 100644 index 000000000000..bd80a017da02 --- /dev/null +++ b/tests/neg/i9266.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i9266.scala:5:22 ----------------------------------------------------------------------------------- +5 |def test = { implicit x: Int => x + x } // error + | ^ + | This syntax is no longer supported; parameter needs to be enclosed in (...) + | This construct can be rewritten automatically under -rewrite -source future-migration. diff --git a/tests/neg/i9266.scala b/tests/neg/i9266.scala new file mode 100644 index 000000000000..e6f8db7417c7 --- /dev/null +++ b/tests/neg/i9266.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings + +import language.`future-migration` + +def test = { implicit x: Int => x + x } // error diff --git a/tests/neg/i9408a.check b/tests/neg/i9408a.check new file mode 100644 index 000000000000..d9deb9cddaf7 --- /dev/null +++ b/tests/neg/i9408a.check @@ -0,0 +1,24 @@ +-- Error: tests/neg/i9408a.scala:18:20 --------------------------------------------------------------------------------- +18 | val length: Int = "qwerty" // error + | ^^^^^^^^ + |The conversion (Test3.implicitLength : String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. +-- Error: tests/neg/i9408a.scala:23:20 --------------------------------------------------------------------------------- +23 | val length: Int = "qwerty" // error + | ^^^^^^^^ + |The conversion (Test4.implicitLength : => String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. +-- Error: tests/neg/i9408a.scala:28:20 --------------------------------------------------------------------------------- +28 | val length: Int = "qwerty" // error + | ^^^^^^^^ + |The conversion (Test5.implicitLength : [A]: String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. +-- Error: tests/neg/i9408a.scala:33:20 --------------------------------------------------------------------------------- +33 | val length: Int = "qwerty" // error + | ^^^^^^^^ + |The conversion (Test6.implicitLength : Map[String, Int]) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. +-- Error: tests/neg/i9408a.scala:37:60 --------------------------------------------------------------------------------- +37 | implicit def a2int[A](a: A)(implicit ev: A => Int): Int = a // error + | ^ + |The conversion (ev : A => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. +-- Error: tests/neg/i9408a.scala:61:2 ---------------------------------------------------------------------------------- +61 | 123.foo // error + | ^^^ + |The conversion (Test11.a2foo : [A]: A => Test11.Foo) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. diff --git a/tests/neg-custom-args/fatal-warnings/i9408a.scala b/tests/neg/i9408a.scala similarity index 98% rename from tests/neg-custom-args/fatal-warnings/i9408a.scala rename to tests/neg/i9408a.scala index 754ca51b701a..594417ec3215 100644 --- a/tests/neg-custom-args/fatal-warnings/i9408a.scala +++ b/tests/neg/i9408a.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import language.`3.0-migration` import scala.language.implicitConversions diff --git a/tests/neg/i9408b.check b/tests/neg/i9408b.check new file mode 100644 index 000000000000..5f8a854a9282 --- /dev/null +++ b/tests/neg/i9408b.check @@ -0,0 +1,5 @@ + +-- Error: tests/neg/i9408b/Test_2.scala:8:20 --------------------------------------------------------------------------- +8 | val length: Int = "abc" // error + | ^^^^^ + |The conversion (test.conversions.Conv.implicitLength : String => Int) will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views. diff --git a/tests/neg-custom-args/fatal-warnings/i9408b/Conv_1.scala b/tests/neg/i9408b/Conv_1.scala similarity index 100% rename from tests/neg-custom-args/fatal-warnings/i9408b/Conv_1.scala rename to tests/neg/i9408b/Conv_1.scala diff --git a/tests/neg/i9408b/Test_2.scala b/tests/neg/i9408b/Test_2.scala new file mode 100644 index 000000000000..6b3cbbafcb0e --- /dev/null +++ b/tests/neg/i9408b/Test_2.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings + +import language.`3.0-migration` +import scala.language.implicitConversions + +object Test { + import test.conversions.Conv.* + val length: Int = "abc" // error +} diff --git a/tests/neg/i9517.scala b/tests/neg/i9517.scala new file mode 100644 index 000000000000..2e63ec5fe5f8 --- /dev/null +++ b/tests/neg/i9517.scala @@ -0,0 +1,4 @@ +//> using options -Xprint-types + +def test():Unit = foo({ case 1 => 10 }) // error +def foo(x: Any): Boolean = true diff --git a/tests/neg/i9685bis.check b/tests/neg/i9685bis.check new file mode 100644 index 000000000000..1cc81df9987c --- /dev/null +++ b/tests/neg/i9685bis.check @@ -0,0 +1,9 @@ +-- [E008] Not Found Error: tests/neg/i9685bis.scala:25:4 --------------------------------------------------------------- +25 | 1.asdf // error + | ^^^^^^ + | value asdf is not a member of Int, but could be made available as an extension method. + | + | The following import might make progress towards fixing the problem: + | + | import foo.Baz.toBaz + | diff --git a/tests/neg/i9685bis.scala b/tests/neg/i9685bis.scala new file mode 100644 index 000000000000..0a3e245fe738 --- /dev/null +++ b/tests/neg/i9685bis.scala @@ -0,0 +1,25 @@ +//> using options -source future + +package foo + +import scala.language.implicitConversions + +class Foo + +object Foo: + + inline implicit def toFoo(x: Int): Foo = Foo() + +class Bar + +object Bar: + inline given Conversion[Int, Bar] with + def apply(x: Int): Bar = Bar() + +class Baz + +object Baz: + transparent inline implicit def toBaz(x: Int): Baz = Baz() + +object Usage: + 1.asdf // error diff --git a/tests/neg/i9740.check b/tests/neg/i9740.check new file mode 100644 index 000000000000..359603a2863a --- /dev/null +++ b/tests/neg/i9740.check @@ -0,0 +1,12 @@ +-- [E186] Type Error: tests/neg/i9740.scala:10:9 ----------------------------------------------------------------------- +10 | case RecoveryCompleted => println("Recovery completed") // error + | ^^^^^^^^^^^^^^^^^ + | Implausible pattern: + | RecoveryCompleted could match selector of type object TypedRecoveryCompleted + | only if there is an `equals` method identifying elements of the two types. +-- [E186] Type Error: tests/neg/i9740.scala:15:9 ----------------------------------------------------------------------- +15 | case RecoveryCompleted => // error + | ^^^^^^^^^^^^^^^^^ + | Implausible pattern: + | RecoveryCompleted could match selector of type TypedRecoveryCompleted + | only if there is an `equals` method identifying elements of the two types. diff --git a/tests/neg/i9740.scala b/tests/neg/i9740.scala index 2f342977ef5d..6222298df48b 100644 --- a/tests/neg/i9740.scala +++ b/tests/neg/i9740.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns abstract class RecoveryCompleted object RecoveryCompleted extends RecoveryCompleted diff --git a/tests/neg/i9740b.scala b/tests/neg/i9740b.scala index 8006056684c7..dcd9a1d6a474 100644 --- a/tests/neg/i9740b.scala +++ b/tests/neg/i9740b.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns enum Recovery: case RecoveryCompleted diff --git a/tests/neg/i9740c.scala b/tests/neg/i9740c.scala index 87881c9b20d7..c6a3a1380f1d 100644 --- a/tests/neg/i9740c.scala +++ b/tests/neg/i9740c.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns sealed trait Exp[T] case class IntExp(x: Int) extends Exp[Int] case class StrExp(x: String) extends Exp[String] diff --git a/tests/neg/i9740d.scala b/tests/neg/i9740d.scala index 9f2490b697b6..6f3cc3be02e2 100644 --- a/tests/neg/i9740d.scala +++ b/tests/neg/i9740d.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -Wimplausible-patterns + sealed trait Exp[T] case class IntExp(x: Int) extends Exp[Int] case class StrExp(x: String) extends Exp[String] diff --git a/tests/neg/i9751.scala b/tests/neg/i9751.scala new file mode 100644 index 000000000000..618d237e0cd4 --- /dev/null +++ b/tests/neg/i9751.scala @@ -0,0 +1,11 @@ +//> using options -Xfatal-warnings + +def f(): Unit = { + () // error + () +} + +inline def g(): Unit = { + () // error + () +} diff --git a/tests/neg/i9776.scala b/tests/neg/i9776.scala new file mode 100644 index 000000000000..b05488810416 --- /dev/null +++ b/tests/neg/i9776.scala @@ -0,0 +1,61 @@ +//> using options -Xfatal-warnings + +import scala.annotation.switch + +sealed trait Fruit + +object Fruit { + case object Apple extends Fruit + case object Banana extends Fruit + case object Lemon extends Fruit + case object Lime extends Fruit + case object Orange extends Fruit + + def isCitrus(fruit: Fruit): Boolean = + (fruit: @switch) match { // error Could not emit switch for @switch annotated match + case Orange => true + case Lemon => true + case Lime => true + case _ => false + } +} + + +sealed trait TaggedFruit { + def tag: Int +} + +object TaggedFruit { + case object Apple extends TaggedFruit { + val tag = 1 + } + case object Banana extends TaggedFruit { + val tag = 2 + } + case object Orange extends TaggedFruit { + val tag = 3 + } + + def isCitrus(fruit: TaggedFruit): Boolean = + (fruit.tag: @switch) match { // error Could not emit switch for @switch annotated match + case Apple.tag => true + case 2 => true + case 3 => true + case _ => false + } + + // fewer than four cases, so no warning + def succ1(fruit: TaggedFruit): Boolean = + (fruit.tag: @switch) match { + case 3 => false + case 2 | Apple.tag => true + } + + // fewer than four cases, so no warning + def succ2(fruit: TaggedFruit): Boolean = + (fruit.tag: @switch) match { + case 3 => false + case 2 => true + case Apple.tag => true + } +} diff --git a/tests/neg-custom-args/fatal-warnings/i9880.scala b/tests/neg/i9880.scala similarity index 95% rename from tests/neg-custom-args/fatal-warnings/i9880.scala rename to tests/neg/i9880.scala index d9d857110543..f99fae25e5dc 100644 --- a/tests/neg-custom-args/fatal-warnings/i9880.scala +++ b/tests/neg/i9880.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + opaque type Bytes = Array[Byte] object Bytes: extension (self: Bytes) diff --git a/tests/neg/illegal-match-types.check b/tests/neg/illegal-match-types.check new file mode 100644 index 000000000000..f5f0f2d07c51 --- /dev/null +++ b/tests/neg/illegal-match-types.check @@ -0,0 +1,42 @@ +-- [E191] Type Error: tests/neg/illegal-match-types.scala:7:23 --------------------------------------------------------- +7 |type InvNesting[X] = X match // error + | ^ + | The match type contains an illegal case: + | case Inv[Cov[t]] => t + | (this error can be ignored for now with `-source:3.3`) +8 | case Inv[Cov[t]] => t +-- [E191] Type Error: tests/neg/illegal-match-types.scala:10:26 -------------------------------------------------------- +10 |type ContraNesting[X] = X match // error + | ^ + | The match type contains an illegal case: + | case Contra[Cov[t]] => t + | (this error can be ignored for now with `-source:3.3`) +11 | case Contra[Cov[t]] => t +-- [E191] Type Error: tests/neg/illegal-match-types.scala:15:22 -------------------------------------------------------- +15 |type AndTypeMT[X] = X match // error + | ^ + | The match type contains an illegal case: + | case t & Seq[Any] => t + | (this error can be ignored for now with `-source:3.3`) +16 | case t & Seq[Any] => t +-- [E191] Type Error: tests/neg/illegal-match-types.scala:22:33 -------------------------------------------------------- +22 |type TypeAliasWithBoundMT[X] = X match // error + | ^ + | The match type contains an illegal case: + | case IsSeq[t] => t + | (this error can be ignored for now with `-source:3.3`) +23 | case IsSeq[t] => t +-- [E191] Type Error: tests/neg/illegal-match-types.scala:29:34 -------------------------------------------------------- +29 |type TypeMemberExtractorMT[X] = X match // error + | ^ + | The match type contains an illegal case: + | case TypeMemberAux[t] => t + | (this error can be ignored for now with `-source:3.3`) +30 | case TypeMemberAux[t] => t +-- [E191] Type Error: tests/neg/illegal-match-types.scala:40:35 -------------------------------------------------------- +40 |type TypeMemberExtractorMT2[X] = X match // error + | ^ + | The match type contains an illegal case: + | case TypeMemberAux2[t] => t + | (this error can be ignored for now with `-source:3.3`) +41 | case TypeMemberAux2[t] => t diff --git a/tests/neg/illegal-match-types.scala b/tests/neg/illegal-match-types.scala new file mode 100644 index 000000000000..51b0aab6301a --- /dev/null +++ b/tests/neg/illegal-match-types.scala @@ -0,0 +1,41 @@ +class Inv[T] +class Cov[+T] +class Contra[-T] + +// Nesting captures in non-covariant position + +type InvNesting[X] = X match // error + case Inv[Cov[t]] => t + +type ContraNesting[X] = X match // error + case Contra[Cov[t]] => t + +// Intersection type to type-test and capture at the same time + +type AndTypeMT[X] = X match // error + case t & Seq[Any] => t + +// Poly type alias with a bound to type-test and capture at the same time + +type IsSeq[X <: Seq[Any]] = X + +type TypeAliasWithBoundMT[X] = X match // error + case IsSeq[t] => t + +// Poly type alias with an unknown type member refinement + +type TypeMemberAux[X] = { type TypeMember = X } + +type TypeMemberExtractorMT[X] = X match // error + case TypeMemberAux[t] => t + +// Poly type alias with a refined member of stronger bounds than in the parent + +class Base { + type TypeMember +} + +type TypeMemberAux2[X <: Seq[Any]] = Base { type TypeMember = X } + +type TypeMemberExtractorMT2[X] = X match // error + case TypeMemberAux2[t] => t diff --git a/tests/neg-custom-args/feature/impl-conv/A.scala b/tests/neg/impl-conv/A.scala similarity index 100% rename from tests/neg-custom-args/feature/impl-conv/A.scala rename to tests/neg/impl-conv/A.scala diff --git a/tests/neg/impl-conv/B.scala b/tests/neg/impl-conv/B.scala new file mode 100644 index 000000000000..618868b8bbc0 --- /dev/null +++ b/tests/neg/impl-conv/B.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings -feature + +package implConv + +object B { + import A.{_, given} + + "".foo + + val x: Int = "" // ok + val y: String = 1 // error: feature +} diff --git a/tests/neg-custom-args/feature/implicit-conversions-old.scala b/tests/neg/implicit-conversions-old.scala similarity index 91% rename from tests/neg-custom-args/feature/implicit-conversions-old.scala rename to tests/neg/implicit-conversions-old.scala index 1050094dcaf1..0a7b75766bbf 100644 --- a/tests/neg-custom-args/feature/implicit-conversions-old.scala +++ b/tests/neg/implicit-conversions-old.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -feature + class A class B diff --git a/tests/neg-custom-args/feature/implicit-conversions.scala b/tests/neg/implicit-conversions.scala similarity index 91% rename from tests/neg-custom-args/feature/implicit-conversions.scala rename to tests/neg/implicit-conversions.scala index 2cf1a85d7540..ead8c5ac7646 100644 --- a/tests/neg-custom-args/feature/implicit-conversions.scala +++ b/tests/neg/implicit-conversions.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -feature + class A class B diff --git a/tests/neg-custom-args/fatal-warnings/indentLeft.scala b/tests/neg/indentLeft.scala similarity index 77% rename from tests/neg-custom-args/fatal-warnings/indentLeft.scala rename to tests/neg/indentLeft.scala index 36734e354fed..c4a260583de2 100644 --- a/tests/neg-custom-args/fatal-warnings/indentLeft.scala +++ b/tests/neg/indentLeft.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { if (true) { diff --git a/tests/neg-custom-args/indentRight.scala b/tests/neg/indentRight.scala similarity index 93% rename from tests/neg-custom-args/indentRight.scala rename to tests/neg/indentRight.scala index f00bec12fbff..8eb9deb23389 100644 --- a/tests/neg-custom-args/indentRight.scala +++ b/tests/neg/indentRight.scala @@ -1,3 +1,5 @@ +//> using options -no-indent -Xfatal-warnings + trait A case class B() extends A // error: Line is indented too far to the right case object C extends A // error: Line is indented too far to the right diff --git a/tests/neg-custom-args/infix.scala b/tests/neg/infix.scala similarity index 96% rename from tests/neg-custom-args/infix.scala rename to tests/neg/infix.scala index f6f3053087dd..aefdd5c40d47 100644 --- a/tests/neg-custom-args/infix.scala +++ b/tests/neg/infix.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + // Compile with -strict -Xfatal-warnings -deprecation class C: infix def op(x: Int): Int = ??? diff --git a/tests/neg-custom-args/fatal-warnings/inline-givens.scala b/tests/neg/inline-givens.scala similarity index 90% rename from tests/neg-custom-args/fatal-warnings/inline-givens.scala rename to tests/neg/inline-givens.scala index eae50bca45cf..3e388de2ce8b 100644 --- a/tests/neg-custom-args/fatal-warnings/inline-givens.scala +++ b/tests/neg/inline-givens.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings class Item(x: String) diff --git a/tests/neg/inline-unstable-accessors.check b/tests/neg/inline-unstable-accessors.check new file mode 100644 index 000000000000..eb226afc376b --- /dev/null +++ b/tests/neg/inline-unstable-accessors.check @@ -0,0 +1,345 @@ +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:10:6 ------------------------------------------ +10 | valBinaryAPI1 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor foo$A$$inline$valBinaryAPI1 was generated in class A. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI1 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI1 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI1 with @publicInBinary + | * Option 2: Make value valBinaryAPI1 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class A: + | @publicInBinary private[A] final def foo$A$$inline$valBinaryAPI1: Int = this.valBinaryAPI1 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:11:6 ------------------------------------------ +11 | valBinaryAPI2 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor foo$A$$inline$valBinaryAPI2 was generated in class A. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class A: + | @publicInBinary private[A] def foo$A$$inline$valBinaryAPI2: Int = this.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:15:6 ------------------------------------------ +15 | a.valBinaryAPI2 + // error + | ^^^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2$i1 was generated in class B. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class B: + | @publicInBinary private[B] def inline$valBinaryAPI2$i1(x$0: foo.A): Int = x$0.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:23:6 ------------------------------------------ +23 | valBinaryAPI1 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI1 was generated in class C. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI1 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI1 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI1 with @publicInBinary + | * Option 2: Make value valBinaryAPI1 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class C: + | @publicInBinary private[C] final def inline$valBinaryAPI1: Int = this.valBinaryAPI1 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:24:6 ------------------------------------------ +24 | valBinaryAPI2 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2 was generated in class C. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class C: + | @publicInBinary private[C] def inline$valBinaryAPI2: Int = this.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:28:6 ------------------------------------------ +28 | c.valBinaryAPI2 + // error + | ^^^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2$i2 was generated in class D. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to class D: + | @publicInBinary private[D] def inline$valBinaryAPI2$i2(x$0: foo.C): Int = x$0.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:36:6 ------------------------------------------ +36 | valBinaryAPI1 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI1 was generated in object E. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI1 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI1 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI1 with @publicInBinary + | * Option 2: Make value valBinaryAPI1 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to object E: + | @publicInBinary private[E] final def inline$valBinaryAPI1: Int = foo.E.valBinaryAPI1 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:37:6 ------------------------------------------ +37 | valBinaryAPI2 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2 was generated in object E. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to object E: + | @publicInBinary private[E] def inline$valBinaryAPI2: Int = foo.E.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:41:6 ------------------------------------------ +41 | E.valBinaryAPI2 + // error + | ^^^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2$i3 was generated in object F. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to object F: + | @publicInBinary private[F] def inline$valBinaryAPI2$i3(x$0: object foo.E): Int = x$0.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:49:6 ------------------------------------------ +49 | valBinaryAPI1 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI1 was generated in package G. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI1 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI1 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI1 with @publicInBinary + | * Option 2: Make value valBinaryAPI1 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package G: + | @publicInBinary private[G] def inline$valBinaryAPI1: Int = foo.G.valBinaryAPI1 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:50:6 ------------------------------------------ +50 | valBinaryAPI2 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2 was generated in package G. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package G: + | @publicInBinary private[G] def inline$valBinaryAPI2: Int = foo.G.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:54:6 ------------------------------------------ +54 | G.valBinaryAPI2 + // error + | ^^^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2$i4 was generated in package H. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package H: + | @publicInBinary private[H] def inline$valBinaryAPI2$i4(x$0: foo.G): Int = x$0.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:62:6 ------------------------------------------ +62 | valBinaryAPI1 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI1 was generated in package I. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI1 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI1 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI1 with @publicInBinary + | * Option 2: Make value valBinaryAPI1 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package I: + | @publicInBinary private[I] def inline$valBinaryAPI1: Int = foo.I.valBinaryAPI1 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:63:6 ------------------------------------------ +63 | valBinaryAPI2 + // error + | ^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2 was generated in package I. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package I: + | @publicInBinary private[I] def inline$valBinaryAPI2: Int = foo.I.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- +-- [E192] Compatibility Error: tests/neg/inline-unstable-accessors.scala:67:6 ------------------------------------------ +67 | I.valBinaryAPI2 + // error + | ^^^^^^^^^^^^^^^ + | Unstable inline accessor inline$valBinaryAPI2$i5 was generated in package J. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Access to non-public value valBinaryAPI2 causes the automatic generation of an accessor. + | This accessor is not stable, its name may change or it may disappear + | if not needed in a future version. + | + | To make sure that the inlined code is binary compatible you must make sure that + | value valBinaryAPI2 is public in the binary API. + | * Option 1: Annotate value valBinaryAPI2 with @publicInBinary + | * Option 2: Make value valBinaryAPI2 public + | + | This change may break binary compatibility if a previous version of this + | library was compiled with generated accessors. Binary compatibility should + | be checked using MiMa. If binary compatibility is broken, you should add the + | old accessor explicitly in the source code. The following code should be + | added to package J: + | @publicInBinary private[J] def inline$valBinaryAPI2$i5(x$0: foo.I): Int = x$0.valBinaryAPI2 + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/inline-unstable-accessors.scala b/tests/neg/inline-unstable-accessors.scala new file mode 100644 index 000000000000..cf65006daaf8 --- /dev/null +++ b/tests/neg/inline-unstable-accessors.scala @@ -0,0 +1,68 @@ +//> using options -Werror -WunstableInlineAccessors -explain + +package foo +import scala.annotation.publicInBinary +class A: + private val valBinaryAPI1: Int = 1 + private[foo] val valBinaryAPI2: Int = 1 + @publicInBinary private[foo] val valBinaryAPI3: Int = 1 + inline def inlined = + valBinaryAPI1 + // error + valBinaryAPI2 + // error + valBinaryAPI3 +class B(val a: A): + inline def inlined = + a.valBinaryAPI2 + // error + a.valBinaryAPI3 + +final class C: + private val valBinaryAPI1: Int = 1 + private[foo] val valBinaryAPI2: Int = 1 + @publicInBinary private[foo] val valBinaryAPI3: Int = 1 + inline def inlined = + valBinaryAPI1 + // error + valBinaryAPI2 + // error + valBinaryAPI3 +final class D(val c: C): + inline def inlined = + c.valBinaryAPI2 + // error + c.valBinaryAPI3 + +object E: + private val valBinaryAPI1: Int = 1 + private[foo] val valBinaryAPI2: Int = 1 + @publicInBinary private[foo] val valBinaryAPI3: Int = 1 + inline def inlined = + valBinaryAPI1 + // error + valBinaryAPI2 + // error + valBinaryAPI3 +object F: + inline def inlined = + E.valBinaryAPI2 + // error + E.valBinaryAPI3 + +package object G: + private val valBinaryAPI1: Int = 1 + private[foo] val valBinaryAPI2: Int = 1 + @publicInBinary private[foo] val valBinaryAPI3: Int = 1 + inline def inlined = + valBinaryAPI1 + // error + valBinaryAPI2 + // error + valBinaryAPI3 +package object H: + inline def inlined = + G.valBinaryAPI2 + // error + G.valBinaryAPI3 + +package I: + private val valBinaryAPI1: Int = 1 + private[foo] val valBinaryAPI2: Int = 1 + @publicInBinary private[foo] val valBinaryAPI3: Int = 1 + inline def inlined = + valBinaryAPI1 + // error + valBinaryAPI2 + // error + valBinaryAPI3 +package J: + inline def inlined = + I.valBinaryAPI2 + // error + I.valBinaryAPI3 diff --git a/tests/neg/inner-classes-of-universal-traits.scala b/tests/neg/inner-classes-of-universal-traits.scala new file mode 100644 index 000000000000..4988f7b8740d --- /dev/null +++ b/tests/neg/inner-classes-of-universal-traits.scala @@ -0,0 +1,8 @@ +trait Outer extends Any { + trait Inner1 + trait Inner2 extends Any + class Inner3 + class Inner4(a: Int) extends AnyVal // error + case class Inner5(a: Int) // error + object Inner6 // error +} diff --git a/tests/neg/irrefutable.check b/tests/neg/irrefutable.check new file mode 100644 index 000000000000..01baff685cbc --- /dev/null +++ b/tests/neg/irrefutable.check @@ -0,0 +1,12 @@ +-- [E008] Not Found Error: tests/neg/irrefutable.scala:27:29 ----------------------------------------------------------- +27 | for (case Foo(x: Int) <- xs) yield x // error + | ^^ + | value withFilter is not a member of Lst[Foo[Any]] +-- Error: tests/neg/irrefutable.scala:30:16 ---------------------------------------------------------------------------- +30 | for (Foo(x: Int) <- xs) yield x // error + | ^^^ + | pattern's type Int is more specialized than the right hand side expression's type Any + | + | If the narrowing is intentional, this can be communicated by adding the `case` keyword before the full pattern, + | which will result in a filtering for expression (using `withFilter`). + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg/irrefutable.scala b/tests/neg/irrefutable.scala new file mode 100644 index 000000000000..b4f2736998e6 --- /dev/null +++ b/tests/neg/irrefutable.scala @@ -0,0 +1,42 @@ +// This tests that A.f1 is recognized as an irrefutable pattern and A.f2_nocase is not, and therefore A.f2 solves this +// by adding a case to the pattern, which results in withFilter being inserted. +// see also: tests/run/irrefutable.scala for an example that exercises the insertion of withFilter. + +class Lst[+T](val id: String, val underlying: List[T]) { + def map[U](f: T => U): Lst[U] = new Lst(id, underlying.map(f)) + + // hide the withFilter so that there is a compile error + // def withFilter(f: T => Boolean): Lst.WithFilter[T] = new Lst.WithFilter(this, f) +} + +// object Lst: +// class WithFilter[+T](lst: Lst[T], filter: T => Boolean): +// def forwardingFilter[T1](filter: T1 => Boolean): T1 => Boolean = t => +// println(s"filtering $t in ${lst.id}") +// filter(t) + +// def map[U](f: T => U): Lst[U] = Lst(lst.id, lst.underlying.withFilter(forwardingFilter(filter)).map(f)) + +case class Foo[T](x: T) + +object A { + def f1(xs: Lst[Foo[Int]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: Lst[Foo[Any]]): Lst[Int] = { + for (case Foo(x: Int) <- xs) yield x // error + } + def f2_nocase(xs: Lst[Foo[Any]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x // error + } +} + +@main def Test = + val xs = new Lst("xs", List(Foo(1), Foo(2), Foo(3))) + println("=== mapping xs with A.f1 ===") + val xs1 = A.f1(xs) + assert(xs1.underlying == List(1, 2, 3)) + val ys = new Lst("ys", List(Foo(1: Any), Foo(2: Any), Foo(3: Any))) + println("=== mapping ys with A.f2 ===") + val ys1 = A.f2(ys) + assert(ys1.underlying == List(1, 2, 3)) diff --git a/tests/neg-custom-args/jdk-9-app.scala b/tests/neg/jdk-9-app.scala similarity index 83% rename from tests/neg-custom-args/jdk-9-app.scala rename to tests/neg/jdk-9-app.scala index 5709da34c466..4f78eb278876 100644 --- a/tests/neg-custom-args/jdk-9-app.scala +++ b/tests/neg/jdk-9-app.scala @@ -1,3 +1,5 @@ +//> using options -release:8 + import java.lang.ProcessHandle // error: not a member object Jdk9App extends App { diff --git a/tests/neg/kind-projector-underscores.scala b/tests/neg/kind-projector-underscores.scala new file mode 100644 index 000000000000..76aada871fae --- /dev/null +++ b/tests/neg/kind-projector-underscores.scala @@ -0,0 +1,16 @@ +//> using options -Ykind-projector:underscores + +package kind_projector_neg + +trait Foo[F[_]] + +class Bar1 extends Foo[Either[_, _]] // error +class Bar2 extends Foo[_] // error +class Bar3 extends Foo[λ[List[x] => Int]] // error + +object Test { + type -_ = Int // error -_ not allowed as a type def name without backticks + type +_ = Int // error +_ not allowed as a type def name without backticks +} + +class BacktickUnderscoreIsNotFine extends Foo[List[`_`]] // error wildcard invalid as backquoted identifier diff --git a/tests/neg/kind-projector.scala b/tests/neg/kind-projector.scala new file mode 100644 index 000000000000..a7fc24c70b93 --- /dev/null +++ b/tests/neg/kind-projector.scala @@ -0,0 +1,9 @@ +//> using options -Ykind-projector + +package kind_projector_neg + +trait Foo[F[_]] + +class Bar1 extends Foo[Either[*, *]] // error +class Bar2 extends Foo[*] // error +class Bar3 extends Foo[λ[List[x] => Int]] // error diff --git a/tests/neg-custom-args/explain/labelNotFound.check b/tests/neg/labelNotFound.check similarity index 80% rename from tests/neg-custom-args/explain/labelNotFound.check rename to tests/neg/labelNotFound.check index 594a838aeeed..94198a284014 100644 --- a/tests/neg-custom-args/explain/labelNotFound.check +++ b/tests/neg/labelNotFound.check @@ -1,5 +1,5 @@ --- [E172] Type Error: tests/neg-custom-args/explain/labelNotFound.scala:2:30 ------------------------------------------- -2 | scala.util.boundary.break(1) // error +-- [E172] Type Error: tests/neg/labelNotFound.scala:4:30 --------------------------------------------------------------- +4 | scala.util.boundary.break(1) // error | ^ |No given instance of type scala.util.boundary.Label[Int] was found for parameter label of method break in object boundary |--------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/labelNotFound.scala b/tests/neg/labelNotFound.scala new file mode 100644 index 000000000000..74ad6ef148ba --- /dev/null +++ b/tests/neg/labelNotFound.scala @@ -0,0 +1,4 @@ +//> using options -explain + +object Test: + scala.util.boundary.break(1) // error diff --git a/tests/neg-custom-args/erased/lambda-infer.scala b/tests/neg/lambda-infer.scala similarity index 91% rename from tests/neg-custom-args/erased/lambda-infer.scala rename to tests/neg/lambda-infer.scala index 2eebf8186b0d..90f40aa05e86 100644 --- a/tests/neg-custom-args/erased/lambda-infer.scala +++ b/tests/neg/lambda-infer.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + type F = (Int, erased Int) => Int erased class A diff --git a/tests/neg/lambda-rename.check b/tests/neg/lambda-rename.check new file mode 100644 index 000000000000..39969d4a7cfa --- /dev/null +++ b/tests/neg/lambda-rename.check @@ -0,0 +1,34 @@ +-- [E007] Type Mismatch Error: tests/neg/lambda-rename.scala:4:33 ------------------------------------------------------ +4 |val a: (x: Int) => Bar[x.type] = ??? : ((x: Int) => Foo[x.type]) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: (x: Int) => Foo[x.type] + | Required: (x: Int) => Bar[x.type] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/lambda-rename.scala:7:33 ------------------------------------------------------ +7 |val b: HK[[X] =>> Foo[(X, X)]] = ??? : HK[[X] =>> Bar[(X, X)]] // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: HK[[X] =>> Bar[(X, X)]] + | Required: HK[[X] =>> Foo[(X, X)]] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/lambda-rename.scala:10:33 ----------------------------------------------------- +10 |val c: HK[[X] =>> Foo[(X, X)]] = ??? : HK[[Y] =>> Foo[(X, X)]] // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: HK[[Y] =>> Foo[(X, X)]] + | Required: HK[[X²] =>> Foo[(X², X²)]] + | + | where: X is a class in the empty package + | X² is a type variable + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/lambda-rename.scala:12:33 ----------------------------------------------------- +12 |val d: HK[[Y] =>> Foo[(X, X)]] = ??? : HK[[X] =>> Foo[(X, X)]] // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: HK[[X] =>> Foo[(X, X)]] + | Required: HK[[Y] =>> Foo[(X², X²)]] + | + | where: X is a type variable + | X² is a class in the empty package + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/lambda-rename.scala b/tests/neg/lambda-rename.scala new file mode 100644 index 000000000000..586d8ae28bdf --- /dev/null +++ b/tests/neg/lambda-rename.scala @@ -0,0 +1,12 @@ +class Foo[T] +class Bar[T] + +val a: (x: Int) => Bar[x.type] = ??? : ((x: Int) => Foo[x.type]) // error + +trait HK[F <: AnyKind] +val b: HK[[X] =>> Foo[(X, X)]] = ??? : HK[[X] =>> Bar[(X, X)]] // error + +class X +val c: HK[[X] =>> Foo[(X, X)]] = ??? : HK[[Y] =>> Foo[(X, X)]] // error + +val d: HK[[Y] =>> Foo[(X, X)]] = ??? : HK[[X] =>> Foo[(X, X)]] // error diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala new file mode 100644 index 000000000000..357a417f0ed9 --- /dev/null +++ b/tests/neg/looping-givens.scala @@ -0,0 +1,11 @@ +//> using options -Xfatal-warnings + +class A +class B + +given joint(using a: A, b: B): (A & B) = ??? + +def foo(using a: A, b: B) = + given aa: A = summon // error + given bb: B = summon // error + given ab: (A & B) = summon // error diff --git a/tests/neg-custom-args/fatal-warnings/main-functions-nameclash.scala b/tests/neg/main-functions-nameclash.scala similarity index 77% rename from tests/neg-custom-args/fatal-warnings/main-functions-nameclash.scala rename to tests/neg/main-functions-nameclash.scala index 407323591c87..1f9352e1592d 100644 --- a/tests/neg-custom-args/fatal-warnings/main-functions-nameclash.scala +++ b/tests/neg/main-functions-nameclash.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings object foo { @main def foo(x: Int) = () // error: class foo and object foo produce classes that overwrite one another diff --git a/tests/neg/manifest-summoning-b.check b/tests/neg/manifest-summoning-b.check new file mode 100644 index 000000000000..bb63eebb555f --- /dev/null +++ b/tests/neg/manifest-summoning-b.check @@ -0,0 +1,14 @@ +-- Error: tests/neg/manifest-summoning-b.scala:3:34 -------------------------------------------------------------------- +3 |val foo = manifest[List[? <: Int]] // error + | ^ + | Compiler synthesis of Manifest and OptManifest is deprecated, instead + | replace with the type `scala.reflect.ClassTag[List[? <: Int]]`. + | Alternatively, consider using the new metaprogramming features of Scala 3, + | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html +-- Error: tests/neg/manifest-summoning-b.scala:4:41 -------------------------------------------------------------------- +4 |val bar = optManifest[Array[? <: String]] // error + | ^ + | Compiler synthesis of Manifest and OptManifest is deprecated, instead + | replace with the type `scala.reflect.ClassTag[Array[? <: String]]`. + | Alternatively, consider using the new metaprogramming features of Scala 3, + | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html diff --git a/tests/neg/manifest-summoning-b.scala b/tests/neg/manifest-summoning-b.scala new file mode 100644 index 000000000000..6d1b8baff007 --- /dev/null +++ b/tests/neg/manifest-summoning-b.scala @@ -0,0 +1,4 @@ +//> using options -Xfatal-warnings -deprecation + +val foo = manifest[List[? <: Int]] // error +val bar = optManifest[Array[? <: String]] // error diff --git a/tests/neg/match-type-enumeration-value-hack.check b/tests/neg/match-type-enumeration-value-hack.check new file mode 100644 index 000000000000..13e425b80dbf --- /dev/null +++ b/tests/neg/match-type-enumeration-value-hack.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/match-type-enumeration-value-hack.scala:11:40 ------------------------------------------ +11 | summon[Suit#Value =:= EnumValue[Suit]] // error + | ^ + | Cannot prove that Suit#Value =:= EnumValue[Suit]. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce EnumValue[Suit] + | failed since selector Suit + | does not uniquely determine parameter t in + | case EnumValueAux[t] => t + | The computed bounds for the parameter are: + | t >: ?1.Value <: ?1.Value diff --git a/tests/neg/match-type-enumeration-value-hack.scala b/tests/neg/match-type-enumeration-value-hack.scala new file mode 100644 index 000000000000..4c6176b9b637 --- /dev/null +++ b/tests/neg/match-type-enumeration-value-hack.scala @@ -0,0 +1,12 @@ +type EnumValueAux[A] = ({ type Value }) { type Value = A } + +type EnumValue[E <: Enumeration] = E match + case EnumValueAux[t] => t + +// A class extending Enumeration does not yet define a concrete enumeration +class Suit extends Enumeration: + val Hearts, Diamonds, Clubs, Spades = Val() + +object Test: + summon[Suit#Value =:= EnumValue[Suit]] // error +end Test diff --git a/tests/neg-custom-args/matchable.scala b/tests/neg/matchable.scala similarity index 93% rename from tests/neg-custom-args/matchable.scala rename to tests/neg/matchable.scala index 388af29d25cf..aaf8234f6d1e 100644 --- a/tests/neg-custom-args/matchable.scala +++ b/tests/neg/matchable.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -source future + def foo[T](x: T): Matchable = println(x.getClass()) // ok println(x.isInstanceOf[Int]) // ok diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index 9c37fc08c4df..980329d585dc 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,14 +1,14 @@ --- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------- 9 | identity[T1[3]]("") // error - | ^ + | ^^^^^ | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int | case (2 : Int) => String --- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- +-- [E184] Type Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------- 10 | identity[T1[3]](1) // error - | ^ + | ^^^^^ | Match type reduction failed since selector (3 : Int) | matches none of the cases | @@ -285,7 +285,7 @@ | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:109:29 ---------------------------------------------------- -109 | identity[T9[Tuple2[_, _]]]("") // error +109 | identity[T9[Tuple2[?, ?]]]("") // error | ^^ | Found: ("" : String) | Required: Test.T9[(?, ?)] @@ -302,7 +302,7 @@ | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:110:29 ---------------------------------------------------- -110 | identity[T9[Tuple2[_, _]]](1) // error +110 | identity[T9[Tuple2[?, ?]]](1) // error | ^ | Found: (1 : Int) | Required: Test.T9[(?, ?)] diff --git a/tests/neg/matchtype-seq.scala b/tests/neg/matchtype-seq.scala index 79a38fb2f5e4..46f9c02e5262 100644 --- a/tests/neg/matchtype-seq.scala +++ b/tests/neg/matchtype-seq.scala @@ -106,8 +106,8 @@ object Test { identity[T9[Tuple2[String, Nothing]]]("1") // error identity[T9[Tuple2[Int, Nothing]]](1) // error identity[T9[Tuple2[Nothing, Int]]]("1") // error - identity[T9[Tuple2[_, _]]]("") // error - identity[T9[Tuple2[_, _]]](1) // error + identity[T9[Tuple2[?, ?]]]("") // error + identity[T9[Tuple2[?, ?]]](1) // error identity[T9[Tuple2[Any, Any]]]("") // error identity[T9[Tuple2[Any, Any]]](1) // error diff --git a/tests/neg/mirror-synthesis-errors-b.check b/tests/neg/mirror-synthesis-errors-b.check index d9e394617c9d..a2042d865170 100644 --- a/tests/neg/mirror-synthesis-errors-b.check +++ b/tests/neg/mirror-synthesis-errors-b.check @@ -1,40 +1,40 @@ -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:21:56 -------------------------------------------------- 21 |val testA = summon[Mirror.ProductOf[Cns[Int] & Sm[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]]: type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. + |No given instance of type scala.deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]]: type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:22:56 -------------------------------------------------- 22 |val testB = summon[Mirror.ProductOf[Sm[Int] & Cns[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]]: type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. + |No given instance of type scala.deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]]: type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:23:49 -------------------------------------------------- 23 |val testC = summon[Mirror.Of[Cns[Int] & Sm[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]]: + |No given instance of type scala.deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Cns[Int] & Sm[Int]]: | * type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. | * type `Cns[Int] & Sm[Int]` is not a generic sum because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:24:49 -------------------------------------------------- 24 |val testD = summon[Mirror.Of[Sm[Int] & Cns[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]]: + |No given instance of type scala.deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Sm[Int] & Cns[Int]]: | * type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. | * type `Sm[Int] & Cns[Int]` is not a generic sum because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:25:55 -------------------------------------------------- 25 |val testE = summon[Mirror.ProductOf[Sm[Int] & Nn.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.ProductOf[Sm[Int] & Nn.type] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Sm[Int] & Nn.type]: type `Sm[Int] & Nn.type` is not a generic product because its subpart `Sm[Int] & Nn.type` is an intersection of unrelated definitions class Sm and object Nn. + |No given instance of type scala.deriving.Mirror.ProductOf[Sm[Int] & Nn.type] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.ProductOf[Sm[Int] & Nn.type]: type `Sm[Int] & Nn.type` is not a generic product because its subpart `Sm[Int] & Nn.type` is an intersection of unrelated definitions class Sm and object Nn. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:26:55 -------------------------------------------------- 26 |val testF = summon[Mirror.ProductOf[Nn.type & Sm[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.ProductOf[Nn.type & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Nn.type & Sm[Int]]: type `Nn.type & Sm[Int]` is not a generic product because its subpart `Nn.type & Sm[Int]` is an intersection of unrelated definitions object Nn and class Sm. + |No given instance of type scala.deriving.Mirror.ProductOf[Nn.type & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.ProductOf[Nn.type & Sm[Int]]: type `Nn.type & Sm[Int]` is not a generic product because its subpart `Nn.type & Sm[Int]` is an intersection of unrelated definitions object Nn and class Sm. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:27:54 -------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.A.type & Foo.B.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: + |No given instance of type scala.deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic product because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic sum because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. -- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:28:54 -------------------------------------------------- 28 |val testH = summon[Mirror.Of[Foo.B.type & Foo.A.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: + |No given instance of type scala.deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic product because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic sum because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. diff --git a/tests/neg/mirror-synthesis-errors.check b/tests/neg/mirror-synthesis-errors.check index da795e80bf51..92ce2118e66a 100644 --- a/tests/neg/mirror-synthesis-errors.check +++ b/tests/neg/mirror-synthesis-errors.check @@ -1,42 +1,42 @@ -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:21:32 ---------------------------------------------------- 21 |val testA = summon[Mirror.Of[A]] // error: Not a sealed trait | ^ - |No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + |No given instance of type scala.deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[A]: | * trait A is not a generic product because it is not a case class | * trait A is not a generic sum because it is not a sealed trait -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:22:32 ---------------------------------------------------- 22 |val testC = summon[Mirror.Of[C]] // error: Does not have subclasses | ^ - |No given instance of type deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[C]: + |No given instance of type scala.deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[C]: | * trait C is not a generic product because it is not a case class | * trait C is not a generic sum because it does not have subclasses -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:23:32 ---------------------------------------------------- 23 |val testD = summon[Mirror.Of[D]] // error: child SubD takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[D]: + |No given instance of type scala.deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[D]: | * class D is not a generic product because it is not a case class | * class D is not a generic sum because its child class SubD is not a generic product because it takes more than one parameter list -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:24:38 ---------------------------------------------------- 24 |val testSubD = summon[Mirror.Of[SubD]] // error: takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubD]: + |No given instance of type scala.deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[SubD]: | * class SubD is not a generic product because it takes more than one parameter list | * class SubD is not a generic sum because it is not a sealed class -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:25:32 ---------------------------------------------------- 25 |val testE = summon[Mirror.Of[E]] // error: Not an abstract class | ^ - |No given instance of type deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[E]: + |No given instance of type scala.deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[E]: | * class E is not a generic product because it is not a case class | * class E is not a generic sum because it is not an abstract class -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:26:32 ---------------------------------------------------- 26 |val testF = summon[Mirror.Of[F]] // error: No children | ^ - |No given instance of type deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[F]: + |No given instance of type scala.deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[F]: | * trait F is not a generic product because it is not a case class | * trait F is not a generic sum because it does not have subclasses -- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:27:36 ---------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.G]] // error: Has anonymous subclasses | ^ - |No given instance of type deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo.G]: + |No given instance of type scala.deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type scala.deriving.Mirror.Of[Foo.G]: | * trait G is not a generic product because it is not a case class | * trait G is not a generic sum because it has anonymous or inaccessible subclasses diff --git a/tests/neg/missing-implicit-2.check b/tests/neg/missing-implicit-2.check index 10f0192d1459..7f13903ca166 100644 --- a/tests/neg/missing-implicit-2.check +++ b/tests/neg/missing-implicit-2.check @@ -16,7 +16,7 @@ | | The following import might fix the problem: | - | import concurrent.ExecutionContext.Implicits.global + | import scala.concurrent.ExecutionContext.Implicits.global | -- [E007] Type Mismatch Error: tests/neg/missing-implicit-2.scala:6:25 ------------------------------------------------- 6 |val b: java.lang.Byte = (1: Byte) // error, but no hint diff --git a/tests/neg/missing-implicit-3.check b/tests/neg/missing-implicit-3.check index 45837ce11576..ecedd10d029c 100644 --- a/tests/neg/missing-implicit-3.check +++ b/tests/neg/missing-implicit-3.check @@ -2,7 +2,7 @@ 6 |val d: scala.concurrent.duration.Duration = (10, DAYS) // error | ^^^^^^^^^^ | Found: (Int, java.util.concurrent.TimeUnit) - | Required: concurrent².duration.Duration + | Required: scala.concurrent².duration.Duration | | where: concurrent is a package in package java.util | concurrent² is a package in package scala @@ -10,7 +10,7 @@ | | The following import might fix the problem: | - | import concurrent.duration.pairIntToDuration + | import scala.concurrent.duration.pairIntToDuration | | | longer explanation available when compiling with `-explain` @@ -21,5 +21,5 @@ | | The following import might fix the problem: | - | import concurrent.duration.DurationInt + | import scala.concurrent.duration.DurationInt | diff --git a/tests/neg/missing-implicit1.check b/tests/neg/missing-implicit1.check index c94225aaf0a6..6006afda3ece 100644 --- a/tests/neg/missing-implicit1.check +++ b/tests/neg/missing-implicit1.check @@ -19,7 +19,7 @@ -- [E172] Type Error: tests/neg/missing-implicit1.scala:23:42 ---------------------------------------------------------- 23 | List(1, 2, 3).traverse(x => Option(x)) // error | ^ - |No given instance of type testObjectInstance.Zip[Option] was found for an implicit parameter of method traverse in trait Traverse + |No given instance of type testObjectInstance.Zip[Option] was found for a context parameter of method traverse in trait Traverse | |The following import might fix the problem: | diff --git a/tests/neg/missing-implicit3.check b/tests/neg/missing-implicit3.check index ab87bf99a32a..c58b4430f3fe 100644 --- a/tests/neg/missing-implicit3.check +++ b/tests/neg/missing-implicit3.check @@ -1,9 +1,14 @@ -- [E172] Type Error: tests/neg/missing-implicit3.scala:13:36 ---------------------------------------------------------- 13 |val sortedFoos = sort(List(new Foo)) // error | ^ - | No given instance of type ord.Ord[ord.Foo] was found for an implicit parameter of method sort in package ord. - | I found: + | No given instance of type ord.Ord[ord.Foo] was found for a context parameter of method sort in package ord. + | I found: | - | ord.Ord.ordered[ord.Foo](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) + | ord.Ord.ordered[ord.Foo](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) + | + | But no implicit values were found that match type ord.Foo => Comparable[? >: ord.Foo]. + | + | The following import might make progress towards fixing the problem: + | + | import scala.math.Ordered.orderingToOrdered | - | But no implicit values were found that match type ord.Foo => Comparable[? >: ord.Foo]. diff --git a/tests/neg/missing-implicit4.check b/tests/neg/missing-implicit4.check index e243c208ecdf..be262fc2081f 100644 --- a/tests/neg/missing-implicit4.check +++ b/tests/neg/missing-implicit4.check @@ -19,9 +19,9 @@ -- [E172] Type Error: tests/neg/missing-implicit4.scala:20:42 ---------------------------------------------------------- 20 | List(1, 2, 3).traverse(x => Option(x)) // error | ^ - | No given instance of type Zip[Option] was found for an implicit parameter of method traverse in trait Traverse + | No given instance of type Zip[Option] was found for a context parameter of method traverse in trait Traverse | - | The following import might fix the problem: + | The following import might fix the problem: | - | import instances.zipOption + | import instances.zipOption | diff --git a/tests/neg-custom-args/missing-targetName.scala b/tests/neg/missing-targetName.scala similarity index 82% rename from tests/neg-custom-args/missing-targetName.scala rename to tests/neg/missing-targetName.scala index c32ef461aee0..b5403ac7cb19 100644 --- a/tests/neg-custom-args/missing-targetName.scala +++ b/tests/neg/missing-targetName.scala @@ -1,3 +1,5 @@ +//> using options -Yrequire-targetName -Xfatal-warnings + // Compile with -strict -Xfatal-warnings -deprecation import scala.annotation.targetName class & { // error diff --git a/tests/neg/mt-recur.cov.scala b/tests/neg/mt-recur.cov.scala new file mode 100644 index 000000000000..9ef55dabad96 --- /dev/null +++ b/tests/neg/mt-recur.cov.scala @@ -0,0 +1,7 @@ +// like mt-recur.scala, but covariant +class Cov[+T] + +type Recur[X] = X match + case Int => Cov[Recur[X]] + +def x = ??? : Recur[Int] // error diff --git a/tests/neg/mt-recur.scala b/tests/neg/mt-recur.scala new file mode 100644 index 000000000000..f7252030acc7 --- /dev/null +++ b/tests/neg/mt-recur.scala @@ -0,0 +1,10 @@ +// an example of an infinite recursion match type +// using an _invariant_ type constructor +// see mt-recur.cov.scala for covariant +// used to track the behaviour of match type reduction +class Inv[T] + +type Recur[X] = X match + case Int => Inv[Recur[X]] + +def x = ??? : Recur[Int] // error diff --git a/tests/neg/mt-scrutinee-widen.scala b/tests/neg/mt-scrutinee-widen.scala new file mode 100644 index 000000000000..b9cb5c8769f4 --- /dev/null +++ b/tests/neg/mt-scrutinee-widen.scala @@ -0,0 +1,24 @@ +// We widen scrutinee's that are inline proxies +// But make sure that term refs in scrutinees are not widened in general + +val x: Int = 42 +val y: Int = 43 +val z: Int = 44 + +type IsX[T] = + T match + case x.type => true + case _ => false +def test = summon[IsX[y.type] =:= IsX[z.type]] // error + +def test2 = summon[ + ( + y.type match + case x.type => true + case _ => false + ) =:= ( + z.type match + case x.type => true + case _ => false + ) +] // error diff --git a/tests/neg/mt-scrutinee-widen2.scala b/tests/neg/mt-scrutinee-widen2.scala new file mode 100644 index 000000000000..8e89f6ab7122 --- /dev/null +++ b/tests/neg/mt-scrutinee-widen2.scala @@ -0,0 +1,13 @@ +// A test case showing how we shouldn't widen +// both IsX scrutinees and make "def test" typecheck +import scala.util.Random +val x = 42 + +type IsX[T] = + T match + case x.type => true + case _ => false + +def bothXOrNot(a: Int, b: Int)(using IsX[a.type] =:= IsX[b.type]) = ??? + +def test = bothXOrNot(Random.nextInt(), Random.nextInt()) // error diff --git a/tests/neg/mt-subtyping-transitivity.scala b/tests/neg/mt-subtyping-transitivity.scala new file mode 100644 index 000000000000..d654f2a45258 --- /dev/null +++ b/tests/neg/mt-subtyping-transitivity.scala @@ -0,0 +1,14 @@ +final class A +final class B + +type MT[X] = X match + case A => String + case B => Int + +def test: MT[A | B] = ??? : MT[A] // error +// testing that +// MT[A] !<: MT[A | B] +// otherwise +// String <: MT[A] <: MT[A | B] +// but +// String !<: MT[A | B] diff --git a/tests/neg-custom-args/erased/multiple-args-consume.scala b/tests/neg/multiple-args-consume.scala similarity index 85% rename from tests/neg-custom-args/erased/multiple-args-consume.scala rename to tests/neg/multiple-args-consume.scala index e4aaacca8969..6966eb0a3280 100644 --- a/tests/neg-custom-args/erased/multiple-args-consume.scala +++ b/tests/neg/multiple-args-consume.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + def foo(erased x: Int, y: Int) = y def bar(x: Int, erased y: Int) = x diff --git a/tests/neg-custom-args/erased/multiple-args.scala b/tests/neg/multiple-args.scala similarity index 82% rename from tests/neg-custom-args/erased/multiple-args.scala rename to tests/neg/multiple-args.scala index fb9bce8e4573..0349e956a6a8 100644 --- a/tests/neg-custom-args/erased/multiple-args.scala +++ b/tests/neg/multiple-args.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + def foo(x: Int, erased y: Int): Int = x def bar(erased x: Int, y: Int): Int = y diff --git a/tests/neg/name-hints.check b/tests/neg/name-hints.check index 324416d08c96..bac56c0c0b76 100644 --- a/tests/neg/name-hints.check +++ b/tests/neg/name-hints.check @@ -31,9 +31,9 @@ | ^^^^^^^ | value AbCde is not a member of object O - did you mean O.abcde? -- [E008] Not Found Error: tests/neg/name-hints.scala:15:13 ------------------------------------------------------------ -15 | val s3 = O.AbCdE // error +15 | val s3 = O.AbcdE // error | ^^^^^^^ - | value AbCdE is not a member of object O - did you mean O.abcde? + | value AbcdE is not a member of object O - did you mean O.abcde? -- [E008] Not Found Error: tests/neg/name-hints.scala:16:13 ------------------------------------------------------------ 16 | val s3 = O.AbCDE // error, no hint | ^^^^^^^ diff --git a/tests/neg/name-hints.scala b/tests/neg/name-hints.scala index cb4cb8884087..114053a0b673 100644 --- a/tests/neg/name-hints.scala +++ b/tests/neg/name-hints.scala @@ -12,7 +12,7 @@ object Test: val d3 = O.ab // error, no hint since distance = 3 > 2 = length val s1 = O.Abcde // error val s3 = O.AbCde // error - val s3 = O.AbCdE // error + val s3 = O.AbcdE // error val s3 = O.AbCDE // error, no hint val a1 = O.abcde0 // error val a2 = O.abcde00 // error diff --git a/tests/neg/newline-braces.scala b/tests/neg/newline-braces.scala new file mode 100644 index 000000000000..afe7731f3095 --- /dev/null +++ b/tests/neg/newline-braces.scala @@ -0,0 +1,8 @@ +//> using options -source 3.0-migration -Xfatal-warnings + +def f: List[Int] = { + List(1, 2, 3).map // no newline inserted here in Scala-2 compat mode + { x => // error (migration) + x + 1 + } +} diff --git a/tests/neg/no-kind-polymorphism-anykind.scala b/tests/neg/no-kind-polymorphism-anykind.scala new file mode 100644 index 000000000000..b14491468d00 --- /dev/null +++ b/tests/neg/no-kind-polymorphism-anykind.scala @@ -0,0 +1,3 @@ +//> using options -Yno-kind-polymorphism + +trait Foo[T <: AnyKind] // error: Not found: type AnyKind diff --git a/tests/neg/noimports-additional.scala b/tests/neg/noimports-additional.scala index e726db5b9b0a..e176135bd644 100644 --- a/tests/neg/noimports-additional.scala +++ b/tests/neg/noimports-additional.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-imports -Yimports:scala.annotation,scala.util.matching +//> using options -Yno-imports -Yimports:scala.annotation,scala.util.matching class annotation extends Annotation val s: String = "str" // error val regex: Regex = new Regex("str") diff --git a/tests/neg/noimports.scala b/tests/neg/noimports.scala index 720d111757cd..468778eb0633 100644 --- a/tests/neg/noimports.scala +++ b/tests/neg/noimports.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-imports +//> using options -Yno-imports object Test { val t: Int = 1 // error: not found Int } diff --git a/tests/neg/noimports2.scala b/tests/neg/noimports2.scala index deee773c35c6..33dc1537dd2d 100644 --- a/tests/neg/noimports2.scala +++ b/tests/neg/noimports2.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-imports +//> using options -Yno-imports object Test { assert("asdf" == "asdf") // error: not found assert } diff --git a/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala b/tests/neg/nonunit-statement.scala similarity index 98% rename from tests/neg-custom-args/fatal-warnings/nonunit-statement.scala rename to tests/neg/nonunit-statement.scala index 399d132edfae..94346031077c 100644 --- a/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala +++ b/tests/neg/nonunit-statement.scala @@ -1,4 +1,4 @@ -// scalac: -Wnonunit-statement -Wvalue-discard +//> using options -Xfatal-warnings -Wnonunit-statement -Wvalue-discard -source:3.3 import collection.ArrayOps import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} import concurrent._ diff --git a/tests/neg/nopredef-additional.scala b/tests/neg/nopredef-additional.scala index 0b6a71ca7c53..a91fcf340e3b 100644 --- a/tests/neg/nopredef-additional.scala +++ b/tests/neg/nopredef-additional.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-predef -Yimports:java.lang,scala.annotation,scala.util.matching +//> using options -Yno-predef -Yimports:java.lang,scala.annotation,scala.util.matching class annotation extends Annotation val s: String = "str" val regex: Regex = s.r // error diff --git a/tests/neg/nopredef.scala b/tests/neg/nopredef.scala index fa9a344772a6..5b577180a4b9 100644 --- a/tests/neg/nopredef.scala +++ b/tests/neg/nopredef.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-predef +//> using options -Yno-predef object Test { assert("asdf" == "asdf") // error: not found assert } diff --git a/tests/neg/not-accessible.check b/tests/neg/not-accessible.check new file mode 100644 index 000000000000..54585460a1d8 --- /dev/null +++ b/tests/neg/not-accessible.check @@ -0,0 +1,25 @@ +-- [E173] Reference Error: tests/neg/not-accessible.scala:8:23 --------------------------------------------------------- +8 | def test(a: A) = a.x // error + | ^^^ + | value x cannot be accessed as a member of (a : foo.A) from class B. + | private[A] value x can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/not-accessible.scala:10:23 -------------------------------------------------------- +10 | def test(a: A) = a.x // error + | ^^^ + | value x cannot be accessed as a member of (a : foo.A) from object B. + | private[A] value x can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/not-accessible.scala:13:23 -------------------------------------------------------- +13 | def test(a: A) = a.x // error + | ^^^ + | value x cannot be accessed as a member of (a : foo.A) from the top-level definitions in package bar. + | private[A] value x can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/not-accessible.scala:5:21 --------------------------------------------------------- +5 | def test(a: A) = a.x // error + | ^^^ + | value x cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] value x can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/not-accessible.scala:15:23 -------------------------------------------------------- +15 |def test(a: foo.A) = a.x // error + | ^^^ + | value x cannot be accessed as a member of (a : foo.A) from the top-level definitions in package . + | private[A] value x can only be accessed from class A in package foo. diff --git a/tests/neg/not-accessible.scala b/tests/neg/not-accessible.scala new file mode 100644 index 000000000000..a0ff791e966f --- /dev/null +++ b/tests/neg/not-accessible.scala @@ -0,0 +1,15 @@ +package foo: + + class A(private[A] val x: Int) + + def test(a: A) = a.x // error + + class B: + def test(a: A) = a.x // error + object B: + def test(a: A) = a.x // error + + package bar: + def test(a: A) = a.x // error + +def test(a: foo.A) = a.x // error diff --git a/tests/neg/nowarn-parser-error.check b/tests/neg/nowarn-parser-error.check new file mode 100644 index 000000000000..27c44dc1df3b --- /dev/null +++ b/tests/neg/nowarn-parser-error.check @@ -0,0 +1,13 @@ +-- [E040] Syntax Error: tests/neg/nowarn-parser-error.scala:5:6 -------------------------------------------------------- +5 | def def // error + | ^^^ + | an identifier expected, but 'def' found + | + | longer explanation available when compiling with `-explain` +-- [E002] Syntax Warning: tests/neg/nowarn-parser-error.scala:4:10 ----------------------------------------------------- +4 | def a = try 1 // warn + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/nowarn-parser-error.scala b/tests/neg/nowarn-parser-error.scala new file mode 100644 index 000000000000..1050a036f431 --- /dev/null +++ b/tests/neg/nowarn-parser-error.scala @@ -0,0 +1,6 @@ +//> using options -deprecation -Wunused:nowarn + +class C { + def a = try 1 // warn + def def // error +} diff --git a/tests/neg/nowarn-typer-error.check b/tests/neg/nowarn-typer-error.check new file mode 100644 index 000000000000..5da16881cb39 --- /dev/null +++ b/tests/neg/nowarn-typer-error.check @@ -0,0 +1,6 @@ +-- [E006] Not Found Error: tests/neg/nowarn-typer-error.scala:6:11 ----------------------------------------------------- +6 | def t1 = / // error + | ^ + | Not found: / + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/nowarn/nowarn-typer-error.scala b/tests/neg/nowarn-typer-error.scala similarity index 82% rename from tests/neg-custom-args/nowarn/nowarn-typer-error.scala rename to tests/neg/nowarn-typer-error.scala index 8ab871b108f6..801d4e55977a 100644 --- a/tests/neg-custom-args/nowarn/nowarn-typer-error.scala +++ b/tests/neg/nowarn-typer-error.scala @@ -1,3 +1,5 @@ +//> using options -deprecation -Wunused:nowarn + import annotation.nowarn object T { @deprecated def f = 1 diff --git a/tests/neg/nowarn.check b/tests/neg/nowarn.check new file mode 100644 index 000000000000..636cabd44d07 --- /dev/null +++ b/tests/neg/nowarn.check @@ -0,0 +1,110 @@ +-- [E002] Syntax Warning: tests/neg/nowarn.scala:11:10 ----------------------------------------------------------------- +11 |def t1a = try 1 // warning (parser) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E002] Syntax Warning: tests/neg/nowarn.scala:25:25 ----------------------------------------------------------------- +25 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E002] Syntax Warning: tests/neg/nowarn.scala:33:26 ----------------------------------------------------------------- +33 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E002] Syntax Warning: tests/neg/nowarn.scala:35:28 ----------------------------------------------------------------- +35 |@nowarn("verbose") def t5 = try 1 // warning with details + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. +Matching filters for @nowarn or -Wconf: + - id=E2 + - name=EmptyCatchAndFinallyBlock + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg/nowarn.scala:15:11 -------------------------------------------------------- +15 |def t2 = { 1; 2 } // warning (the invalid nowarn doesn't silence anything) + | ^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/nowarn.scala:14:8 -------------------------------------------------------------------------------- +14 |@nowarn("wat?") // warning (typer, invalid filter) + | ^^^^^^ + | Invalid message filter + | unknown filter: wat? +-- [E129] Potential Issue Warning: tests/neg/nowarn.scala:18:12 -------------------------------------------------------- +18 |def t2a = { 1; 2 } // warning (invalid nowarn doesn't silence) + | ^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/nowarn.scala:17:8 -------------------------------------------------------------------------------- +17 |@nowarn(t1a.toString) // warning (typer, argument not a compile-time constant) + | ^^^^^^^^^^^^ + | filter needs to be a compile-time constant string +-- Warning: tests/neg/nowarn.scala:25:10 ------------------------------------------------------------------------------- +25 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) + | ^^^^^ + | filter needs to be a compile-time constant string +-- Deprecation Warning: tests/neg/nowarn.scala:39:10 ------------------------------------------------------------------- +39 |def t6a = f // warning (refchecks, deprecation) + | ^ + | method f is deprecated +-- Deprecation Warning: tests/neg/nowarn.scala:42:30 ------------------------------------------------------------------- +42 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) + | ^ + | method f is deprecated +-- Deprecation Warning: tests/neg/nowarn.scala:49:10 ------------------------------------------------------------------- +49 |def t7c = f // warning (deprecation) + | ^ + | method f is deprecated +-- [E092] Pattern Match Unchecked Warning: tests/neg/nowarn.scala:55:7 ------------------------------------------------- +55 | case _: List[Int] => 0 // warning (patmat, unchecked) + | ^ + |the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from Any + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/nowarn.scala:33:1 ---------------------------------------------------------------------------------- +33 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) + |^^^^^^^^^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:42:1 ---------------------------------------------------------------------------------- +42 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) + |^^^^^^^^^^^^^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:50:5 ---------------------------------------------------------------------------------- +50 | : @nowarn("msg=fish") // error (unused nowarn) + | ^^^^^^^^^^^^^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:62:0 ---------------------------------------------------------------------------------- +62 |@nowarn def t9a = { 1: @nowarn; 2 } // error (outer @nowarn is unused) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:63:27 --------------------------------------------------------------------------------- +63 |@nowarn def t9b = { 1: Int @nowarn; 2 } // error (inner @nowarn is unused, it covers the type, not the expression) + | ^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:68:0 ---------------------------------------------------------------------------------- +68 |@nowarn @ann(f) def t10b = 0 // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:69:8 ---------------------------------------------------------------------------------- +69 |@ann(f: @nowarn) def t10c = 0 // error (unused nowarn), should be silent + | ^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:72:0 ---------------------------------------------------------------------------------- +72 |@nowarn class I1a { // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg/nowarn.scala:77:0 ---------------------------------------------------------------------------------- +77 |@nowarn class I1b { // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings diff --git a/tests/neg-custom-args/nowarn/nowarn.scala b/tests/neg/nowarn.scala similarity index 96% rename from tests/neg-custom-args/nowarn/nowarn.scala rename to tests/neg/nowarn.scala index f5d10a5f262a..5b18ab5ccc51 100644 --- a/tests/neg-custom-args/nowarn/nowarn.scala +++ b/tests/neg/nowarn.scala @@ -1,3 +1,5 @@ +//> using options -deprecation -Wunused:nowarn "-Wconf:msg=@nowarn annotation does not suppress any warnings:e" + import scala.annotation.{ nowarn, Annotation } // This test doesn't run with `-Werror`, because once there's an error, later phases are skipped and we would not see diff --git a/tests/neg-strict/nullless.scala b/tests/neg/nullless.scala similarity index 93% rename from tests/neg-strict/nullless.scala rename to tests/neg/nullless.scala index 7545d840db83..20f9af841e14 100644 --- a/tests/neg-strict/nullless.scala +++ b/tests/neg/nullless.scala @@ -1,3 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + object nullless { trait LowerBound[T] { type M >: T; diff --git a/tests/neg/old-syntax.scala b/tests/neg/old-syntax.scala new file mode 100644 index 000000000000..124781d13db2 --- /dev/null +++ b/tests/neg/old-syntax.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings -deprecation + +val f = (x: Int) ⇒ x + 1 // error + +val list = for (n ← List(42)) yield n + 1 // error diff --git a/tests/neg-custom-args/fatal-warnings/opaque-match.scala b/tests/neg/opaque-match.scala similarity index 82% rename from tests/neg-custom-args/fatal-warnings/opaque-match.scala rename to tests/neg/opaque-match.scala index f48a11168274..59d0836fb2a3 100644 --- a/tests/neg-custom-args/fatal-warnings/opaque-match.scala +++ b/tests/neg/opaque-match.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + case class C() object O: @@ -17,7 +19,7 @@ def Test[T] = (??? : Any) match case _: List[O.T] => ??? // error (??? : Any) match - case _: List[O.T @unchecked] => ??? // OK + case _: List[O.T @unchecked] => ??? // OK (??? : Any) match case _: List[T] => ??? // error diff --git a/tests/neg-custom-args/overrideClass.scala b/tests/neg/overrideClass.scala similarity index 91% rename from tests/neg-custom-args/overrideClass.scala rename to tests/neg/overrideClass.scala index 431b846d90ce..182cf945ec00 100644 --- a/tests/neg-custom-args/overrideClass.scala +++ b/tests/neg/overrideClass.scala @@ -1,3 +1,5 @@ +//> using options -source 3.0-migration + abstract class FooA { type A <: Ax; abstract class Ax; diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index c8fc8de97f7c..ff83b91d26be 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -120,3 +120,24 @@ class C extends A { override def m: Int = 42 // error: has incompatible type } } + +package p6 { + class A { def apply(xs: Int*) = 42 } + class B extends A { override def apply(xs: Seq[Int]) = 42 } // error +} +package p7 { + class A { def apply(xs: Int*) = 42 } + class B extends A { def apply(xs: Seq[Int]) = 42 } // error +} +package p8 { + class A { def apply(xs: Seq[Int]) = 42 } + class B extends A { override def apply(xs: Int*) = 42 } // error +} +package p9 { + class A { def apply(xs: Seq[Int]) = 42 } + class B extends A { def apply(xs: Int*) = 42 } // error +} +package p10 { + class A { def apply(s: String)(xs: Int*) = 42 } + class B extends A { def apply(s: String)(xs: Seq[Int]) = 42 } // error +} diff --git a/tests/neg/ovlazy.scala b/tests/neg/ovlazy.scala new file mode 100644 index 000000000000..69078fbd2745 --- /dev/null +++ b/tests/neg/ovlazy.scala @@ -0,0 +1,8 @@ +//> using options -source 3.0-migration -Xfatal-warnings + +class A { + val x: Int = 1 +} +class B extends A { + override lazy val x: Int = 2 // error +} diff --git a/tests/neg/parser-stability-1.scala b/tests/neg/parser-stability-1.scala index 560b9cf116e3..661ab87e31e5 100644 --- a/tests/neg/parser-stability-1.scala +++ b/tests/neg/parser-stability-1.scala @@ -1,3 +1,4 @@ object x0 { x1 match // error def this // error +// error \ No newline at end of file diff --git a/tests/neg/polymorphic-erased-functions-types.check b/tests/neg/polymorphic-erased-functions-types.check new file mode 100644 index 000000000000..39d2720023cf --- /dev/null +++ b/tests/neg/polymorphic-erased-functions-types.check @@ -0,0 +1,28 @@ +-- [E007] Type Mismatch Error: tests/neg/polymorphic-erased-functions-types.scala:3:28 --------------------------------- +3 |def t1a: [T] => T => Unit = [T] => (erased t: T) => () // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: [T] => (erased t: T) => Unit + | Required: [T] => (x$1: T) => Unit + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/polymorphic-erased-functions-types.scala:4:37 --------------------------------- +4 |def t1b: [T] => (erased T) => Unit = [T] => (t: T) => () // error + | ^^^^^^^^^^^^^^^^^^^ + | Found: [T] => (t: T) => Unit + | Required: [T] => (erased x$1: T) => Unit + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/polymorphic-erased-functions-types.scala:6:36 --------------------------------- +6 |def t2a: [T, U] => (T, U) => Unit = [T, U] => (t: T, erased u: U) => () // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: [T, U] => (t: T, erased u: U) => Unit + | Required: [T, U] => (x$1: T, x$2: U) => Unit + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/polymorphic-erased-functions-types.scala:7:43 --------------------------------- +7 |def t2b: [T, U] => (T, erased U) => Unit = [T, U] => (t: T, u: U) => () // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: [T, U] => (t: T, u: U) => Unit + | Required: [T, U] => (x$1: T, erased x$2: U) => Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/polymorphic-erased-functions-types.scala b/tests/neg/polymorphic-erased-functions-types.scala new file mode 100644 index 000000000000..d453c4602bad --- /dev/null +++ b/tests/neg/polymorphic-erased-functions-types.scala @@ -0,0 +1,7 @@ +import language.experimental.erasedDefinitions + +def t1a: [T] => T => Unit = [T] => (erased t: T) => () // error +def t1b: [T] => (erased T) => Unit = [T] => (t: T) => () // error + +def t2a: [T, U] => (T, U) => Unit = [T, U] => (t: T, erased u: U) => () // error +def t2b: [T, U] => (T, erased U) => Unit = [T, U] => (t: T, u: U) => () // error diff --git a/tests/neg/polymorphic-erased-functions-used.check b/tests/neg/polymorphic-erased-functions-used.check new file mode 100644 index 000000000000..6eb5abb0e235 --- /dev/null +++ b/tests/neg/polymorphic-erased-functions-used.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/polymorphic-erased-functions-used.scala:3:33 ------------------------------------------------------- +3 |def t1 = [T] => (erased t: T) => t // error + | ^ + | parameter t is declared as `erased`, but is in fact used +-- Error: tests/neg/polymorphic-erased-functions-used.scala:4:42 ------------------------------------------------------- +4 |def t2 = [T, U] => (t: T, erased u: U) => u // error + | ^ + | parameter u is declared as `erased`, but is in fact used diff --git a/tests/neg/polymorphic-erased-functions-used.scala b/tests/neg/polymorphic-erased-functions-used.scala new file mode 100644 index 000000000000..73ca48b133ee --- /dev/null +++ b/tests/neg/polymorphic-erased-functions-used.scala @@ -0,0 +1,4 @@ +import language.experimental.erasedDefinitions + +def t1 = [T] => (erased t: T) => t // error +def t2 = [T, U] => (t: T, erased u: U) => u // error diff --git a/tests/neg/polymorphic-functions.scala b/tests/neg/polymorphic-functions.scala index d9783baee967..b949cf04194c 100644 --- a/tests/neg/polymorphic-functions.scala +++ b/tests/neg/polymorphic-functions.scala @@ -2,4 +2,6 @@ object Test { val pv0: [T] => List[T] = ??? // error val pv1: Any = [T] => Nil // error val pv2: [T] => List[T] = [T] => Nil // error // error + + val intraDep = [T] => (x: T, y: List[x.type]) => List(y) // error } diff --git a/tests/neg/polymorphic-functions1.check b/tests/neg/polymorphic-functions1.check index 7374075de072..eef268c298cf 100644 --- a/tests/neg/polymorphic-functions1.check +++ b/tests/neg/polymorphic-functions1.check @@ -1,7 +1,7 @@ --- [E007] Type Mismatch Error: tests/neg/polymorphic-functions1.scala:1:53 --------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/polymorphic-functions1.scala:1:33 --------------------------------------------- 1 |val f: [T] => (x: T) => x.type = [T] => (x: Int) => x // error - | ^ - | Found: [T] => (x: Int) => Int - | Required: [T] => (x: T) => x.type + | ^^^^^^^^^^^^^^^^^^^^ + | Found: [T] => (x: Int) => x.type + | Required: [T] => (x: T) => x.type | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/polymorphic-functions2.scala b/tests/neg/polymorphic-functions2.scala new file mode 100644 index 000000000000..0fa618298cb3 --- /dev/null +++ b/tests/neg/polymorphic-functions2.scala @@ -0,0 +1,6 @@ +val wrongLength1: [T, S] => (T, S) => T = [T] => (x, y) => x // error +val wrongLength2: [T] => T => T = [T] => (x, x) => x // error + +val notSubType: [T] => T => T = [T <: Int] => x => x // error + +val notInScope: [T] => T => T = [S] => x => (x: T) // error diff --git a/tests/neg/print-tuple-union.check b/tests/neg/print-tuple-union.check new file mode 100644 index 000000000000..f3754aa5b17e --- /dev/null +++ b/tests/neg/print-tuple-union.check @@ -0,0 +1,18 @@ +-- [E007] Type Mismatch Error: tests/neg/print-tuple-union.scala:3:23 -------------------------------------------------- +3 | def bar[B]: Int = foo[B] // error + | ^^^^^^ + | Found: Tuple.Union[B] + | Required: Int + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Union[B] + | trying to reduce Tuple.Fold[B, Nothing, [x, y] =>> x | y] + | failed since selector B + | does not match case EmptyTuple => Nothing + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case h *: t => h | Tuple.Fold[t, Nothing, [x, y] =>> x | y] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/print-tuple-union.scala b/tests/neg/print-tuple-union.scala new file mode 100644 index 000000000000..9d958fd5e1ca --- /dev/null +++ b/tests/neg/print-tuple-union.scala @@ -0,0 +1,3 @@ +trait Test: + def foo[A]: Tuple.Union[A] + def bar[B]: Int = foo[B] // error diff --git a/tests/neg/private-this-3.4.check b/tests/neg/private-this-3.4.check new file mode 100644 index 000000000000..29c2fe909ede --- /dev/null +++ b/tests/neg/private-this-3.4.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/private-this-3.4.scala:6:16 ------------------------------------------------------------------------ +6 | private[this] def foo: Int = ??? // error: migration warning + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Error: tests/neg/private-this-3.4.scala:7:18 ------------------------------------------------------------------------ +7 | protected[this] def bar: Int = ??? // error: migration warning + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/private-this-3.4.scala b/tests/neg/private-this-3.4.scala new file mode 100644 index 000000000000..b198e954e41b --- /dev/null +++ b/tests/neg/private-this-3.4.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import scala.language.`3.4` + +class Foo: + private[this] def foo: Int = ??? // error: migration warning + protected[this] def bar: Int = ??? // error: migration warning diff --git a/tests/neg/private-this-future-migration.scala b/tests/neg/private-this-future-migration.scala new file mode 100644 index 000000000000..7e3da2be72e2 --- /dev/null +++ b/tests/neg/private-this-future-migration.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import scala.language.`future-migration` + +class Foo: + private[this] def foo: Int = ??? // error: migration warning + protected[this] def bar: Int = ??? // error: migration warning diff --git a/tests/neg/private-this-future.scala b/tests/neg/private-this-future.scala new file mode 100644 index 000000000000..d94cbe16abad --- /dev/null +++ b/tests/neg/private-this-future.scala @@ -0,0 +1,5 @@ +import scala.language.future + +class Foo: + private[this] def foo: Int = ??? // error + protected[this] def bar: Int = ??? // error diff --git a/tests/neg/publicInBinary-not-accessible.check b/tests/neg/publicInBinary-not-accessible.check new file mode 100644 index 000000000000..5f02d66351c1 --- /dev/null +++ b/tests/neg/publicInBinary-not-accessible.check @@ -0,0 +1,38 @@ +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:16:4 ------------------------------------------ +16 | a.p // error + | ^^^ + | value p cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] value p can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:17:4 ------------------------------------------ +17 | a.a // error + | ^^^ + | value a cannot be accessed as a member of (a² : foo.A) from the top-level definitions in package foo. + | private[A] value a can only be accessed from class A in package foo. + | + | where: a is a value in class A + | a² is a parameter in method test +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:18:4 ------------------------------------------ +18 | a.b // error + | ^^^ + | lazy value b cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] lazy value b can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:19:4 ------------------------------------------ +19 | a.c // error + | ^^^ + | variable c cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] variable c can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:20:4 ------------------------------------------ +20 | a.d // error + | ^^^ + | method d cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] method d can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:21:4 ------------------------------------------ +21 | a.e // error + | ^^^ + | given instance e cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] given instance e can only be accessed from class A in package foo. +-- [E173] Reference Error: tests/neg/publicInBinary-not-accessible.scala:22:4 ------------------------------------------ +22 | a.f(using 1.0) // error + | ^^^ + | given instance f cannot be accessed as a member of (a : foo.A) from the top-level definitions in package foo. + | private[A] given instance f can only be accessed from class A in package foo. diff --git a/tests/neg/publicInBinary-not-accessible.scala b/tests/neg/publicInBinary-not-accessible.scala new file mode 100644 index 000000000000..23ddbf2db7d0 --- /dev/null +++ b/tests/neg/publicInBinary-not-accessible.scala @@ -0,0 +1,22 @@ +//> using options -Werror -WunstableInlineAccessors + +package foo + +import scala.annotation.publicInBinary + +class A(@publicInBinary private[A] val p: Int): + @publicInBinary private[A] val a: Int = 1 + @publicInBinary private[A] lazy val b: Int = 1 + @publicInBinary private[A] var c: Int = 1 + @publicInBinary private[A] def d: Int = 1 + @publicInBinary private[A] given e: Int = 1 + @publicInBinary private[A] given f(using Double): Int = 1 + +def test(a: A) = + a.p // error + a.a // error + a.b // error + a.c // error + a.d // error + a.e // error + a.f(using 1.0) // error diff --git a/tests/neg/publicInBinary.check b/tests/neg/publicInBinary.check new file mode 100644 index 000000000000..8cf8690017f1 --- /dev/null +++ b/tests/neg/publicInBinary.check @@ -0,0 +1,40 @@ +-- Error: tests/neg/publicInBinary.scala:8:22 -------------------------------------------------------------------------- +8 |@publicInBinary class C: // error + | ^ + | @publicInBinary cannot be used on class definitions +-- Error: tests/neg/publicInBinary.scala:10:24 ------------------------------------------------------------------------- +10 | @publicInBinary def g = () // error + | ^ + | @publicInBinary cannot be used on local definitions +-- Error: tests/neg/publicInBinary.scala:12:24 ------------------------------------------------------------------------- +12 |class D[@publicInBinary T] // error + | ^ + | @publicInBinary cannot be used on type definitions +-- Error: tests/neg/publicInBinary.scala:16:21 ------------------------------------------------------------------------- +16 |@publicInBinary enum Enum1: // error + | ^ + | @publicInBinary cannot be used on enum definitions +-- Error: tests/neg/publicInBinary.scala:20:23 ------------------------------------------------------------------------- +20 | @publicInBinary case A // error + | ^ + | @publicInBinary cannot be used on enum definitions +-- Error: tests/neg/publicInBinary.scala:21:23 ------------------------------------------------------------------------- +21 | @publicInBinary case B(a: Int) // error + | ^ + | @publicInBinary cannot be used on enum definitions +-- Error: tests/neg/publicInBinary.scala:25:18 ------------------------------------------------------------------------- +25 | @publicInBinary x: Int, // error + | ^ + | @publicInBinary cannot be non `val` constructor parameters +-- Error: tests/neg/publicInBinary.scala:26:31 ------------------------------------------------------------------------- +26 | @publicInBinary private[Bar] y: Int, // error + | ^ + | @publicInBinary cannot be non `val` constructor parameters +-- Error: tests/neg/publicInBinary.scala:7:21 -------------------------------------------------------------------------- +7 |@publicInBinary type A // error + | ^ + | @publicInBinary cannot be used on type definitions +-- Error: tests/neg/publicInBinary.scala:14:22 ------------------------------------------------------------------------- +14 |def f(@publicInBinary x: Int) = 3 // error + | ^ + | @publicInBinary cannot be used on local definitions diff --git a/tests/neg/publicInBinary.scala b/tests/neg/publicInBinary.scala new file mode 100644 index 000000000000..3034eb65975e --- /dev/null +++ b/tests/neg/publicInBinary.scala @@ -0,0 +1,28 @@ +//> using options -Werror -WunstableInlineAccessors + +package foo + +import scala.annotation.publicInBinary + +@publicInBinary type A // error +@publicInBinary class C: // error + def f: Unit = + @publicInBinary def g = () // error + () +class D[@publicInBinary T] // error + +def f(@publicInBinary x: Int) = 3 // error + +@publicInBinary enum Enum1: // error + case A + +enum Enum2: + @publicInBinary case A // error + @publicInBinary case B(a: Int) // error + + +class Bar ( + @publicInBinary x: Int, // error + @publicInBinary private[Bar] y: Int, // error + @publicInBinary private[Bar] val z: Int, +) diff --git a/tests/neg/publicInBinaryOverride.check b/tests/neg/publicInBinaryOverride.check new file mode 100644 index 000000000000..e44692c78525 --- /dev/null +++ b/tests/neg/publicInBinaryOverride.check @@ -0,0 +1,5 @@ +-- [E164] Declaration Error: tests/neg/publicInBinaryOverride.scala:8:15 ----------------------------------------------- +8 | override def f(): Unit = () // error + | ^ + | error overriding method f in class A of type (): Unit; + | method f of type (): Unit also needs to be declared with @publicInBinary diff --git a/tests/neg/publicInBinaryOverride.scala b/tests/neg/publicInBinaryOverride.scala new file mode 100644 index 000000000000..4b9144d27540 --- /dev/null +++ b/tests/neg/publicInBinaryOverride.scala @@ -0,0 +1,9 @@ +import scala.annotation.publicInBinary + +class A: + @publicInBinary def f(): Unit = () + @publicInBinary def g(): Unit = () + +class B extends A: + override def f(): Unit = () // error + @publicInBinary override def g(): Unit = () diff --git a/tests/neg-custom-args/fatal-warnings/pureStatement.scala b/tests/neg/pureStatement.scala similarity index 95% rename from tests/neg-custom-args/fatal-warnings/pureStatement.scala rename to tests/neg/pureStatement.scala index 80ee7c589dc7..4d2ea1d49b08 100644 --- a/tests/neg-custom-args/fatal-warnings/pureStatement.scala +++ b/tests/neg/pureStatement.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class IOCapability object Test { diff --git a/tests/neg-custom-args/fatal-warnings/quote-simple-hole.scala b/tests/neg/quote-simple-hole.scala similarity index 90% rename from tests/neg-custom-args/fatal-warnings/quote-simple-hole.scala rename to tests/neg/quote-simple-hole.scala index 35308cc36161..64e2bcad4862 100644 --- a/tests/neg-custom-args/fatal-warnings/quote-simple-hole.scala +++ b/tests/neg/quote-simple-hole.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.quoted.Quotes def test(using Quotes) = { diff --git a/tests/neg/refinements-this.scala b/tests/neg/refinements-this.scala new file mode 100644 index 000000000000..f8d41bd85360 --- /dev/null +++ b/tests/neg/refinements-this.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings + +class Outer: + type X = { type O = Outer.this.type } // ok + type Y = { type O = this.type } // error diff --git a/tests/neg/refutable-pattern-binding-messages.scala b/tests/neg/refutable-pattern-binding-messages.scala index 97ce61503a2b..c6ae043652c2 100644 --- a/tests/neg/refutable-pattern-binding-messages.scala +++ b/tests/neg/refutable-pattern-binding-messages.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror object Test { // refutable extractor object Positive { def unapply(i: Int): Option[Int] = Some(i).filter(_ > 0) } diff --git a/tests/neg/rewrite-messages.check b/tests/neg/rewrite-messages.check index 3ee081edbed2..b062ab2bc732 100644 --- a/tests/neg/rewrite-messages.check +++ b/tests/neg/rewrite-messages.check @@ -8,4 +8,4 @@ | ^^^ | Alphanumeric method foo is not declared infix; it should not be used as infix operator. | Instead, use method syntax .foo(...) or backticked identifier `foo`. - | The latter can be rewritten automatically under -rewrite -deprecation. + | The latter can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/rewrite-messages.scala b/tests/neg/rewrite-messages.scala index b4f2bf75bfe3..7509682c4baa 100644 --- a/tests/neg/rewrite-messages.scala +++ b/tests/neg/rewrite-messages.scala @@ -1,4 +1,4 @@ -// scalac: -source:future-migration -deprecation -Werror +//> using options -source:future-migration -deprecation -Werror import scala.util._ // error diff --git a/tests/neg/serialversionuid-not-const.scala b/tests/neg/serialversionuid-not-const.scala index 1b24112dbf70..88e87221d21f 100644 --- a/tests/neg/serialversionuid-not-const.scala +++ b/tests/neg/serialversionuid-not-const.scala @@ -1,4 +1,4 @@ -@SerialVersionUID(13l.toLong) class C1 extends Serializable // error +@SerialVersionUID(13l.toLong) class C1 extends Serializable // OK because toLong is constant-folded @SerialVersionUID(13l) class C2 extends Serializable // OK @SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable // error @SerialVersionUID(Test.bippy) class C4 extends Serializable // error diff --git a/tests/neg/singleton-ops-any.check b/tests/neg/singleton-ops-any.check index caab2b24e037..d26d65806188 100644 --- a/tests/neg/singleton-ops-any.check +++ b/tests/neg/singleton-ops-any.check @@ -30,7 +30,7 @@ 18 | val t04: ToString[Int] = "Int" // error | ^^^^^ | Found: ("Int" : String) - | Required: compiletime.ops.any.ToString[Int] + | Required: scala.compiletime.ops.any.ToString[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:32:26 ------------------------------------------------- diff --git a/tests/neg/sip54.check b/tests/neg/sip54.check new file mode 100644 index 000000000000..d53687f8ba79 --- /dev/null +++ b/tests/neg/sip54.check @@ -0,0 +1,17 @@ +-- [E049] Reference Error: tests/neg/sip54.scala:12:8 ------------------------------------------------------------------ +12 |val _ = meth(foo)() // error // error + | ^^^^ + | Reference to meth is ambiguous. + | It is both imported by import A._ + | and imported subsequently by import B._ + | + | Hint: This error may arise if extension method `meth` is called as a normal method. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/sip54.scala:12:13 ------------------------------------------------------------- +12 |val _ = meth(foo)() // error // error + | ^^^ + | Found: (foo : Foo) + | Required: Bar + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/sip54.scala b/tests/neg/sip54.scala new file mode 100644 index 000000000000..3e092af65d32 --- /dev/null +++ b/tests/neg/sip54.scala @@ -0,0 +1,12 @@ +class Foo +class Bar +object A: + extension (foo: Foo) def meth(): Foo = foo +object B: + extension (bar: Bar) def meth(): Bar = bar + +import A.* +import B.* + +val foo = new Foo +val _ = meth(foo)() // error // error diff --git a/tests/neg/spaces-vs-tabs.check b/tests/neg/spaces-vs-tabs.check index 109503c2d557..f8374618f0fd 100644 --- a/tests/neg/spaces-vs-tabs.check +++ b/tests/neg/spaces-vs-tabs.check @@ -28,9 +28,9 @@ | The start of this line does not match any of the previous indentation widths. | Indentation width of current line : 1 tab, 2 spaces | This falls between previous widths: 1 tab and 1 tab, 4 spaces --- [E129] Potential Issue Warning: tests/neg/spaces-vs-tabs.scala:13:7 ------------------------------------------------- +-- [E190] Potential Issue Warning: tests/neg/spaces-vs-tabs.scala:13:7 ------------------------------------------------- 13 | 1 | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | Discarded non-Unit value of type Int. You may want to use `()`. | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/splice-pat.check b/tests/neg/splice-pat.check deleted file mode 100644 index 32af3ad6308e..000000000000 --- a/tests/neg/splice-pat.check +++ /dev/null @@ -1,10 +0,0 @@ --- [E032] Syntax Error: tests/neg/splice-pat.scala:12:16 --------------------------------------------------------------- -12 | case '{ foo(${ // error: pattern expected - | ^ - | pattern expected - | - | longer explanation available when compiling with `-explain` --- [E040] Syntax Error: tests/neg/splice-pat.scala:15:5 ---------------------------------------------------------------- -15 | })} => ??? // error - | ^ - | '=>' expected, but ')' found diff --git a/tests/neg-custom-args/fatal-warnings/strict-pattern-bindings-3.2.scala b/tests/neg/strict-pattern-bindings-3.2.scala similarity index 97% rename from tests/neg-custom-args/fatal-warnings/strict-pattern-bindings-3.2.scala rename to tests/neg/strict-pattern-bindings-3.2.scala index 23c8af3f0f19..d7db6cd165e4 100644 --- a/tests/neg-custom-args/fatal-warnings/strict-pattern-bindings-3.2.scala +++ b/tests/neg/strict-pattern-bindings-3.2.scala @@ -1,3 +1,4 @@ +//> using options -Xfatal-warnings // These tests should fail under -Xfatal-warnings with source version source version 3.2 or later import language.`3.2` diff --git a/tests/neg/structural-2.scala b/tests/neg/structural-2.scala new file mode 100644 index 000000000000..3613d6073e34 --- /dev/null +++ b/tests/neg/structural-2.scala @@ -0,0 +1,75 @@ +//> using options -Xfatal-warnings + +import scala.reflect.Selectable.reflectiveSelectable + +package p1 { + +object test123 { + type A = { def a: Int } + def f(a: A): A = a +} + +object structural2 { + type A = { def a: Int } + + type B = { + def b: Int + } + + type AB = A & B + + def f(ab: AB): AB = ab + + f(new { + def a = 43 + def b = 42 + }) +} +} + +package p2 { +object RClose { + type ReflectCloseable = { def close(): Unit } + def withReflectCloseable[T <: ReflectCloseable, R](s: T)(action: T => R): R = + try { + action(s) + } finally { + s.close() + } +} +} + +package p3 { + +object Test { + def idMap[C[_],T](m: { def map[U](f: T => U): C[U] }): C[T] = m.map(t => t) // error: polymorphic refinement method map without matching type in parent Object is no longer allowed // error: Structural access not allowed + + def main(args: Array[String]): Unit = { + idMap(Some(5)) // error: type mismatch: found Some[Int], required Object{map: [U](f: Any => U): Any} + idMap(Responder.constant(5)) // error: type mismatch: found Responder[Int], required Object{map: [U](f: Any => U): Any} + } +} +} +package p4 { + +trait A { self: Any { def p: Any } => + def f(b: => Unit): Unit = {} + f { p } // OK +} +} + +package p5 { +// t2810 +object Test { + val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator } + val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} } +} +} + +package p6 { + + class Refinements { + val y: { val x: T; type T } // error: deprecated warning: illegal forward reference in refinement; now illegal + } + +} diff --git a/tests/neg/supertraits-b.scala b/tests/neg/supertraits-b.scala new file mode 100644 index 000000000000..78854537974e --- /dev/null +++ b/tests/neg/supertraits-b.scala @@ -0,0 +1,35 @@ +//> using options -Xfatal-warnings + +transparent sealed trait TA +transparent sealed trait TB +trait S +case object a extends S, TA, TB +case object b extends S, TA, TB + +object Test: + + def choose0[X](x: X, y: X): X = x + def choose1[X <: TA](x: X, y: X): X = x + def choose2[X <: TB](x: X, y: X): X = x + def choose3[X <: Product](x: X, y: X): X = x + def choose4[X <: TA & TB](x: X, y: X): X = x + + choose0(a, b) match + case _: TA => ??? + case _: TB => ??? // error: unreachable + + choose1(a, b) match + case _: TA => ??? + case _: TB => ??? // error: unreachable + + choose2(a, b) match + case _: TB => ??? + case _: TA => ??? // error: unreachable + + choose3(a, b) match + case _: Product => ??? + case _: TA => ??? // error: unreachable + + choose4(a, b) match + case _: (TA & TB) => ??? + case _: Product => ??? // error: unreachable \ No newline at end of file diff --git a/tests/neg/switches.scala b/tests/neg/switches.scala new file mode 100644 index 000000000000..d405f8185706 --- /dev/null +++ b/tests/neg/switches.scala @@ -0,0 +1,102 @@ +//> using options -Xfatal-warnings + +import scala.annotation.switch + +// this is testing not so much how things ought to be but how they are; +// the test is supposed to start failing if the behavior changes at all. +object Other { + val C1 = 'P' // fails: not final + final val C2 = 'Q' // succeeds: singleton type Char('Q') inferred + final val C3: Char = 'R' // fails: type Char specified + final val C4 = '\u000A' // succeeds like C2 but more unicodey +} + +object Main { + def succ1(c: Char) = (c: @switch) match { + case 'A' | 'B' | 'C' => true + case 'd' => true + case 'f' | 'g' => true + case _ => false + } + + def succ2(c: Char) = (c: @switch) match { + case 'A' | 'B' | 'C' => true + case Other.C2 => true + case Other.C4 => true + case _ => false + } + + // has a guard, but since SI-5830 that's ok + // PENDING: #5070 + // def succ_guard(c: Char) = (c: @switch) match { + // case 'A' | 'B' | 'C' => true + // case x if x == 'A' => true + // case _ => false + // } + + // throwing in @unchecked on the next two to make sure + // multiple annotations are processed correctly + + // thinks a val in an object is constant... so naive + def fail1(c: Char) = (c: @switch @unchecked) match { // error: Could not emit switch for @switch annotated match + case 'A' => true + case 'B' => true + case Other.C1 => true + case _ => false + } + + // more naivete + def fail2(c: Char) = (c: @unchecked @switch) match { // error: Could not emit switch for @switch annotated match + case 'A' => true + case 'B' => true + case Other.C3 => true + case _ => false + } + + // guard case done correctly + def succ3(c: Char) = (c: @switch) match { + case 'A' | 'B' | 'C' => true + case x => x == 'A' + } + + // some ints just to mix it up a bit + def succ4(x: Int, y: Int) = ((x + y): @switch) match { + case 1 => 5 + case 2 => 10 + case 3 => 20 + case 4 => 50 + case 5|6|7|8 => 100 + case _ => -1 + } + + def fail3(x: Any) = (x: @switch) match { // error: Could not emit switch for @switch annotated match + case 1 | 2 | 3 => true + case _ => false + } + + def fail4(x: AnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match + case 1 | 2 | 3 => true + case _ => false + } + + case class IntAnyVal(x: Int) extends AnyVal + + val Ten = IntAnyVal(10) + def fail5(x: IntAnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match + case IntAnyVal(1) => 0 + case Ten => 1 + case IntAnyVal(100) => 2 + case IntAnyVal(1000) => 3 + case IntAnyVal(10000) => 4 + } + + // the generated lookupswitch covers only a subset of the cases + final val One = IntAnyVal(1) + def fail6(x: IntAnyVal) = (x: @switch) match { // error: Could not emit switch for @switch annotated match + case One => 0 + case IntAnyVal(10) => 1 + case IntAnyVal(100) => 2 + case IntAnyVal(1000) => 3 + case IntAnyVal(10000) => 4 + } +} diff --git a/tests/neg/symbolic-packages.check b/tests/neg/symbolic-packages.check new file mode 100644 index 000000000000..8e9b7e114829 --- /dev/null +++ b/tests/neg/symbolic-packages.check @@ -0,0 +1,16 @@ +-- Error: tests/neg/symbolic-packages.scala:3:8 ------------------------------------------------------------------------ +3 |package `with spaces` { // error + | ^^^^^^^^^^^^^ + | The package name `with spaces` will be encoded on the classpath, and can lead to undefined behaviour. +-- Error: tests/neg/symbolic-packages.scala:7:10 ----------------------------------------------------------------------- +7 |package +.* { // error // error + | ^ + | The package name `*` will be encoded on the classpath, and can lead to undefined behaviour. +-- Error: tests/neg/symbolic-packages.scala:7:8 ------------------------------------------------------------------------ +7 |package +.* { // error // error + | ^ + | The package name `+` will be encoded on the classpath, and can lead to undefined behaviour. +-- Error: tests/neg/symbolic-packages.scala:11:16 ---------------------------------------------------------------------- +11 |package object `mixed_*` { // error + | ^^^^^^^ + | The package name `mixed_*` will be encoded on the classpath, and can lead to undefined behaviour. diff --git a/tests/neg-custom-args/fatal-warnings/symbolic-packages.scala b/tests/neg/symbolic-packages.scala similarity index 80% rename from tests/neg-custom-args/fatal-warnings/symbolic-packages.scala rename to tests/neg/symbolic-packages.scala index 4e8ec2b15a0e..12719d027b4c 100644 --- a/tests/neg-custom-args/fatal-warnings/symbolic-packages.scala +++ b/tests/neg/symbolic-packages.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + package `with spaces` { // error class Foo } diff --git a/tests/neg/syntax-error-recovery.check b/tests/neg/syntax-error-recovery.check index 18d877833d79..83b764c3062c 100644 --- a/tests/neg/syntax-error-recovery.check +++ b/tests/neg/syntax-error-recovery.check @@ -9,7 +9,7 @@ -- [E040] Syntax Error: tests/neg/syntax-error-recovery.scala:19:4 ----------------------------------------------------- 19 | if x == 0 then println(bar) // error | ^^ - | ')' expected, but 'if' found + | ',' or ')' expected, but 'if' found -- [E040] Syntax Error: tests/neg/syntax-error-recovery.scala:23:12 ---------------------------------------------------- 23 | if x < 0) then // error | ^ @@ -25,7 +25,7 @@ -- [E040] Syntax Error: tests/neg/syntax-error-recovery.scala:48:4 ----------------------------------------------------- 48 | if x == 0 then println(bar) // error | ^^ - | ')' expected, but 'if' found + | ',' or ')' expected, but 'if' found -- [E040] Syntax Error: tests/neg/syntax-error-recovery.scala:52:12 ---------------------------------------------------- 52 | if x < 0) then // error | ^ @@ -94,45 +94,9 @@ | Not found: bam | | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:7:2 ------------------------------------------- -6 | 2 -7 | } - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:15:2 ------------------------------------------ -14 | 2 -15 | println(baz) // error - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:27:2 ------------------------------------------ -26 | 2 -27 | println(bam) // error - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:36:2 ------------------------------------------ -35 | 2 -36 | } - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:44:2 ------------------------------------------ -43 | 2 -44 | println(baz) // error - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` --- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:56:2 ------------------------------------------ -55 | 2 -56 | println(bam) // error - | ^ - | A pure expression does nothing in statement position; you may be omitting necessary parentheses - | - | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/syntax-error-recovery.scala:61:2 ----------------------------------------------------------------- +61 | println(bam) + | ^^^^^^^ + | Alphanumeric method println is not declared infix; it should not be used as infix operator. + | Instead, use method syntax .println(...) or backticked identifier `println`. + | The latter can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/t11900.check b/tests/neg/t11900.check index 531a1b8417fd..a18cb16c4ddb 100644 --- a/tests/neg/t11900.check +++ b/tests/neg/t11900.check @@ -10,9 +10,7 @@ 52 | println("b"), // error: weird comma | ^ | end of statement expected but ',' found --- [E032] Syntax Error: tests/neg/t11900.scala:64:8 -------------------------------------------------------------------- +-- Error: tests/neg/t11900.scala:64:7 ---------------------------------------------------------------------------------- 64 | _*, // error - | ^ - | pattern expected - | - | longer explanation available when compiling with `-explain` \ No newline at end of file + | ^ + | spread operator `*` not allowed here; must come last in a parameter list diff --git a/tests/neg/t1625.check b/tests/neg/t1625.check index 7e31f49f3729..05ef10b8780d 100644 --- a/tests/neg/t1625.check +++ b/tests/neg/t1625.check @@ -1,8 +1,8 @@ --- [E040] Syntax Error: tests/neg/t1625.scala:2:20 --------------------------------------------------------------------- +-- Error: tests/neg/t1625.scala:2:19 ----------------------------------------------------------------------------------- 2 | def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found // error: an identifier expected, but ',' found - | ^ - | an identifier expected, but ',' found --- [E040] Syntax Error: tests/neg/t1625.scala:2:32 --------------------------------------------------------------------- + | ^ + | spread operator `*` not allowed here; must come last in a parameter list +-- Error: tests/neg/t1625.scala:2:31 ----------------------------------------------------------------------------------- 2 | def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found // error: an identifier expected, but ',' found - | ^ - | an identifier expected, but ',' found + | ^ + | spread operator `*` not allowed here; must come last in a parameter list diff --git a/tests/neg/t1625b.scala b/tests/neg/t1625b.scala index 4c4633c62d23..5382156f812f 100644 --- a/tests/neg/t1625b.scala +++ b/tests/neg/t1625b.scala @@ -1,3 +1,3 @@ object T5 { - case class Abc(x: String*, c: String*) // error: identifier expected but `,` found + case class Abc(x: String*, c: String*) // error: varargs parameter must come last } diff --git a/tests/neg-tailcall/t1672b.scala b/tests/neg/t1672b.scala similarity index 93% rename from tests/neg-tailcall/t1672b.scala rename to tests/neg/t1672b.scala index 84ebb6155e6d..b67664615048 100644 --- a/tests/neg-tailcall/t1672b.scala +++ b/tests/neg/t1672b.scala @@ -41,7 +41,6 @@ object Test1772B { } } - // the `liftedTree` local method will prevent a tail call here. @tailrec def bar(i : Int) : Int = { if (i == 0) 0 diff --git a/tests/neg/t3235-minimal.check b/tests/neg/t3235-minimal.check new file mode 100644 index 000000000000..83c287f85bc0 --- /dev/null +++ b/tests/neg/t3235-minimal.check @@ -0,0 +1,16 @@ +-- Error: tests/neg/t3235-minimal.scala:5:21 --------------------------------------------------------------------------- +5 | assert(123456789.round == 123456789) // error + | ^^^^^^^^^^^^^^^ + |method round in class RichInt is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +-- Error: tests/neg/t3235-minimal.scala:6:16 --------------------------------------------------------------------------- +6 | assert(math.round(123456789) == 123456789) // error + | ^^^^^^^^^^ + |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +-- Error: tests/neg/t3235-minimal.scala:7:32 --------------------------------------------------------------------------- +7 | assert(1234567890123456789L.round == 1234567890123456789L) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + |method round in class RichLong is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +-- Error: tests/neg/t3235-minimal.scala:8:16 --------------------------------------------------------------------------- +8 | assert(math.round(1234567890123456789L) == 1234567890123456789L) // error + | ^^^^^^^^^^ + |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? diff --git a/tests/neg-custom-args/deprecation/t3235-minimal.scala b/tests/neg/t3235-minimal.scala similarity index 86% rename from tests/neg-custom-args/deprecation/t3235-minimal.scala rename to tests/neg/t3235-minimal.scala index 3aef0eea2dae..f65ee13f87f5 100644 --- a/tests/neg-custom-args/deprecation/t3235-minimal.scala +++ b/tests/neg/t3235-minimal.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation + object Test { def main(args: Array[String]): Unit = { assert(123456789.round == 123456789) // error diff --git a/tests/neg-tailcall/t3275.scala b/tests/neg/t3275.scala similarity index 100% rename from tests/neg-tailcall/t3275.scala rename to tests/neg/t3275.scala diff --git a/tests/neg/t5702-neg-bad-and-wild.check b/tests/neg/t5702-neg-bad-and-wild.check index c461b76ea70b..be77cd255a66 100644 --- a/tests/neg/t5702-neg-bad-and-wild.check +++ b/tests/neg/t5702-neg-bad-and-wild.check @@ -14,46 +14,44 @@ 13 | case List(1, _*3:) => // error // error | ^ | an identifier expected, but ')' found --- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:15:18 --------------------------------------------------- +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:15:17 ----------------------------------------------------------------- 15 | case List(x*, 1) => // error: pattern expected - | ^ - | pattern expected - | - | longer explanation available when compiling with `-explain` + | ^ + | spread operator `*` not allowed here; must come last in a parameter list +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:16:16 ----------------------------------------------------------------- +16 | case (1, x*) => // error: bad use of * + | ^ + | bad use of `*` - sequence pattern not allowed here -- [E031] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:17:18 --------------------------------------------------- 17 | case (1, x: _*) => // error: bad use of _* (sequence pattern not allowed) | ^ | * can be used only for last argument | | longer explanation available when compiling with `-explain` --- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:23:17 --------------------------------------------------- -23 | val K(ns @ _*, xx) = k // error: pattern expected // error - | ^ - | pattern expected - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:23:16 ----------------------------------------------------------------- +23 | val K(ns @ _*, xx) = k // error: pattern expected + | ^ + | spread operator `*` not allowed here; must come last in a parameter list +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:25:14 ----------------------------------------------------------------- +25 | val (b, _ * ) = (5,6) // error: bad use of `*` + | ^ + | bad use of `*` - sequence pattern not allowed here -- [E161] Naming Error: tests/neg/t5702-neg-bad-and-wild.scala:24:10 --------------------------------------------------- 24 | val K(x) = k // error: x is already defined as value x | ^^^^^^^^^^^^ | x is already defined as value x | | Note that overloaded methods must all be defined in the same group of toplevel definitions --- [E006] Not Found Error: tests/neg/t5702-neg-bad-and-wild.scala:12:20 ------------------------------------------------ +-- [E189] Not Found Error: tests/neg/t5702-neg-bad-and-wild.scala:12:20 ------------------------------------------------ 12 | case List(1, _*3,) => // error: pattern expected // error | ^ - | Not found: * + | no pattern match extractor named * was found | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/t5702-neg-bad-and-wild.scala:13:20 ------------------------------------------------ +-- [E189] Not Found Error: tests/neg/t5702-neg-bad-and-wild.scala:13:20 ------------------------------------------------ 13 | case List(1, _*3:) => // error // error | ^ - | Not found: * - | - | longer explanation available when compiling with `-explain` --- [E045] Cyclic Error: tests/neg/t5702-neg-bad-and-wild.scala:23:19 --------------------------------------------------- -23 | val K(ns @ _*, xx) = k // error: pattern expected // error - | ^ - | Recursive value $1$ needs type + | no pattern match extractor named * was found | | longer explanation available when compiling with `-explain` -- Warning: tests/neg/t5702-neg-bad-and-wild.scala:13:22 --------------------------------------------------------------- @@ -71,11 +69,3 @@ | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, | which may result in a MatchError at runtime. | This patch can be rewritten automatically under -rewrite -source 3.2-migration. --- Warning: tests/neg/t5702-neg-bad-and-wild.scala:25:20 --------------------------------------------------------------- -25 | val (b, _ * ) = (5,6) // ok - | ^^^^^ - | pattern's type Int* does not match the right hand side expression's type Int - | - | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, - | which may result in a MatchError at runtime. - | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg/t5702-neg-bad-and-wild.scala b/tests/neg/t5702-neg-bad-and-wild.scala index 95d00c270e89..6031fee93bfc 100644 --- a/tests/neg/t5702-neg-bad-and-wild.scala +++ b/tests/neg/t5702-neg-bad-and-wild.scala @@ -13,16 +13,16 @@ object Test { case List(1, _*3:) => // error // error case List(1, x*) => // ok case List(x*, 1) => // error: pattern expected - case (1, x*) => //ok + case (1, x*) => // error: bad use of * case (1, x: _*) => // error: bad use of _* (sequence pattern not allowed) } // good syntax, bad semantics, detected by typer //gowild.scala:14: error: star patterns must correspond with varargs parameters val K(x @ _*) = k - val K(ns @ _*, xx) = k // error: pattern expected // error + val K(ns @ _*, xx) = k // error: pattern expected val K(x) = k // error: x is already defined as value x - val (b, _ * ) = (5,6) // ok + val (b, _ * ) = (5,6) // error: bad use of `*` // no longer complains //bad-and-wild.scala:15: error: ')' expected but '}' found. } diff --git a/tests/neg/t5702-neg-bad-brace.check b/tests/neg/t5702-neg-bad-brace.check index 92e9fe912a92..d305e9272a6e 100644 --- a/tests/neg/t5702-neg-bad-brace.check +++ b/tests/neg/t5702-neg-bad-brace.check @@ -1,9 +1,7 @@ --- [E032] Syntax Error: tests/neg/t5702-neg-bad-brace.scala:8:21 ------------------------------------------------------- +-- Error: tests/neg/t5702-neg-bad-brace.scala:8:20 --------------------------------------------------------------------- 8 | case List(1, _*} => // error: pattern expected - | ^ - | pattern expected - | - | longer explanation available when compiling with `-explain` + | ^ + | spread operator `*` not allowed here; must come last in a parameter list -- [E040] Syntax Error: tests/neg/t5702-neg-bad-brace.scala:11:0 ------------------------------------------------------- 11 |} // error: eof expected, but '}' found |^ diff --git a/tests/neg-custom-args/fatal-warnings/t5830.scala b/tests/neg/t5830.scala similarity index 85% rename from tests/neg-custom-args/fatal-warnings/t5830.scala rename to tests/neg/t5830.scala index 629b345d3737..947b7bac4a22 100644 --- a/tests/neg-custom-args/fatal-warnings/t5830.scala +++ b/tests/neg/t5830.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.annotation.switch class Test { diff --git a/tests/neg-tailcall/t6574.scala b/tests/neg/t6574.scala similarity index 100% rename from tests/neg-tailcall/t6574.scala rename to tests/neg/t6574.scala diff --git a/tests/neg-tailcall/tailrec-2.scala b/tests/neg/tailrec-2.scala similarity index 100% rename from tests/neg-tailcall/tailrec-2.scala rename to tests/neg/tailrec-2.scala diff --git a/tests/neg-tailcall/tailrec-3.scala b/tests/neg/tailrec-3.scala similarity index 100% rename from tests/neg-tailcall/tailrec-3.scala rename to tests/neg/tailrec-3.scala diff --git a/tests/neg-tailcall/tailrec-and-or.scala b/tests/neg/tailrec-and-or.scala similarity index 100% rename from tests/neg-tailcall/tailrec-and-or.scala rename to tests/neg/tailrec-and-or.scala diff --git a/tests/neg-tailcall/tailrec.scala b/tests/neg/tailrec.scala similarity index 100% rename from tests/neg-tailcall/tailrec.scala rename to tests/neg/tailrec.scala diff --git a/tests/neg/targetName-refine.check b/tests/neg/targetName-refine.check new file mode 100644 index 000000000000..fe0dd71dfb0a --- /dev/null +++ b/tests/neg/targetName-refine.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/targetName-refine.scala:7:27 -------------------------------------------------- +7 |val x: T { def f: Int } = C() // error + | ^^^ + | Found: C + | Required: T{def f: Int} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pos/targetName-refine.scala b/tests/neg/targetName-refine.scala similarity index 76% rename from tests/pos/targetName-refine.scala rename to tests/neg/targetName-refine.scala index eaa02b8b7976..f9b8f0d72740 100644 --- a/tests/pos/targetName-refine.scala +++ b/tests/neg/targetName-refine.scala @@ -4,5 +4,5 @@ trait T: class C extends T: @targetName("f2") def f: Int = 1 -val x: T { def f: Int } = C() +val x: T { def f: Int } = C() // error diff --git a/tests/neg/tupled-function-instances-2.scala b/tests/neg/tupled-function-instances-2.scala new file mode 100644 index 000000000000..1abd3f3a3f54 --- /dev/null +++ b/tests/neg/tupled-function-instances-2.scala @@ -0,0 +1,61 @@ +//> using options -language:experimental.erasedDefinitions + +import scala.util.TupledFunction +object Test { + def main(args: Array[String]): Unit = { + type T + type R + + summon[TupledFunction[(erased T) => R, erased Tuple1[T] => R]] // error // error + summon[TupledFunction[(erased T, T) => R, (erased (T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T) => R,(erased (T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T) => R,(erased (T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T) => R,(erased (T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) => R,(erased (T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T)) => R]] // error + + summon[TupledFunction[(erased T) ?=> R, (erased Tuple1[T]) ?=> R]] // error + summon[TupledFunction[(erased T, T) ?=> R, (erased T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T) ?=> R, (erased T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T) ?=> R, (erased T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T) ?=> R, (erased T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + summon[TupledFunction[(erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R, (erased T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T) ?=> R]] // error + } +} \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/type-test-paths-2.scala b/tests/neg/type-test-paths-2.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/type-test-paths-2.scala rename to tests/neg/type-test-paths-2.scala index 488f704a20bc..4bba2f87416e 100644 --- a/tests/neg-custom-args/fatal-warnings/type-test-paths-2.scala +++ b/tests/neg/type-test-paths-2.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.reflect.TypeTest trait R { diff --git a/tests/neg-custom-args/fatal-warnings/type-test-paths.scala b/tests/neg/type-test-paths.scala similarity index 94% rename from tests/neg-custom-args/fatal-warnings/type-test-paths.scala rename to tests/neg/type-test-paths.scala index a8cd6334e769..324ed43ba7fa 100644 --- a/tests/neg-custom-args/fatal-warnings/type-test-paths.scala +++ b/tests/neg/type-test-paths.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.reflect.TypeTest object Test { diff --git a/tests/neg/type-test-syntesize-b.scala b/tests/neg/type-test-syntesize-b.scala new file mode 100644 index 000000000000..b5402eb92c66 --- /dev/null +++ b/tests/neg/type-test-syntesize-b.scala @@ -0,0 +1,31 @@ +//> using options -Xfatal-warnings + +import scala.reflect.TypeTest + +object Test { + def test[S, T](using TypeTest[S, T]): Unit = () + val a: A = ??? + + test[Any, Any] + test[Int, Int] + + test[Int, Any] + test[String, Any] + test[String, AnyRef] + + test[Any, Int] + test[Any, String] + test[Any, Some[?]] + test[Any, Array[Int]] + test[Seq[Int], List[Int]] + + test[Any, Some[Int]] // error + test[Any, a.X] // error + test[a.X, a.Y] // error + +} + +class A { + type X + type Y <: X +} diff --git a/tests/neg-custom-args/typeclass-derivation2.scala b/tests/neg/typeclass-derivation2.scala similarity index 99% rename from tests/neg-custom-args/typeclass-derivation2.scala rename to tests/neg/typeclass-derivation2.scala index be54d7697994..eca11fb326ed 100644 --- a/tests/neg-custom-args/typeclass-derivation2.scala +++ b/tests/neg/typeclass-derivation2.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import scala.collection.mutable import scala.annotation.tailrec diff --git a/tests/neg-custom-args/fatal-warnings/unchecked-patterns.scala b/tests/neg/unchecked-patterns.scala similarity index 96% rename from tests/neg-custom-args/fatal-warnings/unchecked-patterns.scala rename to tests/neg/unchecked-patterns.scala index 649972d29c64..db304a2f1875 100644 --- a/tests/neg-custom-args/fatal-warnings/unchecked-patterns.scala +++ b/tests/neg/unchecked-patterns.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object Test { val (y1: Some[Int]) = Some(1): Option[Int] @unchecked // OK diff --git a/tests/neg/uninitialized-3.4.check b/tests/neg/uninitialized-3.4.check new file mode 100644 index 000000000000..1c7b985072d0 --- /dev/null +++ b/tests/neg/uninitialized-3.4.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/uninitialized-3.4.scala:7:15 ----------------------------------------------------------------------- +7 | var a: Int = _ // error: migration warning + | ^ + | `= _` has been deprecated; use `= uninitialized` instead. + | `uninitialized` can be imported with `scala.compiletime.uninitialized`. + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/uninitialized-3.4.scala b/tests/neg/uninitialized-3.4.scala new file mode 100644 index 000000000000..174a95ae6c54 --- /dev/null +++ b/tests/neg/uninitialized-3.4.scala @@ -0,0 +1,8 @@ +//> using options -Werror + +import scala.language.`3.4` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error: migration warning + var b: Int = uninitialized diff --git a/tests/neg/uninitialized-future-migration.scala b/tests/neg/uninitialized-future-migration.scala new file mode 100644 index 000000000000..05f7c6b67f38 --- /dev/null +++ b/tests/neg/uninitialized-future-migration.scala @@ -0,0 +1,8 @@ +//> using options -Werror + +import scala.language.`future-migration` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error: migration warning + var b: Int = uninitialized diff --git a/tests/neg/uninitialized-future.scala b/tests/neg/uninitialized-future.scala new file mode 100644 index 000000000000..8882b70ed48b --- /dev/null +++ b/tests/neg/uninitialized-future.scala @@ -0,0 +1,6 @@ +import scala.language.future +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error + var b: Int = uninitialized diff --git a/tests/neg/warn-value-discard.check b/tests/neg/warn-value-discard.check index ab6539dd5cd8..ba43c743709f 100644 --- a/tests/neg/warn-value-discard.check +++ b/tests/neg/warn-value-discard.check @@ -1,11 +1,3 @@ --- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:15:35 ---------------------------------------------- -15 | firstThing().map(_ => secondThing()) // error - | ^^^^^^^^^^^^^ - | discarded non-Unit value of type Either[Failed, Unit] --- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:18:35 ---------------------------------------------- -18 | firstThing().map(_ => secondThing()) // error - | ^^^^^^^^^^^^^ - | discarded non-Unit value of type Either[Failed, Unit] -- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:27:36 ---------------------------------------------- 27 | mutable.Set.empty[String].remove("") // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -18,3 +10,11 @@ 59 | mutable.Set.empty[String] += "" // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | discarded non-Unit value of type scala.collection.mutable.Set[String] +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:15:35 ---------------------------------------------- +15 | firstThing().map(_ => secondThing()) // error + | ^^^^^^^^^^^^^ + | discarded non-Unit value of type Either[Failed, Unit] +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:18:35 ---------------------------------------------- +18 | firstThing().map(_ => secondThing()) // error + | ^^^^^^^^^^^^^ + | discarded non-Unit value of type Either[Failed, Unit] diff --git a/tests/neg/warn-value-discard.scala b/tests/neg/warn-value-discard.scala index 149433395cc5..fb01fdeda384 100644 --- a/tests/neg/warn-value-discard.scala +++ b/tests/neg/warn-value-discard.scala @@ -1,4 +1,4 @@ -// scalac: -Wvalue-discard -Werror +//> using options -Wvalue-discard -Werror import scala.util.{Either, Right, Left} import scala.collection.mutable diff --git a/tests/neg-tailcall/while-loops.scala b/tests/neg/while-loops.scala similarity index 100% rename from tests/neg-tailcall/while-loops.scala rename to tests/neg/while-loops.scala diff --git a/tests/neg/wildcard-match.check b/tests/neg/wildcard-match.check new file mode 100644 index 000000000000..d405326c3d2b --- /dev/null +++ b/tests/neg/wildcard-match.check @@ -0,0 +1,94 @@ +-- [E007] Type Mismatch Error: tests/neg/wildcard-match.scala:31:13 ---------------------------------------------------- +31 | val _: C = a1 // error + | ^^ + | Found: CovElem[Y] + | Required: C + | + | where: Y is a type in method f with bounds <: Cov[C] + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce CovElem[Y] + | failed since selector Y + | does not uniquely determine parameter a in + | case Cov[a] => a + | The computed bounds for the parameter are: + | a <: C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/wildcard-match.scala:34:13 ---------------------------------------------------- +34 | val _: C = a2 // error + | ^^ + | Found: ContravElem[Z] + | Required: C + | + | where: Z is a type in method f with bounds <: Contrav[C] + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce ContravElem[Z] + | failed since selector Z + | does not uniquely determine parameter a in + | case Contrav[a] => a + | The computed bounds for the parameter are: + | a >: C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/wildcard-match.scala:40:19 ---------------------------------------------------- +40 | val _: List[C] = b1 // error + | ^^ + | Found: CovToList[Y] + | Required: List[C] + | + | where: Y is a type in method f with bounds <: Cov[C] + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce CovToList[Y] + | failed since selector Y + | does not uniquely determine parameter a in + | case Cov[a] => List[a] + | The computed bounds for the parameter are: + | a <: C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/wildcard-match.scala:43:19 ---------------------------------------------------- +43 | val _: List[C] = b2 // error + | ^^ + | Found: ContravElem[Z] + | Required: List[C] + | + | where: Z is a type in method f with bounds <: Contrav[C] + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce ContravElem[Z] + | failed since selector Z + | does not uniquely determine parameter a in + | case Contrav[a] => a + | The computed bounds for the parameter are: + | a >: C + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/wildcard-match.scala:61:33 ------------------------------------------------------------- +61 | summon[tuples.length[T2] =:= 3] // error + | ^ + | Cannot prove that shapeless.tuples.length[T2] =:= (3 : Int). + | + | where: T2 is a type in method testShapeless with bounds <: (Int, Int, Int) + | + | + | Note: a match type could not be fully reduced: + | + | trying to reduce shapeless.tuples.length[T2] + | trying to reduce Tuple.Size[shapeless.tuples.to[T2]] + | failed since selector shapeless.tuples.to[T2] + | does not uniquely determine parameters x, xs in + | case x *: xs => scala.compiletime.ops.int.S[Tuple.Size[xs]] + | The computed bounds for the parameters are: + | x <: Int + | xs <: (Int, Int) diff --git a/tests/neg/wildcard-match.scala b/tests/neg/wildcard-match.scala index 326a97485bd2..c220428f4f85 100644 --- a/tests/neg/wildcard-match.scala +++ b/tests/neg/wildcard-match.scala @@ -42,5 +42,20 @@ def f[X <: Box[C], Y <: Cov[C], Z <: Contrav[C]] = def b2: ContravElem[Z] = ??? val _: List[C] = b2 // error +// found in shapeless +object shapeless: + trait Monoidal: + type to[_] <: Tuple + type length[m] = Tuple.Size[to[m]] + object tuples extends Monoidal: + type to[t] = t & Tuple +end shapeless + +def testShapeless[T2 <: (Int, Int, Int)](): Unit = + import shapeless.* + + type T1 = (Int, Int, Int) + summon[tuples.length[T1] =:= 3] // OK + summon[tuples.length[T2] =:= 3] // error diff --git a/tests/neg/with-type-operator-future-migration.check b/tests/neg/with-type-operator-future-migration.check new file mode 100644 index 000000000000..e56049880431 --- /dev/null +++ b/tests/neg/with-type-operator-future-migration.check @@ -0,0 +1,7 @@ +-- [E003] Syntax Error: tests/neg/with-type-operator-future-migration.scala:5:13 --------------------------------------- +5 |def foo: Int with String = ??? // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/with-type-operator-future-migration.scala b/tests/neg/with-type-operator-future-migration.scala new file mode 100644 index 000000000000..3ed2e3a8f067 --- /dev/null +++ b/tests/neg/with-type-operator-future-migration.scala @@ -0,0 +1,5 @@ +//> using options -Werror + +import scala.language.`future-migration` + +def foo: Int with String = ??? // error diff --git a/tests/neg/with-type-operator-future.scala b/tests/neg/with-type-operator-future.scala new file mode 100644 index 000000000000..3f33ebde8708 --- /dev/null +++ b/tests/neg/with-type-operator-future.scala @@ -0,0 +1,3 @@ +import scala.language.`future` + +def foo: Int with String = ??? // error diff --git a/tests/neg/x b/tests/neg/x new file mode 100644 index 000000000000..fc4634d7618d --- /dev/null +++ b/tests/neg/x @@ -0,0 +1,151 @@ +2023.10.13 14:54:13 INFO compiling 37 Scala sources to /Users/odersky/workspace/dotty/compiler/target/scala-3.3.1/classes ... +2023.10.13 14:54:22 INFO compiling 36 Scala sources to /Users/odersky/workspace/dotty/compiler/target/scala-3.3.1/classes ... +2023.10.13 14:54:25 ERROR text document: file:///Users/odersky/workspace/dotty/tests/pos-custom-args/captures/outer-roots.scala +java.nio.file.NoSuchFileException: /Users/odersky/workspace/dotty/tests/pos-custom-args/captures/outer-roots.scala + at sun.nio.fs.UnixException.translateToIOException(UnixException.java:92) + at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:106) + at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111) + at sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:218) + at java.nio.file.Files.newByteChannel(Files.java:380) + at java.nio.file.Files.newByteChannel(Files.java:432) + at java.nio.file.Files.readAllBytes(Files.java:3288) + at scala.meta.internal.io.PlatformFileIO$.slurp(PlatformFileIO.scala:45) + at scala.meta.internal.io.FileIO$.slurp(FileIO.scala:24) + at scala.meta.internal.metals.InteractiveSemanticdbs.$anonfun$textDocument$2(InteractiveSemanticdbs.scala:90) + at scala.Option.getOrElse(Option.scala:201) + at scala.meta.internal.metals.InteractiveSemanticdbs.$anonfun$textDocument$1(InteractiveSemanticdbs.scala:90) + at java.util.HashMap.compute(HashMap.java:1316) + at java.util.Collections$SynchronizedMap.compute(Collections.java:2770) + at scala.meta.internal.metals.InteractiveSemanticdbs.textDocument(InteractiveSemanticdbs.scala:89) + at scala.meta.internal.metals.InteractiveSemanticdbs.textDocument(InteractiveSemanticdbs.scala:63) + at scala.meta.internal.metals.AggregateSemanticdbs.loop$1(AggregateSemanticdbs.scala:30) + at scala.meta.internal.metals.AggregateSemanticdbs.textDocument(AggregateSemanticdbs.scala:36) + at scala.meta.internal.metals.CodeLensProvider.findLenses(CodeLensProvider.scala:22) + at scala.meta.internal.metals.MetalsLspService.$anonfun$codeLens$2(MetalsLspService.scala:1574) + at scala.meta.internal.metals.TimerProvider.timedThunk(TimerProvider.scala:25) + at scala.meta.internal.metals.MetalsLspService.$anonfun$codeLens$1(MetalsLspService.scala:1572) + at scala.meta.internal.metals.CancelTokens$.$anonfun$apply$2(CancelTokens.scala:26) + at scala.concurrent.Future$.$anonfun$apply$1(Future.scala:687) + at scala.concurrent.impl.Promise$Transformation.run(Promise.scala:467) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) + at java.lang.Thread.run(Thread.java:833) + +2023.10.13 14:54:27 INFO Deduplicating compilation of project_bd2c96d2de from bsp client 'scala-cli 0.1.19' (since 4h 32m 27.423s) +2023.10.13 14:54:27 INFO compiling project_bd2c96d2de (470 scala sources and 5 java sources) +2023.10.13 14:54:28 INFO time: compiled scala3-compiler in 14s +2023.10.13 14:54:28 INFO compiling scala3-presentation-compiler +2023.10.13 14:54:28 INFO compiling scala3-compiler-test +2023.10.13 14:54:28 INFO compiling 2 Scala sources to /Users/odersky/workspace/dotty/presentation-compiler/target/scala-3.3.1/classes ... +2023.10.13 14:54:28 INFO compiling 7 Scala sources and 1 Java source to /Users/odersky/workspace/dotty/compiler/target/scala-3.3.1/test-classes ... +2023.10.13 14:54:30 INFO time: compiled scala3-presentation-compiler in 0.54s +2023.10.13 14:54:30 INFO compiling scala3-presentation-compiler-test +2023.10.13 14:54:30 INFO time: compiled scala3-presentation-compiler-test in 26ms +2023.10.13 14:54:30 INFO time: compiled scala3-compiler-test in 1.32s +2023.10.13 14:54:30 INFO Processing buildTarget/scalaMainClasses +2023.10.13 14:54:31 INFO compiling scala3-interfaces +2023.10.13 14:54:31 INFO compiling scala3-library +2023.10.13 14:54:31 INFO time: compiled scala3-interfaces in 2ms +2023.10.13 14:54:31 INFO time: compiled scala3-library in 3ms +2023.10.13 14:54:31 INFO compiling tasty-core +2023.10.13 14:54:31 INFO time: compiled tasty-core in 2ms +2023.10.13 14:54:31 INFO compiling scala3-compiler +2023.10.13 14:54:31 INFO time: compiled scala3-compiler in 14ms +2023.10.13 14:54:31 INFO compiling scala3-sbt-bridge +2023.10.13 14:54:31 INFO compiling scala3-presentation-compiler +2023.10.13 14:54:31 INFO compiling scala3-compiler-test +2023.10.13 14:54:31 INFO compiling 1 Java source to /Users/odersky/workspace/dotty/sbt-bridge/src/target/classes ... +2023.10.13 14:54:31 INFO time: compiled scala3-compiler-test in 9ms +2023.10.13 14:54:31 INFO time: compiled scala3-presentation-compiler in 14ms +2023.10.13 14:54:31 INFO compiling scala3-presentation-compiler-test +2023.10.13 14:54:31 INFO time: compiled scala3-presentation-compiler-test in 7ms +2023.10.13 14:54:32 INFO time: compiled scala3-sbt-bridge in 1.06s +2023.10.13 14:54:33 INFO compiling scala3-library-bootstrapped +2023.10.13 14:54:33 INFO compiling 2 Scala sources to /Users/odersky/workspace/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.4.0-RC1-bin-SNAPSHOT-nonbootstrapped/classes ... +2023.10.13 14:54:35 INFO Processing buildTarget/scalaTestClasses +2023.10.13 14:54:35 INFO compiling scala3-library +2023.10.13 14:54:35 INFO time: compiled scala3-library in 1ms +2023.10.13 14:54:35 INFO compiling scala3-interfaces +2023.10.13 14:54:35 INFO compiling tasty-core +2023.10.13 14:54:35 INFO time: compiled scala3-interfaces in 1ms +2023.10.13 14:54:35 INFO time: compiled tasty-core in 1ms +2023.10.13 14:54:35 INFO compiling scala3-compiler +2023.10.13 14:54:35 INFO time: compiled scala3-compiler in 15ms +2023.10.13 14:54:35 INFO compiling scala3-presentation-compiler +2023.10.13 14:54:35 INFO compiling scala3-compiler-test +2023.10.13 14:54:35 INFO time: compiled scala3-presentation-compiler in 5ms +2023.10.13 14:54:35 INFO time: compiled scala3-compiler-test in 6ms +2023.10.13 14:54:35 INFO compiling scala3-presentation-compiler-test +2023.10.13 14:54:35 INFO time: compiled scala3-presentation-compiler-test in 3ms +2023.10.13 14:54:35 INFO Processing buildTarget/jvmRunEnvironment +2023.10.13 14:54:35 INFO compiling scala3-interfaces +2023.10.13 14:54:35 INFO compiling scala3-library +2023.10.13 14:54:35 INFO time: compiled scala3-interfaces in 3ms +2023.10.13 14:54:35 INFO time: compiled scala3-library in 4ms +2023.10.13 14:54:35 INFO compiling tasty-core +2023.10.13 14:54:35 INFO time: compiled tasty-core in 1ms +2023.10.13 14:54:35 INFO compiling scala3-compiler +2023.10.13 14:54:35 INFO time: compiled scala3-compiler in 13ms +2023.10.13 14:54:35 INFO compiling scala3-sbt-bridge +2023.10.13 14:54:35 INFO compiling scala3-compiler-test +2023.10.13 14:54:35 INFO compiling scala3-presentation-compiler +2023.10.13 14:54:35 INFO time: compiled scala3-sbt-bridge in 8ms +2023.10.13 14:54:35 INFO time: compiled scala3-presentation-compiler in 8ms +2023.10.13 14:54:35 INFO time: compiled scala3-compiler-test in 13ms +2023.10.13 14:54:35 INFO compiling scala3-presentation-compiler-test +2023.10.13 14:54:35 INFO time: compiled scala3-presentation-compiler-test in 3ms +2023.10.13 14:54:35 INFO compiling scala3-library-bootstrapped +2023.10.13 14:54:41 WARN sbt does not support `buildTarget/inverseSources`, unable to fetch targets owning source. +2023.10.13 14:54:41 WARN no build target for: /Users/odersky/workspace/dotty/tests/neg/i18588.scala +2023.10.13 14:54:41 WARN sbt does not support `buildTarget/inverseSources`, unable to fetch targets owning source. +2023.10.13 14:54:41 WARN no build target for: /Users/odersky/workspace/dotty/tests/neg/i18588.scala +2023.10.13 14:54:48 WARN sbt does not support `buildTarget/inverseSources`, unable to fetch targets owning source. +2023.10.13 14:54:48 WARN no build target for: /Users/odersky/workspace/dotty/tests/neg/i18588.scala +2023.10.13 14:55:05 INFO Processing workspace/buildTargets +2023.10.13 14:55:13 WARN sbt does not support `buildTarget/inverseSources`, unable to fetch targets owning source. +2023.10.13 14:55:13 WARN no build target for: /Users/odersky/workspace/dotty/tests/neg/i18588.scala +2023.10.13 14:56:06 INFO Processing workspace/buildTargets +2023.10.13 14:56:08 INFO time: code lens generation in 1.36s +2023.10.13 14:56:16 INFO time: code lens generation in 1.24s +2023.10.13 14:56:30 WARN sbt does not support `buildTarget/inverseSources`, unable to fetch targets owning source. +2023.10.13 14:56:32 WARN no build target for: /Users/odersky/workspace/dotty/tests/neg/i18588.scala +2023.10.13 14:56:41 INFO time: code lens generation in 24s +2023.10.13 14:56:53 INFO BSP server: [warn] ./backend/jvm/ClassNode1.java:1:1: Using directives detected in multiple files. It is recommended to keep them centralized in the /Users/odersky/workspace/dotty/tests/pos-with-compiler-cc/project.scala file. +2023.10.13 14:56:53 INFO BSP server: [warn] ./backend/jvm/LabelNode1.java:1:1: Using directives detected in multiple files. It is recommended to keep them centralized in the /Users/odersky/workspace/dotty/tests/pos-with-compiler-cc/project.scala file. +2023.10.13 14:56:53 INFO BSP server: [warn] ./backend/jvm/MethodNode1.java:1:1: Using directives detected in multiple files. It is recommended to keep them centralized in the /Users/odersky/workspace/dotty/tests/pos-with-compiler-cc/project.scala file. +2023.10.13 14:56:53 INFO BSP server: [warn] ./dotc/profile/ExtendedThreadMxBean.java:1:1: Using directives detected in multiple files. It is recommended to keep them centralized in the /Users/odersky/workspace/dotty/tests/pos-with-compiler-cc/project.scala file. +2023.10.13 14:56:53 INFO BSP server: [warn] package dotty.tools.dotc.profile; +2023.10.13 14:56:53 INFO BSP server: [warn] ^ +2023.10.13 14:56:53 INFO BSP server: [warn] ./dotc/profile/ExternalToolHook.java:1:1: Using directives detected in multiple files. It is recommended to keep them centralized in the /Users/odersky/workspace/dotty/tests/pos-with-compiler-cc/project.scala file. +2023.10.13 14:56:53 INFO BSP server: [warn] package dotty.tools.dotc.profile; +2023.10.13 14:56:53 INFO BSP server: [warn] ^ +2023.10.13 14:56:59 INFO time: code lens generation in 35s +2023.10.13 14:56:59 INFO time: code lens generation in 37s +2023.10.13 14:57:06 INFO Processing workspace/buildTargets +2023.10.13 14:58:32 INFO Processing workspace/buildTargets +2023.10.13 14:59:13 INFO Processing workspace/buildTargets +[Error - 15:01:20] Request textDocument/codeAction failed. + Message: Internal error. + Code: -32603 +java.lang.RuntimeException: java.lang.reflect.InvocationTargetException + at org.eclipse.lsp4j.jsonrpc.services.GenericEndpoint.lambda$null$0(GenericEndpoint.java:67) + at org.eclipse.lsp4j.jsonrpc.services.GenericEndpoint.request(GenericEndpoint.java:120) + at org.eclipse.lsp4j.jsonrpc.RemoteEndpoint.handleRequest(RemoteEndpoint.java:261) + at org.eclipse.lsp4j.jsonrpc.RemoteEndpoint.consume(RemoteEndpoint.java:190) + at org.eclipse.lsp4j.jsonrpc.json.StreamMessageProducer.handleMessage(StreamMessageProducer.java:194) + at org.eclipse.lsp4j.jsonrpc.json.StreamMessageProducer.listen(StreamMessageProducer.java:94) + at org.eclipse.lsp4j.jsonrpc.json.ConcurrentMessageProcessor.run(ConcurrentMessageProcessor.java:113) + at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539) + at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) + at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) + at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) + at java.base/java.lang.Thread.run(Thread.java:833) +Caused by: java.lang.reflect.InvocationTargetException + at jdk.internal.reflect.GeneratedMethodAccessor11.invoke(Unknown Source) + at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.base/java.lang.reflect.Method.invoke(Method.java:568) + at org.eclipse.lsp4j.jsonrpc.services.GenericEndpoint.lambda$null$0(GenericEndpoint.java:65) + ... 11 more +Caused by: java.lang.OutOfMemoryError: Java heap space + +2023.10.13 15:01:21 INFO time: code lens generation in 1.23s diff --git a/tests/neg/xfatalWarnings.scala b/tests/neg/xfatalWarnings.scala new file mode 100644 index 000000000000..3f49e159cbc4 --- /dev/null +++ b/tests/neg/xfatalWarnings.scala @@ -0,0 +1,13 @@ +//> using options -Xfatal-warnings + +object xfatalWarnings { + val opt:Option[String] = Some("test") + + opt match { // error when running with -Xfatal-warnings + case None => + } + + object Test { + while (true) {} // should be ok. no "pure expression does nothing in statement position" issued. + } +} diff --git a/tests/neg/yimports-custom/C_2.scala b/tests/neg/yimports-custom/C_2.scala index 6ba25ad2963c..03bb24433962 100644 --- a/tests/neg/yimports-custom/C_2.scala +++ b/tests/neg/yimports-custom/C_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:hello.world.minidef +//> using options -Yimports:hello.world.minidef class C { val v: Numb = Magic diff --git a/tests/neg/yimports-nojava.scala b/tests/neg/yimports-nojava.scala index 35233e37a775..7e5349635207 100644 --- a/tests/neg/yimports-nojava.scala +++ b/tests/neg/yimports-nojava.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala,scala.Predef +//> using options -Yimports:scala,scala.Predef trait T { def f() = println("hello, world!") diff --git a/tests/neg/yimports-nosuch.scala b/tests/neg/yimports-nosuch.scala index 431daf39a180..b02c1c6f662e 100644 --- a/tests/neg/yimports-nosuch.scala +++ b/tests/neg/yimports-nosuch.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:skala,scala.Predeff +//> using options -Yimports:skala,scala.Predeff // class C // nopos-error diff --git a/tests/neg/yimports-predef.scala b/tests/neg/yimports-predef.scala index 8bfe89b08cd8..ecbe017a9c75 100644 --- a/tests/neg/yimports-predef.scala +++ b/tests/neg/yimports-predef.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala,scala.Predef +//> using options -Yimports:scala,scala.Predef // import Predef.{any2stringadd => _, _} diff --git a/tests/neg/yimports-stable.check b/tests/neg/yimports-stable.check index c5bfd914ae07..6a0b059de908 100644 --- a/tests/neg/yimports-stable.check +++ b/tests/neg/yimports-stable.check @@ -3,12 +3,12 @@ error: bad preamble import hello.world.potions -- [E006] Not Found Error: tests/neg/yimports-stable/C_2.scala:4:9 ----------------------------------------------------- 4 | val v: Numb = magic // error // error | ^^^^ - | Not found: type Numb + | Not found: type Numb - did you mean Null? | | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg/yimports-stable/C_2.scala:4:16 ---------------------------------------------------- 4 | val v: Numb = magic // error // error | ^^^^^ - | Not found: magic + | Not found: magic - did you mean main? | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/yimports-stable/C_2.scala b/tests/neg/yimports-stable/C_2.scala index 0b97775f1a01..6cd8c17f6620 100644 --- a/tests/neg/yimports-stable/C_2.scala +++ b/tests/neg/yimports-stable/C_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala,scala.Predef,hello.world.potions +//> using options -Yimports:scala,scala.Predef,hello.world.potions // class C { val v: Numb = magic // error // error diff --git a/tests/new/moduletrans.scala b/tests/new/moduletrans.scala deleted file mode 100644 index 51538417ed5d..000000000000 --- a/tests/new/moduletrans.scala +++ /dev/null @@ -1,8 +0,0 @@ -object m1 { - - class m() { - def f() = 5 - } - final val m: m = new m() - -} diff --git a/tests/new/projection.scala b/tests/new/projection.scala deleted file mode 100644 index 766c6f41cdeb..000000000000 --- a/tests/new/projection.scala +++ /dev/null @@ -1,4 +0,0 @@ -class C { type T } -object test { - def x: C#T = ??? -} diff --git a/tests/new/test.scala b/tests/new/test.scala index 8aa8f42ac787..e6bfc29fd808 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,2 +1,2 @@ object Test: - def test = 0 + def f: Any = 1 diff --git a/tests/run-custom-args/tasty-interpreter/InterpretedMain.scala b/tests/old-tasty-interpreter-prototype/InterpretedMain.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/InterpretedMain.scala rename to tests/old-tasty-interpreter-prototype/InterpretedMain.scala diff --git a/tests/run-custom-args/tasty-interpreter/Precompiled.scala b/tests/old-tasty-interpreter-prototype/Precompiled.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/Precompiled.scala rename to tests/old-tasty-interpreter-prototype/Precompiled.scala diff --git a/tests/run-custom-args/tasty-interpreter/Test.scala b/tests/old-tasty-interpreter-prototype/Test.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/Test.scala rename to tests/old-tasty-interpreter-prototype/Test.scala diff --git a/tests/run-custom-args/tasty-interpreter/interpreter/TastyInterpreter.scala b/tests/old-tasty-interpreter-prototype/interpreter/TastyInterpreter.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/interpreter/TastyInterpreter.scala rename to tests/old-tasty-interpreter-prototype/interpreter/TastyInterpreter.scala diff --git a/tests/run-custom-args/tasty-interpreter/interpreter/TreeInterpreter.scala b/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/interpreter/TreeInterpreter.scala rename to tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala diff --git a/tests/run-custom-args/tasty-interpreter/interpreter/jvm/Interpreter.scala b/tests/old-tasty-interpreter-prototype/interpreter/jvm/Interpreter.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/interpreter/jvm/Interpreter.scala rename to tests/old-tasty-interpreter-prototype/interpreter/jvm/Interpreter.scala diff --git a/tests/run-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala b/tests/old-tasty-interpreter-prototype/interpreter/jvm/JVMReflection.scala similarity index 100% rename from tests/run-custom-args/tasty-interpreter/interpreter/jvm/JVMReflection.scala rename to tests/old-tasty-interpreter-prototype/interpreter/jvm/JVMReflection.scala diff --git a/tests/run-custom-args/tasty-interpreter/notes.md b/tests/old-tasty-interpreter-prototype/notes.md similarity index 100% rename from tests/run-custom-args/tasty-interpreter/notes.md rename to tests/old-tasty-interpreter-prototype/notes.md diff --git a/tests/patmat/dotty.scala b/tests/patmat/dotty.scala index 509c0a68b4ba..24495fe86e32 100644 --- a/tests/patmat/dotty.scala +++ b/tests/patmat/dotty.scala @@ -5,7 +5,7 @@ object IntEqualityTestTreeMaker { class Test { def isBelow(n: Int, s: String): Boolean = false - def foo(xs: List[(Int, String)]): Unit = (xs filter (isBelow _).tupled) match { + def foo(xs: List[(Int, String)]): Unit = xs.filter(isBelow.tupled) match { case Nil => case matches => } diff --git a/tests/patmat/exhausting.scala b/tests/patmat/exhausting.scala index 9f17fae9def5..f65efb18cf9c 100644 --- a/tests/patmat/exhausting.scala +++ b/tests/patmat/exhausting.scala @@ -5,7 +5,7 @@ object Test { case object Bar3 extends Foo[AnyRef] def ex1[T](xs: List[T]) = xs match { - case ys: List[_] => "ok" + case ys: List[?] => "ok" } def ex2[T](xx: (Foo[T], Foo[T])) = xx match { case (Bar1, Bar1) => () @@ -14,7 +14,7 @@ object Test { case (_, Bar2) => () } def ex3[T](xx: (Foo[T], Foo[T])) = xx match { - case (_: Foo[_], _: Foo[_]) => () + case (_: Foo[?], _: Foo[?]) => () } // fails for: ::(_, Nil), ::(_, ::(_, ::(_, _))), ... diff --git a/tests/patmat/exhaustive_heuristics.scala b/tests/patmat/exhaustive_heuristics.scala index 7d682f6aa457..297900510b2a 100644 --- a/tests/patmat/exhaustive_heuristics.scala +++ b/tests/patmat/exhaustive_heuristics.scala @@ -18,8 +18,8 @@ object Test { // well, in truth, we do rewrite List() to Nil, but otherwise we do nothing // the full rewrite List(a, b) to a :: b :: Nil, for example is planned (but not sure it's a good idea) List(true, false) match { - case List(_, _, _:_*) => - case List(node, _:_*) => + case List(_, _, _*) => + case List(node, _*) => case Nil => } diff --git a/tests/patmat/gadt.scala b/tests/patmat/gadt.scala index 0541ed61f95b..484dac39a973 100644 --- a/tests/patmat/gadt.scala +++ b/tests/patmat/gadt.scala @@ -35,14 +35,14 @@ object Test { // case _: Or => true } - def foo4a(x: Expr[_]) = x match { + def foo4a(x: Expr[?]) = x match { case _: IntLit => true case _: Sum => true case _: BooleanLit => true case _: Or => true } - def foo4b(x: Expr[_]) = x match { + def foo4b(x: Expr[?]) = x match { case _: Sum => true case _: Or => true } diff --git a/tests/patmat/gadt2.scala b/tests/patmat/gadt2.scala index f6a3978c773c..feb096b1d209 100644 --- a/tests/patmat/gadt2.scala +++ b/tests/patmat/gadt2.scala @@ -4,12 +4,12 @@ case class Succ[T]() extends Nat[T] // +N is incorrect, as in `foo` we can have `N = Zero | Succ[Zero]`, // then it's correct for exhaustivity check to produce two warnings. -sealed trait Vect[N <: Nat[_], +T] +sealed trait Vect[N <: Nat[?], +T] case class VN[T]() extends Vect[Zero, T] -case class VC[T, N <: Nat[_]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] +case class VC[T, N <: Nat[?]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] object Test { - def foo[N <: Nat[_], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { + def foo[N <: Nat[?], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { case (VN(), VN()) => 1 case (VC(x, xs), VC(y, ys)) => 2 } diff --git a/tests/patmat/gadt4.scala b/tests/patmat/gadt4.scala index d5212be2d406..069d0f8be7c3 100644 --- a/tests/patmat/gadt4.scala +++ b/tests/patmat/gadt4.scala @@ -4,12 +4,12 @@ case class Succ[T]() extends Nat[T] // +N is incorrect, as in `foo` we can have `N = Zero | Succ[Zero]`, // then it's correct for exhaustivity check to produce two warnings. -sealed trait Vect[+N <: Nat[_], +T] +sealed trait Vect[+N <: Nat[?], +T] case class VN[T]() extends Vect[Zero, T] -case class VC[T, N <: Nat[_]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] +case class VC[T, N <: Nat[?]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] object Test { - def foo[N <: Nat[_], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { + def foo[N <: Nat[?], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { case (VN(), VN()) => 1 case (VC(x, xs), VC(y, ys)) => 2 } diff --git a/tests/patmat/gadt5.scala b/tests/patmat/gadt5.scala index 8cc9e77f1cdd..d057ef5230f0 100644 --- a/tests/patmat/gadt5.scala +++ b/tests/patmat/gadt5.scala @@ -15,12 +15,12 @@ object Try1 { case class Succ[T](n: Nat[T]) extends Nat[TSucc[T]] //We can index Vect with the types of value-level Nat, but this is a bit overkill. Still, no warnings. - sealed trait Vect[N <: Nat[_], +T] + sealed trait Vect[N <: Nat[?], +T] case class VN[T]() extends Vect[Zero, T] - case class VC[T, N <: Nat[_]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] + case class VC[T, N <: Nat[?]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] object Test { - def foo[N <: Nat[_], A, B](v1: Vect[N, A], v2: Vect[N, B]) = + def foo[N <: Nat[?], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { case (VN(), VN()) => 1 case (VC(x, xs), VC(y, ys)) => 2 @@ -65,7 +65,7 @@ object Try3 { case (VC(x, xs), VC(y, ys)) => 2 } //a call-site which would cause a MatchError (maybe that error should be tested) - def bar = foo[TZero | TSucc[_], Int, String](VN(), VC("", VN())) + def bar = foo[TZero | TSucc[?], Int, String](VN(), VC("", VN())) } } diff --git a/tests/patmat/i10174b.scala b/tests/patmat/i10174b.scala index 875e19e8f037..96e0a1a216c4 100644 --- a/tests/patmat/i10174b.scala +++ b/tests/patmat/i10174b.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat def foo(x: Int): Unit = x match { case 1 => diff --git a/tests/patmat/i12241.scala b/tests/patmat/i12241.scala index 4f61027e2f65..fe21eab4f3c3 100644 --- a/tests/patmat/i12241.scala +++ b/tests/patmat/i12241.scala @@ -26,7 +26,7 @@ object EndpointInput { object Test extends App { import EndpointInput._ - def compare(left: EndpointInput[_], right: EndpointInput[_]): Boolean = + def compare(left: EndpointInput[?], right: EndpointInput[?]): Boolean = (left, right) match { case (Pair(), Pair()) => true case (MappedPair(), MappedPair()) => true @@ -50,7 +50,7 @@ object Test extends App { case (_, _) => false } - def compare2(left: EndpointInput[_], right: EndpointInput[_]): Boolean = + def compare2(left: EndpointInput[?], right: EndpointInput[?]): Boolean = (left, right) match { case (Pair(), Pair()) => true case (MappedPair(), MappedPair()) => true diff --git a/tests/patmat/i12805-fallout.scala b/tests/patmat/i12805-fallout.scala index b598b36159ea..f460c91f080c 100644 --- a/tests/patmat/i12805-fallout.scala +++ b/tests/patmat/i12805-fallout.scala @@ -1,4 +1,5 @@ import scala.annotation.unchecked.uncheckedVariance +import scala.compiletime.uninitialized type Untyped = Null @@ -7,7 +8,7 @@ class Type abstract class Tree[-T >: Untyped] { type ThisTree[T >: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized def withType(tpe: Type): ThisTree[Type] = { val tree = this.asInstanceOf[ThisTree[Type]] @@ -24,7 +25,7 @@ case class CaseDef[-T >: Untyped]() extends Tree[T] def test[T >: Untyped](tree: Tree[T], tp: Type) = tree.withType(tp) match { case Ident() => 1 case DefDef() => 2 - case _: Inlined[_] => 3 + case _: Inlined[?] => 3 case CaseDef() => 4 case _ => 5 } diff --git a/tests/patmat/i14407.dupe.scala b/tests/patmat/i14407.dupe.scala index acc019e41e8a..1d742315fdd4 100644 --- a/tests/patmat/i14407.dupe.scala +++ b/tests/patmat/i14407.dupe.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror @main def Test = val it: Int = 42 42L match diff --git a/tests/patmat/i14407.scala b/tests/patmat/i14407.scala index a1f710bd6acb..ec95c5c014de 100644 --- a/tests/patmat/i14407.scala +++ b/tests/patmat/i14407.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror @main def Test = val it: Int = 42 42L match diff --git a/tests/patmat/i17184.scala b/tests/patmat/i17184.scala new file mode 100644 index 000000000000..9c04ade63510 --- /dev/null +++ b/tests/patmat/i17184.scala @@ -0,0 +1,9 @@ +class Foo +trait Bar: + val foo : Int + val f : Option[foo.type] = Some(foo) + + def g : Boolean = + f match + case None => false + case Some(_) => true diff --git a/tests/patmat/i18118.check b/tests/patmat/i18118.check new file mode 100644 index 000000000000..8861eb273fb9 --- /dev/null +++ b/tests/patmat/i18118.check @@ -0,0 +1,4 @@ +12: Pattern Match +21: Pattern Match +32: Pattern Match +41: Pattern Match diff --git a/tests/patmat/i18118.scala b/tests/patmat/i18118.scala new file mode 100644 index 000000000000..fc889d4d49b3 --- /dev/null +++ b/tests/patmat/i18118.scala @@ -0,0 +1,41 @@ +//> using options -Werror + +object O1: + sealed trait A + case class B() extends A + case class C() extends A + + + def bigMatch(x: A) = x match + case B() => + case C() => + case _ => // error + +object O2: + sealed trait A + case class B() extends A + + + def bigMatch(x: A) = x match + case B() => + case _ => // error // was: no "unreachable but for null" warning + +object O3: + sealed trait A + case class B() extends A + case class C() extends A + + + def bigMatch(x: A) = x match + case _: B => + case _: C => + case _ => // error + +object O4: + sealed trait A + case class B() extends A + + + def bigMatch(x: A) = x match + case _: B => + case _ => // error diff --git a/tests/patmat/i18507.scala b/tests/patmat/i18507.scala new file mode 100644 index 000000000000..45a9eaa3572b --- /dev/null +++ b/tests/patmat/i18507.scala @@ -0,0 +1,6 @@ +import scala.quoted.* + +trait DFValOf[T] + +def calcWidth(x: List[Type[?]])(using q: Quotes): Unit = + x.collect { case '[DFValOf[t]] => ???} diff --git a/tests/patmat/i4030.scala b/tests/patmat/i4030.scala index c0c7a76eb813..75845bb5e945 100644 --- a/tests/patmat/i4030.scala +++ b/tests/patmat/i4030.scala @@ -6,9 +6,9 @@ case class C4[X, Y]() extends Root[(X => X)|(Y => Y)|(X => Y)] object TestGADT { - def f[A <: Seq[_], B, Foo >: A => B](v: Root[Foo], u: Root[Foo]) = (v, u) match { + def f[A <: Seq[?], B, Foo >: A => B](v: Root[Foo], u: Root[Foo]) = (v, u) match { case (C3(), C3()) => } // The following line no longer type checks - // f(C3[Seq[_], Long](), C4[Seq[_], Long]()) + // f(C3[Seq[?], Long](), C4[Seq[?], Long]()) } diff --git a/tests/patmat/i6197c.scala b/tests/patmat/i6197c.scala index 81142ebaac3c..4c8ec65f10f9 100644 --- a/tests/patmat/i6197c.scala +++ b/tests/patmat/i6197c.scala @@ -1,5 +1,5 @@ def foo(x: Option[Any]) = x match { - case _: Some[Some[_]] => - case _: Some[_] => // unreachable + case _: Some[Some[?]] => + case _: Some[?] => // unreachable case None => } \ No newline at end of file diff --git a/tests/patmat/i6197d.scala b/tests/patmat/i6197d.scala index 419c6e7ec2fa..c09315d4a073 100644 --- a/tests/patmat/i6197d.scala +++ b/tests/patmat/i6197d.scala @@ -1,8 +1,8 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat def foo(x: Array[String]) = x match { - case _: Array[_] => + case _: Array[?] => } def bar(x: Array[String]) = x match { - case _: Array[_ <: Int] => + case _: Array[? <: Int] => } diff --git a/tests/patmat/i6255b.scala b/tests/patmat/i6255b.scala index 02fba4ad9100..3982c90a91f5 100644 --- a/tests/patmat/i6255b.scala +++ b/tests/patmat/i6255b.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat class Foo { def foo(x: quoted.Expr[Int])(using scala.quoted.Quotes): Unit = x match { case '{ 1 } => diff --git a/tests/patmat/patmat-indent.scala b/tests/patmat/patmat-indent.scala index f62d704f8ca0..4bf36d2e4bb7 100644 --- a/tests/patmat/patmat-indent.scala +++ b/tests/patmat/patmat-indent.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat object Test { val Nil: scala.collection.immutable.Nil.type = scala.collection.immutable.Nil val X = 5 diff --git a/tests/patmat/t10502.scala b/tests/patmat/t10502.scala index 71066fdb7ae8..8efa5ab5fb26 100644 --- a/tests/patmat/t10502.scala +++ b/tests/patmat/t10502.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat object Perhaps { def unapply[A](oa: Option[A]): Some[Option[A]] = Some(oa) diff --git a/tests/patmat/t11620b.scala b/tests/patmat/t11620b.scala index a71f9ee4ade5..e6084619c0e0 100644 --- a/tests/patmat/t11620b.scala +++ b/tests/patmat/t11620b.scala @@ -9,7 +9,7 @@ import Length.* case class Indent[T <: Length](length: T) -def withIndent[T <: Length](indent: => Indent[_]): Unit = +def withIndent[T <: Length](indent: => Indent[?]): Unit = indent match { case Indent(Num(0)) => println("this") case x => println(x) // "unreachable" diff --git a/tests/patmat/t2425.scala b/tests/patmat/t2425.scala index 477d5467aab3..8d925fb34918 100644 --- a/tests/patmat/t2425.scala +++ b/tests/patmat/t2425.scala @@ -3,8 +3,8 @@ class D extends B object Test extends App { def foo[T](bar: T) = { bar match { - case _: Array[Array[_]] => println("array 2d") - case _: Array[_] => println("array 1d") + case _: Array[Array[?]] => println("array 2d") + case _: Array[?] => println("array 1d") case _ => println("something else") } } diff --git a/tests/patmat/t3097.scala b/tests/patmat/t3097.scala index 3ff61b3c7b87..77f65d7e87bd 100644 --- a/tests/patmat/t3097.scala +++ b/tests/patmat/t3097.scala @@ -1,7 +1,7 @@ sealed trait ISimpleValue sealed trait IListValue extends ISimpleValue { - def items: List[IAtomicValue[_]] + def items: List[IAtomicValue[?]] } sealed trait IAtomicValue[O] extends ISimpleValue { @@ -13,7 +13,7 @@ sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { sealed trait IDoubleValue extends IAbstractDoubleValue[Double] -case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue +case class ListValue(val items: List[IAtomicValue[?]]) extends IListValue class DoubleValue(val data: Double) extends IDoubleValue { def asDouble = data @@ -28,7 +28,7 @@ object Test { val v: ISimpleValue = new DoubleValue(1) v match { case m: IListValue => println("list") - case a: IAtomicValue[_] => println("atomic") + case a: IAtomicValue[?] => println("atomic") } } diff --git a/tests/patmat/t3163.scala b/tests/patmat/t3163.scala index 19aa6fe93ca8..6b7fe8e3ca4a 100644 --- a/tests/patmat/t3163.scala +++ b/tests/patmat/t3163.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat object Test { def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"} } diff --git a/tests/patmat/t4526.scala b/tests/patmat/t4526.scala index 016f3782b65b..d29ae4ce97d7 100644 --- a/tests/patmat/t4526.scala +++ b/tests/patmat/t4526.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat object Test{ def foo(a: Int) = a match { case 5 => "Five!" diff --git a/tests/patmat/t4661b.scala b/tests/patmat/t4661b.scala index f2329020cb66..c442fbe5b2a2 100644 --- a/tests/patmat/t4661b.scala +++ b/tests/patmat/t4661b.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat class C { trait Foo class One extends Foo diff --git a/tests/patmat/t6450.scala b/tests/patmat/t6450.scala index 92194c2e48ab..559fc438d368 100644 --- a/tests/patmat/t6450.scala +++ b/tests/patmat/t6450.scala @@ -2,7 +2,7 @@ sealed abstract class FoundNode[T] case class A[T](x: T) extends FoundNode[T] object Foo { - val v: (Some[_], FoundNode[_]) = (???, ???) + val v: (Some[?], FoundNode[?]) = (???, ???) v match { case (x: Some[t], _) => } diff --git a/tests/patmat/t9351.scala b/tests/patmat/t9351.scala index b43fa948ce87..8d5a939d1c2c 100644 --- a/tests/patmat/t9351.scala +++ b/tests/patmat/t9351.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat trait A {} case object B extends A {} case object C extends A {} diff --git a/tests/patmat/t9779.scala b/tests/patmat/t9779.scala index 9c418b0b1dc5..0f272b586507 100644 --- a/tests/patmat/t9779.scala +++ b/tests/patmat/t9779.scala @@ -7,7 +7,7 @@ trait Elems { } class BadMatch[A <: Elems](a: A) { - private def toLuaValue(eX: a.Elem[_]): String = eX match { + private def toLuaValue(eX: a.Elem[?]): String = eX match { case a.UnitElement => "" // type mismatch } } \ No newline at end of file diff --git a/tests/patmat/t9809.scala b/tests/patmat/t9809.scala index 72ff23b11e51..180bfe648dc0 100644 --- a/tests/patmat/t9809.scala +++ b/tests/patmat/t9809.scala @@ -1,4 +1,4 @@ -// scalac: -Ycheck-all-patmat +//> using options -Ycheck-all-patmat object Example { val op1: (Any, Any) => Unit = { case (_, b: Int) => diff --git a/tests/pending/neg/i16451.check b/tests/pending/neg/i16451.check deleted file mode 100644 index e53085e8eafa..000000000000 --- a/tests/pending/neg/i16451.check +++ /dev/null @@ -1,24 +0,0 @@ --- Error: tests/neg/i16451.scala:13:9 ---------------------------------------------------------------------------------- -13 | case x: Wrapper[Color.Red.type] => Some(x) // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] --- Error: tests/neg/i16451.scala:21:9 ---------------------------------------------------------------------------------- -21 | case x: Wrapper[Color.Red.type] => Some(x) // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Any --- Error: tests/neg/i16451.scala:25:9 ---------------------------------------------------------------------------------- -25 | case x: Wrapper[Color.Red.type] => Some(x) // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] --- Error: tests/neg/i16451.scala:29:9 ---------------------------------------------------------------------------------- -29 | case x: Wrapper[Color.Red.type] => Some(x) // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from A1 --- Error: tests/neg/i16451.scala:34:11 --------------------------------------------------------------------------------- -34 | case x: Wrapper[Color.Red.type] => x // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] --- Error: tests/neg/i16451.scala:39:11 --------------------------------------------------------------------------------- -39 | case x: Wrapper[Color.Red.type] => x // error - | ^ - |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] diff --git a/tests/pending/neg/i16451.scala b/tests/pending/neg/i16451.scala index 49997d2bcf92..e69de29bb2d1 100644 --- a/tests/pending/neg/i16451.scala +++ b/tests/pending/neg/i16451.scala @@ -1,40 +0,0 @@ -// scalac: -Werror -enum Color: - case Red, Green - -case class Wrapper[A](value: A) - -object Test: - def test_correct(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match - case x: Wrapper[Color.Red.type] => Some(x) // error - case null => None - - def test_different(x: Wrapper[Color]): Option[Wrapper[Color]] = x match - case x @ Wrapper(_: Color.Red.type) => Some(x) - case x @ Wrapper(_: Color.Green.type) => None - - def test_any(x: Any): Option[Wrapper[Color.Red.type]] = x match - case x: Wrapper[Color.Red.type] => Some(x) // error - case _ => None - - def test_wrong(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match - case x: Wrapper[Color.Red.type] => Some(x) // error - case null => None - - def t2[A1 <: Wrapper[Color]](x: A1): Option[Wrapper[Color.Red.type]] = x match - case x: Wrapper[Color.Red.type] => Some(x) // error - case null => None - - def test_wrong_seq(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = - xs.collect { - case x: Wrapper[Color.Red.type] => x // error - } - - def test_wrong_seq2(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = - xs.collect { x => x match - case x: Wrapper[Color.Red.type] => x // error - } - - def main(args: Array[String]): Unit = - println(test_wrong_seq(Seq(Wrapper(Color.Red), Wrapper(Color.Green)))) - // outputs: List(Wrapper(Red), Wrapper(Green)) diff --git a/tests/pending/neg/yimports-custom-b/C_2.scala b/tests/pending/neg/yimports-custom-b/C_2.scala index 8da798e80b0d..8a7771d192c9 100644 --- a/tests/pending/neg/yimports-custom-b/C_2.scala +++ b/tests/pending/neg/yimports-custom-b/C_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:hello.world.minidef +//> using options -Yimports:hello.world.minidef import hello.{world => hw} import hw.minidef.{Magic => Answer} diff --git a/tests/pending/neg/yimports-custom-b/minidef_1.scala b/tests/pending/neg/yimports-custom-b/minidef_1.scala index befc137b6ab6..78d2f3c03bfc 100644 --- a/tests/pending/neg/yimports-custom-b/minidef_1.scala +++ b/tests/pending/neg/yimports-custom-b/minidef_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala +//> using options -Yimports:scala package hello.world diff --git a/tests/pending/neg/yimports-masked/C_2.scala b/tests/pending/neg/yimports-masked/C_2.scala index 1b6c736bad7b..acd9d953a125 100644 --- a/tests/pending/neg/yimports-masked/C_2.scala +++ b/tests/pending/neg/yimports-masked/C_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala,hello.world.minidef +//> using options -Yimports:scala,hello.world.minidef // import at top level or top of package disables implicit import. // the import can appear at any statement position, here, end of package. diff --git a/tests/pos-custom-args/captures/buffers.scala b/tests/pos-custom-args/captures/buffers.scala new file mode 100644 index 000000000000..7fcf96291ccb --- /dev/null +++ b/tests/pos-custom-args/captures/buffers.scala @@ -0,0 +1,30 @@ +import reflect.ClassTag + +class Buffer[A] + +class ArrayBuffer[A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +object ArrayBuffer: + def make[A: ClassTag](xs: A*) = new ArrayBuffer: + elems = xs.toArray + def apply[A: ClassTag](xs: A*) = new ArrayBuffer: + elems = xs.toArray // ok + +class EncapsArray[A: ClassTag]: + val x: Array[A] = new Array[A](10) + + + + + + + + diff --git a/tests/pos-custom-args/captures/byname.scala b/tests/pos-custom-args/captures/byname.scala index efd76618469d..761496185a0d 100644 --- a/tests/pos-custom-args/captures/byname.scala +++ b/tests/pos-custom-args/captures/byname.scala @@ -1,4 +1,5 @@ import annotation.retainsByName +import caps.cap class CC type Cap = CC^ diff --git a/tests/pos-custom-args/captures/bynamefun.scala b/tests/pos-custom-args/captures/bynamefun.scala index 86bad201ffc3..414f0c46c42f 100644 --- a/tests/pos-custom-args/captures/bynamefun.scala +++ b/tests/pos-custom-args/captures/bynamefun.scala @@ -1,11 +1,14 @@ object test: class Plan(elem: Plan) object SomePlan extends Plan(???) + type PP = (-> Plan) -> Plan def f1(expr: (-> Plan) -> Plan): Plan = expr(SomePlan) f1 { onf => Plan(onf) } def f2(expr: (=> Plan) -> Plan): Plan = ??? f2 { onf => Plan(onf) } def f3(expr: (-> Plan) => Plan): Plan = ??? - f1 { onf => Plan(onf) } + f3 { onf => Plan(onf) } def f4(expr: (=> Plan) => Plan): Plan = ??? - f2 { onf => Plan(onf) } + f4 { onf => Plan(onf) } + def f5(expr: PP): Plan = expr(SomePlan) + f5 { onf => Plan(onf) } \ No newline at end of file diff --git a/tests/pos-custom-args/captures/capt1.scala b/tests/pos-custom-args/captures/capt1.scala index 8d2285f1fa50..e3f5c20e724e 100644 --- a/tests/pos-custom-args/captures/capt1.scala +++ b/tests/pos-custom-args/captures/capt1.scala @@ -3,7 +3,7 @@ type Cap = C^ def f1(c: Cap): () ->{c} c.type = () => c // ok def f2: Int = - val g: Boolean ->{cap} Int = ??? + val g: Boolean => Int = ??? val x = g(true) x diff --git a/tests/pos-custom-args/captures/cc-setup-impure-classes.scala b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala new file mode 100644 index 000000000000..04dfb665b6d4 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala @@ -0,0 +1,6 @@ +import language.experimental.captureChecking + +trait Resource +def id[X](x: X): x.type = x +def foo[M <: Resource](r: M^): Unit = id(r) // was error, should be ok +def bar[M](r: M^): Unit = id(r) // ok diff --git a/tests/pos-custom-args/captures/cc-this.scala b/tests/pos-custom-args/captures/cc-this.scala index 2124ee494041..12c62e99d186 100644 --- a/tests/pos-custom-args/captures/cc-this.scala +++ b/tests/pos-custom-args/captures/cc-this.scala @@ -14,4 +14,5 @@ def test(using Cap) = def c1 = new C(f) def c2 = c1 def c3 = c2.y + val c4: C^ = c3 val _ = c3: C^ diff --git a/tests/pos-custom-args/captures/colltest.scala b/tests/pos-custom-args/captures/colltest.scala new file mode 100644 index 000000000000..ab3ac437db9f --- /dev/null +++ b/tests/pos-custom-args/captures/colltest.scala @@ -0,0 +1,34 @@ +// Showing a problem with recursive references +object CollectionStrawMan5 { + + /** Base trait for generic collections */ + trait Iterable[+A] extends IterableLike[A] { + this: Iterable[A]^ => + def iterator: Iterator[A]^{this} + def coll: Iterable[A]^{this} = this + } + + trait IterableLike[+A]: + this: IterableLike[A]^ => + def coll: Iterable[A]^{this} + def partition(p: A => Boolean): Unit = + val pn = Partition(coll, p) + () + + /** Concrete collection type: View */ + trait View[+A] extends Iterable[A] with IterableLike[A] { + this: View[A]^ => + } + + case class Partition[A](val underlying: Iterable[A]^, p: A => Boolean) { + self: Partition[A]^{underlying, p} => + + class Partitioned(expected: Boolean) extends View[A]: + this: Partitioned^{self} => + def iterator: Iterator[A]^{this} = + underlying.iterator.filter((x: A) => p(x) == expected) + + val left: Partitioned^{self} = Partitioned(true) + val right: Partitioned^{self} = Partitioned(false) + } +} \ No newline at end of file diff --git a/tests/pos-custom-args/captures/concat-iterator.scala b/tests/pos-custom-args/captures/concat-iterator.scala new file mode 100644 index 000000000000..828b8b3b2657 --- /dev/null +++ b/tests/pos-custom-args/captures/concat-iterator.scala @@ -0,0 +1,17 @@ +package test + +trait IOnce[A]: + self: I[A]^ => + +trait I[+A]: + self: I[A]^ => + + def concat[B >: A](xs: => IOnce[B]^): I[B]^{this, xs} = new I.ConcatI[B](self).concat(xs) + +object I: + private final class ConcatI[+A](val from: I[A]^) extends I[A]: + override def concat[B >: A](that: => IOnce[B]^): I[B]^{this, that} = ??? + + + + diff --git a/tests/pos-custom-args/captures/ctest.scala b/tests/pos-custom-args/captures/ctest.scala new file mode 100644 index 000000000000..62aa77fec0a5 --- /dev/null +++ b/tests/pos-custom-args/captures/ctest.scala @@ -0,0 +1,7 @@ +class C +type Cap = C^ + +class S + +def f(y: Cap) = + val a: ((x: Cap) -> S^) = (x: Cap) => S() \ No newline at end of file diff --git a/tests/pos-custom-args/captures/curried-closures.scala b/tests/pos-custom-args/captures/curried-closures.scala new file mode 100644 index 000000000000..baea8b15075c --- /dev/null +++ b/tests/pos-custom-args/captures/curried-closures.scala @@ -0,0 +1,31 @@ +object Test: + def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) + val f1 = map2 + val fc1: List[Int] -> (Int => Int) -> List[Int] = f1 + + def map3(f: Int => Int)(xs: List[Int]): List[Int] = xs.map(f) + private val f2 = map3 + val fc2: (f: Int => Int) -> List[Int] ->{f} List[Int] = f2 + + val f3 = (f: Int => Int) => + println(f(3)) + (xs: List[Int]) => xs.map(_ + 1) + val f3c: (Int => Int) -> List[Int] -> List[Int] = f3 + + class LL[A]: + def drop(n: Int): LL[A]^{this} = ??? + + def test(ct: CanThrow[Exception]) = + def xs: LL[Int]^{ct} = ??? + val ys = xs.drop(_) + val ysc: Int -> LL[Int]^{ct} = ys + +import java.io.* +def Test4(g: OutputStream^) = + val xs: List[Int] = ??? + val later = (f: OutputStream^) => (y: Int) => xs.foreach(x => f.write(x + y)) + val _: (f: OutputStream^) ->{} Int ->{f} Unit = later + + val later2 = () => (y: Int) => xs.foreach(x => g.write(x + y)) + val _: () ->{} Int ->{g} Unit = later2 + diff --git a/tests/pos-custom-args/captures/curried-shorthands.scala b/tests/pos-custom-args/captures/curried-shorthands.scala deleted file mode 100644 index c68dc4b5cdbf..000000000000 --- a/tests/pos-custom-args/captures/curried-shorthands.scala +++ /dev/null @@ -1,24 +0,0 @@ -object Test: - def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) - val f1 = map2 - val fc1: List[Int] -> (Int => Int) -> List[Int] = f1 - - def map3(f: Int => Int)(xs: List[Int]): List[Int] = xs.map(f) - private val f2 = map3 - val fc2: (Int => Int) -> List[Int] -> List[Int] = f2 - - val f3 = (f: Int => Int) => - println(f(3)) - (xs: List[Int]) => xs.map(_ + 1) - val f3c: (Int => Int) -> List[Int] ->{} List[Int] = f3 - - class LL[A]: - def drop(n: Int): LL[A]^{this} = ??? - - def test(ct: CanThrow[Exception]) = - def xs: LL[Int]^{ct} = ??? - val ys = xs.drop(_) - val ysc: Int -> LL[Int]^{ct} = ys - - - diff --git a/tests/pos-custom-args/captures/dependent-pure.scala b/tests/pos-custom-args/captures/dependent-pure.scala new file mode 100644 index 000000000000..ad10d9590f25 --- /dev/null +++ b/tests/pos-custom-args/captures/dependent-pure.scala @@ -0,0 +1,5 @@ +import language.experimental.captureChecking +class ContextCls +type Context = ContextCls^ + +class Filtered(p: (c: Context) ?-> () ->{c} Boolean) extends Pure diff --git a/tests/pos-custom-args/captures/eta-expansions.scala b/tests/pos-custom-args/captures/eta-expansions.scala new file mode 100644 index 000000000000..1aac7ded1b50 --- /dev/null +++ b/tests/pos-custom-args/captures/eta-expansions.scala @@ -0,0 +1,9 @@ +@annotation.capability class Cap + +def test(d: Cap) = + def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) + val f1 = map2 // capture polymorphic implicit eta expansion + def f2c: List[Int] => (Int => Int) => List[Int] = f1 + val a0 = identity[Cap ->{d} Unit] // capture monomorphic implicit eta expansion + val a0c: (Cap ->{d} Unit) ->{d} Cap ->{d} Unit = a0 + val b0 = (x: Cap ->{d} Unit) => identity[Cap ->{d} Unit](x) // not an implicit eta expansion, hence capture polymorphic diff --git a/tests/pos-custom-args/captures/foreach.scala b/tests/pos-custom-args/captures/foreach.scala index b7dfc49272a9..28225c3dee7b 100644 --- a/tests/pos-custom-args/captures/foreach.scala +++ b/tests/pos-custom-args/captures/foreach.scala @@ -1,4 +1,4 @@ -import caps.unsafe.* +import annotation.unchecked.uncheckedCaptures def test = - val tasks = new collection.mutable.ArrayBuffer[() => Unit] - val _: Unit = tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) + val tasks = new collection.mutable.ArrayBuffer[(() => Unit) @uncheckedCaptures] + val _: Unit = tasks.foreach(((task: () => Unit) => task())) diff --git a/tests/pos-custom-args/captures/fromJavaObject.scala b/tests/pos-custom-args/captures/fromJavaObject.scala new file mode 100644 index 000000000000..1f640af799f5 --- /dev/null +++ b/tests/pos-custom-args/captures/fromJavaObject.scala @@ -0,0 +1,5 @@ +// Test that CC handles FromJavaObject correctly +object Test: + val x: Any = 12 + String.valueOf(x) + diff --git a/tests/pos-custom-args/captures/function-combinators.scala b/tests/pos-custom-args/captures/function-combinators.scala new file mode 100644 index 000000000000..4354af4c7636 --- /dev/null +++ b/tests/pos-custom-args/captures/function-combinators.scala @@ -0,0 +1,28 @@ +class ContextClass +type Context = ContextClass^ + +def Test(using ctx1: Context, ctx2: Context) = + val f: Int => Int = identity + val g1: Int ->{ctx1} Int = identity + val g2: Int ->{ctx2} Int = identity + val h: Int -> Int = identity + val a1 = f.andThen(f); val _: Int ->{f} Int = a1 + val a2 = f.andThen(g1); val _: Int ->{f, g1} Int = a2 + val a3 = f.andThen(g2); val _: Int ->{f, g2} Int = a3 + val a4 = f.andThen(h); val _: Int ->{f} Int = a4 + val b1 = g1.andThen(f); val _: Int ->{f, g1} Int = b1 + val b2 = g1.andThen(g1); val _: Int ->{g1} Int = b2 + val b3 = g1.andThen(g2); val _: Int ->{g1, g2} Int = b3 + val b4 = g1.andThen(h); val _: Int ->{g1} Int = b4 + val c1 = h.andThen(f); val _: Int ->{f} Int = c1 + val c2 = h.andThen(g1); val _: Int ->{g1} Int = c2 + val c3 = h.andThen(g2); val _: Int ->{g2} Int = c3 + val c4 = h.andThen(h); val _: Int -> Int = c4 + + val f2: (Int, Int) => Int = _ + _ + val f2c = f2.curried; val _: Int -> Int ->{f2} Int = f2c + val f2t = f2.tupled; val _: ((Int, Int)) ->{f2} Int = f2t + + val f3: (Int, Int, Int) => Int = ??? + val f3c = f3.curried; val _: Int -> Int -> Int ->{f3} Int = f3c + val f3t = f3.tupled; val _: ((Int, Int, Int)) ->{f3} Int = f3t diff --git a/tests/pos-custom-args/captures/future-traverse.scala b/tests/pos-custom-args/captures/future-traverse.scala new file mode 100644 index 000000000000..743984660af0 --- /dev/null +++ b/tests/pos-custom-args/captures/future-traverse.scala @@ -0,0 +1,17 @@ +import language.experimental.captureChecking + +trait Builder[-A, +C] +trait BuildFrom[-From, -A, +C] { + def newBuilder(from: From): Builder[A, C] +} + +trait Future[+T] { this: Future[T]^ => + import Future.* + def foldLeft[R](r: R): R = r + def traverse[A, B, M[X] <: IterableOnce[X]](in: M[A]^, bf: BuildFrom[M[A]^, B, M[B]^]): Unit = + val builder = bf.newBuilder(in) + foldLeft(successful(builder)) +} +object Future { + def successful[T](result: T): Future[T] = ??? +} diff --git a/tests/pos-custom-args/captures/i13816.scala b/tests/pos-custom-args/captures/i13816.scala index 235afef35f1c..0ba84ef84c64 100644 --- a/tests/pos-custom-args/captures/i13816.scala +++ b/tests/pos-custom-args/captures/i13816.scala @@ -2,12 +2,16 @@ import language.experimental.saferExceptions class Ex1 extends Exception("Ex1") class Ex2 extends Exception("Ex2") +class Ex3 extends Exception("Ex3") def foo0(i: Int): (CanThrow[Ex1], CanThrow[Ex2]) ?-> Unit = if i > 0 then throw new Ex1 else throw new Ex2 -def foo01(i: Int): CanThrow[Ex1] ?-> CanThrow[Ex2] ?-> Unit = +/* Does not work yet curried dependent CFTs are not yet handled in typer + +def foo01(i: Int): (ct: CanThrow[Ex1]) ?-> CanThrow[Ex2] ?->{ct} Unit = if i > 0 then throw new Ex1 else throw new Ex2 +*/ def foo1(i: Int): Unit throws Ex1 throws Ex2 = if i > 0 then throw new Ex1 else throw new Ex1 @@ -33,6 +37,11 @@ def foo7(i: Int)(using CanThrow[Ex1]): Unit throws Ex1 | Ex2 = def foo8(i: Int)(using CanThrow[Ex2]): Unit throws Ex2 | Ex1 = if i > 0 then throw new Ex1 else throw new Ex2 +def foo9(i: Int): Unit throws Ex1 | Ex2 | Ex3 = + if i > 0 then throw new Ex1 + else if i < 0 then throw new Ex2 + else throw new Ex3 + def test(): Unit = try foo1(1) diff --git a/tests/pos-custom-args/captures/i15749.scala b/tests/pos-custom-args/captures/i15749.scala index 4959c003a918..0a552ae1a3c5 100644 --- a/tests/pos-custom-args/captures/i15749.scala +++ b/tests/pos-custom-args/captures/i15749.scala @@ -1,9 +1,9 @@ class Unit object unit extends Unit -type Top = Any^{cap} +type Top = Any^ -type LazyVal[T] = Unit ->{cap} T +type LazyVal[T] = Unit => T class Foo[T](val x: T) diff --git a/tests/pos-custom-args/captures/i15749a.scala b/tests/pos-custom-args/captures/i15749a.scala deleted file mode 100644 index fe5f4d75dae1..000000000000 --- a/tests/pos-custom-args/captures/i15749a.scala +++ /dev/null @@ -1,21 +0,0 @@ -class Unit -object u extends Unit - -type Top = Any^ - -type Wrapper[T] = [X] -> (op: T ->{cap} X) -> X - -def test = - - def wrapper[T](x: T): Wrapper[T] = - [X] => (op: T ->{cap} X) => op(x) - - def strictMap[A <: Top, sealed B <: Top](mx: Wrapper[A])(f: A ->{cap} B): Wrapper[B] = - mx((x: A) => wrapper(f(x))) - - def force[A](thunk: Unit ->{cap} A): A = thunk(u) - - def forceWrapper[sealed A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = - // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] - // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not - strictMap[Unit ->{cap} A, A](mx)(t => force[A](t)) // error diff --git a/tests/pos-custom-args/captures/i15922.scala b/tests/pos-custom-args/captures/i15922.scala deleted file mode 100644 index 23109a3ba8f4..000000000000 --- a/tests/pos-custom-args/captures/i15922.scala +++ /dev/null @@ -1,14 +0,0 @@ -trait Cap { def use(): Int } -type Id[X] = [T] -> (op: X => T) -> T -def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) - -def withCap[X](op: (Cap^) => X): X = { - val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } - val result = op(cap) - result -} - -def leaking(c: Cap^): Id[Cap^{c}] = mkId(c) - -def test = - val bad = withCap(leaking) diff --git a/tests/pos-custom-args/captures/i15923-cases.scala b/tests/pos-custom-args/captures/i15923-cases.scala index 136b8950eb26..7c5635f7b3dd 100644 --- a/tests/pos-custom-args/captures/i15923-cases.scala +++ b/tests/pos-custom-args/captures/i15923-cases.scala @@ -2,14 +2,14 @@ trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) -def foo(x: Id[Cap^{cap}]) = { +def foo(x: Id[Cap^]) = { x(_.use()) // was error, now OK } -def bar(io: Cap^{cap}, x: Id[Cap^{io}]) = { +def bar(io: Cap^, x: Id[Cap^{io}]) = { x(_.use()) } -def barAlt(a: Cap^{cap}, b: Cap^{cap}, x: Id[Cap]^{a, b}) = { +def barAlt(a: Cap^, b: Cap^, x: Id[Cap]^{a, b}) = { x(_.use()) } diff --git a/tests/pos-custom-args/captures/i16224.scala b/tests/pos-custom-args/captures/i16224.scala new file mode 100644 index 000000000000..098b2a2a2903 --- /dev/null +++ b/tests/pos-custom-args/captures/i16224.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking +class Delta: + val value = 1 + + def f(v: Int)(using delta: Delta): Int = + v + delta.value + + def run(): Unit = + val delta = Delta() + val x: Map[Char, Int] = Map( + 'a' -> 0, + 'b' -> 1 + ) + val y: Map[Char, Int] = x.map((k, v) => (k, f(v)(using delta))) \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i16415.scala b/tests/pos-custom-args/captures/i16415.scala new file mode 100644 index 000000000000..aede36a15be8 --- /dev/null +++ b/tests/pos-custom-args/captures/i16415.scala @@ -0,0 +1,8 @@ +abstract class A[X]: + def foo(x: X): X + +class IO +class C +def test(io: IO^) = + class B extends A[C^{io}]: // error, but should work + override def foo(x: C^{io}): C^{io} = ??? diff --git a/tests/pos-custom-args/captures/inlined-closure.scala b/tests/pos-custom-args/captures/inlined-closure.scala new file mode 100644 index 000000000000..74f93131b940 --- /dev/null +++ b/tests/pos-custom-args/captures/inlined-closure.scala @@ -0,0 +1,14 @@ +class ContextClass +type Context = ContextClass^ +class ParamRef: + def isTracked(using Context): Boolean = ??? +trait Lam[PR <: ParamRef]: + val paramRefs: List[PR] = ??? +inline def atPhase[T]()(inline op: Context ?=> T)(using ctx: Context): T = + op(using ctx) + +def Test(using ctx: Context) = + val info: Lam[ParamRef] = ??? + info.paramRefs.filter(_.isTracked) + val p = atPhase()((_: ParamRef).isTracked) + val _: ParamRef ->{ctx} Boolean = p diff --git a/tests/pos-custom-args/captures/iterators.scala b/tests/pos-custom-args/captures/iterators.scala index 10a7f57cd68f..9a3ce7569b09 100644 --- a/tests/pos-custom-args/captures/iterators.scala +++ b/tests/pos-custom-args/captures/iterators.scala @@ -1,5 +1,9 @@ package cctest +trait IterableOnce[A]: + this: IterableOnce[A]^ => + def iterator: Iterator[A]^{this} + abstract class Iterator[T]: thisIterator: Iterator[T]^ => @@ -10,6 +14,9 @@ abstract class Iterator[T]: def next = f(thisIterator.next) end Iterator +private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^): + def headIterator: Iterator[A]^{this} = head.iterator + class C type Cap = C^ diff --git a/tests/pos-custom-args/captures/lazylists-exceptions.scala b/tests/pos-custom-args/captures/lazylists-exceptions.scala index 8f1fba2bf2dc..afc6616108bc 100644 --- a/tests/pos-custom-args/captures/lazylists-exceptions.scala +++ b/tests/pos-custom-args/captures/lazylists-exceptions.scala @@ -1,5 +1,6 @@ import language.experimental.saferExceptions import scala.compiletime.uninitialized +import scala.annotation.unchecked.uncheckedCaptures trait LzyList[+A]: def isEmpty: Boolean @@ -13,7 +14,7 @@ object LzyNil extends LzyList[Nothing]: final class LzyCons[+A](hd: A, tl: () => LzyList[A]^) extends LzyList[A]: private var forced = false - private var cache: LzyList[A]^{this} = uninitialized + private var cache: LzyList[A @uncheckedCaptures]^{this} = uninitialized private def force = if !forced then { cache = tl(); forced = true } cache diff --git a/tests/pos-custom-args/captures/lists.scala b/tests/pos-custom-args/captures/lists.scala index 56473e68d49f..99505f0bb7a2 100644 --- a/tests/pos-custom-args/captures/lists.scala +++ b/tests/pos-custom-args/captures/lists.scala @@ -30,7 +30,7 @@ def test(c: Cap, d: Cap, e: Cap) = CONS(z, ys) val zsc: LIST[Cap ->{d, y} Unit] = zs val z1 = zs.head - val z1c: Cap ->{y, d} Unit = z1 + val z1c: Cap^ ->{y, d} Unit = z1 val ys1 = zs.tail val y1 = ys1.head diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index 3f417da8c1be..d95eeaae74cf 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -52,7 +52,8 @@ class Pair[+A, +B](x: A, y: B): def test2(ct: CanThrow[Exception], fs: FileSystem) = def x: Int ->{ct} String = ??? def y: Logger^{fs} = ??? - def p = Pair(x, y) + def p = Pair[Int ->{ct} String, Logger^{fs}](x, y) + def p3 = Pair(x, y) def f = () => p.fst diff --git a/tests/pos-custom-args/captures/nested-classes-2.scala b/tests/pos-custom-args/captures/nested-classes-2.scala new file mode 100644 index 000000000000..744635ee949b --- /dev/null +++ b/tests/pos-custom-args/captures/nested-classes-2.scala @@ -0,0 +1,24 @@ + +def f(x: (() => Unit)): (() => Unit) => (() => Unit) = + def g(y: (() => Unit)): (() => Unit) = x + g + +def test1(x: (() => Unit)): Unit = + def test2(y: (() => Unit)) = + val a: (() => Unit) => (() => Unit) = f(y) + a(x) // OK, but should be error + test2(() => ()) + +def test2(x1: (() => Unit), x2: (() => Unit) => Unit) = + class C1(x1: (() => Unit), xx2: (() => Unit) => Unit): + def c2(y1: (() => Unit), y2: (() => Unit) => Unit): C2^ = C2(y1, y2) + class C2(y1: (() => Unit), y2: (() => Unit) => Unit): + val a: (() => Unit) => (() => Unit) = f(y1) + a(x1) //OK, but should be error + C2(() => (), x => ()) + + def test3(y1: (() => Unit), y2: (() => Unit) => Unit) = + val cc1: C1^{y1, y2} = C1(y1, y2) + val cc2 = cc1.c2(x1, x2) + val cc3: cc1.C2^{cc1, x1, x2} = cc2 + diff --git a/tests/pos-custom-args/captures/pairs.scala b/tests/pos-custom-args/captures/pairs.scala index bc20d20ffd92..e15a76970c29 100644 --- a/tests/pos-custom-args/captures/pairs.scala +++ b/tests/pos-custom-args/captures/pairs.scala @@ -1,4 +1,5 @@ - +//class CC +//type Cap = CC^ @annotation.capability class Cap object Generic: @@ -12,21 +13,6 @@ object Generic: def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: Cap ->{c} Unit = x1 - val y1 = p.snd - val y1c: Cap ->{d} Unit = y1 - -object Monomorphic: - - class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap ->{x} Unit = x - def snd: Cap ->{y} Unit = y - - def test(c: Cap, d: Cap) = - def f(x: Cap): Unit = if c == x then () - def g(x: Cap): Unit = if d == x then () - val p = Pair(f, g) - val x1 = p.fst - val x1c: Cap ->{c} Unit = x1 + val x1c: Cap^ ->{c} Unit = x1 val y1 = p.snd - val y1c: Cap ->{d} Unit = y1 + val y1c: Cap^ ->{d} Unit = y1 diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala new file mode 100644 index 000000000000..0f7df02e13b1 --- /dev/null +++ b/tests/pos-custom-args/captures/reaches.scala @@ -0,0 +1,49 @@ +class C +def f(xs: List[C^]) = + val y = xs + val z: List[C^{xs*}] = y + +type Proc = () => Unit + +class Ref[T](init: T): + private var x: T = init + def get: T = x + def set(y: T) = { x = y } + +def runAll(xs: List[Proc]): Unit = + var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR + while cur.nonEmpty do + val next: () ->{xs*} Unit = cur.head + next() + cur = cur.tail: List[() ->{xs*} Unit] + +def id1(x: Proc): () ->{x} Unit = x +def id2(xs: List[Proc]): List[() ->{xs*} Unit] = xs + +def cons(x: Proc, xs: List[Proc]): List[() ->{x, xs*} Unit] = + val y = x :: xs + y + +def addOneProc(xs: List[Proc]): List[Proc] = + val x: Proc = () => println("hello") + val result: List[() ->{x, xs*} Unit] = x :: xs + result // OK, we can widen () ->{x, xs*} Unit to cap here. + +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) + +def compose2[A, B, C](f: A => B, g: B => C): A => C = + z => g(f(z)) + +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) + +@annotation.capability class IO + +def test(io: IO) = + val a: () ->{io} Unit = () => () + val as: List[() ->{io} Unit] = Nil + val bs: List[() ->{io} Unit] = cons(a, as) + val cs = cons(a, as) + val ds: List[() ->{io} Unit] = cs + diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala new file mode 100644 index 000000000000..0894d7aad50d --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-lowerbound.scala @@ -0,0 +1,12 @@ +def foo[B](x: B): B = x + +def bar[B, A >: B](x: A): A = foo[A](x) + +class C[A] + +class CV[A](x: Int): + def this() = this: + val x = new C[A]: + println("foo") + 0 + diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala new file mode 100644 index 000000000000..7d2a223be91f --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-value-class.scala @@ -0,0 +1,3 @@ +class Ops[A](xs: Array[A]) extends AnyVal: + + def f(p: A => Boolean): Array[A] = xs diff --git a/tests/pos-custom-args/captures/selftypes.scala b/tests/pos-custom-args/captures/selftypes.scala index c1b8eefce506..fff7445c419a 100644 --- a/tests/pos-custom-args/captures/selftypes.scala +++ b/tests/pos-custom-args/captures/selftypes.scala @@ -13,3 +13,12 @@ class D(@constructorOnly op: Int => Int) extends C: val x = 1//op(1) +// Demonstrates root mapping for self types +class IM: + this: IM^ => + + def coll: IM^{this} = ??? + foo(coll) + +def foo(im: IM^): Unit = ??? + diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala new file mode 100644 index 000000000000..6169abab21dc --- /dev/null +++ b/tests/pos-custom-args/captures/steppers.scala @@ -0,0 +1,27 @@ + +trait Stepper[+A]: + this: Stepper[A]^ => + +object Stepper: + trait EfficientSplit + +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure + +trait IterableOnce[+A] extends Any: + this: IterableOnce[A]^ => + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? + +sealed abstract class ArraySeq[T] extends IterableOnce[T], Pure: + def array: Array[_] + + def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] + ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] + +object ArraySeq: + + def make[T](x: Array[T]): ArraySeq[T] = ??? + + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? + diff --git a/tests/pos-custom-args/captures/test.scala b/tests/pos-custom-args/captures/test.scala new file mode 100644 index 000000000000..979f3d34ec17 --- /dev/null +++ b/tests/pos-custom-args/captures/test.scala @@ -0,0 +1,16 @@ +class C +type Cap = C^ + +type Proc = () => Unit + +class Ref[T](p: T): + private var x: T = p + def set(x: T): Unit = this.x = x + def get: T = x + +def test(c: () => Unit) = + val p: () => Unit = ??? + val r = Ref(p) + val x = r.get + r.set(x) + diff --git a/tests/pos-custom-args/captures/try3.scala b/tests/pos-custom-args/captures/try3.scala index b8937bec00f3..b44ea57ccae4 100644 --- a/tests/pos-custom-args/captures/try3.scala +++ b/tests/pos-custom-args/captures/try3.scala @@ -13,7 +13,7 @@ def raise[E <: Exception](ex: E)(using CanThrow[E]): Nothing = throw ex def test1: Int = - def f(a: Boolean): Boolean -> CanThrow[IOException] ?-> Int = + def f(a: Boolean) = handle { if !a then raise(IOException()) (b: Boolean) => (_: CanThrow[IOException]) ?=> @@ -22,6 +22,7 @@ def test1: Int = } { ex => (b: Boolean) => (_: CanThrow[IOException]) ?=> -1 } + def fc(a: Boolean): Boolean -> CanThrow[IOException] ?-> Int = f(a) handle { val g = f(true) g(false) // can raise an exception diff --git a/tests/pos-custom-args/captures/unsafe-unbox.scala b/tests/pos-custom-args/captures/unsafe-unbox.scala index 63a32d58f640..1c523490e19d 100644 --- a/tests/pos-custom-args/captures/unsafe-unbox.scala +++ b/tests/pos-custom-args/captures/unsafe-unbox.scala @@ -1,7 +1,7 @@ import annotation.unchecked.uncheckedCaptures def test = - @uncheckedCaptures - var finalizeActions = collection.mutable.ListBuffer[() => Unit]() + val finalizeActionsInit = collection.mutable.ListBuffer[(() => Unit) @uncheckedCaptures]() + var finalizeActions = finalizeActionsInit val action = finalizeActions.remove(0) diff --git a/tests/pos-custom-args/captures/unsafeAssumePure.scala b/tests/pos-custom-args/captures/unsafeAssumePure.scala new file mode 100644 index 000000000000..ac81be57aa76 --- /dev/null +++ b/tests/pos-custom-args/captures/unsafeAssumePure.scala @@ -0,0 +1,4 @@ +class C +import caps.unsafe.* + +def foo(x: C^): C = x.unsafeAssumePure diff --git a/tests/pos-custom-args/captures/unzip.scala b/tests/pos-custom-args/captures/unzip.scala new file mode 100644 index 000000000000..228142e93b01 --- /dev/null +++ b/tests/pos-custom-args/captures/unzip.scala @@ -0,0 +1,7 @@ +class Seqq[A]: + def unzip[A1, A2](using asPair: A -> (A1, A2)): (Seq[A1], Seq[A2]) = ??? + +def Test = + val s: Seqq[(String, Int)] = ??? + s.unzip(using Predef.$conforms[(String, Int)]) + diff --git a/tests/pos-custom-args/captures/vars.scala b/tests/pos-custom-args/captures/vars.scala index ccf2cd587eb1..a335be96fed1 100644 --- a/tests/pos-custom-args/captures/vars.scala +++ b/tests/pos-custom-args/captures/vars.scala @@ -14,4 +14,4 @@ def test(cap1: Cap, cap2: Cap) = val r = Ref() r.elem = f - val fc: String ->{cap1} String = r.elem + val fc: String ->{cap1} String = r.elem \ No newline at end of file diff --git a/tests/pos-custom-args/captures/vars1.scala b/tests/pos-custom-args/captures/vars1.scala index 56548e5a9c30..451b8988364f 100644 --- a/tests/pos-custom-args/captures/vars1.scala +++ b/tests/pos-custom-args/captures/vars1.scala @@ -8,11 +8,11 @@ object Test: var defaultIncompleteHandler: ErrorHandler = ??? @uncheckedCaptures var incompleteHandler: ErrorHandler = defaultIncompleteHandler - val x = incompleteHandler.unsafeUnbox + private val x = incompleteHandler.unsafeUnbox val _ : ErrorHandler = x val _ = x(1, "a") - def defaultIncompleteHandler1(): ErrorHandler = ??? + def defaultIncompleteHandler1(): (Int, String) => Unit = ??? val defaultIncompleteHandler2: ErrorHandler = ??? @uncheckedCaptures var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1() @@ -25,6 +25,6 @@ object Test: incompleteHandler1 = defaultIncompleteHandler2 incompleteHandler1 = defaultIncompleteHandler2 - val saved = incompleteHandler1 + private val saved = incompleteHandler1 diff --git a/tests/pos-custom-args/erased/erased-args-lifted.scala b/tests/pos-custom-args/erased/erased-args-lifted.scala deleted file mode 100644 index faa07cdcb0b5..000000000000 --- a/tests/pos-custom-args/erased/erased-args-lifted.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - def foo(erased a: Int)(b: Int, c: Int) = 42 - def bar(i: Int): Int = { - println(1) - 42 - } - def baz: Int = { - println(1) - 2 - } - foo(bar(baz))(c = baz, b = baz) // force all args to be lifted in vals befor the call -} diff --git a/tests/pos-custom-args/erased/erased-extension-method.scala b/tests/pos-custom-args/erased/erased-extension-method.scala deleted file mode 100644 index 7ef14d65e4a4..000000000000 --- a/tests/pos-custom-args/erased/erased-extension-method.scala +++ /dev/null @@ -1,3 +0,0 @@ -class IntDeco(x: Int) extends AnyVal { - def foo(erased y: Int) = x -} diff --git a/tests/pos-custom-args/erased/erased-typedef.scala b/tests/pos-custom-args/erased/erased-typedef.scala deleted file mode 100644 index f4b3e0937664..000000000000 --- a/tests/pos-custom-args/erased/erased-typedef.scala +++ /dev/null @@ -1,8 +0,0 @@ -trait Monadless[Monad[_]] { - - type M[T] = Monad[T] - - def lift[T](body: T): Monad[T] = ??? - - def unlift[T](m: M[T]): T = ??? -} \ No newline at end of file diff --git a/tests/pos-custom-args/erased/i10848a.scala b/tests/pos-custom-args/erased/i10848a.scala deleted file mode 100644 index 296937e5e3c2..000000000000 --- a/tests/pos-custom-args/erased/i10848a.scala +++ /dev/null @@ -1,5 +0,0 @@ -class IsOn[T] -type On -object IsOn { - erased given IsOn[On] = new IsOn[On] -} diff --git a/tests/pos-custom-args/erased/i10848b.scala b/tests/pos-custom-args/erased/i10848b.scala deleted file mode 100644 index 71292b1b859c..000000000000 --- a/tests/pos-custom-args/erased/i10848b.scala +++ /dev/null @@ -1,4 +0,0 @@ -class Foo: - erased given Int = 1 - def foo(using erased x: Int): Unit = () - foo diff --git a/tests/pos-custom-args/help.scala b/tests/pos-custom-args/help.scala deleted file mode 100644 index 257186338e40..000000000000 --- a/tests/pos-custom-args/help.scala +++ /dev/null @@ -1,4 +0,0 @@ - -// dummy source for exercising information flags -// -class Help diff --git a/tests/pos-custom-args/i13405/Test.scala b/tests/pos-custom-args/i13405/Test.scala deleted file mode 100644 index 385f4453adeb..000000000000 --- a/tests/pos-custom-args/i13405/Test.scala +++ /dev/null @@ -1 +0,0 @@ -@main def main: Unit = hh() diff --git a/tests/pos-custom-args/i8875.scala b/tests/pos-custom-args/i8875.scala deleted file mode 100644 index ac5e0bd5cf42..000000000000 --- a/tests/pos-custom-args/i8875.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - extension (a: Int) { - def foo: Int = 1 - } -} \ No newline at end of file diff --git a/tests/pos-custom-args/i9267.scala b/tests/pos-custom-args/i9267.scala deleted file mode 100644 index 83d15dc739b5..000000000000 --- a/tests/pos-custom-args/i9267.scala +++ /dev/null @@ -1 +0,0 @@ -class A diff --git a/tests/pos-custom-args/jdk-8-app.scala b/tests/pos-custom-args/jdk-8-app.scala deleted file mode 100644 index 6a9d07155958..000000000000 --- a/tests/pos-custom-args/jdk-8-app.scala +++ /dev/null @@ -1,5 +0,0 @@ -import java.time.LocalDate - -object Jdk8App extends App { - println(LocalDate.now()) -} diff --git a/tests/pos-custom-args/no-experimental/dotty-experimental.scala b/tests/pos-custom-args/no-experimental/dotty-experimental.scala deleted file mode 100644 index 72d16ddd9b15..000000000000 --- a/tests/pos-custom-args/no-experimental/dotty-experimental.scala +++ /dev/null @@ -1,6 +0,0 @@ -package dotty.tools -object test { - - val x = caps.cap - -} diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-top.scala b/tests/pos-custom-args/no-experimental/experimental-imports-top.scala deleted file mode 100644 index bee89d6ab6c8..000000000000 --- a/tests/pos-custom-args/no-experimental/experimental-imports-top.scala +++ /dev/null @@ -1,5 +0,0 @@ -import language.experimental.erasedDefinitions -import annotation.experimental - -@experimental -erased def f = 1 diff --git a/tests/pos-custom-args/no-experimental/i13848.scala b/tests/pos-custom-args/no-experimental/i13848.scala deleted file mode 100644 index 8b65ccb078e1..000000000000 --- a/tests/pos-custom-args/no-experimental/i13848.scala +++ /dev/null @@ -1,8 +0,0 @@ -import annotation.experimental - -@main -@experimental -def run(): Unit = f - -@experimental -def f = 2 diff --git a/tests/pos-custom-args/semanticdb/inline-unapply/Macro_1.scala b/tests/pos-custom-args/semanticdb/inline-unapply/Macro_1.scala deleted file mode 100644 index 0c15284141cb..000000000000 --- a/tests/pos-custom-args/semanticdb/inline-unapply/Macro_1.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.quoted.* - -object Succ: - - inline def unapply(n: Int): Option[Int] = ${ impl('n) } - - private def impl(n: Expr[Int])(using Quotes): Expr[Option[Int]] = - '{ if $n == 0 then None else Some($n - 1)} diff --git a/tests/pos-custom-args/semanticdb/macro-pos/example_1.scala b/tests/pos-custom-args/semanticdb/macro-pos/example_1.scala deleted file mode 100644 index 08e57680c74d..000000000000 --- a/tests/pos-custom-args/semanticdb/macro-pos/example_1.scala +++ /dev/null @@ -1,5 +0,0 @@ -import quoted.* - -object CodeImpl { - def codeExpr(using Quotes): Expr[String] = '{""} -} diff --git a/tests/pos-custom-args/semanticdb/macro-pos/example_3.scala b/tests/pos-custom-args/semanticdb/macro-pos/example_3.scala deleted file mode 100644 index 2c2223b59860..000000000000 --- a/tests/pos-custom-args/semanticdb/macro-pos/example_3.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Test { - - def test = TestImpl.fun("") - -} diff --git a/tests/pos-custom-args/strict/adhoc-extension/A.scala b/tests/pos-custom-args/strict/adhoc-extension/A.scala deleted file mode 100644 index ff2e6bf29883..000000000000 --- a/tests/pos-custom-args/strict/adhoc-extension/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -package adhoc -class A -abstract class Abs \ No newline at end of file diff --git a/tests/pos-custom-args/strict/i10383.scala b/tests/pos-custom-args/strict/i10383.scala deleted file mode 100644 index ca54464eb514..000000000000 --- a/tests/pos-custom-args/strict/i10383.scala +++ /dev/null @@ -1 +0,0 @@ -def r = BigInt(1) to BigInt(3) // error diff --git a/tests/pos-custom-args/strict/i16808.scala b/tests/pos-custom-args/strict/i16808.scala deleted file mode 100644 index 602ceed94161..000000000000 --- a/tests/pos-custom-args/strict/i16808.scala +++ /dev/null @@ -1,2 +0,0 @@ -def collectKeys[A, B, C](xs: Map[A, B])(f: PartialFunction[A, C]): Map[C, B] = - xs.collect{ case (f(c) , b) => (c, b) } \ No newline at end of file diff --git a/tests/pos-custom-args/strict/i7296.scala b/tests/pos-custom-args/strict/i7296.scala deleted file mode 100644 index 3b582ad284b8..000000000000 --- a/tests/pos-custom-args/strict/i7296.scala +++ /dev/null @@ -1,2 +0,0 @@ -class Foo: - private var blah: Double = 0L \ No newline at end of file diff --git a/tests/pos-special/isInstanceOf/3324c.scala b/tests/pos-deep-subtype/3324c.scala similarity index 82% rename from tests/pos-special/isInstanceOf/3324c.scala rename to tests/pos-deep-subtype/3324c.scala index ad38af34e265..aefa25eaafc2 100644 --- a/tests/pos-special/isInstanceOf/3324c.scala +++ b/tests/pos-deep-subtype/3324c.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + sealed trait A[T] class B[T] extends A[T] diff --git a/tests/pos-special/isInstanceOf/3324d.scala b/tests/pos-deep-subtype/3324d.scala similarity index 78% rename from tests/pos-special/isInstanceOf/3324d.scala rename to tests/pos-deep-subtype/3324d.scala index d39ebe80c465..9372f33c3d10 100644 --- a/tests/pos-special/isInstanceOf/3324d.scala +++ b/tests/pos-deep-subtype/3324d.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Test { val x: Any = ??? diff --git a/tests/pos-special/isInstanceOf/3324e.scala b/tests/pos-deep-subtype/3324e.scala similarity index 83% rename from tests/pos-special/isInstanceOf/3324e.scala rename to tests/pos-deep-subtype/3324e.scala index b31c809eaaf7..58ba0152ed21 100644 --- a/tests/pos-special/isInstanceOf/3324e.scala +++ b/tests/pos-deep-subtype/3324e.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class C[T] { val x: T = ??? x.isInstanceOf[T] diff --git a/tests/pos-deep-subtype/3324h.scala b/tests/pos-deep-subtype/3324h.scala new file mode 100644 index 000000000000..6bae5f6a01d9 --- /dev/null +++ b/tests/pos-deep-subtype/3324h.scala @@ -0,0 +1,16 @@ +//> using options -Xfatal-warnings + +import scala.language.`3.3` + +object Test { + trait Marker + def foo[T](x: T) = x match { + case _: (T & Marker) => // no warning + case _ => + } + + def foo2[T](x: T) = x match { + case _: T with Marker => // scalac or 3.4 emits a warning + case _ => + } +} diff --git a/tests/pos-special/strawman-collections/CollectionStrawMan1.scala b/tests/pos-deep-subtype/CollectionStrawMan1.scala similarity index 100% rename from tests/pos-special/strawman-collections/CollectionStrawMan1.scala rename to tests/pos-deep-subtype/CollectionStrawMan1.scala diff --git a/tests/pos-special/strawman-collections/CollectionStrawMan4.scala b/tests/pos-deep-subtype/CollectionStrawMan4.scala similarity index 100% rename from tests/pos-special/strawman-collections/CollectionStrawMan4.scala rename to tests/pos-deep-subtype/CollectionStrawMan4.scala diff --git a/tests/pos-special/strawman-collections/CollectionStrawMan5.scala b/tests/pos-deep-subtype/CollectionStrawMan5.scala similarity index 100% rename from tests/pos-special/strawman-collections/CollectionStrawMan5.scala rename to tests/pos-deep-subtype/CollectionStrawMan5.scala diff --git a/tests/pos-special/strawman-collections/CollectionStrawMan6.scala b/tests/pos-deep-subtype/CollectionStrawMan6.scala similarity index 100% rename from tests/pos-special/strawman-collections/CollectionStrawMan6.scala rename to tests/pos-deep-subtype/CollectionStrawMan6.scala diff --git a/tests/pos-special/isInstanceOf/Result.scala b/tests/pos-deep-subtype/Result.scala similarity index 85% rename from tests/pos-special/isInstanceOf/Result.scala rename to tests/pos-deep-subtype/Result.scala index c8a403735219..df42e54f3138 100644 --- a/tests/pos-special/isInstanceOf/Result.scala +++ b/tests/pos-deep-subtype/Result.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + object p { // test parametric case classes, which synthesis `canEqual` and `equals` diff --git a/tests/pos-special/isInstanceOf/classTag.scala b/tests/pos-deep-subtype/classTag.scala similarity index 83% rename from tests/pos-special/isInstanceOf/classTag.scala rename to tests/pos-deep-subtype/classTag.scala index fc8be4526958..b084f7f7e59c 100644 --- a/tests/pos-special/isInstanceOf/classTag.scala +++ b/tests/pos-deep-subtype/classTag.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.reflect.ClassTag object IsInstanceOfClassTag { diff --git a/tests/pos-deep-subtype/gadt.scala b/tests/pos-deep-subtype/gadt.scala new file mode 100644 index 000000000000..18fbe6e10986 --- /dev/null +++ b/tests/pos-deep-subtype/gadt.scala @@ -0,0 +1,39 @@ +//> using options -Xfatal-warnings + +sealed trait Exp[T] +case class Num(n: Int) extends Exp[Int] +case class Plus(e1: Exp[Int], e2: Exp[Int]) extends Exp[Int] +case class Var[T](name: String) extends Exp[T] +case class Lambda[T, U](x: Var[T], e: Exp[U]) extends Exp[T => U] +case class App[T, U](f: Exp[T => U], e: Exp[T]) extends Exp[U] + +abstract class Env { outer => + def apply[T](x: Var[T]): T + + def + [T](xe: (Var[T], T)) = new Env { + def apply[T](x: Var[T]): T = + if (x == xe._1) xe._2.asInstanceOf[T] + else outer(x) + } +} + +object Env { + val empty = new Env { + def apply[T](x: Var[T]): T = ??? + } +} + +object Test { + + val exp = App(Lambda(Var[Int]("x"), Plus(Var[Int]("x"), Num(1))), Var[Int]("2")) + + def eval[T](e: Exp[T])(env: Env): T = e match { + case Num(n) => n + case Plus(e1, e2) => eval(e1)(env) + eval(e2)(env) + case v: Var[T] => env(v) + case Lambda(x: Var[s], e) => ((y: s) => eval(e)(env + (x -> y))) + case App(f, e) => eval(f)(env)(eval(e)(env)) + } + + eval(exp)(Env.empty) +} diff --git a/tests/pos/i15677.scala b/tests/pos-deep-subtype/i15677.scala similarity index 100% rename from tests/pos/i15677.scala rename to tests/pos-deep-subtype/i15677.scala diff --git a/tests/pos-special/isInstanceOf/i16899.scala b/tests/pos-deep-subtype/i16899.scala similarity index 76% rename from tests/pos-special/isInstanceOf/i16899.scala rename to tests/pos-deep-subtype/i16899.scala index 650e1e5c7b23..e5acac1f927e 100644 --- a/tests/pos-special/isInstanceOf/i16899.scala +++ b/tests/pos-deep-subtype/i16899.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + sealed trait Unset def foo(v: Unset|Option[Int]): Unit = v match diff --git a/tests/pos-special/isInstanceOf/i9736.scala b/tests/pos-deep-subtype/i9736.scala similarity index 76% rename from tests/pos-special/isInstanceOf/i9736.scala rename to tests/pos-deep-subtype/i9736.scala index 7266764c723d..6ef1c42c981e 100644 --- a/tests/pos-special/isInstanceOf/i9736.scala +++ b/tests/pos-deep-subtype/i9736.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + class Test { type MyCombo = Int | Unit val z: MyCombo = 10 diff --git a/tests/pos-special/isInstanceOf/i9782.scala b/tests/pos-deep-subtype/i9782.scala similarity index 88% rename from tests/pos-special/isInstanceOf/i9782.scala rename to tests/pos-deep-subtype/i9782.scala index e277951769bf..02c69a35b752 100644 --- a/tests/pos-special/isInstanceOf/i9782.scala +++ b/tests/pos-deep-subtype/i9782.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + trait Txn[T <: Txn[T]] trait Elem[T <: Txn[T]] diff --git a/tests/pos-macros/captured-type/Macro_1.scala b/tests/pos-macros/captured-type/Macro_1.scala new file mode 100644 index 000000000000..3f094487ee4f --- /dev/null +++ b/tests/pos-macros/captured-type/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def foo[U](u: U): U = ${ fooImpl[U]('u) } + +def fooImpl[U: Type](u: Expr[U])(using Quotes): Expr[U] = '{ + def f[T](x: T): T = ${ identity('{ x: T }) } + f[U]($u) +} diff --git a/tests/pos-macros/captured-type/Test_2.scala b/tests/pos-macros/captured-type/Test_2.scala new file mode 100644 index 000000000000..ed1baab565e0 --- /dev/null +++ b/tests/pos-macros/captured-type/Test_2.scala @@ -0,0 +1,3 @@ +def test = + foo(1) + foo("abc") diff --git a/tests/pos-macros/erasedArgs/Macro_1.scala b/tests/pos-macros/erasedArgs/Macro_1.scala new file mode 100644 index 000000000000..08706d6110b9 --- /dev/null +++ b/tests/pos-macros/erasedArgs/Macro_1.scala @@ -0,0 +1,7 @@ +import scala.quoted._ +import scala.language.experimental.erasedDefinitions + +transparent inline def mcr: Any = ${ mcrImpl(1, 2d, "abc") } + +def mcrImpl(x: Int, erased y: Double, z: String)(using Quotes): Expr[String] = + Expr(x.toString() + z) diff --git a/tests/pos-macros/erasedArgs/Test_2.scala b/tests/pos-macros/erasedArgs/Test_2.scala new file mode 100644 index 000000000000..19f0364d3f71 --- /dev/null +++ b/tests/pos-macros/erasedArgs/Test_2.scala @@ -0,0 +1 @@ +def test: "1abc" = mcr diff --git a/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala b/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala index 0d2df1504918..c79f565d840a 100644 --- a/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala +++ b/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala @@ -5,9 +5,9 @@ private def impl(x: Expr[Any])(using Quotes): Expr[Unit] = { case '{ foo[x] } => assert(Type.show[x] == "scala.Int", Type.show[x]) case '{ type f[X]; foo[`f`] } => - assert(Type.show[f] == "[A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A]", Type.show[f]) + assert(Type.show[f] == "[A >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.List[A]", Type.show[f]) case '{ type f <: AnyKind; foo[`f`] } => - assert(Type.show[f] == "[K >: scala.Nothing <: scala.Any, V >: scala.Nothing <: scala.Any] => scala.collection.immutable.Map[K, V]", Type.show[f]) + assert(Type.show[f] == "[K >: scala.Nothing <: scala.Any, V >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.Map[K, V]", Type.show[f]) case x => throw MatchError(x.show) '{} } diff --git a/tests/pos-macros/i10864/Macro_1.scala b/tests/pos-macros/i10864/Macro_1.scala new file mode 100644 index 000000000000..7cf1e1850a76 --- /dev/null +++ b/tests/pos-macros/i10864/Macro_1.scala @@ -0,0 +1,15 @@ +import scala.quoted._ + +case class T(t: Type[_]) + +object T { + def impl[T <: AnyKind](using tt: Type[T])(using Quotes): Expr[Unit] = { + val t = T(tt) + t.t match + case '[type x <: AnyKind; x] => // ok + case _ => quotes.reflect.report.error("not ok :(") + '{} + } + + inline def run[T <: AnyKind] = ${ impl[T] } +} diff --git a/tests/pos-macros/i10864/Test_2.scala b/tests/pos-macros/i10864/Test_2.scala new file mode 100644 index 000000000000..e93fa1302221 --- /dev/null +++ b/tests/pos-macros/i10864/Test_2.scala @@ -0,0 +1,4 @@ +def test = + T.run[List] + T.run[Map] + T.run[Tuple22] diff --git a/tests/pos-macros/i10864a/Macro_1.scala b/tests/pos-macros/i10864a/Macro_1.scala new file mode 100644 index 000000000000..d1d5302544b4 --- /dev/null +++ b/tests/pos-macros/i10864a/Macro_1.scala @@ -0,0 +1,21 @@ +import scala.quoted._ + +case class T(t: Type[_]) + +object T { + def impl[T <: AnyKind](using tt: Type[T])(using Quotes): Expr[Unit] = { + val t = T(tt) + t.t match + case '[type x; x] => + assert(Type.show[x] == "scala.Int", Type.show[x]) + case '[type f[X]; f] => + assert(Type.show[f] == "[A >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.List[A]", Type.show[f]) + case '[type f[X <: Int]; f] => + assert(Type.show[f] == "[T >: scala.Nothing <: scala.Int] =>> C[T]", Type.show[f]) + case '[type f <: AnyKind; f] => + assert(Type.show[f] == "[K >: scala.Nothing <: scala.Any, V >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.Map[K, V]", Type.show[f]) + '{} + } + + inline def run[T <: AnyKind] = ${ impl[T] } +} diff --git a/tests/pos-macros/i10864a/Test_2.scala b/tests/pos-macros/i10864a/Test_2.scala new file mode 100644 index 000000000000..7a1596d0fa41 --- /dev/null +++ b/tests/pos-macros/i10864a/Test_2.scala @@ -0,0 +1,8 @@ +@main +def run = + T.run[Int] + T.run[C] + T.run[List] + T.run[Map] + +class C[T <: Int] diff --git a/tests/pos-special/i11331.1/Macro_1.scala b/tests/pos-macros/i11331a/Macro_1.scala similarity index 100% rename from tests/pos-special/i11331.1/Macro_1.scala rename to tests/pos-macros/i11331a/Macro_1.scala diff --git a/tests/pos-special/i11331.1/Test_2.scala b/tests/pos-macros/i11331a/Test_2.scala similarity index 100% rename from tests/pos-special/i11331.1/Test_2.scala rename to tests/pos-macros/i11331a/Test_2.scala diff --git a/tests/pos/i11415.scala b/tests/pos-macros/i11415.scala similarity index 100% rename from tests/pos/i11415.scala rename to tests/pos-macros/i11415.scala diff --git a/tests/pos-macros/i11738.scala b/tests/pos-macros/i11738.scala new file mode 100644 index 000000000000..e1213a5dee6d --- /dev/null +++ b/tests/pos-macros/i11738.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +def blah[A](using Quotes, Type[A]): Expr[Unit] = + Type.of[A] match + case '[h *: t] => println(s"h = ${Type.show[h]}, t = ${Type.show[t]}") // ok + case '[type f[X]; f[a]] => println(s"f = ${Type.show[f]}, a = ${Type.show[a]}") // error + case _ => + '{()} diff --git a/tests/pos/i12019.scala b/tests/pos-macros/i12019.scala similarity index 100% rename from tests/pos/i12019.scala rename to tests/pos-macros/i12019.scala diff --git a/tests/pos/i12802.scala b/tests/pos-macros/i12802.scala similarity index 100% rename from tests/pos/i12802.scala rename to tests/pos-macros/i12802.scala diff --git a/tests/pos-custom-args/i13405/Macro.scala b/tests/pos-macros/i13405/Macro.scala similarity index 100% rename from tests/pos-custom-args/i13405/Macro.scala rename to tests/pos-macros/i13405/Macro.scala diff --git a/tests/pos-macros/i13405/Test.scala b/tests/pos-macros/i13405/Test.scala new file mode 100644 index 000000000000..b698bd875642 --- /dev/null +++ b/tests/pos-macros/i13405/Test.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings + +@main def main: Unit = hh() diff --git a/tests/pos-macros/i14708.scala b/tests/pos-macros/i14708.scala new file mode 100644 index 000000000000..bf8330d34746 --- /dev/null +++ b/tests/pos-macros/i14708.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +object Main { + def foo(a: Expr[Any])(using Quotes) = { + a match { + case '{ ($x: Set[t]).toSet } => + case _ => + } + } +} diff --git a/tests/pos-macros/i15413/Macro_1.scala b/tests/pos-macros/i15413/Macro_1.scala new file mode 100644 index 000000000000..56fd4f0f0887 --- /dev/null +++ b/tests/pos-macros/i15413/Macro_1.scala @@ -0,0 +1,10 @@ +//> using options -Werror -WunstableInlineAccessors + +import scala.quoted.* +import scala.annotation.publicInBinary + +class Macro: + inline def foo = ${ Macro.fooImpl } + +object Macro: + @publicInBinary private[Macro] def fooImpl(using Quotes) = '{} diff --git a/tests/pos-macros/i15413/Test_2.scala b/tests/pos-macros/i15413/Test_2.scala new file mode 100644 index 000000000000..a8310a8970fd --- /dev/null +++ b/tests/pos-macros/i15413/Test_2.scala @@ -0,0 +1,2 @@ +def test = + new Macro().foo diff --git a/tests/pos-macros/i15413b/Macro_1.scala b/tests/pos-macros/i15413b/Macro_1.scala new file mode 100644 index 000000000000..c1e9bab422f8 --- /dev/null +++ b/tests/pos-macros/i15413b/Macro_1.scala @@ -0,0 +1,10 @@ +//> using options -Werror -WunstableInlineAccessors + +package bar + +import scala.quoted.* +import scala.annotation.publicInBinary + +inline def foo = ${ fooImpl } + +@publicInBinary private[bar] def fooImpl(using Quotes) = '{} diff --git a/tests/pos-macros/i15413b/Test_2.scala b/tests/pos-macros/i15413b/Test_2.scala new file mode 100644 index 000000000000..5fc688c79b68 --- /dev/null +++ b/tests/pos-macros/i15413b/Test_2.scala @@ -0,0 +1 @@ +def test = bar.foo diff --git a/tests/pos/i15650.scala b/tests/pos-macros/i15650.scala similarity index 100% rename from tests/pos/i15650.scala rename to tests/pos-macros/i15650.scala diff --git a/tests/pos-macros/i16522.scala b/tests/pos-macros/i16522.scala new file mode 100644 index 000000000000..0a94931ec1a7 --- /dev/null +++ b/tests/pos-macros/i16522.scala @@ -0,0 +1,16 @@ + import scala.quoted.* + + sealed trait HList + case class HCons[+HD, TL <: HList](hd: HD, tl: TL) extends HList + case object HNil extends HList + + def showFirstTwoImpl(e: Expr[HList])(using Quotes): Expr[String] = { + e match { + case '{HCons($h1, HCons($h2, $_))} => '{$h1.toString ++ $h2.toString} + case '{type tl <: HList; HCons($h1: hd1, HCons($h2: hd2, $_ : tl))} => '{$h1.toString ++ $h2.toString} // error + case '{HCons[hd, HCons[sd, tl]]($h1, HCons($h2, $_))} => '{$h1.toString ++ $h2.toString} + case _ => '{""} + } + } + + transparent inline def showFirstTwo(inline xs: HList) = ${ showFirstTwoImpl('xs) } diff --git a/tests/pos-macros/i16630.scala b/tests/pos-macros/i16630.scala new file mode 100644 index 000000000000..50e80ed996c1 --- /dev/null +++ b/tests/pos-macros/i16630.scala @@ -0,0 +1,14 @@ +import scala.language.experimental.macros +import scala.quoted.{Quotes, Expr, Type} + +trait TraitWithTypeParam[A]: + inline def foo: Option[A] = ${ MacrosImpl.fooImpl[A] } + def foo: Option[A] = macro MacrosImpl.compatFooImpl[A] + +object MacrosImpl: + def fooImpl[A: Type](using quotes: Quotes): Expr[Option[A]] = ??? + def compatFooImpl[A: c.WeakTypeTag](c: Context): c.Tree = ??? + +trait Context: + type WeakTypeTag[A] + type Tree \ No newline at end of file diff --git a/tests/pos-macros/i16960/Macro_1.scala b/tests/pos-macros/i16960/Macro_1.scala new file mode 100644 index 000000000000..664623a82c41 --- /dev/null +++ b/tests/pos-macros/i16960/Macro_1.scala @@ -0,0 +1,29 @@ +import scala.quoted.* + +inline def myMacro = ${ myMacroImpl } +def myMacroImpl(using Quotes) = + import quotes.reflect.* + + val valSym = Symbol.newVal(Symbol.spliceOwner, "foo", TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) + val vdef = ValDef(valSym, None) + vdef match + case _: ValDef => + assert(vdef.tpt.tpe =:= TypeRepr.of[Int]) + assert(vdef.rhs == None) + vdef match + case vdef: ValOrDefDef => + assert(vdef.tpt.tpe =:= TypeRepr.of[Int]) + assert(vdef.rhs == None) + + val methSym = Symbol.newMethod(Symbol.spliceOwner, "bar", ByNameType(TypeRepr.of[Int])) + val ddef = DefDef(methSym, _ => None) + ddef match + case _: DefDef => + assert(ddef.tpt.tpe =:= TypeRepr.of[Int]) + assert(ddef.rhs == None) + ddef match + case ddef: ValOrDefDef => + assert(ddef.tpt.tpe =:= TypeRepr.of[Int]) + assert(ddef.rhs == None) + + '{} diff --git a/tests/pos-macros/i16960/Test_2.scala b/tests/pos-macros/i16960/Test_2.scala new file mode 100644 index 000000000000..76a9e17659db --- /dev/null +++ b/tests/pos-macros/i16960/Test_2.scala @@ -0,0 +1 @@ +def test = myMacro diff --git a/tests/pos-macros/i17103c/Macro_1.scala b/tests/pos-macros/i17103c/Macro_1.scala new file mode 100644 index 000000000000..cb8cf43d44a8 --- /dev/null +++ b/tests/pos-macros/i17103c/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +inline def test = ${ testExpr } + +def testExpr(using Quotes): Expr[Unit] = + '{ + trait C + val c: C = ??? + ${ + val expr = '{ + val cRef: c.type = ??? + () + } + expr + } + } diff --git a/tests/pos-macros/i17103c/Test_2.scala b/tests/pos-macros/i17103c/Test_2.scala new file mode 100644 index 000000000000..ce2b73e5adf2 --- /dev/null +++ b/tests/pos-macros/i17103c/Test_2.scala @@ -0,0 +1 @@ +def Test = test diff --git a/tests/pos-macros/i17294/Bar.scala b/tests/pos-macros/i17294/Bar.scala new file mode 100644 index 000000000000..8c99b69b50c7 --- /dev/null +++ b/tests/pos-macros/i17294/Bar.scala @@ -0,0 +1,6 @@ +import scala.quoted.* + +class Bar[T] +object Bar: + transparent inline def bar[T](a: Foo, b: a.Out): Bar[T] = ${ getBarMacro[T] } + def getBarMacro[T](using Quotes, Type[T]): Expr[Bar[T]] = '{ new Bar[T] } \ No newline at end of file diff --git a/tests/pos-macros/i17294/Foo.scala b/tests/pos-macros/i17294/Foo.scala new file mode 100644 index 000000000000..b25d6416727e --- /dev/null +++ b/tests/pos-macros/i17294/Foo.scala @@ -0,0 +1,3 @@ +class Foo: + type Out = Int +val a = Bar.bar(new Foo(), 0) \ No newline at end of file diff --git a/tests/pos-macros/i17610/Macros.scala b/tests/pos-macros/i17610/Macros.scala new file mode 100644 index 000000000000..974bf3a53a59 --- /dev/null +++ b/tests/pos-macros/i17610/Macros.scala @@ -0,0 +1,34 @@ +// Macros.scala +import Main._ +import scala.quoted.* + +object Macros { + inline def apply(): ProviderProcessor = + ${ Macros.processorExpr } + + def processorExpr[I: Type](using q: Quotes): Expr[ProviderProcessor] = '{ + new ProviderProcessor { + override def apply(simple: Simple): MyF[Int] = + ${ Macros.methodProcessorImpl('simple) } + } + } + + def methodProcessorImpl[I: Type](using q: Quotes)(service: Expr[Simple]): Expr[MyF[Int]] = { + import q.reflect._ + + val returnType = TypeRepr.of[Int] + returnType.asType match { + case '[rt] => + '{ + ${ + import quotes.reflect._ + TypeApply( + Select.unique('{ ???.asInstanceOf[Codec] }.asTerm, "apply"), + List(TypeTree.of[rt]) // generates the error, directly using Int instead of rt makes it disappear + ).asExpr + } + ??? + } + } + } +} diff --git a/tests/pos-macros/i17610/Test.scala b/tests/pos-macros/i17610/Test.scala new file mode 100644 index 000000000000..7c745537e0c9 --- /dev/null +++ b/tests/pos-macros/i17610/Test.scala @@ -0,0 +1,20 @@ +// Main.scala +object Main { + type MyF[A] + + trait ProviderProcessor { + def apply(simple: Simple): MyF[Int] + } + + trait Codec { + def apply[A]: MyF[A] + } + + trait Simple { + def a0: Int + } + + def test(): Unit = { + val p= Macros() + } +} diff --git a/tests/pos-macros/i18059/Macro_1.scala b/tests/pos-macros/i18059/Macro_1.scala new file mode 100644 index 000000000000..4c3f89281883 --- /dev/null +++ b/tests/pos-macros/i18059/Macro_1.scala @@ -0,0 +1,29 @@ +import scala.annotation.StaticAnnotation + +class SqlName(val sqlName: String) extends StaticAnnotation + +import scala.compiletime.* +import scala.quoted.* + +inline def sqlFieldNamesFor[T]: Vector[(String, String)] = ${ + sqlFieldNamesForImpl[T] +} + +private def sqlFieldNamesForImpl[T: Type](using + Quotes // must be named!! like `q: Quotes` +): Expr[Vector[(String, String)]] = + import quotes.reflect.* + val annot = TypeRepr.of[SqlName].typeSymbol + val tuples: Seq[Expr[(String, String)]] = TypeRepr + .of[T] + .typeSymbol + .primaryConstructor + .paramSymss + .head + .collect: + case sym if sym.hasAnnotation(annot) => + val fieldNameExpr = Expr(sym.name.asInstanceOf[String]) + val annotExpr = sym.getAnnotation(annot).get.asExprOf[SqlName] + '{ ($fieldNameExpr, $annotExpr.sqlName) } + val seq: Expr[Seq[(String, String)]] = Expr.ofSeq(tuples) + '{ $seq.toVector } diff --git a/tests/pos-macros/i18059/Test_2.scala b/tests/pos-macros/i18059/Test_2.scala new file mode 100644 index 000000000000..223cf626be87 --- /dev/null +++ b/tests/pos-macros/i18059/Test_2.scala @@ -0,0 +1,8 @@ +case class AppUser( + id: Long, + firstName: Option[String], + @SqlName("last_name") lastName: String +) + +def hello: Unit = + println(sqlFieldNamesFor[AppUser]) // Vector((lastName, last_name)) diff --git a/tests/pos-macros/i18123.scala b/tests/pos-macros/i18123.scala new file mode 100644 index 000000000000..d9127a918ee8 --- /dev/null +++ b/tests/pos-macros/i18123.scala @@ -0,0 +1,27 @@ +// may not compile anymore in Scala 3.4+ +package pkg + +import scala.language.`3.3` + +trait P[+T] + +extension [T](inline parse0: P[T]) + inline def | [V >: T](inline other: P[V]): P[V] = ??? + +extension [T](inline parse0: => P[T]) + inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? + +object Implicits: + trait Repeater[-T, R] + object Repeater: + implicit def GenericRepeaterImplicit[T]: Repeater[T, Seq[T]] = ??? + +sealed trait RegexTree +abstract class Node extends RegexTree +class CharClassIntersection() extends Node + +def classItem: P[RegexTree] = ??? +def charClassIntersection: P[CharClassIntersection] = ??? + +def x = + (charClassIntersection.rep() | classItem.rep()) diff --git a/tests/pos-macros/i18125.scala b/tests/pos-macros/i18125.scala new file mode 100644 index 000000000000..8c5504cafacc --- /dev/null +++ b/tests/pos-macros/i18125.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +final class Foo[T](ns: T) + +def foo(using Quotes)(x: Expr[Any]): Unit = + x match + case '{ new Foo($y: b) } => + case '{ new Foo($y: List[b]) } => + case '{ type b; new Foo($y: b) } => + diff --git a/tests/pos-macros/i18125b.scala b/tests/pos-macros/i18125b.scala new file mode 100644 index 000000000000..47e687d0a4a6 --- /dev/null +++ b/tests/pos-macros/i18125b.scala @@ -0,0 +1,23 @@ +package oolong.phobos + +import scala.quoted.* +import scala.compiletime.* +import scala.annotation.StaticAnnotation + +final class xmlns[T](ns: T) extends StaticAnnotation +trait Namespace[T]{ + val getNamespace: String +} + +object common{ + private def extractFeildNamespace(using Quotes)( + fieldAnnotations: List[Expr[Any]], + ): Expr[Option[String]] = { + import quotes.reflect.* + + fieldAnnotations.collect { case '{ xmlns($namespace: b) } => + '{ Some(summonInline[Namespace[b]].getNamespace) } + } + ??? + } +} diff --git a/tests/pos-macros/i18155/Macro_1.scala b/tests/pos-macros/i18155/Macro_1.scala new file mode 100644 index 000000000000..598d739bc02d --- /dev/null +++ b/tests/pos-macros/i18155/Macro_1.scala @@ -0,0 +1,15 @@ +import scala.quoted.* + +object Macro: + transparent inline def foo: Any = ${ fooImpl } + + def fooImpl(using Quotes): Expr[Any] = + import quotes.reflect.* + '{ + val xxx = ${ + Type.of[Int] match + case '[tpe] => + Typed(Expr(1).asTerm, TypeTree.of[tpe]).asExpr + } + xxx + } diff --git a/tests/pos-macros/i18155/Test_2.scala b/tests/pos-macros/i18155/Test_2.scala new file mode 100644 index 000000000000..743643da5aba --- /dev/null +++ b/tests/pos-macros/i18155/Test_2.scala @@ -0,0 +1,2 @@ +@main def run() = + println(Macro.foo) diff --git a/tests/pos-macros/i18197a.scala b/tests/pos-macros/i18197a.scala new file mode 100644 index 000000000000..05c997158cf8 --- /dev/null +++ b/tests/pos-macros/i18197a.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def readPathMacro[A: Type, B: Type](expr: Expr[Any])(using Quotes) = + expr match + case '{ foo($y) } => y: Expr[Int ?=> Int] + +def foo(x: Int ?=> Int): Any = ??? diff --git a/tests/pos-macros/i18197b/Macro.scala b/tests/pos-macros/i18197b/Macro.scala new file mode 100644 index 000000000000..9afe96a6d74c --- /dev/null +++ b/tests/pos-macros/i18197b/Macro.scala @@ -0,0 +1,18 @@ +import scala.quoted.* + +object ReproMacro { + inline def readPath[A, B](inline config: Config[A, B]) = ${ readPathMacro[A, B]('config) } + + def readPathMacro[A: Type, B: Type](expr: Expr[Config[A, B]])(using Quotes) = { + import quotes.reflect.report + + expr match { + case '{ Field.const[a, b, tpe]($selector) } => + val selector2: Expr[Selector ?=> a => tpe] = selector + report.info(s"Matched!") + '{} + case other => + report.errorAndAbort("woops, I did not match") + } + } +} diff --git a/tests/pos-macros/i18197b/Test.scala b/tests/pos-macros/i18197b/Test.scala new file mode 100644 index 000000000000..908e58cf3e94 --- /dev/null +++ b/tests/pos-macros/i18197b/Test.scala @@ -0,0 +1,19 @@ +trait Selector { + extension [A](self: A) def at[B <: A]: B +} + +trait Config[A, B] + +object Field { + def const[A, B, FieldTpe](selector: Selector ?=> A => FieldTpe): Config[A, B] = ??? +} + +final case class Example(int: Int) + +@main def main = { + // compiles just fine + ReproMacro.readPath[Example, Example](Field.const(_.int)) + + // doesn't compile + ReproMacro.readPath[Example, Example](Field.const(_.int.at[Int])) +} diff --git a/tests/pos-macros/i18228.scala b/tests/pos-macros/i18228.scala new file mode 100644 index 000000000000..f0b8226fc135 --- /dev/null +++ b/tests/pos-macros/i18228.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +case class QueryMeta[T](map: Map[String, String]) + +object QueryMeta: + given [T: Type]: FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: + def unapply(expr: Expr[QueryMeta[T]])(using q: Quotes): Option[QueryMeta[T]] = + import q.reflect.* + expr match + case '{ QueryMeta[t](${ map }: Map[String, String]) } => + map.value.map(QueryMeta[T].apply) + case _ => + None diff --git a/tests/pos-macros/i18228b.scala b/tests/pos-macros/i18228b.scala new file mode 100644 index 000000000000..977fc93e9846 --- /dev/null +++ b/tests/pos-macros/i18228b.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +case class QueryMeta[T](map: Map[String, String]) + +object QueryMeta: + given [T]: FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: + def unapply(expr: Expr[QueryMeta[T]])(using q: Quotes): Option[QueryMeta[T]] = + import q.reflect.* + expr match + case '{ QueryMeta[t](${ map }: Map[String, String]) } => + map.value.map(QueryMeta[T].apply) + case _ => + None diff --git a/tests/pos-macros/i18250.scala b/tests/pos-macros/i18250.scala new file mode 100644 index 000000000000..64d34cda85d8 --- /dev/null +++ b/tests/pos-macros/i18250.scala @@ -0,0 +1,6 @@ +import scala.quoted.* + +def test(x: Expr[Any])(using Quotes): Unit = + x match + case '{ type t; type u <: t; () } => + case '{ type t <: Comparable[t]; () } => diff --git a/tests/pos-macros/i18353/Macro_1.scala b/tests/pos-macros/i18353/Macro_1.scala new file mode 100644 index 000000000000..d0f5dd84ea66 --- /dev/null +++ b/tests/pos-macros/i18353/Macro_1.scala @@ -0,0 +1,64 @@ +import scala.compiletime.* +import scala.deriving.* +import scala.quoted.* + +trait Getter[S, A]: + def view: S => A + +trait Lens[S, A] extends Getter[S, A]: + def set: S => A => S + +object Lens { + inline def apply[S, A](_view: S => A)(_set: S => A => S): Lens[S, A] = + new Lens[S, A]: + def view: S => A = _view + def set: S => A => S = _set + + inline given derived[T <: Product, A]: Lens[T, A] = ${ + ProductMacros.genLens[T, A] + } +} + +object ProductMacros { + private def indexOf[T: Type, A: Type](using Quotes): Int = + indexOf0[T, A](0) + + private def indexOf0[T: Type, A: Type](acc: Int)(using Quotes): Int = + Type.of[T] match + case '[EmptyTuple] => -1 + case '[A *: tpes] => acc + case '[tpe *: tpes] => indexOf0[tpes, A](acc + 1) + + def genLens[T <: Product: Type, A: Type](using + q: Quotes + ): Expr[Lens[T, A]] = { + import quotes.reflect.* + + Expr + .summon[Mirror.ProductOf[T]] + .map { + case '{ + $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes } + } => + val i = indexOf[elementTypes, A] + if i < 0 then + report.errorAndAbort(s"has no the field of ${Type.show[A]}") + else + val ii: Expr[Int] = Expr(i) + val view: Expr[T => A] = '{ t => + t.productElement($ii).asInstanceOf[A] + } + val set: Expr[T => A => T] = '{ t => a => + val arr = Tuple.fromProduct(t).toArray + arr($ii) = a.asInstanceOf[Object] + // Check-macros fails here probably + $m.fromTuple(Tuple.fromArray(arr).asInstanceOf[elementTypes]) + } + '{ Lens[T, A]($view)($set) } + } + .getOrElse( + report.errorAndAbort(s"${Type.show[T]} is not a product type") + ) + + } +} diff --git a/tests/pos-macros/i18353/Test_2.scala b/tests/pos-macros/i18353/Test_2.scala new file mode 100644 index 000000000000..08a79953097e --- /dev/null +++ b/tests/pos-macros/i18353/Test_2.scala @@ -0,0 +1,7 @@ +def Test = { + + type TupleConfig = (Int, String) + + val tConfig = (1, "string") + val fails = summon[Lens[TupleConfig, Int]].view(tConfig) +} diff --git a/tests/pos-macros/i18358/Macro_1.scala b/tests/pos-macros/i18358/Macro_1.scala new file mode 100644 index 000000000000..7471bca97449 --- /dev/null +++ b/tests/pos-macros/i18358/Macro_1.scala @@ -0,0 +1,4 @@ +import scala.quoted.* +inline def generateCode: Unit = ${ testLocalDummyOwner } +def testLocalDummyOwner(using Quotes): Expr[Unit] = '{ trait E { $valInBlock } } +def valInBlock(using Quotes): Expr[Unit] = '{ val x: Int = 2; () } diff --git a/tests/pos-macros/i18358/Test_2.scala b/tests/pos-macros/i18358/Test_2.scala new file mode 100644 index 000000000000..7110731760a6 --- /dev/null +++ b/tests/pos-macros/i18358/Test_2.scala @@ -0,0 +1 @@ +def test = generateCode diff --git a/tests/pos-macros/i18393/Inline_2.scala b/tests/pos-macros/i18393/Inline_2.scala new file mode 100644 index 000000000000..66fc9b9a135a --- /dev/null +++ b/tests/pos-macros/i18393/Inline_2.scala @@ -0,0 +1,8 @@ +package user + +import defn.Macro + +object Inline extends Macro { + inline def callMacro(): Int = + ${ this.impl() } +} diff --git a/tests/pos-macros/i18393/Macro_1.scala b/tests/pos-macros/i18393/Macro_1.scala new file mode 100644 index 000000000000..fe99162bdf84 --- /dev/null +++ b/tests/pos-macros/i18393/Macro_1.scala @@ -0,0 +1,7 @@ +package defn + +import scala.quoted.* + +abstract class Macro { + def impl()(using Quotes): Expr[Int] = '{1} +} diff --git a/tests/pos-macros/i18393/Test_2.scala b/tests/pos-macros/i18393/Test_2.scala new file mode 100644 index 000000000000..666bf5fd379f --- /dev/null +++ b/tests/pos-macros/i18393/Test_2.scala @@ -0,0 +1,5 @@ +package user + +object Test { + Inline.callMacro() +} diff --git a/tests/pos-macros/i18409.scala b/tests/pos-macros/i18409.scala new file mode 100644 index 000000000000..800e192b81bb --- /dev/null +++ b/tests/pos-macros/i18409.scala @@ -0,0 +1,26 @@ +//> using options -Werror -Wunused:all + +import scala.quoted.* + +object model { + trait Transformer[Source, Dest] { + def transform(from: Source): Dest + } + object Transformer { + trait ForProduct[A, B] extends Transformer[A, B] + } +} + +object Ops { + import model.Transformer // unused import false-positive + + def unapply(using Quotes)(term: quotes.reflect.Term): Option[String] = { + term.asExpr match { + case '{ + ($transformer: Transformer.ForProduct[a, b]).transform($appliedTo) + } => + Some("") + case other => None + } + } +} diff --git a/tests/pos-macros/i18855/invoc.scala b/tests/pos-macros/i18855/invoc.scala new file mode 100644 index 000000000000..2923abd4836a --- /dev/null +++ b/tests/pos-macros/i18855/invoc.scala @@ -0,0 +1,3 @@ +import scala.language.experimental.captureChecking +val x = run() + diff --git a/tests/pos-macros/i18855/macro.scala b/tests/pos-macros/i18855/macro.scala new file mode 100644 index 000000000000..1c0ee28effbe --- /dev/null +++ b/tests/pos-macros/i18855/macro.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +import scala.language.experimental.captureChecking + +def impl()(using Quotes): Expr[Unit] = '{()} +inline def run(): Unit = ${impl()} + + diff --git a/tests/pos-macros/i18911/Macros_1.scala b/tests/pos-macros/i18911/Macros_1.scala new file mode 100644 index 000000000000..677610fd9536 --- /dev/null +++ b/tests/pos-macros/i18911/Macros_1.scala @@ -0,0 +1,91 @@ +import scala.quoted._ +import scala.compiletime.testing.{typeChecks, typeCheckErrors} + +trait Assertion +trait Bool { + def value: Boolean +} +class SimpleMacroBool(expression: Boolean) extends Bool { + override def value: Boolean = expression +} +class BinaryMacroBool(left: Any, operator: String, right: Any, expression: Boolean) extends Bool { + override def value: Boolean = expression +} +object Bool { + def simpleMacroBool(expression: Boolean): Bool = new SimpleMacroBool(expression) + def binaryMacroBool(left: Any, operator: String, right: Any, expression: Boolean): Bool = + new BinaryMacroBool(left, operator, right, expression) + def binaryMacroBool(left: Any, operator: String, right: Any, bool: Bool): Bool = + new BinaryMacroBool(left, operator, right, bool.value) +} + +object Assertions { + inline def assert(inline condition: Boolean): Assertion = + ${ AssertionsMacro.assert('{ condition }) } +} + +object AssertionsMacro { + def assert(condition: Expr[Boolean])(using Quotes): Expr[Assertion] = + transform(condition) + + def transform( + condition: Expr[Boolean] + )(using Quotes): Expr[Assertion] = { + val bool = BooleanMacro.parse(condition) + '{ + new Assertion { + val condition = $bool + } + } + } +} + +object BooleanMacro { + private val supportedBinaryOperations = + Set("!=", "==") + + def parse(condition: Expr[Boolean])(using Quotes): Expr[Bool] = { + import quotes.reflect._ + import quotes.reflect.ValDef.let + import util._ + + def exprStr: String = condition.show + def defaultCase = '{ Bool.simpleMacroBool($condition) } + + def isByNameMethodType(tp: TypeRepr): Boolean = tp.widen match { + case MethodType(_, ByNameType(_) :: Nil, _) => true + case _ => false + } + + condition.asTerm.underlyingArgument match { // WARNING: unsound use of `underlyingArgument` + case Apply(sel @ Select(lhs, op), rhs :: Nil) => + def binaryDefault = + if (isByNameMethodType(sel.tpe)) defaultCase + else if (supportedBinaryOperations.contains(op)) { + let(Symbol.spliceOwner, lhs) { left => + let(Symbol.spliceOwner, rhs) { right => + val app = left.select(sel.symbol).appliedTo(right) + let(Symbol.spliceOwner, app) { result => + val l = left.asExpr + val r = right.asExpr + val b = result.asExprOf[Boolean] + val code = '{ Bool.binaryMacroBool($l, ${ Expr(op) }, $r, $b) } + code.asTerm + } + } + }.asExprOf[Bool] + } else defaultCase + + op match { + case "==" => binaryDefault + case _ => binaryDefault + } + + case Literal(_) => + '{ Bool.simpleMacroBool($condition) } + + case _ => + defaultCase + } + } +} diff --git a/tests/pos-macros/i18911/Test_2.scala b/tests/pos-macros/i18911/Test_2.scala new file mode 100644 index 000000000000..5c253d5e8e1b --- /dev/null +++ b/tests/pos-macros/i18911/Test_2.scala @@ -0,0 +1,5 @@ +@main def Test = { + case class Document() + val expected: Document = ??? + Assertions.assert( expected == Document()) // error +} diff --git a/tests/pos/i6693.scala b/tests/pos-macros/i6693.scala similarity index 100% rename from tests/pos/i6693.scala rename to tests/pos-macros/i6693.scala diff --git a/tests/pos-macros/i7264.scala b/tests/pos-macros/i7264.scala index c87409561bee..82264402c768 100644 --- a/tests/pos-macros/i7264.scala +++ b/tests/pos-macros/i7264.scala @@ -3,5 +3,6 @@ class Foo { def f[T2](t: Type[T2])(using Quotes) = t match { case '[ *:[Int, t2] ] => Type.of[ *:[Int, t2] ] + case '[ type t <: Tuple; *:[t, t] ] => } } diff --git a/tests/pos-special/i7592/Macros_1.scala b/tests/pos-macros/i7592/Macros_1.scala similarity index 100% rename from tests/pos-special/i7592/Macros_1.scala rename to tests/pos-macros/i7592/Macros_1.scala diff --git a/tests/pos-macros/i7592/Test_2.scala b/tests/pos-macros/i7592/Test_2.scala new file mode 100644 index 000000000000..84b5e52d8da4 --- /dev/null +++ b/tests/pos-macros/i7592/Test_2.scala @@ -0,0 +1,7 @@ +//> using options -Yretain-trees + +def return1 = 1 + +def testReturn1 = { + assert(1 == compile(return1)) +} diff --git a/tests/pos/i9361.scala b/tests/pos-macros/i9361.scala similarity index 100% rename from tests/pos/i9361.scala rename to tests/pos-macros/i9361.scala diff --git a/tests/pos-macros/macro-deprecation.scala b/tests/pos-macros/macro-deprecation.scala deleted file mode 100644 index ff14f96ac7fa..000000000000 --- a/tests/pos-macros/macro-deprecation.scala +++ /dev/null @@ -1,4 +0,0 @@ -import scala.quoted.* - -inline def f = ${ impl } // error -@deprecated def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/mirrorQuotePattern.scala b/tests/pos-macros/mirrorQuotePattern.scala new file mode 100644 index 000000000000..1b3b77591339 --- /dev/null +++ b/tests/pos-macros/mirrorQuotePattern.scala @@ -0,0 +1,13 @@ +import scala.deriving._ +import scala.quoted._ + +private def derivedExpr[T](mirrorExpr: Expr[Mirror.Of[T]])(using Quotes, Type[T]): Expr[Any] = { + mirrorExpr match { + case '{ $mirrorExpr : Mirror.Sum { type MirroredElemTypes = mirroredElemTypes } } => + '{ liftableSum[mirroredElemTypes]($mirrorExpr) } + case '{ type mirroredElemTypes; $mirrorExpr : Mirror.Sum { type MirroredElemTypes = mirroredElemTypes } } => + '{ liftableSum[mirroredElemTypes]($mirrorExpr) } + } +} + +def liftableSum[MElemTypes](mirror: Mirror.Sum { type MirroredElemTypes = MElemTypes }): Any = ??? diff --git a/tests/pos-macros/mirrorQuotePattern2.scala b/tests/pos-macros/mirrorQuotePattern2.scala new file mode 100644 index 000000000000..a123cc28907f --- /dev/null +++ b/tests/pos-macros/mirrorQuotePattern2.scala @@ -0,0 +1,10 @@ +import scala.deriving._ +import scala.quoted._ + +private def derivedExpr(x: Expr[Any])(using Quotes): Unit = + x match + case '{ type mtp1; ($m1 : Mirror.Sum { type MirroredElemTypes = mtp1 } & Mirror.Of[Any], $m2 : Mirror.Sum { type MirroredElemTypes = mtp2 } & Mirror.Of[Any]); ??? } => + val _: Expr[Mirror.Sum { type MirroredElemTypes = mtp1 } & Mirror.Of[Any]] = m1 + val _: Expr[Mirror.Sum { type MirroredElemTypes = mtp2 } & Mirror.Of[Any]] = m2 + '{ $m1: Mirror.Sum { type MirroredElemTypes = mtp1 } } + '{ $m2: Mirror.Sum { type MirroredElemTypes = mtp2 } } diff --git a/tests/pos-macros/multiline-quote-patterns.scala b/tests/pos-macros/multiline-quote-patterns.scala new file mode 100644 index 000000000000..a1f1649b6059 --- /dev/null +++ b/tests/pos-macros/multiline-quote-patterns.scala @@ -0,0 +1,62 @@ +import scala.quoted.* +def types(t: Type[?])(using Quotes) = t match { + case '[ + type t; + t + ] => + + case '[ + type t + t + ] => + + case '[ + type t + List[t] + ] => + + case '[ + type t; + type u; + Map[t, u] + ] => + + case '[ + type t + type u + Map[t, u] + ] => + + case '[ + type t; type u + t => u + ] => +} + +def expressions(x: Expr[Any])(using Quotes) = x match { + case '{ + type t; + $x: t + } => + + case '{ + type t + $x: t + } => + + case '{ + type t; + List() + } => + + case '{ + type t + List() + } => + + case '{ + type t + type u + Map.empty[t, u] + } => +} diff --git a/tests/pos-macros/quote-pattern-type-variable-bounds.scala b/tests/pos-macros/quote-pattern-type-variable-bounds.scala new file mode 100644 index 000000000000..c342783664d5 --- /dev/null +++ b/tests/pos-macros/quote-pattern-type-variable-bounds.scala @@ -0,0 +1,12 @@ +import quoted.* + +def foo(using Quotes)(x: Expr[Int]) = + x match + case '{ type t; type u <: `t`; f[`t`, `u`] } => + case '{ type u <: `t`; type t; f[`t`, `u`] } => + case '{ type t; type u <: `t`; g[F[`t`, `u`]] } => + case '{ type u <: `t`; type t; g[F[`t`, `u`]] } => + +def f[T, U <: T] = ??? +def g[T] = ??? +type F[T, U <: T] diff --git a/tests/pos-macros/quote-pattern-type-variable-no-escape.scala b/tests/pos-macros/quote-pattern-type-variable-no-escape.scala new file mode 100644 index 000000000000..06a53b68e793 --- /dev/null +++ b/tests/pos-macros/quote-pattern-type-variable-no-escape.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +def foo[T: Type](expr: Expr[Any])(using Quotes): Any = + expr match + case '{ $x: Map[t, t] } => + case '{ type t; $x: Any } => + case '{ type t; $x: Map[t, t] } => + case '{ ($x: Set[t]).toSet[t] } => + + Type.of[T] match + case '[Map[t, t]] => + case '[(t, t, t, t, t, t, t)] => diff --git a/tests/pos-macros/quoted-with-precise-types.scala b/tests/pos-macros/quoted-with-precise-types.scala new file mode 100644 index 000000000000..e9620810377e --- /dev/null +++ b/tests/pos-macros/quoted-with-precise-types.scala @@ -0,0 +1,41 @@ +import scala.quoted.* + +def withType[T <: AnyKind, U](tpe: Type[T])(body: [X <: T] => Type[X] ?=> U)(using Quotes): U = + type X <: T + val tpeX: Type[X] = tpe.asInstanceOf[Type[X]] + body[X](using tpeX) + +def test1(t1: Type[?], t2: Type[? <: Any])(using Quotes) = + withType(t1) { [T <: AnyKind] => _ ?=> // TODO remove _ ?=> // Implementation restriction: polymorphic function literals must have a value parameter + Type.of[T] + Type.show[T] + } + withType(t2) { [T] => _ ?=> // TODO remove _ ?=> + '{ val a: T = ??? } + Type.of[T] + Type.show[T] + } + withType(t2): + [T] => _ ?=> '{ val a: T = ??? } // TODO remove _ ?=> + +def exprWithPreciseType[T, U](expr: Expr[T])(body: [X <: T] => Type[X] ?=> Expr[X] => U)(using Quotes): U = + import quotes.reflect.* + type X <: T + val exprX = expr.asInstanceOf[Expr[X]] + val tpeX = expr.asTerm.tpe.asType.asInstanceOf[Type[X]] + body[X](using tpeX)(exprX) + +def test2(x: Expr[Any])(using Quotes) = + // exprWithPreciseType(x) { [T] => x => // Inference limitation: x is assumed to be the Type[T] instead of the Expr[T] + exprWithPreciseType(x) { [T] => _ ?=> x => // TODO remove _ ?=> + Type.of[T] + '{ val a: T = $x } + } + exprWithPreciseType('{1}) { [T <: Int] => _ ?=> x => // TODO remove _ ?=> + Type.of[T] + '{ val a: Int = $x } + '{ val a: T = $x } + '{ val a: T = i($x) } + } + +def i[T <: Int](x: T): T = x diff --git a/tests/pos-custom-args/semanticdb/inline-unapply/App_2.scala b/tests/pos-macros/semanticdb-inline-unapply/App_2.scala similarity index 84% rename from tests/pos-custom-args/semanticdb/inline-unapply/App_2.scala rename to tests/pos-macros/semanticdb-inline-unapply/App_2.scala index 3d0d6b665128..2d5383505219 100644 --- a/tests/pos-custom-args/semanticdb/inline-unapply/App_2.scala +++ b/tests/pos-macros/semanticdb-inline-unapply/App_2.scala @@ -1,3 +1,4 @@ +//> using options -Xsemanticdb object Test { def main(args: Array[String]): Unit = { diff --git a/tests/pos-macros/semanticdb-inline-unapply/Macro_1.scala b/tests/pos-macros/semanticdb-inline-unapply/Macro_1.scala new file mode 100644 index 000000000000..eb6fe1109fbc --- /dev/null +++ b/tests/pos-macros/semanticdb-inline-unapply/Macro_1.scala @@ -0,0 +1,10 @@ +//> using options -Xsemanticdb + +import scala.quoted.* + +object Succ: + + inline def unapply(n: Int): Option[Int] = ${ impl('n) } + + private def impl(n: Expr[Int])(using Quotes): Expr[Option[Int]] = + '{ if $n == 0 then None else Some($n - 1)} diff --git a/tests/pos-macros/semanticdb-macro-pos/example_1.scala b/tests/pos-macros/semanticdb-macro-pos/example_1.scala new file mode 100644 index 000000000000..87cd556db1f2 --- /dev/null +++ b/tests/pos-macros/semanticdb-macro-pos/example_1.scala @@ -0,0 +1,7 @@ +//> using options -Xsemanticdb + +import quoted.* + +object CodeImpl { + def codeExpr(using Quotes): Expr[String] = '{""} +} diff --git a/tests/pos-custom-args/semanticdb/macro-pos/example_2.scala b/tests/pos-macros/semanticdb-macro-pos/example_2.scala similarity index 94% rename from tests/pos-custom-args/semanticdb/macro-pos/example_2.scala rename to tests/pos-macros/semanticdb-macro-pos/example_2.scala index 0e11dae6718d..0a2ceb596a2d 100644 --- a/tests/pos-custom-args/semanticdb/macro-pos/example_2.scala +++ b/tests/pos-macros/semanticdb-macro-pos/example_2.scala @@ -1,3 +1,5 @@ +//> using options -Xsemanticdb + import quoted.* object TestImpl { diff --git a/tests/pos-macros/semanticdb-macro-pos/example_3.scala b/tests/pos-macros/semanticdb-macro-pos/example_3.scala new file mode 100644 index 000000000000..0db4dbdc79e2 --- /dev/null +++ b/tests/pos-macros/semanticdb-macro-pos/example_3.scala @@ -0,0 +1,7 @@ +//> using options -Xsemanticdb + +object Test { + + def test = TestImpl.fun("") + +} diff --git a/tests/pos/splice-pat.scala b/tests/pos-macros/splice-pat.scala similarity index 100% rename from tests/pos/splice-pat.scala rename to tests/pos-macros/splice-pat.scala diff --git a/tests/pos-special/fatal-warnings/Dynamic.scala b/tests/pos-special/fatal-warnings/Dynamic.scala deleted file mode 100644 index 3a404ea07e13..000000000000 --- a/tests/pos-special/fatal-warnings/Dynamic.scala +++ /dev/null @@ -1,3 +0,0 @@ -package scala - -trait Dynamic extends Any diff --git a/tests/pos-special/fatal-warnings/annot-constant/Test_2.scala b/tests/pos-special/fatal-warnings/annot-constant/Test_2.scala deleted file mode 100644 index 13f603514ab4..000000000000 --- a/tests/pos-special/fatal-warnings/annot-constant/Test_2.scala +++ /dev/null @@ -1,6 +0,0 @@ -package pkg - -object U { - println(Constants_1.foo()) // The same constant in the constant pool is first unpickled here as a boolean - println(Constants_1.BYTE) // ... and here as a byte -} diff --git a/tests/pos-special/fatal-warnings/i10247.scala b/tests/pos-special/fatal-warnings/i10247.scala deleted file mode 100644 index 4ac5d719dbdc..000000000000 --- a/tests/pos-special/fatal-warnings/i10247.scala +++ /dev/null @@ -1,20 +0,0 @@ -// check that deprecation warnings of Red are not caught in its enclosing scope -enum Color(rgb: Int) { - - @deprecated("stop using Red", "0.1") - case Red extends Color(0xff0000) - - case Green extends Color(0x00ff00) - - case Blue extends Color(0x0000ff) - - final def colorCode: Option[Int] = this match { - case Red => None - case _ => Some(rgb) - } - -} - -object Color { - val deprecatedMembers = Set(Red) -} diff --git a/tests/pos-special/fatal-warnings/i10259.scala b/tests/pos-special/fatal-warnings/i10259.scala deleted file mode 100644 index 5b4c628cd126..000000000000 --- a/tests/pos-special/fatal-warnings/i10259.scala +++ /dev/null @@ -1,8 +0,0 @@ -trait S[T] extends (T => T): - def apply(x: T) = ??? - extension (x: T) def show: String - -given S[Int] with - extension (x: Int) def show = x.toString - -val x = 10.show diff --git a/tests/pos-special/fatal-warnings/i14637.scala b/tests/pos-special/fatal-warnings/i14637.scala deleted file mode 100644 index 9499c2fca2f9..000000000000 --- a/tests/pos-special/fatal-warnings/i14637.scala +++ /dev/null @@ -1,6 +0,0 @@ -class C - -object Givens: - given cOrdering: Ordering[C] with - override def compare(c0: C, c1: C) = 0 - val greeting = "we love Givens" \ No newline at end of file diff --git a/tests/pos-special/fatal-warnings/i16649-irrefutable.scala b/tests/pos-special/fatal-warnings/i16649-irrefutable.scala deleted file mode 100644 index b9aa6d2acf52..000000000000 --- a/tests/pos-special/fatal-warnings/i16649-irrefutable.scala +++ /dev/null @@ -1,7 +0,0 @@ -import quoted.* - -def foo(using Quotes)(x: Expr[Int]) = - val '{ $y } = x - val '{ $a: Any } = x - val '{ $b: Int } = x - val '[List[Int]] = Type.of[List[Int]] diff --git a/tests/pos-special/fatal-warnings/i17314a.scala b/tests/pos-special/fatal-warnings/i17314a.scala deleted file mode 100644 index 468b956fb04c..000000000000 --- a/tests/pos-special/fatal-warnings/i17314a.scala +++ /dev/null @@ -1,12 +0,0 @@ -// scalac: -Wunused:all - -package foo: - class Foo[T] - given Foo[Int] = new Foo[Int] - - -package bar: - import foo.{given foo.Foo[Int]} - import foo.Foo - - val repro: Foo[Int] = summon[Foo[Int]] diff --git a/tests/pos-special/fatal-warnings/i17735.scala b/tests/pos-special/fatal-warnings/i17735.scala new file mode 100644 index 000000000000..f171d4a028f7 --- /dev/null +++ b/tests/pos-special/fatal-warnings/i17735.scala @@ -0,0 +1,24 @@ +//> using options -Wvalue-discard + +import scala.collection.mutable +import scala.annotation.nowarn + +object Foo: + + def f(b: Boolean): String = + val messageBuilder = mutable.StringBuilder() + if b then + // Here @nowarn is effective with or without -Wfatal-warnings + // i.e. no warning without -Wfatal-warnings and no error with -Wfatal-warnings + messageBuilder.append("helloworld").append("\n"): @nowarn("msg=discarded non-Unit value*") + + messageBuilder.result() + + def g(x: String => Unit) = ??? + def h: String = + val messageBuilder = mutable.StringBuilder() + g: s => + // here @nowarn is effective without -Wfatal-warnings (i.e. no warning) + // But with -Wfatal-warnings we get an error + messageBuilder.append("\n").append(s): @nowarn("msg=discarded non-Unit value*") + messageBuilder.result() \ No newline at end of file diff --git a/tests/pos-special/fatal-warnings/i17735a.scala b/tests/pos-special/fatal-warnings/i17735a.scala new file mode 100644 index 000000000000..fe0ea7e6bc45 --- /dev/null +++ b/tests/pos-special/fatal-warnings/i17735a.scala @@ -0,0 +1,24 @@ +//> using options -Wvalue-discard -Wconf:msg=non-Unit:s + +import scala.collection.mutable +import scala.annotation.nowarn + +object Test: + + def f(b: Boolean): String = + val messageBuilder = mutable.StringBuilder() + if b then + // Here @nowarn is effective with or without -Wfatal-warnings + // i.e. no warning without -Wfatal-warnings and no error with -Wfatal-warnings + messageBuilder.append("helloworld").append("\n") + + messageBuilder.result() + + def g(x: String => Unit) = ??? + def h: String = + val messageBuilder = mutable.StringBuilder() + g: s => + // here @nowarn is effective without -Wfatal-warnings (i.e. no warning) + // But with -Wfatal-warnings we get an error + messageBuilder.append("\n").append(s) + messageBuilder.result() diff --git a/tests/pos-special/fatal-warnings/i17741.scala b/tests/pos-special/fatal-warnings/i17741.scala new file mode 100644 index 000000000000..7171aab83e4b --- /dev/null +++ b/tests/pos-special/fatal-warnings/i17741.scala @@ -0,0 +1,32 @@ +//> using options -Wnonunit-statement + +class Node() +class Elem( + prefix: String, + label: String, + minimizeEmpty: Boolean, + child: Node* +) extends Node +class Text(text: String) extends Node +class NodeBuffer() { + def &+(node: Node): NodeBuffer = + this +} +class NodeSeq() +object NodeSeq { + def seqToNodeSeq(seq: NodeBuffer): Seq[Node] = ??? +} + +object Main { + def example() = { + { + new Elem(null, "foo", false, + { + val $buf: NodeBuffer = new NodeBuffer() + $buf.&+(new Text("bar")) + NodeSeq.seqToNodeSeq($buf) + }* + ) + } + }: @annotation.nowarn() +} \ No newline at end of file diff --git a/tests/pos-special/fatal-warnings/i2673.scala b/tests/pos-special/fatal-warnings/i2673.scala deleted file mode 100644 index 9721f81da217..000000000000 --- a/tests/pos-special/fatal-warnings/i2673.scala +++ /dev/null @@ -1,6 +0,0 @@ -package Foos - -object Outer { - class X - object x -} diff --git a/tests/pos-special/fatal-warnings/i3323.scala b/tests/pos-special/fatal-warnings/i3323.scala deleted file mode 100644 index 220cbd68041b..000000000000 --- a/tests/pos-special/fatal-warnings/i3323.scala +++ /dev/null @@ -1,7 +0,0 @@ -class Foo { - def foo[A](lss: List[List[A]]): Unit = { - lss match { - case xss: List[List[A]] => - } - } -} diff --git a/tests/pos-special/fatal-warnings/i3323b.scala b/tests/pos-special/fatal-warnings/i3323b.scala deleted file mode 100644 index df4b9bb2dc76..000000000000 --- a/tests/pos-special/fatal-warnings/i3323b.scala +++ /dev/null @@ -1,7 +0,0 @@ -class Foo { - def foo(lss: List[Int]): Unit = { - lss match { - case xss: List[Int] => - } - } -} diff --git a/tests/pos-special/fatal-warnings/i3589b.scala b/tests/pos-special/fatal-warnings/i3589b.scala deleted file mode 100644 index 115e74b8cee2..000000000000 --- a/tests/pos-special/fatal-warnings/i3589b.scala +++ /dev/null @@ -1,7 +0,0 @@ -class Test { - def test(x: 1 | 2 | 3) = (x: @annotation.switch) match { - case 1 => 1 - case 2 => 2 - case 3 => 3 - } -} diff --git a/tests/pos-special/fatal-warnings/i4166.scala b/tests/pos-special/fatal-warnings/i4166.scala deleted file mode 100644 index 44684bc68855..000000000000 --- a/tests/pos-special/fatal-warnings/i4166.scala +++ /dev/null @@ -1,7 +0,0 @@ -package foo { - class Hello -} - -package bar { - class Hello -} diff --git a/tests/pos-special/fatal-warnings/i4185.scala b/tests/pos-special/fatal-warnings/i4185.scala deleted file mode 100644 index 10db2d672eea..000000000000 --- a/tests/pos-special/fatal-warnings/i4185.scala +++ /dev/null @@ -1,4 +0,0 @@ -object ord { - class Ord - object Ord -} diff --git a/tests/pos-special/fatal-warnings/i4674.scala b/tests/pos-special/fatal-warnings/i4674.scala deleted file mode 100644 index 53108d7981ca..000000000000 --- a/tests/pos-special/fatal-warnings/i4674.scala +++ /dev/null @@ -1,8 +0,0 @@ -class Test { - def test(x: String) = { - x.foreach { - case 's' => println("s") - case c: Char => println(c) // should compile without warning - } - } -} diff --git a/tests/pos-special/fatal-warnings/i6190a.scala b/tests/pos-special/fatal-warnings/i6190a.scala deleted file mode 100644 index e57238edf824..000000000000 --- a/tests/pos-special/fatal-warnings/i6190a.scala +++ /dev/null @@ -1,6 +0,0 @@ -case class Rule(name: String) -object Rule extends (String => Rule) { - def apply(name: String): Rule = new Rule(name) -} - -def foo = List("1", "2").map(Rule) diff --git a/tests/pos-special/fatal-warnings/i6190c.scala b/tests/pos-special/fatal-warnings/i6190c.scala deleted file mode 100644 index b7ab530dddd9..000000000000 --- a/tests/pos-special/fatal-warnings/i6190c.scala +++ /dev/null @@ -1,3 +0,0 @@ -case class Rule(name: String) - -def foo = List("1", "2").map(Rule.apply) diff --git a/tests/pos-special/fatal-warnings/i6621.scala b/tests/pos-special/fatal-warnings/i6621.scala deleted file mode 100644 index 92362a004530..000000000000 --- a/tests/pos-special/fatal-warnings/i6621.scala +++ /dev/null @@ -1,8 +0,0 @@ -object Unapply { - def unapply(a: Any): Option[(Int, Int)] = - Some((1, 2)) -} - -object Test { - val Unapply(x, y) = "": @unchecked -} diff --git a/tests/pos-special/fatal-warnings/i7219.scala b/tests/pos-special/fatal-warnings/i7219.scala deleted file mode 100644 index fe50549e2710..000000000000 --- a/tests/pos-special/fatal-warnings/i7219.scala +++ /dev/null @@ -1,18 +0,0 @@ -object Foo { - enum MyEnum { - case Red - case Blue(msg: String) - } - export MyEnum._ -} - -object Bar { - type Blue = Foo.Blue -} - -import Foo.* - -def foo(a: MyEnum): Seq[Bar.Blue] = a match { - case Red => Seq.empty - case m: Foo.Blue => Seq(m) -} diff --git a/tests/pos-special/fatal-warnings/i8758.scala b/tests/pos-special/fatal-warnings/i8758.scala deleted file mode 100644 index d64cb4e0624e..000000000000 --- a/tests/pos-special/fatal-warnings/i8758.scala +++ /dev/null @@ -1,5 +0,0 @@ -def test = "?johndoe" match { - case s":$name" => println(s":name $name") - case s"{$name}" => println(s"{name} $name") - case s"?$pos" => println(s"pos $pos") -} diff --git a/tests/pos-special/fatal-warnings/i8781.scala b/tests/pos-special/fatal-warnings/i8781.scala deleted file mode 100644 index fe0d5d1ce61c..000000000000 --- a/tests/pos-special/fatal-warnings/i8781.scala +++ /dev/null @@ -1,10 +0,0 @@ -@main -def Test = - - val x: Int | String = 1 - - println(x.isInstanceOf[Int]) - - x match - case _: Int => println("Int") - case _: String => println("String") diff --git a/tests/pos-special/fatal-warnings/i9751.scala b/tests/pos-special/fatal-warnings/i9751.scala deleted file mode 100644 index 4536011b31c8..000000000000 --- a/tests/pos-special/fatal-warnings/i9751.scala +++ /dev/null @@ -1,11 +0,0 @@ -object Test { - extension (x: Int) - inline def times(inline op: Unit): Unit = { - var count = 0 - while count < x do - op - count += 1 - } - - 10.times { println("hello") } -} diff --git a/tests/pos-special/fatal-warnings/i9776.scala b/tests/pos-special/fatal-warnings/i9776.scala deleted file mode 100644 index e1c65dff268f..000000000000 --- a/tests/pos-special/fatal-warnings/i9776.scala +++ /dev/null @@ -1,38 +0,0 @@ -import scala.annotation.switch - -sealed trait Fruit - -object Fruit { - case object Apple extends Fruit - case object Banana extends Fruit - case object Orange extends Fruit - - def isCitrus(fruit: Fruit): Boolean = - (fruit: @switch) match { - case Orange => true - case _ => false - } -} - - -sealed trait TaggedFruit { - def tag: Int -} - -object TaggedFruit { - case object Apple extends TaggedFruit { - val tag = 1 - } - case object Banana extends TaggedFruit { - val tag = 2 - } - case object Orange extends TaggedFruit { - val tag = 3 - } - - def isCitrus(fruit: TaggedFruit): Boolean = - (fruit.tag: @switch) match { - case 3 => true - case _ => false - } -} diff --git a/tests/pos-special/fatal-warnings/i9804.scala b/tests/pos-special/fatal-warnings/i9804.scala deleted file mode 100644 index e42c13f3ebdc..000000000000 --- a/tests/pos-special/fatal-warnings/i9804.scala +++ /dev/null @@ -1,5 +0,0 @@ -import scala.quoted.* - -def f[A: Type](e: Expr[A])(using Quotes): Expr[A] = e match { - case '{ $e2 } => e2 -} diff --git a/tests/pos-special/fatal-warnings/matchable-same-type.scala b/tests/pos-special/fatal-warnings/matchable-same-type.scala deleted file mode 100644 index afcef9619855..000000000000 --- a/tests/pos-special/fatal-warnings/matchable-same-type.scala +++ /dev/null @@ -1,7 +0,0 @@ -import scala.language.`future-migration` - -type X -def x: X = ??? -def test: Unit = - x match - case y: X => diff --git a/tests/pos-special/fatal-warnings/nowarnannot.scala b/tests/pos-special/fatal-warnings/nowarnannot.scala new file mode 100644 index 000000000000..26e9713d0543 --- /dev/null +++ b/tests/pos-special/fatal-warnings/nowarnannot.scala @@ -0,0 +1,6 @@ +case class F(i: Int) + +object Main { + def example() = + List(1, 2, 3).map(F): @annotation.nowarn +} diff --git a/tests/pos-special/fatal-warnings/patmat-exhaustive.scala b/tests/pos-special/fatal-warnings/patmat-exhaustive.scala deleted file mode 100644 index c5c95c455b8c..000000000000 --- a/tests/pos-special/fatal-warnings/patmat-exhaustive.scala +++ /dev/null @@ -1,10 +0,0 @@ -def foo: Unit = - object O: - sealed abstract class A - class B extends O.A - class C extends O.A - - val x: O.A = ??? - x match - case x: B => ??? - case x: C => ??? diff --git a/tests/pos-special/fatal-warnings/stats-in-empty-pkg.scala b/tests/pos-special/fatal-warnings/stats-in-empty-pkg.scala deleted file mode 100644 index e5ea0b7566f8..000000000000 --- a/tests/pos-special/fatal-warnings/stats-in-empty-pkg.scala +++ /dev/null @@ -1,4 +0,0 @@ -def foo = 23 -val bar = foo -var baz = bar -type Qux = Int diff --git a/tests/pos-special/fatal-warnings/switches.scala b/tests/pos-special/fatal-warnings/switches.scala deleted file mode 100644 index a06621acde21..000000000000 --- a/tests/pos-special/fatal-warnings/switches.scala +++ /dev/null @@ -1,53 +0,0 @@ -import scala.annotation.switch - -class Test { - import Test.* - - def test1(x: Int): Int = (x: @switch) match { - case 1 => 1 - case 2 | 3 | 4 => 2 - case 65 => 3 - case 72 => 4 - } - - def test2(c: Char): Boolean = (c: @switch) match { - case LF | CR | FF | SU => true - case _ => false - } - - // #1313 - def test3(x: Int, y: Int): Int = (x: @switch) match { - case 6 if y > 5 => 1 - case 6 => 2 - case 12 => 3 - case 14 => 4 - case _ => 5 - } - - def test4(x: Byte): Boolean = (x: @switch) match { - case 1 | 2 | 3 => true - case _ => false - } - - def test5(x: Short): Boolean = (x: @switch) match { - case 1 | 2 | 3 => true - case _ => false - } - - def test6(x: IntAnyVal) = (x: @switch) match { - case IntAnyVal(1) => 0 - case IntAnyVal(10) => 1 - case IntAnyVal(100) => 2 - case IntAnyVal(1000) => 3 - case IntAnyVal(10000) => 4 - } -} - -case class IntAnyVal(x: Int) extends AnyVal - -object Test { - final val LF = '\u000A' - final val CR = '\u000D' - final val FF = '\u000C' - final val SU = '\u001A' -} diff --git a/tests/pos-special/fatal-warnings/tuple-exaustivity.scala b/tests/pos-special/fatal-warnings/tuple-exaustivity.scala deleted file mode 100644 index dd5aec2436f1..000000000000 --- a/tests/pos-special/fatal-warnings/tuple-exaustivity.scala +++ /dev/null @@ -1,4 +0,0 @@ -def test(t: Tuple) = - t match - case Tuple() => - case head *: tail => diff --git a/tests/pos-special/fatal-warnings/unchecked-scrutinee.scala b/tests/pos-special/fatal-warnings/unchecked-scrutinee.scala deleted file mode 100644 index cde3d2ff5f91..000000000000 --- a/tests/pos-special/fatal-warnings/unchecked-scrutinee.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Test { - (List(1: @unchecked, 2, 3): @unchecked) match { - case a :: as => - } -} \ No newline at end of file diff --git a/tests/pos-special/i18589/core_0.scala b/tests/pos-special/i18589/core_0.scala new file mode 100644 index 000000000000..d381fd0dea29 --- /dev/null +++ b/tests/pos-special/i18589/core_0.scala @@ -0,0 +1,17 @@ +import scala.deriving.Mirror + +trait NamedCodec[A, R] + +object NamedCodecPlatform { + + final class Builder[R]() { + inline def of[T](using m: Mirror.Of[T]): NamedCodec[T, R] = + inline m match { + case s: Mirror.SumOf[T] => sumInst(s) + case _: Mirror.ProductOf[T] => productInst + } + + private inline def productInst[T]: NamedCodec[T, R] = ??? + private inline def sumInst[T](m: Mirror.SumOf[T]): NamedCodec[T, R] = ??? + } +} diff --git a/tests/pos-special/i18589/test_1.scala b/tests/pos-special/i18589/test_1.scala new file mode 100644 index 000000000000..6de191970791 --- /dev/null +++ b/tests/pos-special/i18589/test_1.scala @@ -0,0 +1,8 @@ +enum Data { + case A, B, C +} + +@main def Test = { + val builder: NamedCodecPlatform.Builder[Any] = ??? + builder.of[Data] +} diff --git a/tests/pos-special/i7575.scala b/tests/pos-special/i7575.scala deleted file mode 100644 index a2193cf8b4ea..000000000000 --- a/tests/pos-special/i7575.scala +++ /dev/null @@ -1 +0,0 @@ -class Foo() extends Dynamic // tested with -language:dynamics diff --git a/tests/pos-special/i7592/Test_2.scala b/tests/pos-special/i7592/Test_2.scala deleted file mode 100644 index 37ee3a1de243..000000000000 --- a/tests/pos-special/i7592/Test_2.scala +++ /dev/null @@ -1,5 +0,0 @@ -def return1 = 1 - -def testReturn1 = { - assert(1 == compile(return1)) -} diff --git a/tests/pos-special/isInstanceOf/3324h.scala b/tests/pos-special/isInstanceOf/3324h.scala deleted file mode 100644 index 0e718218a6ef..000000000000 --- a/tests/pos-special/isInstanceOf/3324h.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - trait Marker - def foo[T](x: T) = x match { - case _: (T & Marker) => // no warning - case _ => - } - - def foo2[T](x: T) = x match { - case _: T with Marker => // scalac emits a warning - case _ => - } -} diff --git a/tests/pos-special/isInstanceOf/gadt.scala b/tests/pos-special/isInstanceOf/gadt.scala deleted file mode 100644 index 6f661ce152c3..000000000000 --- a/tests/pos-special/isInstanceOf/gadt.scala +++ /dev/null @@ -1,37 +0,0 @@ -sealed trait Exp[T] -case class Num(n: Int) extends Exp[Int] -case class Plus(e1: Exp[Int], e2: Exp[Int]) extends Exp[Int] -case class Var[T](name: String) extends Exp[T] -case class Lambda[T, U](x: Var[T], e: Exp[U]) extends Exp[T => U] -case class App[T, U](f: Exp[T => U], e: Exp[T]) extends Exp[U] - -abstract class Env { outer => - def apply[T](x: Var[T]): T - - def + [T](xe: (Var[T], T)) = new Env { - def apply[T](x: Var[T]): T = - if (x == xe._1) xe._2.asInstanceOf[T] - else outer(x) - } -} - -object Env { - val empty = new Env { - def apply[T](x: Var[T]): T = ??? - } -} - -object Test { - - val exp = App(Lambda(Var[Int]("x"), Plus(Var[Int]("x"), Num(1))), Var[Int]("2")) - - def eval[T](e: Exp[T])(env: Env): T = e match { - case Num(n) => n - case Plus(e1, e2) => eval(e1)(env) + eval(e2)(env) - case v: Var[T] => env(v) - case Lambda(x: Var[s], e) => ((y: s) => eval(e)(env + (x -> y))) - case App(f, e) => eval(f)(env)(eval(e)(env)) - } - - eval(exp)(Env.empty) -} diff --git a/tests/pos-special/kind-projector-underscores.scala b/tests/pos-special/kind-projector-underscores.scala deleted file mode 100644 index 06face862e53..000000000000 --- a/tests/pos-special/kind-projector-underscores.scala +++ /dev/null @@ -1,59 +0,0 @@ -package kind_projector - -trait Foo[F[_]] -trait Qux[F[_, _]] -trait Baz[F[_], A, B] - -trait FooPlus[+F[+_]] -trait QuxPlus[+F[+_, +_]] -trait BazPlus[+F[+_], +A, +B] - -trait FooMinus[-F[-_]] -trait QuxMinus[-F[-_, -_]] -trait BazMinus[-F[-_], -A, -B] - -class Bar1 extends Foo[Either[Int, _]] -class Bar2 extends Foo[Either[_, Int]] -class Bar3 extends Foo[_ => Int] -class Bar4 extends Foo[Int => _] -class Bar5 extends Foo[(Int, _, Int)] -class Bar6 extends Foo[λ[x => Either[Int, x]]] -class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] -class Bar8 extends Foo[Baz[Int => _, _, Int]] -class Bar9 extends Foo[λ[x => Baz[x => _, Int, x]]] - -class BarPlus1 extends FooPlus[Either[Int, +_]] -class BarPlus2 extends FooPlus[Either[+_, Int]] -class BarPlus3 extends FooPlus[Int => +_] -class BarPlus4 extends FooPlus[(Int, +_, Int)] -class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] -class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] -class BarPlus7 extends FooPlus[BazPlus[Int => +_, +_, Int]] - -class BarMinus1 extends FooMinus[-_ => Int] - -class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -_]] -class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-_, Int]] -class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+_ => Int] -class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -_] -class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -_, Int)] -class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] -class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] -class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -_, -_, Int]] -class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -_, Int, x]]] - -class BackticksAreFine1 extends FooPlus[Either[Int, `-_`]] -class BackticksAreFine2 extends FooPlus[Either[`-_`, Int]] -class BackticksAreFine3 extends FooMinus[`+_` => Int] -class BackticksAreFine4 extends FooPlus[Int => `-_`] -class BackticksAreFine5 extends FooPlus[(Int, `-_`, Int)] -class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-_`, `-_`, Int]] -class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-_`, Int, x]]] - -class SpacesAreFine1 extends FooPlus[Either[Int, - _ ]] -class SpacesAreFine2 extends FooPlus[Either[ - _ , Int]] -class SpacesAreFine3 extends FooMinus[ + _ => Int] -class SpacesAreFine4 extends FooPlus[Int => - _] -class SpacesAreFine5 extends FooPlus[(Int, - _, Int)] -class SpacesAreFine6 extends FooPlus[BazPlus[Int => - _ , - _, Int]] -class SpacesAreFine7 extends Foo[λ[`-x` => BazPlus[x => - _ , Int, x]]] diff --git a/tests/pos-special/kind-projector.scala b/tests/pos-special/kind-projector.scala deleted file mode 100644 index 9048ae90f41c..000000000000 --- a/tests/pos-special/kind-projector.scala +++ /dev/null @@ -1,60 +0,0 @@ -package kind_projector - -trait Foo[F[_]] -trait Qux[F[_, _]] -trait Baz[F[_], A, B] - -trait FooPlus[+F[+_]] -trait QuxPlus[+F[+_, +_]] -trait BazPlus[+F[+_], +A, +B] - -trait FooMinus[-F[-_]] -trait QuxMinus[-F[-_, -_]] -trait BazMinus[-F[-_], -A, -B] - -class Bar1 extends Foo[Either[Int, *]] -class Bar2 extends Foo[Either[*, Int]] -class Bar3 extends Foo[* => Int] -class Bar4 extends Foo[Int => *] -class Bar5 extends Foo[(Int, *, Int)] -class Bar6 extends Foo[λ[x => Either[Int, x]]] -class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] -class Bar8 extends Foo[Baz[Int => *, *, Int]] -class Bar9 extends Foo[λ[x => Baz[x => *, Int, x]]] - -class BarPlus1 extends FooPlus[Either[Int, +*]] -class BarPlus2 extends FooPlus[Either[+*, Int]] -class BarPlus3 extends FooPlus[Int => +*] -class BarPlus4 extends FooPlus[(Int, +*, Int)] -class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] -class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] -class BarPlus7 extends FooPlus[BazPlus[Int => +*, +*, Int]] - -class BarMinus1 extends FooMinus[-* => Int] - -class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -*]] -class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-*, Int]] -class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+* => Int] -class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -*] -class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -*, Int)] -class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] -class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] -class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -*, -*, Int]] -class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -*, Int, x]]] - -class BackticksAreFine1 extends FooPlus[Either[Int, `-*`]] -class BackticksAreFine2 extends FooPlus[Either[`-*`, Int]] -class BackticksAreFine3 extends FooMinus[`+*` => Int] -class BackticksAreFine4 extends FooPlus[Int => `-*`] -class BackticksAreFine5 extends FooPlus[(Int, `-*`, Int)] -class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-*`, `-*`, Int]] -class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-*`, Int, x]]] -class BackticksAreFine8 extends Foo[λ[`x` => BazPlus[x => `*`, Int, x]]] - -// https://github.com/lampepfl/dotty/issues/13141 -// i13141 -object A { - class X { type Blah = Int } - val * = new X - val a: *.Blah = 2 -} diff --git a/tests/pos-special/stdlib/Test1.scala b/tests/pos-special/stdlib/Test1.scala new file mode 100644 index 000000000000..312756ad4ef5 --- /dev/null +++ b/tests/pos-special/stdlib/Test1.scala @@ -0,0 +1,34 @@ +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +import java.io.* + +object Test0: + + def usingLogFile[T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + def test(xs: List[Int]) = + usingLogFile: f => + xs.map: x => + f.write(x) + x * x + +object Test1: + def test(it: Iterator[Int]^, v: View[Int]^) = + val isEven: Int => Boolean = _ % 2 == 0 + val it2 = it.filter(isEven) + val _: Iterator[Int]^{it, isEven} = it2 + val it2c: Iterator[Int]^{it2} = it2 + val v2 = v.filter(isEven) + val _: View[Int]^{v, isEven} = v2 + val v2c: View[Int]^{v2} = v2 + val v3 = v.drop(2) + val _: View[Int]^{v} = v3 + val v3c: View[Int]^{v3} = v3 + val (xs6, xs7) = v.partition(isEven) + val (xs6a, xs7a) = v.partition(_ % 2 == 0) diff --git a/tests/pos-special/stdlib/Test2.scala b/tests/pos-special/stdlib/Test2.scala new file mode 100644 index 000000000000..cab9440c17db --- /dev/null +++ b/tests/pos-special/stdlib/Test2.scala @@ -0,0 +1,232 @@ +import scala.reflect.ClassTag +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +object Test { + + def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int => Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x3 = xs.indexWhere(isEven) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Seq[Int] = xs6 + val ys7: Seq[Int] = xs7 + val xs8 = xs.drop(2) + val ys8: Seq[Int] = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Seq[Boolean] = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Seq[Int] = xs10 + val xs11 = xs ++ xs + val ys11: Seq[Int] = xs11 + val xs12 = xs ++ Nil + val ys12: Seq[Int] = xs12 + val xs13 = Nil ++ xs + val ys13: Seq[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Int, Boolean)] = xs15 + val xs16 = xs.reverse + val ys16: Seq[Int] = xs16 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs10) + println(xs11) + println(xs12) + println(xs13) + println(xs14) + println(xs15) + println(xs16) + } + + def iterOps(xs: => Iterator[Int]^) = + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int => Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x4 = xs.next() + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Iterator[Int]^{xs6, isEven} = xs6 + val ys7: Iterator[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: Iterator[Int]^{xs6} = xs6 + val ys7a: Iterator[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: Iterator[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Iterator[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Iterator[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: Iterator[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: Iterator[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Iterator[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: Iterator[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + + def viewOps(xs: View[Int]^) = { + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int => Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + //val x3 = xs.indexWhere(_ % 2 == 0) // indexWhere does not exist on View + //val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: View[Int]^{xs6, isEven} = xs6 + val ys7: View[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: View[Int]^{xs6} = xs6 + val ys7a: View[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: View[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: View[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: View[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: View[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: View[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: View[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: View[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + } + + def stringOps(xs: String) = { + val x1 = xs.foldLeft("")(_ + _) + val y1: String = x1 + val x2 = xs.foldRight("")(_ + _) + val y2: String = x2 + val x3 = xs.indexWhere(_ % 2 == 0) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Char] = x5 + val (xs6, xs7) = xs.partition(_ % 2 == 0) + val ys6: String = xs6 + val ys7: String = xs7 + val xs8 = xs.drop(2) + val ys8: String = xs8 + val xs9 = xs.map(_ + 1) + val ys9: Seq[Int] = xs9 + val xs9a = xs.map(_.toUpper) + val ys9a: String = xs9a + val xs10 = xs.flatMap((x: Char) => s"$x,$x") + val ys10: String = xs10 + val xs11 = xs ++ xs + val ys11: String = xs11 + val ops = collection.StringOps(xs) // !!! otherwise we can a "cannot establish reference" + val xs13 = Nil ++ ops.iterator + val ys13: List[Char] = xs13 + val xs14 = xs ++ ("xyz" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Char, Int)] = xs15 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs9a) + println(xs10) + println(xs11) + println(xs13) + println(xs14) + println(xs15) + } + + def main(args: Array[String]) = { + val ints = List(1, 2, 3) + val intsBuf = ints.to(ArrayBuffer) + val intsListBuf = ints.to(ListBuffer) + val intsView = ints.view + seqOps(ints) + seqOps(intsBuf) + seqOps(intsListBuf) + viewOps(intsView) + iterOps(ints.iterator) + stringOps("abc") + } +} diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala new file mode 100644 index 000000000000..bb6174f59598 --- /dev/null +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -0,0 +1,1664 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays +import language.experimental.captureChecking + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A -> B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + copyToArray(destination, 0) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala new file mode 100644 index 000000000000..39c15dbe808f --- /dev/null +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.Builder +import language.experimental.captureChecking + +/** Base type of bitsets. + * + * This trait provides most of the operations of a `BitSet` independently of its representation. + * It is inherited by all concrete implementations of bitsets. + * + * @define bitsetinfo + * Bitsets are sets of non-negative integers which are represented as + * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is + * determined by the largest number stored in it. + * @define coll bitset + * @define Coll `BitSet` + */ +trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "BitSet" + override def unsorted: Set[Int] = this +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." + + def empty: BitSet = immutable.BitSet.empty + def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder + def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) + + @SerialVersionUID(3L) + private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { + + @transient protected var elems: Array[Long] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val nwords = coll.nwords + out.writeInt(nwords) + var i = 0 + while(i < nwords) { + out.writeLong(coll.word(i)) + i += 1 + } + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val nwords = in.readInt() + elems = new Array[Long](nwords) + var i = 0 + while(i < nwords) { + elems(i) = in.readLong() + i += 1 + } + } + + protected[this] def readResolve(): Any + } +} + +/** Base implementation type of bitsets */ +trait BitSetOps[+C <: BitSet with BitSetOps[C]] + extends SortedSetOps[Int, SortedSet, C] { self => + import BitSetOps._ + + def bitSetFactory: SpecificIterableFactory[Int, C] + + def unsorted: Set[Int] + + final def ordering: Ordering[Int] = Ordering.Int + + /** The number of words (each with 64 bits) making up the set */ + protected[collection] def nwords: Int + + /** The words at index `idx`, or 0L if outside the range of the set + * '''Note:''' requires `idx >= 0` + */ + protected[collection] def word(idx: Int): Long + + /** Creates a new set of this kind from an array of longs + */ + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C + + def contains(elem: Int): Boolean = + 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L + + def iterator: Iterator[Int] = iteratorFrom(0) + + def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var currentPos = if (start > 0) start >> LogWL else 0 + private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) + final override def hasNext: Boolean = { + while (currentWord == 0) { + if (currentPos + 1 >= nwords) return false + currentPos += 1 + currentWord = word(currentPos) + } + true + } + final override def next(): Int = { + if (hasNext) { + val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) + currentWord &= currentWord - 1 + (currentPos << LogWL) + bitPos + } else Iterator.empty.next() + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = scala.collection.convert.impl.BitSetStepper.from(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def size: Int = { + var s = 0 + var i = nwords + while (i > 0) { + i -= 1 + s += java.lang.Long.bitCount(word(i)) + } + s + } + + override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) + + @inline private[this] def smallestInt: Int = { + val thisnwords = nwords + var i = 0 + while(i < thisnwords) { + val currentWord = word(i) + if (currentWord != 0L) { + return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) + } + i += 1 + } + throw new UnsupportedOperationException("empty.smallestInt") + } + + @inline private[this] def largestInt: Int = { + var i = nwords - 1 + while(i >= 0) { + val currentWord = word(i) + if (currentWord != 0L) { + return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 + } + i -= 1 + } + throw new UnsupportedOperationException("empty.largestInt") + } + + override def max[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) largestInt + else if (Ordering.Int isReverseOf ord) smallestInt + else super.max(ord) + + + override def min[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) smallestInt + else if (Ordering.Int isReverseOf ord) largestInt + else super.min(ord) + + override def foreach[U](f: Int => U): Unit = { + /* NOTE: while loops are significantly faster as of 2.11 and + one major use case of bitsets is performance. Also, there + is nothing to do when all bits are clear, so use that as + the inner loop condition. */ + var i = 0 + while (i < nwords) { + var w = word(i) + var j = i * WordLength + while (w != 0L) { + if ((w&1L) == 1L) f(j) + w = w >>> 1 + j += 1 + } + i += 1 + } + } + + /** Creates a bit mask for this set as a new array of longs + */ + def toBitMask: Array[Long] = { + val a = new Array[Long](nwords) + var i = a.length + while(i > 0) { + i -= 1 + a(i) = word(i) + } + a + } + + def rangeImpl(from: Option[Int], until: Option[Int]): C = { + val a = coll.toBitMask + val len = a.length + if (from.isDefined) { + val f = from.get + val w = f >> LogWL + val b = f & (WordLength - 1) + if (w >= 0) { + java.util.Arrays.fill(a, 0, math.min(w, len), 0) + if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) + } + } + if (until.isDefined) { + val u = until.get + val w = u >> LogWL + val b = u & (WordLength - 1) + if (w < len) { + java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) + if (w >= 0) a(w) &= (1L << b) - 1 + } + } + coll.fromBitMaskNoCopy(a) + } + + override def concat(other: collection.IterableOnce[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords max otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) | otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.concat(other) + } + + override def intersect(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords min otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.intersect(other) + } + + abstract override def diff(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & ~otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.diff(other) + } + + /** Computes the symmetric difference of this bitset and another bitset by performing + * a bitwise "exclusive-or". + * + * @param other the other bitset to take part in the symmetric difference. + * @return a bitset containing those bits of this + * bitset or the other bitset that are not contained in both bitsets. + */ + def xor(other: BitSet): C = { + val len = coll.nwords max other.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = coll.word(idx) ^ other.word(idx) + coll.fromBitMaskNoCopy(words) + } + + @`inline` final def ^ (other: BitSet): C = xor(other) + + /** + * Builds a new bitset by applying a function to all elements of this bitset + * @param f the function to apply to each element. + * @return a new bitset resulting from applying the given function ''f'' to + * each element of this bitset and collecting the results + */ + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) + + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) + + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) + + override def partition(p: Int => Boolean): (C, C) = { + val left = filter(p) + (left, this &~ left) + } +} + +object BitSetOps { + + /* Final vals can sometimes be inlined as constants (faster) */ + private[collection] final val LogWL = 6 + private[collection] final val WordLength = 64 + private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 + + private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { + var len = elems.length + while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 + var newlen = len + if (idx >= newlen && w != 0L) newlen = idx + 1 + val newelems = new Array[Long](newlen) + Array.copy(elems, 0, newelems, 0, len) + if (idx < newlen) newelems(idx) = w + else assert(w == 0L) + newelems + } + + private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = + if (oldWord == 0L) 0L else { + var w = oldWord + val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) + var jmask = 1L << trailingZeroes + var j = wordIndex * BitSetOps.WordLength + trailingZeroes + val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) + while (j != maxJ) { + if ((w & jmask) != 0L) { + if (pred(j) == isFlipped) { + // j did not pass the filter here + w = w & ~jmask + } + } + jmask = jmask << 1 + j += 1 + } + w + } +} diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala new file mode 100644 index 000000000000..cca40dd31d40 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** Buffered iterators are iterators which provide a method `head` + * that inspects the next element without discarding it. + */ +trait BufferedIterator[+A] extends Iterator[A] { + + /** Returns next element of iterator without advancing beyond it. + */ + def head: A + + /** Returns an option of the next element of an iterator without advancing beyond it. + * @return the next element of this iterator if it has a next element + * `None` if it does not + */ + def headOption : Option[A] = if (hasNext) Some(head) else None + + override def buffered: this.type = this +} diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala new file mode 100644 index 000000000000..05df32856d82 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -0,0 +1,128 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import scala.collection.mutable.Builder +import scala.collection.immutable.WrappedString +import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") +trait BuildFrom[-From, -A, +C] extends Any { self => + def fromSpecific(from: From)(it: IterableOnce[A]^): C + // !!! this is wrong, we need two versions of fromSpecific; one mapping + // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. + // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure + // calls in this file are needed to sweep that problem under the carpet. + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) + + /** Partially apply a BuildFrom to a Factory */ + def toFactory(from: From): Factory[A, C] = new Factory[A, C] { + def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) + def newBuilder: Builder[A, C] = self.newBuilder(from) + } +} + +object BuildFrom extends BuildFromLowPriority1 { + + /** Build the source collection type from a MapOps */ + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + } + + /** Build the source collection type from a SortedMapOps */ + implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + } + + implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = + new BuildFrom[C, Int, C] { + def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) + def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder + } + + implicit val buildFromString: BuildFrom[String, Char, String] = + new BuildFrom[String, Char, String] { + def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) + def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder + } + + implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = + new BuildFrom[WrappedString, Char, WrappedString] { + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) + def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder + } + + implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + new BuildFrom[Array[_], A, Array[A]] { + def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder + } + + implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + new BuildFrom[View[A], B, View[B]] { + def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure + def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder + } + +} + +trait BuildFromLowPriority1 extends BuildFromLowPriority2 { + + /** Build the source collection type from an Iterable with SortedOps */ + // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the + // implicit search space for faster compilation and reduced change of divergence. See the compilation + // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 + implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + } + + implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = + new BuildFrom[String, A, immutable.IndexedSeq[A]] { + def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] + } +} + +trait BuildFromLowPriority2 { + /** Build the source collection type from an IterableOps */ + implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure + } + + implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { + def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure + } +} diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala new file mode 100644 index 000000000000..baa9eceadae5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +import language.experimental.captureChecking + +/** A default map which builds a default `immutable.Map` implementation for all + * transformations. + */ +@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") +trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala new file mode 100644 index 000000000000..c50fa395a0fb --- /dev/null +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -0,0 +1,798 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Pure { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]^): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]^): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable, Pure { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]^): CC[A]^{source} + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure + // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = + factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +// !!! Needed to add this separate trait +trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: + def from[A](source: IterableOnce[A]^): CC[A] + override def apply[A](elems: A*): CC[A] = from(elems) + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + this: SpecificIterableFactory[A, C] => + + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable, Pure { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { + + def from[E : Ev](it: IterableOnce[E]^): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + this: SortedMapFactory[CC] => + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala new file mode 100644 index 000000000000..772dcf5c65da --- /dev/null +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -0,0 +1,63 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +import language.experimental.captureChecking + + +protected[collection] object Hashing { + + def elemHashCode(key: Any): Int = key.## + + def improve(hcode: Int): Int = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) + } + + def computeHash(key: Any): Int = + improve(elemHashCode(key)) + + /** + * Utility method to keep a subset of all bits in a given bitmap + * + * Example + * bitmap (binary): 00000001000000010000000100000001 + * keep (binary): 1010 + * result (binary): 00000001000000000000000100000000 + * + * @param bitmap the bitmap + * @param keep a bitmask containing which bits to keep + * @return the original bitmap with all bits where keep is not 1 set to 0 + */ + def keepBits(bitmap: Int, keep: Int): Int = { + var result = 0 + var current = bitmap + var kept = keep + while (kept != 0) { + // lowest remaining bit in current + val lsb = current ^ (current & (current - 1)) + if ((kept & 1) != 0) { + // mark bit in result bitmap + result |= lsb + } + // clear lowest remaining one bit in abm + current &= ~lsb + // look at the next kept bit + kept >>>= 1 + } + result + } + +} diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala new file mode 100644 index 000000000000..a2d4cc942231 --- /dev/null +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -0,0 +1,136 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{nowarn, tailrec} +import scala.collection.Searching.{Found, InsertionPoint, SearchResult} +import scala.collection.Stepper.EfficientSplit +import scala.math.Ordering +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + + +/** Base trait for indexed sequences that have efficient `apply` and `length` */ +trait IndexedSeq[+A] extends Seq[A] + with IndexedSeqOps[A, IndexedSeq, IndexedSeq[A]] + with IterableFactoryDefaults[A, IndexedSeq] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeq" + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) + +/** Base trait for indexed Seq operations */ +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => + + def iterator: Iterator[A] = view.iterator + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIndexedSeqStepper (this.asInstanceOf[IndexedSeqOps[Int, AnyConstr, _]], 0, length) + case StepperShape.LongShape => new LongIndexedSeqStepper (this.asInstanceOf[IndexedSeqOps[Long, AnyConstr, _]], 0, length) + case StepperShape.DoubleShape => new DoubleIndexedSeqStepper(this.asInstanceOf[IndexedSeqOps[Double, AnyConstr, _]], 0, length) + case _ => shape.parUnbox(new AnyIndexedSeqStepper[A](this, 0, length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def reverseIterator: Iterator[A] = view.reverseIterator + + /* TODO 2.14+ uncomment and delete related code in IterableOnce + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, apply(start)), op) + */ + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(apply(end - 1), z), op) + + //override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + //override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, apply(0), op) else super.reduceLeft(op) + + //override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, apply(length - 1), op) else super.reduceRight(op) + + override def view: IndexedSeqView[A] = new IndexedSeqView.Id[A](this) + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) + + override protected def reversed: Iterable[A] = new IndexedSeqView.Reverse(this) + + // Override transformation operations to use more efficient views than the default ones + override def prepended[B >: A](elem: B): CC[B] = iterableFactory.from(new IndexedSeqView.Prepended(elem, this)) + + override def take(n: Int): C = fromSpecific(new IndexedSeqView.Take(this, n)) + + override def takeRight(n: Int): C = fromSpecific(new IndexedSeqView.TakeRight(this, n)) + + override def drop(n: Int): C = fromSpecific(new IndexedSeqView.Drop(this, n)) + + override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) + + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure + + override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) + + override def slice(from: Int, until: Int): C = fromSpecific(new IndexedSeqView.Slice(this, from, until)) + + override def head: A = apply(0) + + override def headOption: Option[A] = if (isEmpty) None else Some(head) + + override def last: A = apply(length - 1) + + // We already inherit an efficient `lastOption = if (isEmpty) None else Some(last)` + + override final def lengthCompare(len: Int): Int = Integer.compare(length, len) + + override def knownSize: Int = length + + override final def lengthCompare(that: Iterable[_]^): Int = { + val res = that.sizeCompare(length) + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } + + override def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = + binarySearch(elem, 0, length)(ord) + + override def search[B >: A](elem: B, from: Int, to: Int)(implicit ord: Ordering[B]): SearchResult = + binarySearch(elem, from, to)(ord) + + @tailrec + private[this] def binarySearch[B >: A](elem: B, from: Int, to: Int) + (implicit ord: Ordering[B]): SearchResult = { + if (from < 0) binarySearch(elem, 0, to) + else if (to > length) binarySearch(elem, from, length) + else if (to <= from) InsertionPoint(from) + else { + val idx = from + (to - from - 1) / 2 + math.signum(ord.compare(elem, apply(idx))) match { + case -1 => binarySearch(elem, from, idx)(ord) + case 1 => binarySearch(elem, idx + 1, to)(ord) + case _ => Found(idx) + } + } + } +} diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala new file mode 100644 index 000000000000..a16e06fa707d --- /dev/null +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -0,0 +1,187 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import language.experimental.captureChecking + +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { + self: IndexedSeqViewOps[A, CC, C]^ => +} + +/** View defined in terms of indexing a range */ +trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { + self: IndexedSeqView[A]^ => + + override def view: IndexedSeqView[A]^{this} = this + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) + + override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeqView" +} + +object IndexedSeqView { + + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewIterator[A]^ => + private[this] var current = 0 + private[this] var remainder = self.length + override def knownSize: Int = remainder + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A]^{this} = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + + def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value + + val formatFrom = formatRange(from) + val formatUntil = formatRange(until) + remainder = Math.max(0, formatUntil - formatFrom) + current = current + formatFrom + this + } + } + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewReverseIterator[A]^ => + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(pos) + pos -= 1 + remainder -= 1 + r + } else Iterator.empty.next() + + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } + } + this + } + } + + /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]^) + extends SeqView.Id(underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) + extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) + extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) + extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) + extends SeqView.Take(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) + extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) + extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) + extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) + extends SeqView.Map(underlying, f) with IndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} + +/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala new file mode 100644 index 000000000000..bca80d7be108 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -0,0 +1,1052 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} +import language.experimental.captureChecking + +/** Base trait for generic collections. + * + * @tparam A the element type of the collection + * + * @define Coll `Iterable` + * @define coll iterable collection + */ +trait Iterable[+A] extends IterableOnce[A] + with IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => + + // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + final def toIterable: this.type = this + + final protected def coll: this.type = this + + def iterableFactory: IterableFactory[Iterable] = Iterable + + @deprecated("Iterable.seq always returns the iterable itself", "2.13.0") + def seq: this.type = this + + /** Defines the prefix of this object's `toString` representation. + * + * It is recommended to return the name of the concrete collection type, but + * not implementation subclasses. For example, for `ListMap` this method should + * return `"ListMap"`, not `"Map"` (the supertype) or `"Node"` (an implementation + * subclass). + * + * The default implementation returns "Iterable". It is overridden for the basic + * collection kinds "Seq", "IndexedSeq", "LinearSeq", "Buffer", "Set", "Map", + * "SortedSet", "SortedMap" and "View". + * + * @return a string representation which starts the result of `toString` + * applied to this $coll. By default the string prefix is the + * simple name of the collection class $coll. + */ + protected[this] def className: String = stringPrefix + + /** Forwarder to `className` for use in `scala.runtime.ScalaRunTime`. + * + * This allows the proper visibility for `className` to be + * published, but provides the exclusive access needed by + * `scala.runtime.ScalaRunTime.stringOf` (and a few tests in + * the test suite). + */ + private[scala] final def collectionClassName: String = className + + @deprecatedOverriding("Override className instead", "2.13.0") + protected[this] def stringPrefix: String = "Iterable" + + /** Converts this $coll to a string. + * + * @return a string representation of this collection. By default this + * string consists of the `className` of this $coll, followed + * by all elements separated by commas and enclosed in parentheses. + */ + override def toString = mkString(className + "(", ", ", ")") + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) +} + +/** Base trait for Iterable operations + * + * =VarianceNote= + * + * We require that for all child classes of Iterable the variance of + * the child class and the variance of the `C` parameter passed to `IterableOps` + * are the same. We cannot express this since we lack variance polymorphism. That's + * why we have to resort at some places to write `C[A @uncheckedVariance]`. + * + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * + * @define Coll Iterable + * @define coll iterable collection + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + */ +trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + this: IterableOps[A, CC, C]^ => + + /** + * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. + */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + def toIterable: Iterable[A]^{this} + + /** Converts this $coll to an unspecified Iterable. Will return + * the same collection if this instance is already Iterable. + * @return An Iterable containing all elements of this $coll. + */ + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") + final def toTraversable: Traversable[A]^{this} = toIterable + + override def isTraversableAgain: Boolean = true + + /** + * @return This collection as a `C`. + */ + protected def coll: C^{this} + + @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") + final def repr: C^{this} = coll + + /** + * Defines how to turn a given `Iterable[A]` into a collection of type `C`. + * + * This process can be done in a strict way or a non-strict way (ie. without evaluating + * the elements of the resulting collections). In other words, this methods defines + * the evaluation model of the collection. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method + * might be unsound. However, as long as it is called with an + * `Iterable[A]` obtained from `this` collection (as it is the case in the + * implementations of operations where we use a `View[A]`), it is safe. + */ + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): C^{coll} + + /** The companion object of this ${coll}, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def iterableFactory: IterableFactory[CC] + + @deprecated("Use iterableFactory instead", "2.13.0") + @deprecatedOverriding("Use iterableFactory instead", "2.13.0") + @`inline` def companion: IterableFactory[CC] = iterableFactory + + /** + * @return a strict builder for the same collection type. + * + * Note that in the case of lazy collections (e.g. [[scala.collection.View]] or [[scala.collection.immutable.LazyList]]), + * it is possible to implement this method but the resulting `Builder` will break laziness. + * As a consequence, operations should preferably be implemented with `fromSpecific` + * instead of this method. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method might + * be unsound. However, as long as the returned builder is only fed + * with `A` values taken from `this` instance, it is safe. + */ + protected def newSpecificBuilder: Builder[A @uncheckedVariance, C] + + /** The empty iterable of the same type as this iterable + * + * @return an empty iterable of type `C`. + */ + def empty: C = fromSpecific(Nil) + + /** Selects the first element of this $coll. + * $orderDependent + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A = iterator.next() + + /** Optionally selects the first element. + * $orderDependent + * @return the first element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = { + val it = iterator + if (it.hasNext) Some(it.next()) else None + } + + /** Selects the last element. + * $orderDependent + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + def last: A = { + val it = iterator + var lst = it.next() + while (it.hasNext) lst = it.next() + lst + } + + /** Optionally selects the last element. + * $orderDependent + * @return the last element of this $coll$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if (isEmpty) None else Some(last) + + /** A view over the elements of this collection. */ + def view: View[A]^{this} = View.fromIteratorProvider(() => iterator) + + /** Compares the size of this $coll to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(size min otherSize)` instead of `O(size)`. The method should be overridden + * if computing `size` is cheap and `knownSize` returns `-1`. + * + * @see [[sizeIs]] + */ + def sizeCompare(otherSize: Int): Int = { + if (otherSize < 0) 1 + else { + val known = knownSize + if (known >= 0) Integer.compare(known, otherSize) + else { + var i = 0 + val it = iterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + } + + /** Returns a value class containing operations for comparing the size of this $coll to a test value. + * + * These operations are implemented in terms of [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + @inline final def sizeIs: IterableOps.SizeCompareOps^{this} = new IterableOps.SizeCompareOps(this) + + /** Compares the size of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < that.size + * x == 0 if this.size == that.size + * x > 0 if this.size > that.size + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def sizeCompare(that: Iterable[_]^): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this sizeCompare thatKnownSize + else { + val thisKnownSize = this.knownSize + + if (thisKnownSize >= 0) { + val res = that sizeCompare thisKnownSize + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } else { + val thisIt = this.iterator + val thatIt = that.iterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } + } + + /** A view over a slice of the elements of this collection. */ + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + def view(from: Int, until: Int): View[A]^{this} = view.slice(from, until) + + /** Transposes this $coll of iterable collections into + * a $coll of ${coll}s. + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(4, 5, 6)).transpose + * // xs == List( + * // List(1, 4), + * // List(2, 5), + * // List(3, 6)) + * + * val ys = Vector( + * List(1, 2, 3), + * List(4, 5, 6)).transpose + * // ys == Vector( + * // Vector(1, 4), + * // Vector(2, 5), + * // Vector(3, 6)) + * }}} + * + * $willForceEvaluation + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the + * element type of this $coll is an `Iterable`. + * @return a two-dimensional $coll of ${coll}s which has as ''n''th row + * the ''n''th column of this $coll. + * @throws IllegalArgumentException if all collections in this $coll + * are not of the same size. + */ + def transpose[B](implicit asIterable: A -> /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + iterableFactory.from(bs.map(_.result())).asInstanceOf // !!! needed for cc + } + + def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + + def filterNot(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + + /** Creates a non-strict filter of this $coll. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new collection, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): collection.WithFilter[A, CC]^{this, p} = new IterableOps.WithFilter(this, p) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, + * all elements that do not. Interesting because it splits a collection in two. + * + * The default implementation provided here needs to traverse the collection twice. + * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, + * which requires only a single traversal. + */ + def partition(p: A => Boolean): (C^{this, p}, C^{this, p}) = { + val first = new View.Filter(this, p, false) + val second = new View.Filter(this, p, true) + (fromSpecific(first), fromSpecific(second)) + } + + override def splitAt(n: Int): (C^{this}, C^{this}) = (take(n), drop(n)) + + def take(n: Int): C^{this} = fromSpecific(new View.Take(this, n)) + + /** Selects the last ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the last `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def takeRight(n: Int): C^{this} = fromSpecific(new View.TakeRight(this, n)) + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.TakeWhile(this, p)) + + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) = (takeWhile(p), dropWhile(p)) + + def drop(n: Int): C^{this} = fromSpecific(new View.Drop(this, n)) + + /** Selects all elements except last ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def dropRight(n: Int): C^{this} = fromSpecific(new View.DropRight(this, n)) + + def dropWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.DropWhile(this, p)) + + /** Partitions elements in fixed size ${coll}s. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[C^{this}]^{this} = + iterator.grouped(size).map(fromSpecific) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`.) + * + * An empty collection returns an empty iterator, and a non-empty + * collection containing fewer elements than the window size returns + * an iterator that will produce the original collection as its only + * element. + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except for a + * non-empty collection with less than `size` elements, which + * returns an iterator that produces the source collection itself + * as its only element. + * @example `List().sliding(2) = empty iterator` + * @example `List(1).sliding(2) = Iterator(List(1))` + * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` + * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` + */ + def sliding(size: Int): Iterator[C^{this}]^{this} = sliding(size, 1) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * + * The returned iterator will be empty when called on an empty collection. + * The last element the iterator produces may be smaller than the window + * size when the original collection isn't exhausted by the window before + * it and its last element isn't skipped by the step before it. + * + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return An iterator producing ${coll}s of size `size`, except the last + * element (which may be the only element) will be smaller + * if there are fewer than `size` elements remaining to be grouped. + * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + */ + def sliding(size: Int, step: Int): Iterator[C^{this}]^{this} = + iterator.sliding(size, step).map(fromSpecific) + + /** The rest of the collection without its first element. */ + def tail: C^{this} = { + if (isEmpty) throw new UnsupportedOperationException + drop(1) + } + + /** The initial part of the collection without its last element. + * $willForceEvaluation + */ + def init: C^{this} = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } + + def slice(from: Int, until: Int): C^{this} = + fromSpecific(new View.Drop(new View.Take(this, until), from)) + + /** Partitions this $coll into a map of ${coll}s according to some discriminator function. + * + * $willForceEvaluation + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to ${coll}s such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a $coll of those elements `x` + * for which `f(x)` equals `k`. + * + */ + def groupBy[K](f: A => K): immutable.Map[K, C] = { + val m = mutable.Map.empty[K, Builder[A, C]] + val it = iterator + while (it.hasNext) { + val elem = it.next() + val key = f(elem) + val bldr = m.getOrElseUpdate(key, newSpecificBuilder) + bldr += elem + } + var result = immutable.HashMap.empty[K, C] + val mapIt = m.iterator + while (mapIt.hasNext) { + val (k, v) = mapIt.next() + result = result.updated(k, v.result()) + } + result + } + + /** + * Partitions this $coll into a map of ${coll}s according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Seq[User]): Map[Int, Seq[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * $willForceEvaluation + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B](key: A => K)(f: A => B): immutable.Map[K, CC[B]] = { + val m = mutable.Map.empty[K, Builder[B, CC[B]]] + for (elem <- this) { + val k = key(elem) + val bldr = m.getOrElseUpdate(k, iterableFactory.newBuilder[B]) + bldr += f(elem) + } + class Result extends runtime.AbstractFunction1[(K, Builder[B, CC[B]]), Unit] { + var built = immutable.Map.empty[K, CC[B]] + def apply(kv: (K, Builder[B, CC[B]])) = + built = built.updated(kv._1, kv._2.result()) + } + val result = new Result + m.foreach(result) + result.built + } + + /** + * Partitions this $coll into a map according to a discriminator function `key`. All the values that + * have the same discriminator are then transformed by the `f` function and then reduced into a + * single value with the `reduce` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more efficient. + * + * {{{ + * def occurrences[A](as: Seq[A]): Map[A, Int] = + * as.groupMapReduce(identity)(_ => 1)(_ + _) + * }}} + * + * $willForceEvaluation + */ + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): immutable.Map[K, B] = { + val m = mutable.Map.empty[K, B] + for (elem <- this) { + val k = key(elem) + val v = + m.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + m.put(k, v) + } + m.to(immutable.Map) + } + + /** Computes a prefix scan of the elements of the collection. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting collection + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new $coll containing the prefix scan of the elements in this $coll + */ + def scan[B >: A](z: B)(op: (B, B) => B): CC[B]^{this, op} = scanLeft(z)(op) + + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} = iterableFactory.from(new View.ScanLeft(this, z, op)) + + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. + * $willNotTerminateInf + * $orderDependent + * $willForceEvaluation + * + * Example: + * {{{ + * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) + * }}} + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanRight[B](z: B)(op: (A, B) => B): CC[B]^{this, op} = { + class Scanner extends runtime.AbstractFunction1[A, Unit] { + var acc = z + var scanned = acc :: immutable.Nil + def apply(x: A) = { + acc = op(x, acc) + scanned ::= acc + } + } + val scanner = new Scanner + reversed.foreach(scanner) + iterableFactory.from(scanner.scanned) + } + + def map[B](f: A => B): CC[B]^{this, f} = iterableFactory.from(new View.Map(this, f)) + + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = iterableFactory.from(new View.FlatMap(this, f)) + + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} = flatMap(asIterable) + + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} = + iterableFactory.from(new View.Collect(this, pf)) + + /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = $Coll(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] + * + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1]^{this, f}, CC[A2]^{this, f}) = { + val left: View[A1]^{f, this} = new LeftPartitionMapped(this, f) + val right: View[A2]^{f, this} = new RightPartitionMapped(this, f) + (iterableFactory.from(left), iterableFactory.from(right)) + } + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = iterableFactory.from(suffix match { + case xs: Iterable[B] => new View.Concat(this, xs) + case xs => iterator ++ suffix.iterator + }) + + /** Alias for `concat` */ + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = concat(suffix) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)]^{this, that} = iterableFactory.from(that match { // sound bcs of VarianceNote + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} = iterableFactory.from(new View.ZipWithIndex(this)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. + * @return a new collection of type `That` containing pairs consisting of + * corresponding elements of this $coll and `that`. The length + * of the returned collection is the maximum of the lengths of this $coll and `that`. + * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B]^, thisElem: A1, thatElem: B): CC[(A1, B)]^{this, that} = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1]^{this}, CC[A2]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asPair(_)._2) + (iterableFactory.from(first), iterableFactory.from(second)) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1]^{this}, CC[A2]^{this}, CC[A3]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3]^{this} = new View.Map[A, A3](this, asTriple(_)._3) + (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) + } + + /** Iterates over the tails of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this $coll + * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` + */ + def tails: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `init`. + * + * $willForceEvaluation + * + * @return an iterator over all the inits of this $coll + * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` + */ + def inits: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.init) + + override def tapEach[U](f: A => U): C^{this, f} = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Iterable[A]^{this} => Iterable[A]^{this}): Iterator[C^{this}]^{this, f} = { + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f) + .takeWhile((itble: Iterable[A]^) => itble.iterator.nonEmpty) + // CC TODO type annotation for itble needed. + // The previous code `.takeWhile(_.iterator.nonEmpty)` does not work. + (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++:[B >: A](that: IterableOnce[B]^): CC[B]^{this, that} = iterableFactory.from(that match { + case xs: Iterable[B] => new View.Concat(xs, this) + case _ => that.iterator ++ iterator + }) +} + +object IterableOps { + + /** Operations for comparing the size of a collection to a test value. + * + * These operations are implemented in terms of + * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. + */ + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]^) extends AnyVal { + this: SizeCompareOps^{it} => + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = it.sizeCompare(size) <= 0 + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = it.sizeCompare(size) == 0 + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = it.sizeCompare(size) != 0 + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = it.sizeCompare(size) >= 0 + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = it.sizeCompare(size) > 0 + } + + /** A trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ + @SerialVersionUID(3L) + class WithFilter[+A, +CC[_]]( + self: IterableOps[A, CC, _]^, + p: A => Boolean + ) extends collection.WithFilter[A, CC] with Serializable { + + protected def filtered: Iterable[A]^{this} = + new View.Filter(self, p, isFlipped = false) + + def map[B](f: A => B): CC[B]^{this, f} = + self.iterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = + self.iterableFactory.from(new View.FlatMap(filtered, f)) + + def foreach[U](f: A => U): Unit = filtered.foreach(f) + + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} = + new WithFilter(self, (a: A) => p(a) && q(a)) + + } + +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](immutable.Iterable) { + + def single[A](a: A): Iterable[A] = new AbstractIterable[A] { + override def iterator = Iterator.single(a) + override def knownSize = 1 + override def head = a + override def headOption = Some(a) + override def last = a + override def lastOption = Some(a) + override def view = new View.Single(a) + override def take(n: Int) = if (n > 0) this else Iterable.empty + override def takeRight(n: Int) = if (n > 0) this else Iterable.empty + override def drop(n: Int) = if (n > 0) Iterable.empty else this + override def dropRight(n: Int) = if (n > 0) Iterable.empty else this + override def tail = Iterable.empty + override def init = Iterable.empty + } +} + +/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ +abstract class AbstractIterable[+A] extends Iterable[A] + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = iterableFactory.from(coll) + protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + + // overridden for efficiency, since we know CC[A] =:= C + override def empty: CC[A @uncheckedVariance] = iterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for collections that have an additional constraint, + * expressed by the `evidenceIterableFactory` method. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] + implicit protected def iterableEvidence: Ev[A @uncheckedVariance] + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = evidenceIterableFactory.from(coll) + override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] + override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted sets. + * + * Note that in sorted sets, the `CC` type of the set is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Set` in [[SortedSetOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedSetFactoryDefaults[+A, + +CC[X] <: SortedSet[X] with SortedSetOps[X, CC, CC[X]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { + self: IterableOps[A, WithFilterCC, _] => + + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) + + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = + new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) +} + + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for maps. + * + * Note that in maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Map` in [[MapOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait MapFactoryDefaults[K, +V, + +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { + this: MapFactoryDefaults[K, V, CC, WithFilterCC] => + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = mapFactory.from(coll) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] + override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { + // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case _ => mapFactory.empty + } + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC]^{p} = + new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted maps. + * + * Note that in sorted maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying map (which is fixed to `Map` in [[SortedMapOps]]). This trait has therefore + * three type parameters `CC`, `WithFilterCC` and `UnsortedCC`. The `withFilter` method inherited + * from `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedMapFactoryDefaults[K, +V, + +CC[x, y] <: Map[x, y] with SortedMapOps[x, y, CC, CC[x, y]] with UnsortedCC[x, y], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x], + +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { + self: IterableOps[(K, V), WithFilterCC, _] => + + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) + + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = + new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) +} diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala new file mode 100644 index 000000000000..a0b184410428 --- /dev/null +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -0,0 +1,1365 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.StringBuilder +import scala.language.implicitConversions +import scala.math.{Numeric, Ordering} +import scala.reflect.ClassTag +import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking + +/** + * A template trait for collections which can be traversed either once only + * or one or more times. + * + * Note: `IterableOnce` does not extend [[IterableOnceOps]]. This is different than the general + * design of the collections library, which uses the following pattern: + * {{{ + * trait Seq extends Iterable with SeqOps + * trait SeqOps extends IterableOps + * + * trait IndexedSeq extends Seq with IndexedSeqOps + * trait IndexedSeqOps extends SeqOps + * }}} + * + * The goal is to provide a minimal interface without any sequential operations. This allows + * third-party extension like Scala parallel collections to integrate at the level of IterableOnce + * without inheriting unwanted implementations. + * + * @define coll collection + */ +trait IterableOnce[+A] extends Any { + this: IterableOnce[A]^ => + + /** Iterator can be used only once */ + def iterator: Iterator[A]^{this} + + /** Returns a [[scala.collection.Stepper]] for the elements of this collection. + * + * The Stepper enables creating a Java stream to operate on the collection, see + * [[scala.jdk.StreamConverters]]. For collections holding primitive values, the Stepper can be + * used as an iterator which doesn't box the elements. + * + * The implicit [[scala.collection.StepperShape]] parameter defines the resulting Stepper type according to the + * element type of this collection. + * + * - For collections of `Int`, `Short`, `Byte` or `Char`, an [[scala.collection.IntStepper]] is returned + * - For collections of `Double` or `Float`, a [[scala.collection.DoubleStepper]] is returned + * - For collections of `Long` a [[scala.collection.LongStepper]] is returned + * - For any other element type, an [[scala.collection.AnyStepper]] is returned + * + * Note that this method is overridden in subclasses and the return type is refined to + * `S with EfficientSplit`, for example [[scala.collection.IndexedSeqOps.stepper]]. For Steppers marked with + * [[scala.collection.Stepper.EfficientSplit]], the converters in [[scala.jdk.StreamConverters]] + * allow creating parallel streams, whereas bare Steppers can be converted only to sequential + * streams. + */ + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = { + import convert.impl._ + val s: Any = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper[A](iterator)) + } + s.asInstanceOf[S] + } + + /** @return The number of elements in this $coll, if it can be cheaply computed, + * -1 otherwise. Cheaply usually means: Not requiring a collection traversal. + */ + def knownSize: Int = -1 +} + +final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { + @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") + def withFilter(f: A => Boolean): Iterator[A]^{f} = it.iterator.withFilter(f) + + @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") + def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) + + @deprecated("Use .iterator.min instead", "2.13.0") + def min(implicit ord: Ordering[A]): A = it.iterator.min + + @deprecated("Use .iterator.nonEmpty instead", "2.13.0") + def nonEmpty: Boolean = it.iterator.nonEmpty + + @deprecated("Use .iterator.max instead", "2.13.0") + def max(implicit ord: Ordering[A]): A = it.iterator.max + + @deprecated("Use .iterator.reduceRight(...) instead", "2.13.0") + def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) + + @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") + def maxBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + + @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") + def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) + + @deprecated("Use .iterator.sum instead", "2.13.0") + def sum(implicit num: Numeric[A]): A = it.iterator.sum + + @deprecated("Use .iterator.product instead", "2.13.0") + def product(implicit num: Numeric[A]): A = it.iterator.product + + @deprecated("Use .iterator.count(...) instead", "2.13.0") + def count(f: A => Boolean): Int = it.iterator.count(f) + + @deprecated("Use .iterator.reduceOption(...) instead", "2.13.0") + def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) + + @deprecated("Use .iterator.minBy(...) instead", "2.13.0") + def minBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + + @deprecated("Use .iterator.size instead", "2.13.0") + def size: Int = it.iterator.size + + @deprecated("Use .iterator.forall(...) instead", "2.13.0") + def forall(f: A => Boolean): Boolean = it.iterator.forall(f) + + @deprecated("Use .iterator.collectFirst(...) instead", "2.13.0") + def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) + + @deprecated("Use .iterator.filter(...) instead", "2.13.0") + def filter(f: A => Boolean): Iterator[A]^{f} = it.iterator.filter(f) + + @deprecated("Use .iterator.exists(...) instead", "2.13.0") + def exists(f: A => Boolean): Boolean = it.iterator.exists(f) + + @deprecated("Use .iterator.copyToBuffer(...) instead", "2.13.0") + def copyToBuffer(dest: mutable.Buffer[A]): Unit = it.iterator.copyToBuffer(dest) + + @deprecated("Use .iterator.reduce(...) instead", "2.13.0") + def reduce(f: (A, A) => A): A = it.iterator.reduce(f) + + @deprecated("Use .iterator.reduceRightOption(...) instead", "2.13.0") + def reduceRightOption(f: (A, A) => A): Option[A] = it.iterator.reduceRightOption(f) + + @deprecated("Use .iterator.toIndexedSeq instead", "2.13.0") + def toIndexedSeq: IndexedSeq[A] = it.iterator.toIndexedSeq + + @deprecated("Use .iterator.foreach(...) instead", "2.13.0") + @`inline` def foreach[U](f: A => U): Unit = it match { + case it: Iterable[A] => it.foreach(f) + case _ => it.iterator.foreach(f) + } + + @deprecated("Use .iterator.to(factory) instead", "2.13.0") + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) + + @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") + def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + + @deprecated("Use .iterator.toArray", "2.13.0") + def toArray[B >: A: ClassTag]: Array[B] = it match { + case it: Iterable[B] => it.toArray[B] + case _ => it.iterator.toArray[B] + } + + @deprecated("Use .iterator.to(List) instead", "2.13.0") + def toList: immutable.List[A] = immutable.List.from(it) + + @deprecated("Use .iterator.to(Set) instead", "2.13.0") + @`inline` def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(it) + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toTraversable: Traversable[A] = toIterable + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toIterable: Iterable[A] = Iterable.from(it) + + @deprecated("Use .iterator.to(Seq) instead", "2.13.0") + @`inline` def toSeq: immutable.Seq[A] = immutable.Seq.from(it) + + @deprecated("Use .iterator.to(LazyList) instead", "2.13.0") + @`inline` def toStream: immutable.Stream[A] = immutable.Stream.from(it) + + @deprecated("Use .iterator.to(Vector) instead", "2.13.0") + @`inline` def toVector: immutable.Vector[A] = immutable.Vector.from(it) + + @deprecated("Use .iterator.to(Map) instead", "2.13.0") + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(it.asInstanceOf[IterableOnce[(K, V)]]) + + @deprecated("Use .iterator instead", "2.13.0") + @`inline` def toIterator: Iterator[A] = it.iterator + + @deprecated("Use .iterator.isEmpty instead", "2.13.0") + def isEmpty: Boolean = it match { + case it: Iterable[A] => it.isEmpty + case _ => it.iterator.isEmpty + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(start: String, sep: String, end: String): String = it match { + case it: Iterable[A] => it.mkString(start, sep, end) + case _ => it.iterator.mkString(start, sep, end) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(sep: String): String = it match { + case it: Iterable[A] => it.mkString(sep) + case _ => it.iterator.mkString(sep) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString: String = it match { + case it: Iterable[A] => it.mkString + case _ => it.iterator.mkString + } + + @deprecated("Use .iterator.find instead", "2.13.0") + def find(p: A => Boolean): Option[A] = it.iterator.find(p) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def foldLeft[B](z: B)(op: (B, A) => B): B = it.iterator.foldLeft(z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def foldRight[B](z: B)(op: (A, B) => B): B = it.iterator.foldRight(z)(op) + + @deprecated("Use .iterator.fold instead", "2.13.0") + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = it.iterator.fold(z)(op) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") + def map[B](f: A => B): IterableOnce[B]^{f} = it match { + case it: Iterable[A] => it.map(f) + case _ => it.iterator.map(f) + } + + @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") + def flatMap[B](f: A => IterableOnce[B]^): IterableOnce[B]^{f} = it match { + case it: Iterable[A] => it.flatMap(f) + case _ => it.iterator.flatMap(f) + } + + @deprecated("Use .iterator.sameElements instead", "2.13.0") + def sameElements[B >: A](that: IterableOnce[B]): Boolean = it.iterator.sameElements(that) +} + +object IterableOnce { + @`inline` implicit def iterableOnceExtensionMethods[A](it: IterableOnce[A]): IterableOnceExtensionMethods[A] = + new IterableOnceExtensionMethods[A](it) + + /** Computes the number of elements to copy to an array from a source IterableOnce + * + * @param srcLen the length of the source collection + * @param destLen the length of the destination array + * @param start the index in the destination array at which to start copying elements to + * @param len the requested number of elements to copy (we may only be able to copy less than this) + * @return the number of elements that will be copied to the destination array + */ + @inline private[collection] def elemsToCopyToArray(srcLen: Int, destLen: Int, start: Int, len: Int): Int = + math.max(math.min(math.min(len, srcLen), destLen - start), 0) + + /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ + @inline private[collection] def copyElemsToArray[A, B >: A]( + elems: IterableOnce[A]^, + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = + elems match { + case src: Iterable[A] => src.copyToArray[B](xs, start, len) + case src => src.iterator.copyToArray[B](xs, start, len) + } + + @inline private[collection] def checkArraySizeWithinVMLimit(size: Int): Unit = { + import scala.runtime.PStatics.VM_MaxArraySize + if (size > VM_MaxArraySize) { + throw new Exception(s"Size of array-backed collection exceeds VM array size limit of ${VM_MaxArraySize}") + } + } +} + +/** This implementation trait can be mixed into an `IterableOnce` to get the basic methods that are shared between + * `Iterator` and `Iterable`. The `IterableOnce` must support multiple calls to `iterator` but may or may not + * return the same `Iterator` every time. + * + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define willForceEvaluation + * Note: Even when applied to a view or a lazy collection it will always force the elements. + * @define consumesIterator + * After calling this method, one should discard the iterator it was called + * on. Using it is undefined and subject to change. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + * @define coll collection + * + */ +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => + /////////////////////////////////////////////////////////////// Abstract methods that must be implemented + + import IterableOnceOps.Maximized + + /** Produces a $coll containing cumulative results of applying the + * operator going left to right, including the initial value. + * + * $willNotTerminateInf + * $orderDependent + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} + + /** Selects all elements of this $coll which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that satisfy the given + * predicate `p`. The order of the elements is preserved. + */ + def filter(p: A => Boolean): C^{this, p} + + /** Selects all elements of this $coll which do not satisfy a predicate. + * + * @param pred the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that do not satisfy the given + * predicate `pred`. Their order may not be preserved. + */ + def filterNot(p: A => Boolean): C^{this, p} + + /** Selects the first ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the first `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def take(n: Int): C^{this} + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C^{this, p} + + /** Selects all elements except first ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def drop(n: Int): C^{this} + + /** Drops longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest suffix of this $coll whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): C^{this, p} + + /** Selects an interval of elements. The returned $coll is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * $orderDependent + * + * @param from the lowest index to include from this $coll. + * @param until the lowest index to EXCLUDE from this $coll. + * @return a $coll containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this $coll. + */ + def slice(from: Int, until: Int): C^{this} + + /** Builds a new $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned $coll. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B): CC[B]^{this, f} + + /** Builds a new $coll by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * For example: + * + * {{{ + * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+") + * }}} + * + * The type of the resulting collection is guided by the static type of $coll. This might + * cause unexpected results sometimes. For example: + * + * {{{ + * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set + * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet) + * + * // lettersOf will return a Set[Char], not a Seq + * def lettersOf(words: Seq[String]) = words.toSet flatMap ((word: String) => word.toSeq) + * + * // xs will be an Iterable[Int] + * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2) + * + * // ys will be a Map[Int, Int] + * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2) + * }}} + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} + + /** Converts this $coll of iterable collections into + * a $coll formed by the elements of these iterable + * collections. + * + * The resulting collection's type will be guided by the + * type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the element + * type of this $coll is an `Iterable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + */ + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} + + /** Builds a new $coll by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} + + /** Zips this $coll with its indices. + * + * @return A new $coll containing pairs consisting of all elements of this $coll paired with their index. + * Indices start at `0`. + * @example + * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` + */ + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} + + /** Splits this $coll into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but possibly more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * $orderDependent + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this $coll whose + * elements all satisfy `p`, and the rest of this $coll. + */ + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) + + /** Splits this $coll into a prefix/suffix pair at a given position. + * + * Note: `c splitAt n` is equivalent to (but possibly more efficient than) + * `(c take n, c drop n)`. + * $orderDependent + * + * @param n the position at which to split. + * @return a pair of ${coll}s consisting of the first `n` + * elements of this $coll, and the other elements. + */ + def splitAt(n: Int): (C^{this}, C^{this}) = { + class Spanner extends runtime.AbstractFunction1[A, Boolean] { + var i = 0 + def apply(a: A) = i < n && { i += 1 ; true } + } + val spanner = new Spanner + span(spanner) + } + + /** Applies a side-effecting function to each element in this collection. + * Strict collections will apply `f` to their elements immediately, while lazy collections + * like Views and LazyLists will only apply `f` on each element if and when that element + * is evaluated, and each time that element is evaluated. + * + * @param f a function to apply to each element in this $coll + * @tparam U the return type of f + * @return The same logical collection as this + */ + def tapEach[U](f: A => U): C^{this, f} + + /////////////////////////////////////////////////////////////// Concrete methods based on iterator + + /** Tests whether this $coll is known to have a finite size. + * All strict collections are known to have finite size. For a non-strict + * collection such as `Stream`, the predicate returns `'''true'''` if all + * elements have been computed. It returns `'''false'''` if the stream is + * not yet evaluated to the end. Non-empty Iterators usually return + * `'''false'''` even if they were created from a collection with a known + * finite size. + * + * Note: many collection methods will not work on collections of infinite sizes. + * The typical failure mode is an infinite loop. These methods always attempt a + * traversal without checking first that `hasDefiniteSize` returns `'''true'''`. + * However, checking `hasDefiniteSize` can provide an assurance that size is + * well-defined and non-termination is not a concern. + * + * @deprecated This method is deprecated in 2.13 because it does not provide any + * actionable information. As noted above, even the collection library itself + * does not use it. When there is no guarantee that a collection is finite, it + * is generally best to attempt a computation anyway and document that it will + * not terminate for infinite collections rather than backing out because this + * would prevent performing the computation on collections that are in fact + * finite even though `hasDefiniteSize` returns `false`. + * + * @see method `knownSize` for a more useful alternative + * + * @return `'''true'''` if this collection is known to have finite size, + * `'''false'''` otherwise. + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + def hasDefiniteSize: Boolean = true + + /** Tests whether this $coll can be repeatedly traversed. Always + * true for Iterables and false for Iterators unless overridden. + * + * @return `true` if it is repeatedly traversable, `false` otherwise. + */ + def isTraversableAgain: Boolean = false + + /** Apply `f` to each element for its side effects + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val it = iterator + while(it.hasNext) f(it.next()) + } + + /** Tests whether a predicate holds for all elements of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if this $coll is empty or the given predicate `p` + * holds for all elements of this $coll, otherwise `false`. + */ + def forall(p: A => Boolean): Boolean = { + var res = true + val it = iterator + while (res && it.hasNext) res = p(it.next()) + res + } + + /** Tests whether a predicate holds for at least one element of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false` + */ + def exists(p: A => Boolean): Boolean = { + var res = false + val it = iterator + while (!res && it.hasNext) res = p(it.next()) + res + } + + /** Counts the number of elements in the $coll which satisfy a predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the number of elements satisfying the predicate `p`. + */ + def count(p: A => Boolean): Int = { + var res = 0 + val it = iterator + while (it.hasNext) if (p(it.next())) res += 1 + res + } + + /** Finds the first element of the $coll satisfying a predicate, if any. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the $coll + * that satisfies `p`, or `None` if none exists. + */ + def find(p: A => Boolean): Option[A] = { + val it = iterator + while (it.hasNext) { + val a = it.next() + if (p(a)) return Some(a) + } + None + } + + // in future, move to IndexedSeqOps + private def foldl[X >: A, B](seq: IndexedSeq[X], start: Int, z: B, op: (B, X) => B): B = { + @tailrec def loop(at: Int, end: Int, acc: B): B = + if (at == end) acc + else loop(at + 1, end, op(acc, seq(at))) + loop(start, seq.length, z) + } + + private def foldr[X >: A, B >: X](seq: IndexedSeq[X], op: (X, B) => B): B = { + @tailrec def loop(at: Int, acc: B): B = + if (at == 0) acc + else loop(at - 1, op(seq(at - 1), acc)) + loop(seq.length - 1, seq(seq.length - 1)) + } + + /** Applies a binary operator to a start value and all elements of this $coll, + * going left to right. + * + * $willNotTerminateInf + * $orderDependentFold + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this $coll, + * going left to right with the start value `z` on the left: + * `op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)` where `x,,1,,, ..., x,,n,,` + * are the elements of this $coll. + * Returns `z` if this $coll is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] => foldl[A, B](seq, 0, z, op) + case _ => + var result = z + val it = iterator + while (it.hasNext) { + result = op(result, it.next()) + } + result + } + + /** Applies a binary operator to all elements of this $coll and a start value, + * going right to left. + * + * $willNotTerminateInf + * $orderDependentFold + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this $coll, + * going right to left with the start value `z` on the right: + * `op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))` where `x,,1,,, ..., x,,n,,` + * are the elements of this $coll. + * Returns `z` if this $coll is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = reversed.foldLeft(z)((b, a) => op(a, b)) + + @deprecated("Use foldLeft instead of /:", "2.13.0") + @`inline` final def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use foldRight instead of :\\", "2.13.0") + @`inline` final def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + /** Folds the elements of this $coll using the specified associative binary operator. + * The default implementation in `IterableOnce` is equivalent to `foldLeft` but may be + * overridden for more efficient traversal orders. + * + * $undefinedorder + * $willNotTerminateInf + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Reduces the elements of this $coll using the specified associative binary operator. + * + * $undefinedorder + * + * @tparam B A type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator that must be associative. + * @return The result of applying reduce operator `op` between all the elements if the $coll is nonempty. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduce[B >: A](op: (B, B) => B): B = reduceLeft(op) + + /** Reduces the elements of this $coll, if any, using the specified + * associative binary operator. + * + * $undefinedorder + * + * @tparam B A type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator that must be associative. + * @return An option value containing result of applying reduce operator `op` between all + * the elements if the collection is nonempty, and `None` otherwise. + */ + def reduceOption[B >: A](op: (B, B) => B): Option[B] = reduceLeftOption(op) + + /** Applies a binary operator to all elements of this $coll, + * going left to right. + * $willNotTerminateInf + * $orderDependentFold + * + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this $coll, + * going left to right: + * `op( op( ... op(x,,1,,, x,,2,,) ..., x,,n-1,,), x,,n,,)` where `x,,1,,, ..., x,,n,,` + * are the elements of this $coll. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceLeft[B >: A](op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldl(seq, 1, seq(0), op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceLeft") + case _ => reduceLeftIterator[B](throw new UnsupportedOperationException("empty.reduceLeft"))(op) + } + private final def reduceLeftIterator[B >: A](onEmpty: => B)(op: (B, A) => B): B = { + val it = iterator + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + acc + } + else onEmpty + } + + /** Applies a binary operator to all elements of this $coll, going right to left. + * $willNotTerminateInf + * $orderDependentFold + * + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this $coll, + * going right to left: + * `op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))` where `x,,1,,, ..., x,,n,,` + * are the elements of this $coll. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceRight[B >: A](op: (A, B) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldr[A, B](seq, op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceRight") + case _ => reversed.reduceLeft[B]((x, y) => op(y, x)) // reduceLeftIterator + } + + /** Optionally applies a binary operator to all elements of this $coll, going left to right. + * $willNotTerminateInf + * $orderDependentFold + * + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return an option value containing the result of `reduceLeft(op)` if this $coll is nonempty, + * `None` otherwise. + */ + def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = + knownSize match { + case -1 => reduceLeftOptionIterator[B](op) + case 0 => None + case _ => Some(reduceLeft(op)) + } + private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X]^)(op: (B, X) => B): Option[B] = { + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + Some(acc) + } + else None + } + + /** Optionally applies a binary operator to all elements of this $coll, going + * right to left. + * $willNotTerminateInf + * $orderDependentFold + * + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return an option value containing the result of `reduceRight(op)` if this $coll is nonempty, + * `None` otherwise. + */ + def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = + knownSize match { + case -1 => reduceOptionIterator[A, B](reversed.iterator)((x, y) => op(y, x)) + case 0 => None + case _ => Some(reduceRight(op)) + } + + /** Tests whether the $coll is empty. + * + * Note: The default implementation creates and discards an iterator. + * + * Note: Implementations in subclasses that are not repeatedly iterable must take + * care not to consume any elements when `isEmpty` is called. + * + * @return `true` if the $coll contains no elements, `false` otherwise. + */ + def isEmpty: Boolean = + knownSize match { + case -1 => !iterator.hasNext + case 0 => true + case _ => false + } + + /** Tests whether the $coll is not empty. + * + * @return `true` if the $coll contains at least one element, `false` otherwise. + */ + @deprecatedOverriding("nonEmpty is defined as !isEmpty; override isEmpty instead", "2.13.0") + def nonEmpty: Boolean = !isEmpty + + /** The size of this $coll. + * + * $willNotTerminateInf + * + * @return the number of elements in this $coll. + */ + def size: Int = + if (knownSize >= 0) knownSize + else { + val it = iterator + var len = 0 + while (it.hasNext) { len += 1; it.next() } + len + } + + @deprecated("Use `dest ++= coll` instead", "2.13.0") + @inline final def copyToBuffer[B >: A](dest: mutable.Buffer[B]): Unit = dest ++= this + + /** Copy elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + + /** Copy elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with at most `len` elements of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val it = iterator + var i = start + val end = start + math.min(len, xs.length - start) + while (i < end && it.hasNext) { + xs(i) = it.next() + i += 1 + } + i - start + } + + /** Sums the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `+` operator to be used in forming the sum. + * @tparam B the result type of the `+` operator. + * @return the sum of all elements of this $coll with respect to the `+` operator in `num`. + */ + def sum[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.zero)(num.plus) + case 0 => num.zero + case _ => reduce(num.plus) + } + + /** Multiplies together the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `*` operator to be used in forming the product. + * @tparam B the result type of the `*` operator. + * @return the product of all elements of this $coll with respect to the `*` operator in `num`. + */ + def product[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.one)(num.times) + case 0 => num.one + case _ => reduce(num.times) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the smallest element of this $coll with respect to the ordering `ord`. + * + */ + def min[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.min"))(ord.min) + case 0 => throw new UnsupportedOperationException("empty.min") + case _ => reduceLeft(ord.min) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the smallest element of this $coll + * with respect to the ordering `ord`. + */ + def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.min) + case 0 => None + case _ => Some(reduceLeft(ord.min)) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the largest element of this $coll with respect to the ordering `ord`. + */ + def max[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.max"))(ord.max) + case 0 => throw new UnsupportedOperationException("empty.max") + case _ => reduceLeft(ord.max) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the largest element of this $coll with + * respect to the ordering `ord`. + */ + def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.max) + case 0 => None + case _ => Some(reduceLeft(ord.max)) + } + + /** Finds the first element which yields the largest value measured by function f. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function f. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the largest value measured by function f + * with respect to the ordering `cmp`. + */ + def maxBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.maxBy") + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result + } + + /** Finds the first element which yields the largest value measured by function f. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function f. + * @param f The measuring function. + * @return an option value containing the first element of this $coll with the + * largest value measured by function f with respect to the ordering `cmp`. + */ + def maxByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element which yields the smallest value measured by function f. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function f. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the smallest value measured by function f + * with respect to the ordering `cmp`. + */ + def minBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.minBy") + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result + } + + /** Finds the first element which yields the smallest value measured by function f. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function f. + * @param f The measuring function. + * @return an option value containing the first element of this $coll + * with the smallest value measured by function f + * with respect to the ordering `cmp`. + */ + def minByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element of the $coll for which the given partial + * function is defined, and applies the partial function to it. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param pf the partial function + * @return an option value containing pf applied to the first + * value for which it is defined, or `None` if none exists. + * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)` + */ + def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { + // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself + // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it) + val sentinel: scala.Function1[A, Any] = new scala.runtime.AbstractFunction1[A, Any] { + def apply(a: A) = this + } + val it = iterator + while (it.hasNext) { + val x = pf.applyOrElse(it.next(), sentinel) + if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B]) + } + None + } + + @deprecated("`aggregate` is not relevant for sequential collections. Use `foldLeft(z)(seqop)` instead.", "2.13.0") + def aggregate[B](z: => B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) + + /** Tests whether every element of this collection's iterator relates to the + * corresponding element of another collection by satisfying a test predicate. + * + * $willNotTerminateInf + * + * @param that the other collection + * @param p the test predicate, which relates elements from both collections + * @tparam B the type of the elements of `that` + * @return `true` if both collections have the same length and + * `p(x, y)` is `true` for all corresponding elements `x` of this iterator + * and `y` of `that`, otherwise `false` + */ + def corresponds[B](that: IterableOnce[B])(p: (A, B) => Boolean): Boolean = { + val a = iterator + val b = that.iterator + + while (a.hasNext && b.hasNext) { + if (!p(a.next(), b.next())) return false + } + + a.hasNext == b.hasNext + } + + /** Displays all elements of this $coll in a string using start, end, and separator strings. + * + * Delegates to addString, which can be overridden. + * + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return a string representation of this $coll. The resulting string + * begins with the string `start` and ends with the string + * `end`. Inside, the string representations (w.r.t. the method + * `toString`) of all elements of this $coll are separated by + * the string `sep`. + * + * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"` + */ + final def mkString(start: String, sep: String, end: String): String = + if (knownSize == 0) start + end + else addString(new StringBuilder(), start, sep, end).result() + + /** Displays all elements of this $coll in a string using a separator string. + * + * Delegates to addString, which can be overridden. + * + * @param sep the separator string. + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * @example `List(1, 2, 3).mkString("|") = "1|2|3"` + */ + @inline final def mkString(sep: String): String = mkString("", sep, "") + + /** Displays all elements of this $coll in a string. + * + * Delegates to addString, which can be overridden. + * + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll follow each other without any + * separator string. + */ + @inline final def mkString: String = mkString("") + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b , "List(" , ", " , ")") + * res5: StringBuilder = List(1, 2, 3, 4) + * }}} + * + * @param b the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + def addString(b: StringBuilder, start: String, sep: String, end: String): b.type = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) + jsb.append(it.next()) + } + } + if (end.length != 0) jsb.append(end) + b + } + + /** Appends all elements of this $coll to a string builder using a separator string. + * The written text consists of the string representations (w.r.t. the method `toString`) + * of all elements of this $coll, separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b, ", ") + * res0: StringBuilder = 1, 2, 3, 4 + * }}} + * + * @param b the string builder to which elements are appended. + * @param sep the separator string. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder, sep: String): b.type = addString(b, "", sep, "") + + /** Appends all elements of this $coll to a string builder. + * The written text consists of the string representations (w.r.t. the method + * `toString`) of all elements of this $coll without any separator string. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> val h = a.addString(b) + * h: StringBuilder = 1234 + * }}} + * + * @param b the string builder to which elements are appended. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder): b.type = addString(b, "") + + /** Given a collection factory `factory`, convert this collection to the appropriate + * representation for the current element type `A`. Example uses: + * + * {{{ + * xs.to(List) + * xs.to(ArrayBuffer) + * xs.to(BitSet) // for xs: Iterable[Int] + * }}} + */ + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) + + @deprecated("Use .iterator instead of .toIterator", "2.13.0") + @`inline` final def toIterator: Iterator[A]^{this} = iterator + + def toList: immutable.List[A] = immutable.List.from(this) + + def toVector: immutable.Vector[A] = immutable.Vector.from(this) + + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(this.asInstanceOf[IterableOnce[(K, V)]]) + + def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(this) + + /** @return This collection as a `Seq[A]`. This is equivalent to `to(Seq)` but might be faster. + */ + def toSeq: immutable.Seq[A] = immutable.Seq.from(this) + + def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq.from(this) + + @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") + @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) + + @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + + /** Convert collection to array. + * + * Implementation note: DO NOT call [[Array.from]] from this method. + */ + def toArray[B >: A: ClassTag]: Array[B] = + if (knownSize >= 0) { + val destination = new Array[B](knownSize) + copyToArray(destination, 0) + destination + } + else mutable.ArrayBuilder.make[B].addAll(this).result() + + // For internal use + protected def reversed: Iterable[A] = { + var xs: immutable.List[A] = immutable.Nil + val it = iterator + while (it.hasNext) xs = it.next() :: xs + xs + } +} + +object IterableOnceOps: + + // Moved out of trait IterableOnceOps to here, since universal traits cannot + // have nested classes in Scala 3 + private class Maximized[X, B](descriptor: String)(f: X -> B)(cmp: (B, B) -> Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X = null.asInstanceOf[X] + var maxF: B = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } +end IterableOnceOps \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala new file mode 100644 index 000000000000..57a12767320a --- /dev/null +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -0,0 +1,1306 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.runtime.Statics +import language.experimental.captureChecking + + +/** Iterators are data structures that allow to iterate over a sequence + * of elements. They have a `hasNext` method for checking + * if there is a next element available, and a `next` method + * which returns the next element and advances the iterator. + * + * An iterator is mutable: most operations on it change its state. While it is often used + * to iterate through the elements of a collection, it can also be used without + * being backed by any collection (see constructors on the companion object). + * + * It is of particular importance to note that, unless stated otherwise, ''one should never + * use an iterator after calling a method on it''. The two most important exceptions + * are also the sole abstract methods: `next` and `hasNext`. + * + * Both these methods can be called any number of times without having to discard the + * iterator. Note that even `hasNext` may cause mutation -- such as when iterating + * from an input stream, where it will block until the stream is closed or some + * input becomes available. + * + * Consider this example for safe and unsafe use: + * + * {{{ + * def f[A](it: Iterator[A]) = { + * if (it.hasNext) { // Safe to reuse "it" after "hasNext" + * it.next() // Safe to reuse "it" after "next" + * val remainder = it.drop(2) // it is *not* safe to use "it" again after this line! + * remainder.take(2) // it is *not* safe to use "remainder" after this line! + * } else it + * } + * }}} + * + * @define mayNotTerminateInf + * Note: may not terminate for infinite iterators. + * @define preservesIterator + * The iterator remains valid for further use whatever result is returned. + * @define consumesIterator + * After calling this method, one should discard the iterator it was called + * on. Using it is undefined and subject to change. + * @define consumesAndProducesIterator + * After calling this method, one should discard the iterator it was called + * on, and use only the iterator that was returned. Using the old iterator + * is undefined, subject to change, and may result in changes to the new + * iterator as well. + * @define consumesTwoAndProducesOneIterator + * After calling this method, one should discard the iterator it was called + * on, as well as the one passed as a parameter, and use only the iterator + * that was returned. Using the old iterators is undefined, subject to change, + * and may result in changes to the new iterator as well. + * @define consumesOneAndProducesTwoIterators + * After calling this method, one should discard the iterator it was called + * on, and use only the iterators that were returned. Using the old iterator + * is undefined, subject to change, and may result in changes to the new + * iterators as well. + * @define coll iterator + */ +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { + self: Iterator[A]^ => + + /** Check if there is a next element available. + * + * @return `true` if there is a next element, `false` otherwise + * @note Reuse: $preservesIterator + */ + def hasNext: Boolean + + @deprecated("hasDefiniteSize on Iterator is the same as isEmpty", "2.13.0") + @`inline` override final def hasDefiniteSize = isEmpty + + /** Return the next element and advance the iterator. + * + * @throws NoSuchElementException if there is no next element. + * @return the next element. + * @note Reuse: Advances the iterator, which may exhaust the elements. It is valid to + * make additional calls on the iterator. + */ + @throws[NoSuchElementException] + def next(): A + + @inline final def iterator: Iterator[A]^{this} = this + + /** Wraps the value of `next()` in an option. + * + * @return `Some(next)` if a next element exists, `None` otherwise. + */ + def nextOption(): Option[A] = if (hasNext) Some(next()) else None + + /** Tests whether this iterator contains a given value as an element. + * $mayNotTerminateInf + * + * @param elem the element to test. + * @return `true` if this iterator produces some value that is + * is equal (as determined by `==`) to `elem`, `false` otherwise. + * @note Reuse: $consumesIterator + */ + def contains(elem: Any): Boolean = exists(_ == elem) // Note--this seems faster than manual inlining! + + /** Creates a buffered iterator from this iterator. + * + * @see [[scala.collection.BufferedIterator]] + * @return a buffered iterator producing the same values as this iterator. + * @note Reuse: $consumesAndProducesIterator + */ + def buffered: BufferedIterator[A]^{this} = new AbstractIterator[A] with BufferedIterator[A] { + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false + + def head: A = { + if (!hdDefined) { + hd = next() + hdDefined = true + } + hd + } + + override def knownSize = { + val thisSize = self.knownSize + if (thisSize >= 0 && hdDefined) thisSize + 1 + else thisSize + } + + def hasNext = + hdDefined || self.hasNext + + def next() = + if (hdDefined) { + hdDefined = false + hd + } else self.next() + } + + /** A flexible iterator for transforming an `Iterator[A]` into an + * `Iterator[Seq[A]]`, with configurable sequence size, step, and + * strategy for dealing with remainder elements which don't fit evenly + * into the last group. + * + * A `GroupedIterator` is yielded by `grouped` and by `sliding`, + * where the `step` may differ from the group `size`. + */ + class GroupedIterator[B >: A](self: Iterator[B]^, size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + + require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") + + private[this] var buffer: Array[B] = null // current result + private[this] var prev: Array[B] = null // if sliding, overlap from previous result + private[this] var first = true // if !first, advancing may skip ahead + private[this] var filled = false // whether the buffer is "hot" + private[this] var partial = true // whether to emit partial sequence + private[this] var padding: () -> B = null // what to pad short sequences with + private[this] def pad = padding != null // irrespective of partial flag + private[this] def newBuilder = { + val b = ArrayBuilder.make[Any] + val k = self.knownSize + if (k > 0) b.sizeHint(k min size) // if k < size && !partial, buffer will grow on padding + b + } + + /** Specifies a fill element used to pad a partial segment + * so that all segments have the same size. + * + * Any previous setting of `withPartial` is ignored, + * as the last group will always be padded to `size` elements. + * + * The by-name argument is evaluated for each fill element. + * + * @param x The element that will be appended to the last segment, if necessary. + * @return The same iterator, and ''not'' a new iterator. + * @note This method mutates the iterator it is called on, which can be safely used afterwards. + * @note This method is mutually exclusive with `withPartial`. + * @group Configuration + */ + def withPadding(x: -> B): this.type = { + padding = () => x + partial = true // redundant, as padding always results in complete segment + this + } + /** Specify whether to drop the last segment if it has less than `size` elements. + * + * If this flag is `false`, elements of a partial segment at the end of the iterator + * are not returned. + * + * The flag defaults to `true`. + * + * Any previous setting of `withPadding` is ignored, + * as the last group will never be padded. + * A partial segment is either retained or dropped, per the flag. + * + * @param x `true` if partial segments may be returned, `false` otherwise. + * @return The same iterator, and ''not'' a new iterator. + * @note This method mutates the iterator it is called on, which can be safely used afterwards. + * @note This method is mutually exclusive with `withPadding`. + * @group Configuration + */ + def withPartial(x: Boolean): this.type = { + partial = x + padding = null + this + } + + /** Eagerly fetch `size` elements to buffer. + * + * If buffer is dirty and stepping, copy prefix. + * If skipping, skip ahead. + * Fetch remaining elements. + * If unable to deliver size, then pad if padding enabled, otherwise drop segment. + * Returns true if successful in delivering `count` elements, + * or padded segment, or partial segment. + */ + private def fulfill(): Boolean = { + val builder = newBuilder + var done = false + // keep prefix of previous buffer if stepping + if (prev != null) builder.addAll(prev) + // skip ahead + if (!first && step > size) { + var dropping = step - size + while (dropping > 0 && self.hasNext) { + self.next(): Unit + dropping -= 1 + } + done = dropping > 0 // skip failed + } + var index = builder.length + if (!done) { + // advance to rest of segment if possible + while (index < size && self.hasNext) { + builder.addOne(self.next()) + index += 1 + } + // if unable to complete segment, pad if possible + if (index < size && pad) { + builder.sizeHint(size) + while (index < size) { + builder.addOne(padding()) + index += 1 + } + } + } + // segment must have data, and must be complete unless they allow partial + val ok = index > 0 && (partial || index == size) + if (ok) buffer = builder.result().asInstanceOf[Array[B]] + else prev = null + ok + } + + // fill() returns false if no more sequences can be produced + private def fill(): Boolean = filled || { filled = self.hasNext && fulfill() ; filled } + + def hasNext = fill() + + @throws[NoSuchElementException] + def next(): immutable.Seq[B] = + if (!fill()) Iterator.empty.next() + else { + filled = false + // if stepping, retain overlap in prev + if (step < size) { + if (first) prev = buffer.drop(step) + else if (buffer.length == size) Array.copy(src = buffer, srcPos = step, dest = prev, destPos = 0, length = size - step) + else prev = null + } + val res = immutable.ArraySeq.unsafeWrapArray(buffer).asInstanceOf[immutable.ArraySeq[B]] + buffer = null + first = false + res + } + } + + /** A copy of this $coll with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of + * all elements of this $coll followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A](len: Int, elem: B): Iterator[B]^{this} = new AbstractIterator[B] { + private[this] var i = 0 + + override def knownSize: Int = { + val thisSize = self.knownSize + if (thisSize < 0) -1 + else thisSize max (len - i) + } + + def next(): B = { + val b = + if (self.hasNext) self.next() + else if (i < len) elem + else Iterator.empty.next() + i += 1 + b + } + + def hasNext: Boolean = self.hasNext || i < len + } + + /** Partitions this iterator in two iterators according to a predicate. + * + * @param p the predicate on which to partition + * @return a pair of iterators: the iterator that satisfies the predicate + * `p` and the iterator that does not. + * The relative order of the elements in the resulting iterators + * is the same as in the original iterator. + * @note Reuse: $consumesOneAndProducesTwoIterators + */ + def partition(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + val (a, b) = duplicate + (a filter p, b filterNot p) + } + + /** Returns an iterator which groups this iterator into fixed size + * blocks. Example usages: + * {{{ + * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7))) + * (1 to 7).iterator.grouped(3).toList + * // Returns List(List(1, 2, 3), List(4, 5, 6)) + * (1 to 7).iterator.grouped(3).withPartial(false).toList + * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7, 20, 25) + * // Illustrating that withPadding's argument is by-name. + * val it2 = Iterator.iterate(20)(_ + 5) + * (1 to 7).iterator.grouped(3).withPadding(it2.next).toList + * }}} + * + * @note Reuse: $consumesAndProducesIterator + */ + def grouped[B >: A](size: Int): GroupedIterator[B]^{this} = + new GroupedIterator[B](self, size, size) + + /** Returns an iterator which presents a "sliding window" view of + * this iterator. The first argument is the window size, and + * the second argument `step` is how far to advance the window + * on each iteration. The `step` defaults to `1`. + * + * The returned `GroupedIterator` can be configured to either + * pad a partial result to size `size` or suppress the partial + * result entirely. + * + * Example usages: + * {{{ + * // Returns List(ArraySeq(1, 2, 3), ArraySeq(2, 3, 4), ArraySeq(3, 4, 5)) + * (1 to 5).iterator.sliding(3).toList + * // Returns List(ArraySeq(1, 2, 3, 4), ArraySeq(4, 5)) + * (1 to 5).iterator.sliding(4, 3).toList + * // Returns List(ArraySeq(1, 2, 3, 4)) + * (1 to 5).iterator.sliding(4, 3).withPartial(false).toList + * // Returns List(ArraySeq(1, 2, 3, 4), ArraySeq(4, 5, 20, 25)) + * // Illustrating that withPadding's argument is by-name. + * val it2 = Iterator.iterate(20)(_ + 5) + * (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList + * }}} + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return A `GroupedIterator` producing `Seq[B]`s of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + * This behavior can be configured. + * + * @note Reuse: $consumesAndProducesIterator + */ + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B]^{this} = + new GroupedIterator[B](self, size, step) + + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B]^{this, op} = new AbstractIterator[B] { + // We use an intermediate iterator that iterates through the first element `z` + // and then that will be modified to iterate through the collection + private[this] var current: Iterator[B]^{self, op} = + new AbstractIterator[B] { + override def knownSize = { + val thisSize = self.knownSize + + if (thisSize < 0) -1 + else thisSize + 1 + } + def hasNext: Boolean = true + def next(): B = { + // Here we change our self-reference to a new iterator that iterates through `self` + current = new AbstractIterator[B] { + private[this] var acc = z + def next(): B = { + acc = op(acc, self.next()) + acc + } + def hasNext: Boolean = self.hasNext + override def knownSize = self.knownSize + } + z + } + } + override def knownSize = current.knownSize + def next(): B = current.next() + def hasNext: Boolean = current.hasNext + } + + @deprecated("Call scanRight on an Iterable instead.", "2.13.0") + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = + ArrayBuffer.from[A](this).scanRight(z)(op).iterator + + def indexWhere(p: A => Boolean, from: Int = 0): Int = { + var i = math.max(from, 0) + val dropped = drop(from) + while (dropped.hasNext) { + if (p(dropped.next())) return i + i += 1 + } + -1 + } + + /** Returns the index of the first occurrence of the specified + * object in this iterable object. + * $mayNotTerminateInf + * + * @param elem element to search for. + * @return the index of the first occurrence of `elem` in the values produced by this iterator, + * or -1 if such an element does not exist until the end of the iterator is reached. + * @note Reuse: $consumesIterator + */ + def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) + + /** Returns the index of the first occurrence of the specified object in this iterable object + * after or at some start index. + * $mayNotTerminateInf + * + * @param elem element to search for. + * @param from the start index + * @return the index `>= from` of the first occurrence of `elem` in the values produced by this + * iterator, or -1 if such an element does not exist until the end of the iterator is + * reached. + * @note Reuse: $consumesIterator + */ + def indexOf[B >: A](elem: B, from: Int): Int = { + var i = 0 + while (i < from && hasNext) { + next() + i += 1 + } + + while (hasNext) { + if (next() == elem) return i + i += 1 + } + -1 + } + + @inline final def length: Int = size + + @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") + override def isEmpty: Boolean = !hasNext + + def filter(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = false) + + def filterNot(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = true) + + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false + + def hasNext: Boolean = hdDefined || { + if (!self.hasNext) return false + hd = self.next() + while (p(hd) == isFlipped) { + if (!self.hasNext) return false + hd = self.next() + } + hdDefined = true + true + } + + def next() = + if (hasNext) { + hdDefined = false + hd + } + else Iterator.empty.next() + } + + /** Creates an iterator over all the elements of this iterator that + * satisfy the predicate `p`. The order of the elements + * is preserved. + * + * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that + * for-expressions with filters work over iterators. + * + * @param p the predicate used to test values. + * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. + * @note Reuse: $consumesAndProducesIterator + */ + def withFilter(p: A => Boolean): Iterator[A]^{this, p} = filter(p) + + def collect[B](pf: PartialFunction[A, B]^): Iterator[B]^{this, pf} = new AbstractIterator[B] with (A -> B) { + // Manually buffer to avoid extra layer of wrapping with buffered + private[this] var hd: B = _ + + // Little state machine to keep track of where we are + // Seek = 0; Found = 1; Empty = -1 + // Not in vals because scalac won't make them static (@inline def only works with -optimize) + // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! + private[this] var status = 0/*Seek*/ + + def apply(value: A): B = Statics.pfMarker.asInstanceOf[B] + + def hasNext = { + val marker = Statics.pfMarker + while (status == 0/*Seek*/) { + if (self.hasNext) { + val x = self.next() + val v = pf.applyOrElse(x, this) + if (marker ne v.asInstanceOf[AnyRef]) { + hd = v + status = 1/*Found*/ + } + } + else status = -1/*Empty*/ + } + status == 1/*Found*/ + } + def next() = if (hasNext) { status = 0/*Seek*/; hd } else Iterator.empty.next() + } + + /** + * Builds a new iterator from this one without any duplicated elements on it. + * @return iterator with distinct elements + * + * @note Reuse: $consumesIterator + */ + def distinct: Iterator[A]^{this} = distinctBy(identity) + + /** + * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return iterator with distinct elements + * + * @note Reuse: $consumesIterator + */ + def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { + + private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] var nextElementDefined: Boolean = false + private[this] var nextElement: A = _ + + def hasNext: Boolean = nextElementDefined || (self.hasNext && { + val a = self.next() + if (traversedValues.add(f(a))) { + nextElement = a + nextElementDefined = true + true + } + else hasNext + }) + + def next(): A = + if (hasNext) { + nextElementDefined = false + nextElement + } else { + Iterator.empty.next() + } + } + + def map[B](f: A => B): Iterator[B]^{this, f} = new AbstractIterator[B] { + override def knownSize = self.knownSize + def hasNext = self.hasNext + def next() = f(self.next()) + } + + def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new AbstractIterator[B] { + private[this] var cur: Iterator[B]^{f} = Iterator.empty + /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ + private[this] var _hasNext: Int = -1 + + private[this] def nextCur(): Unit = { + cur = null + cur = f(self.next()).iterator + _hasNext = -1 + } + + def hasNext: Boolean = { + if (_hasNext == -1) { + while (!cur.hasNext) { + if (!self.hasNext) { + _hasNext = 0 + // since we know we are exhausted, we can release cur for gc, and as well replace with + // static Iterator.empty which will support efficient subsequent `hasNext`/`next` calls + cur = Iterator.empty + return false + } + nextCur() + } + _hasNext = 1 + true + } else _hasNext == 1 + } + def next(): B = { + if (hasNext) { + _hasNext = -1 + } + cur.next() + } + } + + def flatten[B](implicit ev: A -> IterableOnce[B]): Iterator[B]^{this} = + flatMap[B](ev) + + def concat[B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator.ConcatIterator[B](self).concat(xs) + + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = concat(xs) + + def take(n: Int): Iterator[A]^{this} = sliceIterator(0, n max 0) + + def takeWhile(p: A => Boolean): Iterator[A]^{self, p} = new AbstractIterator[A] { + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false + private[this] var tail: Iterator[A]^{self} = self + + def hasNext = hdDefined || tail.hasNext && { + hd = tail.next() + if (p(hd)) hdDefined = true + else tail = Iterator.empty + hdDefined + } + def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() + } + + def drop(n: Int): Iterator[A]^{this} = sliceIterator(n, -1) + + def dropWhile(p: A => Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator + private[this] var status = -1 + // Local buffering to avoid double-wrap with .buffered + private[this] var fst: A = _ + def hasNext: Boolean = + if (status == 1) self.hasNext + else if (status == 0) true + else { + while (self.hasNext) { + val a = self.next() + if (!p(a)) { + fst = a + status = 0 + return true + } + } + status = 1 + false + } + def next() = + if (hasNext) { + if (status == 1) self.next() + else { + status = 1 + fst + } + } + else Iterator.empty.next() + } + + /** + * @inheritdoc + * + * @note Reuse: $consumesOneAndProducesTwoIterators + */ + def span(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + /* + * Giving a name to following iterator (as opposed to trailing) because + * anonymous class is represented as a structural type that trailing + * iterator is referring (the finish() method) and thus triggering + * handling of structural calls. It's not what's intended here. + */ + final class Leading extends AbstractIterator[A] { + private[this] var lookahead: mutable.Queue[A] = null + private[this] var hd: A = _ + /* Status is kept with magic numbers + * 1 means next element is in hd and we're still reading into this iterator + * 0 means we're still reading but haven't found a next element + * -1 means we are done reading into the iterator, so we must rely on lookahead + * -2 means we are done but have saved hd for the other iterator to use as its first element + */ + private[this] var status = 0 + private def store(a: A): Unit = { + if (lookahead == null) lookahead = new mutable.Queue[A] + lookahead += a + } + def hasNext = { + if (status < 0) (lookahead ne null) && lookahead.nonEmpty + else if (status > 0) true + else { + if (self.hasNext) { + hd = self.next() + status = if (p(hd)) 1 else -2 + } + else status = -1 + status > 0 + } + } + def next() = { + if (hasNext) { + if (status == 1) { status = 0; hd } + else lookahead.dequeue() + } + else Iterator.empty.next() + } + @tailrec + def finish(): Boolean = status match { + case -2 => status = -1 ; true + case -1 => false + case 1 => store(hd) ; status = 0 ; finish() + case 0 => + status = -1 + while (self.hasNext) { + val a = self.next() + if (p(a)) store(a) + else { + hd = a + return true + } + } + false + } + def trailer: A = hd + } + + val leading = new Leading + + val trailing = new AbstractIterator[A] { + private[this] var myLeading = leading + /* Status flag meanings: + * -1 not yet accessed + * 0 single element waiting in leading + * 1 defer to self + * 2 self.hasNext already + * 3 exhausted + */ + private[this] var status = -1 + def hasNext = status match { + case 3 => false + case 2 => true + case 1 => if (self.hasNext) { status = 2 ; true } else { status = 3 ; false } + case 0 => true + case _ => + if (myLeading.finish()) { status = 0 ; true } else { status = 1 ; myLeading = null ; hasNext } + } + def next() = { + if (hasNext) { + if (status == 0) { + status = 1 + val res = myLeading.trailer + myLeading = null + res + } else { + status = 1 + self.next() + } + } + else Iterator.empty.next() + } + } + + (leading, trailing) + } + + def slice(from: Int, until: Int): Iterator[A]^{this} = sliceIterator(from, until max 0) + + /** Creates an optionally bounded slice, unbounded if `until` is negative. */ + protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + val lo = from max 0 + val rest = + if (until < 0) -1 // unbounded + else if (until <= lo) 0 // empty + else until - lo // finite + + if (rest == 0) Iterator.empty + else new Iterator.SliceIterator(this, lo, rest) + } + + def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new AbstractIterator[(A, B)] { + val thatIterator = that.iterator + override def knownSize = self.knownSize min thatIterator.knownSize + def hasNext = self.hasNext && thatIterator.hasNext + def next() = (self.next(), thatIterator.next()) + } + + def zipAll[A1 >: A, B](that: IterableOnce[B]^, thisElem: A1, thatElem: B): Iterator[(A1, B)]^{this, that} = new AbstractIterator[(A1, B)] { + val thatIterator = that.iterator + override def knownSize = { + val thisSize = self.knownSize + val thatSize = thatIterator.knownSize + if (thisSize < 0 || thatSize < 0) -1 + else thisSize max thatSize + } + def hasNext = self.hasNext || thatIterator.hasNext + def next(): (A1, B) = { + val next1 = self.hasNext + val next2 = thatIterator.hasNext + if(!(next1 || next2)) throw new NoSuchElementException + (if(next1) self.next() else thisElem, if(next2) thatIterator.next() else thatElem) + } + } + + def zipWithIndex: Iterator[(A, Int)]^{this} = new AbstractIterator[(A, Int)] { + var idx = 0 + override def knownSize = self.knownSize + def hasNext = self.hasNext + def next() = { + val ret = (self.next(), idx) + idx += 1 + ret + } + } + + /** Checks whether corresponding elements of the given iterable collection + * compare equal (with respect to `==`) to elements of this $coll. + * + * @param that the collection to compare + * @tparam B the type of the elements of collection `that`. + * @return `true` if both collections contain equal elements in the same order, `false` otherwise. + * + * @inheritdoc + */ + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + val those = that.iterator + while (hasNext && those.hasNext) + if (next() != those.next()) + return false + // At that point we know that *at least one* iterator has no next element + // If *both* of them have no elements then the collections are the same + hasNext == those.hasNext + } + + /** Creates two new iterators that both iterate over the same elements + * as this iterator (in the same order). The duplicate iterators are + * considered equal if they are positioned at the same element. + * + * Given that most methods on iterators will make the original iterator + * unfit for further use, this methods provides a reliable way of calling + * multiple such methods on an iterator. + * + * @return a pair of iterators + * @note The implementation may allocate temporary storage for elements + * iterated by one iterator but not yet by the other. + * @note Reuse: $consumesOneAndProducesTwoIterators + */ + def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { + val gap = new scala.collection.mutable.Queue[A] + var ahead: Iterator[A] = null // ahead is captured by Partner, so A is not recognized as parametric + class Partner extends AbstractIterator[A] { + override def knownSize: Int = self.synchronized { + val thisSize = self.knownSize + + if (this eq ahead) thisSize + else if (thisSize < 0 || gap.knownSize < 0) -1 + else thisSize + gap.knownSize + } + def hasNext: Boolean = self.synchronized { + (this ne ahead) && !gap.isEmpty || self.hasNext + } + def next(): A = self.synchronized { + if (gap.isEmpty) ahead = this + if (this eq ahead) { + val e = self.next() + gap enqueue e + e + } else gap.dequeue() + } + // to verify partnerhood we use reference equality on gap because + // type testing does not discriminate based on origin. + private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue + override def hashCode = gap.hashCode() + override def equals(other: Any) = other match { + case x: Partner => x.compareGap(gap) && gap.isEmpty + case _ => super.equals(other) + } + } + (new Partner, new Partner) + } + + /** Returns this iterator with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original iterator appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param patchElems The iterator of patch values + * @param replaced The number of values in the original iterator that are replaced by the patch. + * @note Reuse: $consumesTwoAndProducesOneIterator + */ + def patch[B >: A](from: Int, patchElems: Iterator[B]^, replaced: Int): Iterator[B]^{this, patchElems} = + new AbstractIterator[B] { + private[this] var origElems = self + // > 0 => that many more elems from `origElems` before switching to `patchElems` + // 0 => need to drop elems from `origElems` and start using `patchElems` + // -1 => have dropped elems from `origElems`, will be using `patchElems` until it's empty + // and then using what's left of `origElems` after the drop + private[this] var state = if (from > 0) from else 0 + + // checks state and handles 0 => -1 + @inline private[this] def switchToPatchIfNeeded(): Unit = + if (state == 0) { + origElems = origElems drop replaced + state = -1 + } + + def hasNext: Boolean = { + switchToPatchIfNeeded() + origElems.hasNext || patchElems.hasNext + } + + def next(): B = { + switchToPatchIfNeeded() + if (state < 0 /* == -1 */) { + if (patchElems.hasNext) patchElems.next() + else origElems.next() + } + else { + if (origElems.hasNext) { + state -= 1 + origElems.next() + } + else { + state = -1 + patchElems.next() + } + } + } + } + + override def tapEach[U](f: A => U): Iterator[A]^{this, f} = new AbstractIterator[A] { + override def knownSize = self.knownSize + override def hasNext = self.hasNext + override def next() = { + val _next = self.next() + f(_next) + _next + } + } + + /** Converts this iterator to a string. + * + * @return `""` + * @note Reuse: $preservesIterator + */ + override def toString = "" + + @deprecated("Iterator.seq always returns the iterator itself", "2.13.0") + def seq: this.type = this +} + +@SerialVersionUID(3L) +object Iterator extends IterableFactory[Iterator] { + + private[this] val _empty: Iterator[Nothing] = new AbstractIterator[Nothing] { + def hasNext = false + def next() = throw new NoSuchElementException("next on empty iterator") + override def knownSize: Int = 0 + override protected def sliceIterator(from: Int, until: Int) = this + } + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + override def from[A](source: IterableOnce[A]^): Iterator[A]^{source} = source.iterator + + /** The iterator which produces no values. */ + @`inline` final def empty[T]: Iterator[T] = _empty + + def single[A](a: A): Iterator[A] = new AbstractIterator[A] { + private[this] var consumed: Boolean = false + def hasNext = !consumed + def next() = if (consumed) empty.next() else { consumed = true; a } + override protected def sliceIterator(from: Int, until: Int) = + if (consumed || from > 0 || until == 0) empty + else this + } + + override def apply[A](xs: A*): Iterator[A] = xs.iterator + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, Iterator[A]] = + new ImmutableBuilder[A, Iterator[A]](empty[A]) { + override def addOne(elem: A): this.type = { elems = elems ++ single(elem); this } + } + + /** Creates iterator that produces the results of some element computation a number of times. + * + * @param len the number of elements returned by the iterator. + * @param elem the element computation + * @return An iterator that produces the results of `n` evaluations of `elem`. + */ + override def fill[A](len: Int)(elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + private[this] var i = 0 + override def knownSize: Int = (len - i) max 0 + def hasNext: Boolean = i < len + def next(): A = + if (hasNext) { i += 1; elem } + else empty.next() + } + + /** Creates an iterator producing the values of a given function over a range of integer values starting from 0. + * + * @param end The number of elements returned by the iterator + * @param f The function computing element values + * @return An iterator that produces the values `f(0), ..., f(n -1)`. + */ + override def tabulate[A](end: Int)(f: Int => A): Iterator[A]^{f} = new AbstractIterator[A] { + private[this] var i = 0 + override def knownSize: Int = (end - i) max 0 + def hasNext: Boolean = i < end + def next(): A = + if (hasNext) { val result = f(i); i += 1; result } + else empty.next() + } + + /** Creates an infinite-length iterator which returns successive values from some start value. + + * @param start the start value of the iterator + * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...` + */ + def from(start: Int): Iterator[Int] = from(start, 1) + + /** Creates an infinite-length iterator returning values equally spaced apart. + * + * @param start the start value of the iterator + * @param step the increment between successive values + * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...` + */ + def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var i = start + def hasNext: Boolean = true + def next(): Int = { val result = i; i += step; result } + } + + /** Creates nn iterator returning successive values in some integer interval. + * + * @param start the start value of the iterator + * @param end the end value of the iterator (the first value NOT returned) + * @return the iterator producing values `start, start + 1, ..., end - 1` + */ + def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1) + + /** An iterator producing equally spaced values in some integer interval. + * + * @param start the start value of the iterator + * @param end the end value of the iterator (the first value NOT returned) + * @param step the increment value of the iterator (must be positive or negative) + * @return the iterator producing values `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { + if (step == 0) throw new IllegalArgumentException("zero step") + private[this] var i = start + private[this] var hasOverflowed = false + override def knownSize: Int = { + val size = math.ceil((end.toLong - i.toLong) / step.toDouble) + if (size < 0) 0 + else if (size > Int.MaxValue) -1 + else size.toInt + } + def hasNext: Boolean = { + (step <= 0 || i < end) && (step >= 0 || i > end) && !hasOverflowed + } + def next(): Int = + if (hasNext) { + val result = i + val nextValue = i + step + hasOverflowed = (step > 0) == nextValue < i + i = nextValue + result + } + else empty.next() + } + + /** Creates an infinite iterator that repeatedly applies a given function to the previous result. + * + * @param start the start value of the iterator + * @param f the function that's repeatedly applied + * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[T](start: T)(f: T => T): Iterator[T]^{f} = new AbstractIterator[T] { + private[this] var first = true + private[this] var acc = start + def hasNext: Boolean = true + def next(): T = { + if (first) first = false + else acc = f(acc) + + acc + } + } + + /** Creates an Iterator that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return an Iterator that produces elements using `f` until `f` returns `None` + */ + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A]^{f} = new UnfoldIterator(init)(f) + + /** Creates an infinite-length iterator returning the results of evaluating an expression. + * The expression is recomputed for every element. + * + * @param elem the element computation. + * @return the iterator containing an infinite number of results of evaluating `elem`. + */ + def continually[A](elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + def hasNext = true + def next() = elem + } + + /** Creates an iterator to which other iterators can be appended efficiently. + * Nested ConcatIterators are merged to avoid blowing the stack. + */ + private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { + private var current: Iterator[A]^{from*} = from + private var tail: ConcatIteratorCell[A @uncheckedVariance] = null + private var last: ConcatIteratorCell[A @uncheckedVariance] = null + private var currentHasNextChecked = false + + def hasNext = + if (currentHasNextChecked) true + else if (current == null) false + else if (current.hasNext) { + currentHasNextChecked = true + true + } + else { + // If we advanced the current iterator to a ConcatIterator, merge it into this one + @tailrec def merge(): Unit = + if (current.isInstanceOf[ConcatIterator[_]]) { + val c = current.asInstanceOf[ConcatIterator[A]] + current = c.current + currentHasNextChecked = c.currentHasNextChecked + if (c.tail != null) { + if (last == null) last = c.last + c.last.tail = tail + tail = c.tail + } + merge() + } + + // Advance current to the next non-empty iterator + // current is set to null when all iterators are exhausted + @tailrec def advance(): Boolean = + if (tail == null) { + current = null + last = null + false + } + else { + current = tail.headIterator + if (last eq tail) last = last.tail + tail = tail.tail + merge() + if (currentHasNextChecked) true + else if (current != null && current.hasNext) { + currentHasNextChecked = true + true + } else advance() + } + + advance() + } + + def next() = + if (hasNext) { + currentHasNextChecked = false + current.next() + } else Iterator.empty.next() + + override def concat[B >: A](that: => IterableOnce[B]^): Iterator[B]^{this, that} = { + val c: ConcatIteratorCell[A] = new ConcatIteratorCell[B](that, null).asInstanceOf + if (tail == null) { + tail = c + last = c + } + else { + last.tail = c + last = c + } + if (current == null) current = Iterator.empty + this + } + } + + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^, var tail: ConcatIteratorCell[A]) { + def headIterator: Iterator[A]^{this} = head.iterator // CC todo: can't use {head} as capture set, gives "cannot establish a reference" + } + + /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. + * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. + */ + private[scala] final class SliceIterator[A](val underlying: Iterator[A]^, start: Int, limit: Int) extends AbstractIterator[A] { + private[this] var remaining = limit + private[this] var dropping = start + @inline private def unbounded = remaining < 0 + private def skip(): Unit = + while (dropping > 0) { + if (underlying.hasNext) { + underlying.next() + dropping -= 1 + } else + dropping = 0 + } + override def knownSize: Int = { + val size = underlying.knownSize + if (size < 0) -1 + else { + val dropSize = 0 max (size - dropping) + if (unbounded) dropSize + else remaining min dropSize + } + } + def hasNext = { skip(); remaining != 0 && underlying.hasNext } + def next() = { + skip() + if (remaining > 0) { + remaining -= 1 + underlying.next() + } + else if (unbounded) underlying.next() + else empty.next() + } + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{underlying} = { + val lo = from max 0 + def adjustedBound = + if (unbounded) -1 + else 0 max (remaining - lo) + val rest = + if (until < 0) adjustedBound // respect current bound, if any + else if (until <= lo) 0 // empty + else if (unbounded) until - lo // now finite + else adjustedBound min (until - lo) // keep lesser bound + if (rest == 0) empty + else { + dropping += lo + remaining = rest + this + } + } + } + + /** Creates an iterator that uses a function `f` to produce elements of + * type `A` and update an internal state of type `S`. + */ + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)])extends AbstractIterator[A] { + private[this] var state: S = init + private[this] var nextResult: Option[(A, S)] = null + + override def hasNext: Boolean = { + if (nextResult eq null) { + nextResult = { + val res = f(state) + if (res eq null) throw new NullPointerException("null during unfold") + res + } + state = null.asInstanceOf[S] // allow GC + } + nextResult.isDefined + } + + override def next(): A = { + if (hasNext) { + val (value, newState) = nextResult.get + state = newState + nextResult = null + value + } else Iterator.empty.next() + } + } +} + +/** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ +abstract class AbstractIterator[+A] extends Iterator[A]: + this: Iterator[A]^ => diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala new file mode 100644 index 000000000000..69130eae1829 --- /dev/null +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -0,0 +1,336 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.convert._ +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** A variety of decorators that enable converting between + * Scala and Java collections using extension methods, `asScala` and `asJava`. + * + * The extension methods return adapters for the corresponding API. + * + * The following conversions are supported via `asScala` and `asJava`: + *{{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + *}}} + * The following conversions are supported via `asScala` and through + * specially-named extension methods to convert to Java collections, as shown: + *{{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + *}}} + * In addition, the following one-way conversions are provided via `asJava`: + *{{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + *}}} + * The following one way conversion is provided via `asScala`: + *{{{ + * java.util.Properties => scala.collection.mutable.Map + *}}} + * In all cases, converting from a source type to a target type and back + * again will return the original source object. For example: + * {{{ + * import scala.collection.JavaConverters._ + * + * val source = new scala.collection.mutable.ListBuffer[Int] + * val target: java.util.List[Int] = source.asJava + * val other: scala.collection.mutable.Buffer[Int] = target.asScala + * assert(source eq other) + * }}} + * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. + * {{{ + * scala> val vs = java.util.Arrays.asList("hi", "bye") + * vs: java.util.List[String] = [hi, bye] + * + * scala> val ss = asScalaIterator(vs.iterator) + * ss: Iterator[String] = + * + * scala> .toList + * res0: List[String] = List(hi, bye) + * + * scala> val ss = asScalaBuffer(vs) + * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) + * }}} + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object JavaConverters extends AsJavaConverters with AsScalaConverters { + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + + @deprecated("Use `asScala` instead", "2.13.0") + def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + + // Deprecated implicit conversions for code that directly imports them + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[asJavaIterator]] + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[asJavaEnumeration]] + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[asJavaIterable]] + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[asJavaCollection]] + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[bufferAsJavaList]] + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[mutableSeqAsJavaList]] + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. + * @see [[seqAsJavaList]] + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[mutableSetAsJavaSet]] + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. + * @see [[setAsJavaSet]] + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[mutableMapAsJavaMap]] + */ + implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[asJavaDictionary]] + */ + implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. + * @see [[mapAsJavaMap]] + */ + implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[mapAsJavaConcurrentMap]]. + */ + implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = + new AsJava(mapAsJavaConcurrentMap(m)) + + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[asScalaIterator]] + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[enumerationAsScalaIterator]] + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[iterableAsScalaIterable]] + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[collectionAsScalaIterable]] + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[asScalaBuffer]] + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[asScalaSet]] + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[mapAsScalaMap]] + */ + implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. + * @see [[mapAsScalaConcurrentMap]] + */ + implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = + new AsScala(mapAsScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[dictionaryAsScalaMap]] + */ + implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * @see [[propertiesAsScalaMap]] + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) + + + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + } +} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala new file mode 100644 index 000000000000..1bb4173d219f --- /dev/null +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -0,0 +1,423 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Decorator representing lazily zipped pairs. + * + * @define coll pair + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { + + /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. + * + * @param that the iterable providing the third element of each eventual triple + * @tparam B the type of the third element in each eventual triple + * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or + * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) + + def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = f(elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext) + _current = f(elems1.next(), elems2.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: (El1, El2) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + if (p(e1, e2)) _current = (e1, e2) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.hasNext + }) + } + + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) + + res + } + + def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) + + def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + + while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) + } + + private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = (elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + + override def toString = s"$coll1.lazyZip($coll2)" +} + +object LazyZip2 { + implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable +} + + +/** Decorator representing lazily zipped triples. + * + * @define coll triple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^) { + + /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. + * + * @param that the iterable providing the fourth element of each eventual 4-tuple + * @tparam B the type of the fourth element in each eventual 4-tuple + * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. + * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) + + def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: (El1, El2, El3) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + if (p(e1, e2, e3)) _current = (e1, e2, e3) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next()) + + res + } + + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) + + def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) + f(elems1.next(), elems2.next(), elems3.next()) + } + + private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else s1 min s2 min s3 + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" +} + +object LazyZip3 { + implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable +} + + + +/** Decorator representing lazily zipped 4-tuples. + * + * @define coll tuple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^, + coll4: Iterable[El4]^) { + + def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: (El1, El2, El3, El4) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + val e4 = elems4.next() + if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + + res + } + + def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) + + def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + + private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else { + val s4 = coll4.knownSize + if (s4 == 0) 0 else s1 min s2 min s3 min s4 + } + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" +} + +object LazyZip4 { + implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = + zipped4.toIterable +} diff --git a/tests/pos-special/stdlib/collection/LinearSeq.scala b/tests/pos-special/stdlib/collection/LinearSeq.scala new file mode 100644 index 000000000000..393f5fda4187 --- /dev/null +++ b/tests/pos-special/stdlib/collection/LinearSeq.scala @@ -0,0 +1,311 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{nowarn, tailrec} +import language.experimental.captureChecking + +/** Base trait for linearly accessed sequences that have efficient `head` and + * `tail` operations. + * Known subclasses: List, LazyList + */ +trait LinearSeq[+A] extends Seq[A] + with LinearSeqOps[A, LinearSeq, LinearSeq[A]] + with IterableFactoryDefaults[A, LinearSeq] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "LinearSeq" + + override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq +} + +@SerialVersionUID(3L) +object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) + +/** Base trait for linear Seq operations */ +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends AnyRef with SeqOps[A, CC, C] { + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation that is inherited from [[SeqOps]] + * uses `lengthCompare`, which is defined here to use `isEmpty`. + */ + override def isEmpty: Boolean + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def head: A + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def tail: C + + override def headOption: Option[A] = + if (isEmpty) None else Some(head) + + def iterator: Iterator[A] = + if (knownSize == 0) Iterator.empty + else new LinearSeqIterator[A](this) + + def length: Int = { + var these = coll + var len = 0 + while (these.nonEmpty) { + len += 1 + these = these.tail + } + len + } + + override def last: A = { + if (isEmpty) throw new NoSuchElementException("LinearSeq.last") + else { + var these = coll + var scout = tail + while (scout.nonEmpty) { + these = scout + scout = scout.tail + } + these.head + } + } + + override def lengthCompare(len: Int): Int = { + @tailrec def loop(i: Int, xs: LinearSeq[A]): Int = { + if (i == len) + if (xs.isEmpty) 0 else 1 + else if (xs.isEmpty) + -1 + else + loop(i + 1, xs.tail) + } + if (len < 0) 1 + else loop(0, coll) + } + + override def lengthCompare(that: Iterable[_]^): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this lengthCompare thatKnownSize + else that match { + case that: LinearSeq[_] => + var thisSeq = this + var thatSeq = that + while (thisSeq.nonEmpty && thatSeq.nonEmpty) { + thisSeq = thisSeq.tail + thatSeq = thatSeq.tail + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatSeq.nonEmpty) + case _ => + var thisSeq = this + val thatIt = that.iterator + while (thisSeq.nonEmpty && thatIt.hasNext) { + thisSeq = thisSeq.tail + thatIt.next() + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatIt.hasNext) + } + } + + override def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0 + + // `apply` is defined in terms of `drop`, which is in turn defined in + // terms of `tail`. + @throws[IndexOutOfBoundsException] + override def apply(n: Int): A = { + if (n < 0) throw new IndexOutOfBoundsException(n.toString) + val skipped = drop(n) + if (skipped.isEmpty) throw new IndexOutOfBoundsException(n.toString) + skipped.head + } + + override def foreach[U](f: A => U): Unit = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + f(these.head) + these = these.tail + } + } + + override def forall(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (!p(these.head)) return false + these = these.tail + } + true + } + + override def exists(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return true + these = these.tail + } + false + } + + override def contains[A1 >: A](elem: A1): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (these.head == elem) return true + these = these.tail + } + false + } + + override def find(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return Some(these.head) + these = these.tail + } + None + } + + override def foldLeft[B](z: B)(op: (B, A) => B): B = { + var acc = z + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + acc = op(acc, these.head) + these = these.tail + } + acc + } + + override def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = + (a eq b) || { + if (a.nonEmpty && b.nonEmpty && a.head == b.head) { + linearSeqEq(a.tail, b.tail) + } + else { + a.isEmpty && b.isEmpty + } + } + + that match { + case that: LinearSeq[B] => linearSeqEq(coll, that) + case _ => super.sameElements(that) + } + } + + override def segmentLength(p: A => Boolean, from: Int): Int = { + var i = 0 + var seq = drop(from) + while (seq.nonEmpty && p(seq.head)) { + i += 1 + seq = seq.tail + } + i + } + + override def indexWhere(p: A => Boolean, from: Int): Int = { + var i = math.max(from, 0) + var these: LinearSeq[A] = this drop from + while (these.nonEmpty) { + if (p(these.head)) + return i + + i += 1 + these = these.tail + } + -1 + } + + override def lastIndexWhere(p: A => Boolean, end: Int): Int = { + var i = 0 + var these: LinearSeq[A] = coll + var last = -1 + while (!these.isEmpty && i <= end) { + if (p(these.head)) last = i + these = these.tail + i += 1 + } + last + } + + override def findLast(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + var found = false + var last: A = null.asInstanceOf[A] // don't use `Option`, to prevent excessive `Some` allocation + while (these.nonEmpty) { + val elem = these.head + if (p(elem)) { + found = true + last = elem + } + these = these.tail + } + if (found) Some(last) else None + } + + override def tails: Iterator[C] = { + val end = Iterator.single(empty) + Iterator.iterate(coll)(_.tail).takeWhile(_.nonEmpty) ++ end + } +} + +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends AnyRef with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { + // A more efficient iterator implementation than the default LinearSeqIterator + override def iterator: Iterator[A] = new AbstractIterator[A] { + private[this] var current = StrictOptimizedLinearSeqOps.this + def hasNext = !current.isEmpty + def next() = { val r = current.head; current = current.tail; r } + } + + // Optimized version of `drop` that avoids copying + override def drop(n: Int): C = { + @tailrec def loop(n: Int, s: C): C = + if (n <= 0 || s.isEmpty) s + else loop(n - 1, s.tail) + loop(n, coll) + } + + override def dropWhile(p: A => Boolean): C = { + @tailrec def loop(s: C): C = + if (s.nonEmpty && p(s.head)) loop(s.tail) + else s + loop(coll) + } +} + +/** A specialized Iterator for LinearSeqs that is lazy enough for Stream and LazyList. This is accomplished by not + * evaluating the tail after returning the current head. + */ +private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, LinearSeq, LinearSeq[A]]) extends AbstractIterator[A] { + // A call-by-need cell + private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) { lazy val v = st } + + private[this] var these: LazyCell = { + // Reassign reference to avoid creating a private class field and holding a reference to the head. + // LazyCell would otherwise close over `coll`. + val initialHead = coll + new LazyCell(initialHead) + } + + def hasNext: Boolean = these.v.nonEmpty + + def next(): A = + if (isEmpty) Iterator.empty.next() + else { + val cur = these.v + val result = cur.head + these = new LazyCell(cur.tail) + result + } +} diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala new file mode 100644 index 000000000000..8ab25a3c13e0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -0,0 +1,409 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.StringBuilder +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +/** Base Map type */ +trait Map[K, +V] + extends Iterable[(K, V)] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] + with Equals + with Pure { + + def mapFactory: scala.collection.MapFactory[Map] = Map + + def canEqual(that: Any): Boolean = true + + /** + * Equality of maps is implemented using the lookup method [[get]]. This method returns `true` if + * - the argument `o` is a `Map`, + * - the two maps have the same [[size]], and + * - for every `(key, value)` pair in this map, `other.get(key) == Some(value)`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Map` can narrow down the equality + * to specific map types. The `Map` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two maps use the same + * key equivalence function in their lookup operation. For example, the key equivalence operation in a + * [[scala.collection.immutable.TreeMap]] is defined by its ordering. Comparing a `TreeMap` with a `HashMap` leads + * to unexpected results if `ordering.equiv(k1, k2)` (used for lookup in `TreeMap`) is different from `k1 == k2` + * (used for lookup in `HashMap`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeMap("A" -> 1)(ord) == HashMap("a" -> 1) + * val res0: Boolean = false + * + * scala> HashMap("a" -> 1) == TreeMap("A" -> 1)(ord) + * val res1: Boolean = true + * }}} + * + * + * @param o The map to which this map is compared + * @return `true` if the two maps are equal according to the description + */ + override def equals(o: Any): Boolean = + (this eq o.asInstanceOf[AnyRef]) || (o match { + case map: Map[K @unchecked, _] if map.canEqual(this) => + (this.size == map.size) && { + try this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.mapHash(this) + + // These two methods are not in MapOps so that MapView is not forced to implement them + @deprecated("Use - or removed on an immutable Map", "2.13.0") + def - (key: K): Map[K, V] + @deprecated("Use -- or removedAll on an immutable Map", "2.13.0") + def - (key1: K, key2: K, keys: K*): Map[K, V] + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Map" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too +} + +/** Base Map implementation type + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC type constructor of the map (e.g. `HashMap`). Operations returning a collection + * with a different type of entries `(L, W)` (e.g. `map`) return a `CC[L, W]`. + * @tparam C type of the map (e.g. `HashMap[Int, String]`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * @define coll map + * @define Coll `Map` + */ +// Note: the upper bound constraint on CC is useful only to +// erase CC to IterableOps instead of Object +trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends IterableOps[(K, V), Iterable, C] + with PartialFunction[K, V] { + this: MapOps[K, V, CC, C]^ => + + override def view: MapView[K, V]^{this} = new MapView.Id(this) + + /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ + def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (keysIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (keysIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(keysIterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper(keysIterator)) + } + s.asInstanceOf[S] + } + + /** Returns a [[Stepper]] for the values of this map. See method [[stepper]]. */ + def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (valuesIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (valuesIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(valuesIterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper(valuesIterator)) + } + s.asInstanceOf[S] + } + + /** Similar to `fromIterable`, but returns a Map collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]^): CC[K2, V2] = mapFactory.from(it) + + /** The companion object of this map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def mapFactory: MapFactory[CC] + + /** Optionally returns the value associated with a key. + * + * @param key the key value + * @return an option value containing the value associated with `key` in this map, + * or `None` if none exists. + */ + def get(key: K): Option[V] + + /** Returns the value associated with a key, or a default value if the key is not contained in the map. + * @param key the key. + * @param default a computation that yields a default value in case no binding for `key` is + * found in the map. + * @tparam V1 the result type of the default computation. + * @return the value associated with `key` if it exists, + * otherwise the result of the `default` computation. + */ + def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match { + case Some(v) => v + case None => default + } + + /** Retrieves the value which is associated with the given key. This + * method invokes the `default` method of the map if there is no mapping + * from the given key to a value. Unless overridden, the `default` method throws a + * `NoSuchElementException`. + * + * @param key the key + * @return the value associated with the given key, or the result of the + * map's `default` method, if none exists. + */ + @throws[NoSuchElementException] + def apply(key: K): V = get(key) match { + case None => default(key) + case Some(value) => value + } + + override /*PartialFunction*/ def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 = getOrElse(x, default(x)) + + /** Collects all keys of this map in a set. + * @return a set containing all keys of this map. + */ + def keySet: Set[K] = new KeySet + + /** The implementation class of the set returned by `keySet`. + */ + protected class KeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def diff(that: Set[K]): Set[K] = fromSpecific(this.view.filterNot(that)) + } + + /** A generic trait that is reused by keyset implementations */ + protected trait GenKeySet { this: Set[K] => + def iterator: Iterator[K] = MapOps.this.keysIterator + def contains(key: K): Boolean = MapOps.this.contains(key) + override def size: Int = MapOps.this.size + override def knownSize: Int = MapOps.this.knownSize + override def isEmpty: Boolean = MapOps.this.isEmpty + } + + /** Collects all keys of this map in an iterable collection. + * + * @return the keys of this map as an iterable. + */ + def keys: Iterable[K] = keySet + + /** Collects all values of this map in an iterable collection. + * + * @return the values of this map as an iterable. + */ + def values: Iterable[V] = new AbstractIterable[V] with DefaultSerializable { + override def knownSize: Int = MapOps.this.knownSize + override def iterator: Iterator[V] = valuesIterator + } + + /** Creates an iterator for all keys. + * + * @return an iterator over all keys. + */ + def keysIterator: Iterator[K] = new AbstractIterator[K] { + val iter = MapOps.this.iterator + def hasNext = iter.hasNext + def next() = iter.next()._1 + } + + /** Creates an iterator for all values in this map. + * + * @return an iterator over all values that are associated with some key in this map. + */ + def valuesIterator: Iterator[V] = new AbstractIterator[V] { + val iter = MapOps.this.iterator + def hasNext = iter.hasNext + def next() = iter.next()._2 + } + + /** Apply `f` to each key/value pair for its side effects + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreachEntry[U](f: (K, V) => U): Unit = { + val it = iterator + while (it.hasNext) { + val next = it.next() + f(next._1, next._2) + } + } + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) + + /** Defines the default value computation for the map, + * returned when a key is not found + * The method implemented here throws an exception, + * but it might be overridden in subclasses. + * + * @param key the given key value for which a binding is missing. + * @throws NoSuchElementException + */ + @throws[NoSuchElementException] + def default(key: K): V = + throw new NoSuchElementException("key not found: " + key) + + /** Tests whether this map contains a binding for a key. + * + * @param key the key + * @return `true` if there is a binding for `key` in this map, `false` otherwise. + */ + def contains(key: K): Boolean = get(key).isDefined + + + /** Tests whether this map contains a binding for a key. This method, + * which implements an abstract method of trait `PartialFunction`, + * is equivalent to `contains`. + * + * @param key the key + * @return `true` if there is a binding for `key` in this map, `false` otherwise. + */ + def isDefinedAt(key: K): Boolean = contains(key) + + /** Builds a new map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(this, f)) + + /** Builds a new collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam K2 the key type of the returned $coll. + * @tparam V2 the value type of the returned $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + mapFactory.from(new View.Collect(this, pf)) + + /** Builds a new map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): CC[K, V2] = mapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is + // SortedMap's CC, while Map's CC is fixed to Map + /** Alias for `concat` */ + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat.", "2.13.0") + def + [V1 >: V](kv: (K, V1)): CC[K, V1] = + mapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = + mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + @deprecated("Consider requiring an immutable Map.", "2.13.0") + @`inline` def -- (keys: IterableOnce[K]^): C = { + lazy val keysSet = keys.iterator.to(immutable.Set) + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++: [V1 >: V](that: IterableOnce[(K,V1)]^): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)]^{that} = that match { + case that: Iterable[(K, V1)] => that + case that => View.from(that) + } + mapFactory.from(new View.Concat(thatIterable, this)) + } +} + +object MapOps { + /** Specializes `WithFilter` for Map collection types by adding overloads to transformation + * operations that can return a Map. + * + * @define coll map collection + */ + @SerialVersionUID(3L) + class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( + self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, + p: ((K, V)) => Boolean + ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { + + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = + self.mapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = + self.mapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = + new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +/** + * $factoryInfo + * @define coll map + * @define Coll `Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](immutable.Map) { + private val DefaultSentinel: AnyRef = new AnyRef + private val DefaultSentinelFn: () => AnyRef = () => DefaultSentinel +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends AbstractIterable[(K, V)] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala new file mode 100644 index 000000000000..595fe20538d3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -0,0 +1,196 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn +import scala.collection.MapView.SomeMapOps +import scala.collection.mutable.Builder +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +trait MapView[K, +V] + extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] + with View[(K, V)] { + this: MapView[K, V]^ => + + override def view: MapView[K, V]^{this} = this + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all keys of this map. + * + * @return the keys of this map as a view. + */ + override def keys: Iterable[K]^{this} = new MapView.Keys(this) + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all values of this map. + * + * @return the values of this map as a view. + */ + override def values: Iterable[V]^{this} = new MapView.Values(this) + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) + + override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) + + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) + + override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) + + override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) + + def mapFactory: MapViewFactory = MapView + + override def empty: MapView[K, V] = mapFactory.empty + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) + + override def toString: String = super[View].toString + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "MapView" +} + +object MapView extends MapViewFactory { + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] + /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ + type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] + + @SerialVersionUID(3L) + object EmptyMapView extends AbstractMapView[Any, Nothing] { + // !!! cc problem: crash when we replace the line with + // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + override def get(key: Any): Option[Nothing] = None + override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this + override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this + override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) + } + + @SerialVersionUID(3L) + class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { + def get(key: K): Option[V] = underlying.get(key) + def iterator: Iterator[(K, V)]^{this} = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { + def iterator: Iterator[K]^{this} = underlying.keysIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { + def iterator: Iterator[V]^{this} = underlying.valuesIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) + def get(key: K): Option[W] = underlying.get(key).map(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } + def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) + def get(key: K): Option[V] = underlying.get(key) match { + case s @ Some(v) if p((key, v)) != isFlipped => s + case _ => None + } + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { + override def get(key: K): Option[V] = { + underlying.get(key) match { + case s @ Some(v) => + f((key, v)) + s + case None => None + } + } + override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + + override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] + + override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = + View.from(it).unsafeAssumePure + // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, + // and the latter assumes maps are strict, so from's result captures nothing. + + override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { + case mv: MapView[K, V] => mv + case other => new MapView.Id(other) + } + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { + + def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] + + def empty[X, Y]: MapView[X, Y] + + def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: + this: AbstractMapView[K, V]^ => + diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala new file mode 100644 index 000000000000..f5139422e24c --- /dev/null +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import scala.collection.generic.IsSeq +import language.experimental.captureChecking + +object Searching { + + /** The result of performing a search on a sorted sequence + * + * Example usage: + * + * {{{ + * val list = List(1, 3, 4, 5) // list must be sorted before searching + * list.search(4) // Found(2) + * list.search(2) // InsertionPoint(1) + * }}} + * + * */ + sealed abstract class SearchResult { + /** The index corresponding to the element searched for in the sequence, if it was found, + * or the index where the element would be inserted in the sequence, if it was not in the sequence */ + def insertionPoint: Int + } + + /** The result of performing a search on a sorted sequence, where the element was found. + * + * @param foundIndex the index corresponding to the element searched for in the sequence + */ + case class Found(foundIndex: Int) extends SearchResult { + override def insertionPoint: Int = foundIndex + } + + /** The result of performing a search on a sorted sequence, where the element was not found + * + * @param insertionPoint the index where the element would be inserted in the sequence + */ + case class InsertionPoint(insertionPoint: Int) extends SearchResult + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = + new SearchImpl(fr.conversion(coll)) +} diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala new file mode 100644 index 000000000000..334546d67dad --- /dev/null +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -0,0 +1,1197 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.collection.immutable.Range +import scala.util.hashing.MurmurHash3 +import Searching.{Found, InsertionPoint, SearchResult} +import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +/** Base trait for sequence collections + * + * @tparam A the element type of the collection + */ +trait Seq[+A] + extends Iterable[A] + with PartialFunction[Int, A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] + with Equals { + this: Seq[A] => + + override def iterableFactory: SeqFactory[Seq] = Seq + + def canEqual(that: Any): Boolean = true + + override def equals(o: Any): Boolean = + (this eq o.asInstanceOf[AnyRef]) || (o match { + case seq: Seq[A @unchecked] if seq.canEqual(this) => sameElements(seq) + case _ => false + }) + + override def hashCode(): Int = MurmurHash3.seqHash(this) + + override def toString(): String = super[Iterable].toString() + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Seq" +} + +/** + * $factoryInfo + * @define coll sequence + * @define Coll `Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) + +/** Base trait for Seq operations + * + * @tparam A the element type of the collection + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * + * @define coll sequence + * @define Coll `Seq` + */ +trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => + + override def view: SeqView[A] = new SeqView.Id[A](this) + + def iterableFactory: FreeSeqFactory[CC] + + /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should + * not be assumed to be efficient unless you have an `IndexedSeq`. */ + @throws[IndexOutOfBoundsException] + def apply(i: Int): A + + /** The length (number of elements) of the $coll. `size` is an alias for `length` in `Seq` collections. */ + def length: Int + + /** A copy of the $coll with an element prepended. + * + * Also, the original $coll is not modified, so you will want to capture the result. + * + * Example: + * {{{ + * scala> val x = List(1) + * x: List[Int] = List(1) + * + * scala> val y = 2 +: x + * y: List[Int] = List(2, 1) + * + * scala> println(x) + * List(1) + * }}} + * + * @param elem the prepended element + * @tparam B the element type of the returned $coll. + * + * @return a new $coll consisting of `value` followed + * by all elements of this $coll. + */ + def prepended[B >: A](elem: B): CC[B] = iterableFactory.from(new View.Prepended(elem, this)) + + /** Alias for `prepended`. + * + * Note that :-ending operators are right associative (see example). + * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. + */ + @`inline` final def +: [B >: A](elem: B): CC[B] = prepended(elem) + + /** A copy of this $coll with an element appended. + * + * $willNotTerminateInf + * + * Example: + * {{{ + * scala> val a = List(1) + * a: List[Int] = List(1) + * + * scala> val b = a :+ 2 + * b: List[Int] = List(1, 2) + * + * scala> println(a) + * List(1) + * }}} + * + * @param elem the appended element + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of + * all elements of this $coll followed by `value`. + */ + def appended[B >: A](elem: B): CC[B] = iterableFactory.from(new View.Appended(this, elem)) + + /** Alias for `appended` + * + * Note that :-ending operators are right associative (see example). + * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. + */ + @`inline` final def :+ [B >: A](elem: B): CC[B] = appended(elem) + + /** As with `:++`, returns a new collection containing the elements from the left operand followed by the + * elements from the right operand. + * + * It differs from `:++` in that the right operand determines the type of + * the resulting collection rather than the left one. + * Mnemonic: the COLon is on the side of the new COLlection type. + * + * @param prefix the iterable to prepend. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements of `prefix` followed + * by all the elements of this $coll. + */ + def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = iterableFactory.from(prefix match { + case prefix: Iterable[B] => new View.Concat(prefix, this) + case _ => prefix.iterator ++ iterator + }) + + /** Alias for `prependedAll` */ + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]^): CC[B] = prependedAll(prefix) + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new collection of type `CC[B]` which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + super.concat(suffix).unsafeAssumePure + + /** Alias for `appendedAll` */ + @`inline` final def :++ [B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + + // Make `concat` an alias for `appendedAll` so that it benefits from performance + // overrides of this method + @`inline` final override def concat[B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + + /** Produces a new sequence which contains all elements of this $coll and also all elements of + * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. + * + * @param that the sequence to add. + * @tparam B the element type of the returned $coll. + * @return a new collection which contains all elements of this $coll + * followed by all elements of `that`. + */ + @deprecated("Use `concat` instead", "2.13.0") + @inline final def union[B >: A](that: Seq[B]): CC[B] = concat(that) + + final override def size: Int = length + + /** Selects all the elements of this $coll ignoring the duplicates. + * + * @return a new $coll consisting of all the elements of this $coll without duplicates. + */ + def distinct: C = distinctBy(identity) + + /** Selects all the elements of this $coll ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new $coll consisting of all the elements of this $coll without duplicates. + */ + def distinctBy[B](f: A -> B): C = fromSpecific(new View.DistinctBy(this, f)) + + /** Returns new $coll with elements in reversed order. + * + * $willNotTerminateInf + * $willForceEvaluation + * + * @return A new $coll with all elements of this $coll in reversed order. + */ + def reverse: C = fromSpecific(reversed) + + /** An iterator yielding elements in reversed order. + * + * $willNotTerminateInf + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but might be more efficient. + * + * @return an iterator yielding the elements of this $coll in reversed order + */ + override def reverseIterator: Iterator[A] = reversed.iterator + + /** Tests whether this $coll contains the given sequence at a given index. + * + * '''Note''': If the both the receiver object `this` and the argument + * `that` are infinite sequences this method may not terminate. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this $coll at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B]^, offset: Int = 0): Boolean = { + val i = iterator drop offset + val j = that.iterator + while (j.hasNext && i.hasNext) + if (i.next() != j.next()) + return false + + !j.hasNext + } + + /** Tests whether this $coll ends with the given sequence. + * $willNotTerminateInf + * @param that the sequence to test + * @return `true` if this $coll has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]^): Boolean = { + if (that.isEmpty) true + else { + val i = iterator.drop(length - that.size) + val j = that.iterator + while (i.hasNext && j.hasNext) + if (i.next() != j.next()) + return false + + !j.hasNext + } + } + + /** Tests whether this $coll contains given index. + * + * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into + * a `PartialFunction[Int, A]`. + * + * @param idx the index to test + * @return `true` if this $coll contains an element at position `idx`, `false` otherwise. + */ + def isDefinedAt(idx: Int): Boolean = idx >= 0 && lengthIs > idx + + /** A copy of this $coll with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of + * all elements of this $coll followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A](len: Int, elem: B): CC[B] = iterableFactory.from(new View.PadTo(this, len, elem)) + + /** Computes the length of the longest segment that starts from the first element + * and whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the length of the longest segment of this $coll that starts from the first element + * such that every element of the segment satisfies the predicate `p`. + */ + final def segmentLength(p: A => Boolean): Int = segmentLength(p, 0) + + /** Computes the length of the longest segment that starts from some index + * and whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @param from the index where the search starts. + * @return the length of the longest segment of this $coll starting from index `from` + * such that every element of the segment satisfies the predicate `p`. + */ + def segmentLength(p: A => Boolean, from: Int): Int = { + var i = 0 + val it = iterator.drop(from) + while (it.hasNext && p(it.next())) + i += 1 + i + } + + /** Returns the length of the longest prefix whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the length of the longest prefix of this $coll + * such that every element of the segment satisfies the predicate `p`. + */ + @deprecated("Use segmentLength instead of prefixLength", "2.13.0") + @`inline` final def prefixLength(p: A => Boolean): Int = segmentLength(p, 0) + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(p: A => Boolean, from: Int): Int = iterator.indexWhere(p, from) + + /** Finds index of the first element satisfying some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index `>= 0` of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + @deprecatedOverriding("Override indexWhere(p, from) instead - indexWhere(p) calls indexWhere(p, 0)", "2.13.0") + def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) + + /** Finds index of first occurrence of some value in this $coll after or at some start index. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from) + + /** Finds index of first occurrence of some value in this $coll. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @return the index `>= 0` of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + @deprecatedOverriding("Override indexOf(elem, from) instead - indexOf(elem) calls indexOf(elem, 0)", "2.13.0") + def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) + + /** Finds index of last occurrence of some value in this $coll before or at a given end index. + * + * $willNotTerminateInf + * + * @param elem the element value to search for. + * @param end the end index. + * @tparam B the type of the element `elem`. + * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf[B >: A](elem: B, end: Int = length - 1): Int = lastIndexWhere(elem == _, end) + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int): Int = { + var i = length - 1 + val it = reverseIterator + while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 + i + } + + /** Finds index of last element satisfying some predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index of the last element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + @deprecatedOverriding("Override lastIndexWhere(p, end) instead - lastIndexWhere(p) calls lastIndexWhere(p, Int.MaxValue)", "2.13.0") + def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, Int.MaxValue) + + @inline private[this] def toGenericSeq: scala.collection.Seq[A] = this match { + case s: scala.collection.Seq[A] => s + case _ => toSeq + } + + /** Finds first index after or at a start index where this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @param from the start index + * @return the first index `>= from` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + // TODO Should be implemented in a way that preserves laziness + def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = + if (that.isEmpty && from == 0) 0 + else { + val l = knownSize + val tl = that.knownSize + if (l >= 0 && tl >= 0) { + val clippedFrom = math.max(0, from) + if (from > l) -1 + else if (tl < 1) clippedFrom + else if (l < tl) -1 + else SeqOps.kmpSearch(toGenericSeq, clippedFrom, l, that, 0, tl, forward = true) + } + else { + var i = from + var s: scala.collection.Seq[A] = toGenericSeq.drop(i) + while (!s.isEmpty) { + if (s startsWith that) + return i + + i += 1 + s = s.tail + } + -1 + } + } + + /** Finds first index where this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @return the first index `>= 0` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + @deprecatedOverriding("Override indexOfSlice(that, from) instead - indexOfSlice(that) calls indexOfSlice(that, 0)", "2.13.0") + def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that, 0) + + /** Finds last index before or at a given end index where this $coll contains a given sequence as a slice. + * + * $willNotTerminateInf + * + * @param that the sequence to test + * @param end the end index + * @return the last index `<= end` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = { + val l = length + val tl = that.length + val clippedL = math.min(l-tl, end) + + if (end < 0) -1 + else if (tl < 1) clippedL + else if (l < tl) -1 + else SeqOps.kmpSearch(toGenericSeq, 0, clippedL+tl, that, 0, tl, forward = false) + } + + /** Finds last index where this $coll contains a given sequence as a slice. + * + * $willNotTerminateInf + * + * @param that the sequence to test + * @return the last index such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + @deprecatedOverriding("Override lastIndexOfSlice(that, end) instead - lastIndexOfSlice(that) calls lastIndexOfSlice(that, Int.MaxValue)", "2.13.0") + def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that, Int.MaxValue) + + /** Finds the last element of the $coll satisfying a predicate, if any. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return an option value containing the last element in the $coll + * that satisfies `p`, or `None` if none exists. + */ + def findLast(p: A => Boolean): Option[A] = { + val it = reverseIterator + while (it.hasNext) { + val elem = it.next() + if (p(elem)) return Some(elem) + } + None + } + + /** Tests whether this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @return `true` if this $coll contains a slice with the same elements + * as `that`, otherwise `false`. + */ + def containsSlice[B >: A](that: Seq[B]): Boolean = indexOfSlice(that) != -1 + + /** Tests whether this $coll contains a given value as an element. + * $mayNotTerminateInf + * + * @param elem the element to test. + * @return `true` if this $coll has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem) + + @deprecated("Use .reverseIterator.map(f).to(...) instead of .reverseMap(f)", "2.13.0") + def reverseMap[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(View.fromIteratorProvider(() => reverseIterator), f)) + + /** Iterates over distinct permutations of elements. + * + * $willForceEvaluation + * + * @return An Iterator which traverses the distinct permutations of this $coll. + * @example {{{ + * Seq('a', 'b', 'b').permutations.foreach(println) + * // List(a, b, b) + * // List(b, a, b) + * // List(b, b, a) + * }}} + */ + def permutations: Iterator[C] = + if (isEmpty) Iterator.single(coll) + else new PermutationsItr + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * $willForceEvaluation + * + * @return An Iterator which traverses the n-element combinations of this $coll. + * @example {{{ + * Seq('a', 'b', 'b', 'b', 'c').combinations(2).foreach(println) + * // List(a, b) + * // List(a, c) + * // List(b, b) + * // List(b, c) + * Seq('b', 'a', 'b').combinations(2).foreach(println) + * // List(b, b) + * // List(b, a) + * }}} + */ + def combinations(n: Int): Iterator[C] = + if (n < 0 || n > size) Iterator.empty + else new CombinationsItr(n) + + private class PermutationsItr extends AbstractIterator[C] { + private[this] val (elms, idxs) = init() + private[this] var _hasNext = true + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) + Iterator.empty.next() + + val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val result = (newSpecificBuilder ++= forcedElms).result() + var i = idxs.length - 2 + while(i >= 0 && idxs(i) >= idxs(i+1)) + i -= 1 + + if (i < 0) + _hasNext = false + else { + var j = idxs.length - 1 + while(idxs(j) <= idxs(i)) j -= 1 + swap(i,j) + + val len = (idxs.length - i) / 2 + var k = 1 + while (k <= len) { + swap(i+k, idxs.length - k) + k += 1 + } + } + result + } + private def swap(i: Int, j: Int): Unit = { + val tmpI = idxs(i) + idxs(i) = idxs(j) + idxs(j) = tmpI + val tmpE = elms(i) + elms(i) = elms(j) + elms(j) = tmpE + } + + private[this] def init() = { + val m = mutable.HashMap[A, Int]() + //val s1 = self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) + //val s2: Seq[(A, Int)] = s1 sortBy (_._2) + //val (es, is) = s2.unzip(using Predef.$conforms[(A, Int)]) + val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip + + (es.to(mutable.ArrayBuffer), is.toArray) + } + } + + private class CombinationsItr(n: Int) extends AbstractIterator[C] { + // generating all nums such that: + // (1) nums(0) + .. + nums(length-1) = n + // (2) 0 <= nums(i) <= cnts(i), where 0 <= i <= cnts.length-1 + private[this] val (elms, cnts, nums) = init() + private[this] val offs = cnts.scanLeft(0)(_ + _) + private[this] var _hasNext = true + + def hasNext = _hasNext + def next(): C = { + if (!hasNext) + Iterator.empty.next() + + /* Calculate this result. */ + val buf = newSpecificBuilder + for(k <- 0 until nums.length; j <- 0 until nums(k)) + buf += elms(offs(k)+j) + val res = buf.result() + + /* Prepare for the next call to next. */ + var idx = nums.length - 1 + while (idx >= 0 && nums(idx) == cnts(idx)) + idx -= 1 + + idx = nums.lastIndexWhere(_ > 0, idx - 1) + + if (idx < 0) + _hasNext = false + else { + // OPT: hand rolled version of `sum = nums.view(idx + 1, nums.length).sum + 1` + var sum = 1 + var i = idx + 1 + while (i < nums.length) { + sum += nums(i) + i += 1 + } + nums(idx) -= 1 + for (k <- (idx+1) until nums.length) { + nums(k) = sum min cnts(k) + sum -= nums(k) + } + } + + res + } + + /** Rearrange seq to newSeq a0a0..a0a1..a1...ak..ak such that + * seq.count(_ == aj) == cnts(j) + * + * @return (newSeq,cnts,nums) + */ + private def init(): (IndexedSeq[A], Array[Int], Array[Int]) = { + val m = mutable.HashMap[A, Int]() + + // e => (e, weight(e)) + val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip + val cs = new Array[Int](m.size) + is foreach (i => cs(i) += 1) + val ns = new Array[Int](cs.length) + + var r = n + 0 until ns.length foreach { k => + ns(k) = r min cs(k) + r -= ns(k) + } + (es.to(IndexedSeq), cs, ns) + } + } + + /** Sorts this $coll according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * $willForceEvaluation + * + * @param ord the ordering to be used to compare elements. + * @return a $coll consisting of the elements of this $coll + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): C = { + val len = this.length + val b = newSpecificBuilder + if (len == 1) b += head + else if (len > 1) { + b.sizeHint(len) + val arr = new Array[Any](len) + copyToArray(arr) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + var i = 0 + while (i < len) { + b += arr(i).asInstanceOf[A] + i += 1 + } + } + b.result() + } + + /** Sorts this $coll according to a comparison function. + * $willNotTerminateInf + * $willForceEvaluation + * + * The sort is stable. That is, elements that are equal + * (`lt` returns false for both directions of comparison) + * appear in the same order in the sorted sequence as in the original. + * + * @param lt a predicate that is true if + * its first argument strictly precedes its second argument in + * the desired ordering. + * @return a $coll consisting of the elements of this $coll + * sorted according to the comparison function `lt`. + * @example {{{ + * List("Steve", "Bobby", "Tom", "John", "Bob").sortWith((x, y) => x.take(3).compareTo(y.take(3)) < 0) = + * List("Bobby", "Bob", "John", "Steve", "Tom") + * }}} + */ + def sortWith(lt: (A, A) => Boolean): C = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this $coll according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * $willNotTerminateInf + * $willForceEvaluation + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return a $coll consisting of the elements of this $coll + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + * + * @example {{{ + * val words = "The quick brown fox jumped over the lazy dog".split(' ') + * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]] + * words.sortBy(x => (x.length, x.head)) + * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped) + * }}} + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): C = sorted(ord on f) + + /** Produces the range of all indices of this sequence. + * $willForceEvaluation + * + * @return a `Range` value from `0` to one less than the length of this $coll. + */ + def indices: Range = Range(0, length) + + override final def sizeCompare(otherSize: Int): Int = lengthCompare(otherSize) + + /** Compares the length of this $coll to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + * The method as implemented here does not call `length` directly; its running time + * is `O(length min len)` instead of `O(length)`. The method should be overridden + * if computing `length` is cheap and `knownSize` returns `-1`. + * + * @see [[lengthIs]] + */ + def lengthCompare(len: Int): Int = super.sizeCompare(len) + + override final def sizeCompare(that: Iterable[_]^): Int = lengthCompare(that) + + /** Compares the length of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < that.size + * x == 0 if this.length == that.size + * x > 0 if this.length > that.size + * }}} + * The method as implemented here does not call `length` or `size` directly; its running time + * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def lengthCompare(that: Iterable[_]^): Int = super.sizeCompare(that) + + /** Returns a value class containing operations for comparing the length of this $coll to a test value. + * + * These operations are implemented in terms of [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + @inline final def lengthIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + override def isEmpty: Boolean = lengthCompare(0) == 0 + + /** Are the elements of this collection the same (and in the same order) + * as those of `that`? + */ + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + val thisKnownSize = knownSize + val knownSizeDifference = thisKnownSize != -1 && { + val thatKnownSize = that.knownSize + thatKnownSize != -1 && thisKnownSize != thatKnownSize + } + !knownSizeDifference && iterator.sameElements(that) + } + + /** Tests whether every element of this $coll relates to the + * corresponding element of another sequence by satisfying a test predicate. + * + * @param that the other sequence + * @param p the test predicate, which relates elements from both sequences + * @tparam B the type of the elements of `that` + * @return `true` if both sequences have the same length and + * `p(x, y)` is `true` for all corresponding elements `x` of this $coll + * and `y` of `that`, otherwise `false`. + */ + def corresponds[B](that: Seq[B])(p: (A, B) => Boolean): Boolean = { + val i = iterator + val j = that.iterator + while (i.hasNext && j.hasNext) + if (!p(i.next(), j.next())) + return false + !i.hasNext && !j.hasNext + } + + /** Computes the multiset difference between this $coll and another sequence. + * + * @param that the sequence of elements to remove + * @return a new $coll which contains all elements of this $coll + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): C = { + val occ = occCounts[B](that) + fromSpecific(iterator.filter { x => + var include = false + occ.updateWith(x) { + case None => { + include = true + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include + }) + } + + /** Computes the multiset intersection between this $coll and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new $coll which contains all elements of this $coll + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): C = { + val occ = occCounts[B](that) + fromSpecific(iterator.filter { x => + var include = true + occ.updateWith(x) { + case None => { + include = false + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include + }) + } + + /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original $coll appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced element + * @param other the replacement sequence + * @param replaced the number of elements to drop in the original $coll + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of all elements of this $coll + * except that `replaced` elements starting from `from` are replaced + * by all the elements of `other`. + */ + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = + iterableFactory.from(new View.Patched(this, from, other, replaced)) + + /** A copy of this $coll with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @tparam B the element type of the returned $coll. + * @return a new $coll which is a copy of this $coll with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. In case of a + * lazy collection this exception may be thrown at a later time or not at + * all (if the end of the collection is never evaluated). + */ + def updated[B >: A](index: Int, elem: B): CC[B] = { + if(index < 0) throw new IndexOutOfBoundsException(index.toString) + val k = knownSize + if(k >= 0 && index >= k) throw new IndexOutOfBoundsException(index.toString) + iterableFactory.from(new View.Updated(this, index, elem)) + } + + protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { + val occ = new mutable.HashMap[B, Int]() + for (y <- sq) occ.updateWith(y) { + case None => Some(1) + case Some(n) => Some(n + 1) + } + occ + } + + /** Search this sorted sequence for a specific element. If the sequence is an + * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.collection.IndexedSeq]] + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqOps]], method `sorted` + * + * @param elem the element to find. + * @param ord the ordering to be used to compare elements. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. + */ + def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = + linearSearch(view, elem, 0)(ord) + + /** Search within an interval in this sorted sequence for a specific element. If this + * sequence is an `IndexedSeq`, a binary search is used. Otherwise, a linear search + * is used. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.collection.IndexedSeq]] + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqOps]], method `sorted` + * + * @param elem the element to find. + * @param from the index where the search starts. + * @param to the index following where the search ends. + * @param ord the ordering to be used to compare elements. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. + * + * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` + * is returned + */ + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) + + private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) + (implicit ord: Ordering[B]): SearchResult = { + var idx = offset + val it = c.iterator + while (it.hasNext) { + val cur = it.next() + if (ord.equiv(elem, cur)) return Found(idx) + else if (ord.lt(elem, cur)) return InsertionPoint(idx) + idx += 1 + } + InsertionPoint(idx) + } +} + +object SeqOps { + + // KMP search utilities + + /** A KMP implementation, based on the undoubtedly reliable wikipedia entry. + * Note: I made this private to keep it from entering the API. That can be reviewed. + * + * @param S Sequence that may contain target + * @param m0 First index of S to consider + * @param m1 Last index of S to consider (exclusive) + * @param W Target sequence + * @param n0 First index of W to match + * @param n1 Last index of W to match (exclusive) + * @param forward Direction of search (from beginning==true, from end==false) + * @return Index of start of sequence if found, -1 if not (relative to beginning of S, not m0). + */ + private def kmpSearch[B](S: scala.collection.Seq[B], m0: Int, m1: Int, W: scala.collection.Seq[B], n0: Int, n1: Int, forward: Boolean): Int = { + // Check for redundant case when target has single valid element + def clipR(x: Int, y: Int) = if (x < y) x else -1 + def clipL(x: Int, y: Int) = if (x > y) x else -1 + + if (n1 == n0+1) { + if (forward) + clipR(S.indexOf(W(n0), m0), m1) + else + clipL(S.lastIndexOf(W(n0), m1-1), m0-1) + } + + // Check for redundant case when both sequences are same size + else if (m1-m0 == n1-n0) { + // Accepting a little slowness for the uncommon case. + if (S.iterator.slice(m0, m1).sameElements(W.iterator.slice(n0, n1))) m0 + else -1 + } + // Now we know we actually need KMP search, so do it + else S match { + case xs: scala.collection.IndexedSeq[_] => + // We can index into S directly; it should be adequately fast + val Wopt = kmpOptimizeWord(W, n0, n1, forward) + val T = kmpJumpTable(Wopt, n1-n0) + var i, m = 0 + val zero = if (forward) m0 else m1-1 + val delta = if (forward) 1 else -1 + while (i+m < m1-m0) { + if (Wopt(i) == S(zero+delta*(i+m))) { + i += 1 + if (i == n1-n0) return (if (forward) m+m0 else m1-m-i) + } + else { + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + -1 + case _ => + // We had better not index into S directly! + val iter = S.iterator.drop(m0) + val Wopt = kmpOptimizeWord(W, n0, n1, forward = true) + val T = kmpJumpTable(Wopt, n1-n0) + val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind + var largest = 0 + var i, m = 0 + var answer = -1 + while (m+m0+n1-n0 <= m1) { + while (i+m >= largest) { + cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] + largest += 1 + } + if (Wopt(i) == cache((i+m)%(n1-n0)).asInstanceOf[B]) { + i += 1 + if (i == n1-n0) { + if (forward) return m+m0 + else { + i -= 1 + answer = m+m0 + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + } + else { + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + answer + } + } + + /** Make sure a target sequence has fast, correctly-ordered indexing for KMP. + * + * @param W The target sequence + * @param n0 The first element in the target sequence that we should use + * @param n1 The far end of the target sequence that we should use (exclusive) + * @return Target packed in an IndexedSeq (taken from iterator unless W already is an IndexedSeq) + */ + private def kmpOptimizeWord[B](W: scala.collection.Seq[B], n0: Int, n1: Int, forward: Boolean): IndexedSeqView[B] = W match { + case iso: IndexedSeq[B] => + // Already optimized for indexing--use original (or custom view of original) + if (forward && n0==0 && n1==W.length) iso.view + else if (forward) new AbstractIndexedSeqView[B] { + val length = n1 - n0 + def apply(x: Int) = iso(n0 + x) + } + else new AbstractIndexedSeqView[B] { + def length = n1 - n0 + def apply(x: Int) = iso(n1 - 1 - x) + } + case _ => + // W is probably bad at indexing. Pack in array (in correct orientation) + // Would be marginally faster to special-case each direction + new AbstractIndexedSeqView[B] { + private[this] val Warr = new Array[AnyRef](n1-n0) + private[this] val delta = if (forward) 1 else -1 + private[this] val done = if (forward) n1-n0 else -1 + val wit = W.iterator.drop(n0) + var i = if (forward) 0 else (n1-n0-1) + while (i != done) { + Warr(i) = wit.next().asInstanceOf[AnyRef] + i += delta + } + + val length = n1 - n0 + def apply(x: Int) = Warr(x).asInstanceOf[B] + } + } + + /** Make a jump table for KMP search. + * + * @param Wopt The target sequence + * @param wlen Just in case we're only IndexedSeq and not IndexedSeqOptimized + * @return KMP jump table for target sequence + */ + private def kmpJumpTable[B](Wopt: IndexedSeqView[B], wlen: Int) = { + val arr = new Array[Int](wlen) + var pos = 2 + var cnd = 0 + arr(0) = -1 + arr(1) = 0 + while (pos < wlen) { + if (Wopt(pos-1) == Wopt(cnd)) { + arr(pos) = cnd + 1 + pos += 1 + cnd += 1 + } + else if (cnd > 0) { + cnd = arr(cnd) + } + else { + arr(pos) = 0 + pos += 1 + } + } + arr + } +} + +/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ +abstract class AbstractSeq[+A] extends AbstractIterable[A] with Seq[A] diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala new file mode 100644 index 000000000000..a7f2c629b61d --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +import scala.annotation.nowarn + +/** + * A generic trait for ordered maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] extends Map[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqMap" + + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) + diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala new file mode 100644 index 000000000000..7ec22cd54c83 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -0,0 +1,232 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures + +/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the + * necessary functionality over which SeqViews are defined, and at the same + * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is + * pure, whereas SeqViews are Iterables which can be impure (for instance, + * mapping a SeqView with an impure function gives an impure view). + */ +trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + self: SeqViewOps[A, CC, C]^ => + + def length: Int + def apply(x: Int): A + def appended[B >: A](elem: B): CC[B]^{this} + def prepended[B >: A](elem: B): CC[B]^{this} + def reverse: C^{this} + def sorted[B >: A](implicit ord: Ordering[B]): C^{this} + + def reverseIterator: Iterator[A]^{this} = reversed.iterator +} + +trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { + self: SeqView[A]^ => + + override def view: SeqView[A]^{this} = this + + override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) + + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) + + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqView" +} + +object SeqView { + + /** A `SeqOps` whose collection type and collection type constructor are unknown */ + private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] + + /** A view that doesn’t apply any transformation to an underlying sequence */ + @SerialVersionUID(3L) + class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { + def apply(idx: Int): A = underlying.apply(idx) + def length: Int = underlying.length + def iterator: Iterator[A]^{this} = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + def apply(idx: Int): B = f(underlying(idx)) + def length: Int = underlying.length + } + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { + def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { + def apply(idx: Int): A = { + val l = prefix.length + if (idx < l) prefix(idx) else suffix(idx - l) + } + def length: Int = prefix.length + suffix.length + } + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { + def apply(i: Int) = underlying.apply(size - 1 - i) + def length = underlying.size + def iterator: Iterator[A]^{this} = underlying.reverseIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { + def apply(idx: Int): A = if (idx < n) { + underlying(idx) + } else { + throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") + } + def length: Int = underlying.length min normN + } + + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + private[this] val delta = (underlying.size - (n max 0)) max 0 + def length = underlying.size - delta + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + delta) + } + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + def length = (underlying.size - normN) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + normN) + override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) + } + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + private[this] val len = (underlying.size - (n max 0)) max 0 + def length = len + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i) + } + + @SerialVersionUID(3L) + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, + private[this] val len: Int, + ord: Ordering[B]) + extends SeqView[A] { + outer: Sorted[A, B]^ => + + // force evaluation immediately by calling `length` so infinite collections + // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls + def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) + + @SerialVersionUID(3L) + private[this] class ReverseSorted extends SeqView[A] { + private[this] lazy val _reversed = new SeqView.Reverse(_sorted) + + def apply(i: Int): A = _reversed.apply(i) + def length: Int = len + def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) + override def reverse: SeqView[A]^{outer} = outer + override protected def reversed: Iterable[A] = outer.unsafeAssumePure + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == Sorted.this.ord) outer.unsafeAssumePure + else if (ord1.isReverseOf(Sorted.this.ord)) this + else new Sorted(elems, len, ord1) + } + + @volatile private[this] var evaluated = false + + private[this] lazy val _sorted: Seq[A] = { + val res = { + val len = this.len + if (len == 0) Nil + else if (len == 1) List(underlying.head) + else { + val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] + underlying.copyToArray(arr) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it + // is safe because: + // - the ArraySeq is immutable, and items that are not of type A + // cannot be added to it + // - we know it only contains items of type A (and if this collection + // contains items of another type, we'd get a CCE anyway) + // - the cast doesn't actually do anything in the runtime because the + // type of A is not known and Array[_] is Array[AnyRef] + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + } + } + evaluated = true + underlying = null + res + } + + private[this] def elems: SomeSeqOps[A]^{this} = { + val orig = underlying + if (evaluated) _sorted else orig + } + + def apply(i: Int): A = _sorted.apply(i) + def length: Int = len + def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) + override def reverse: SeqView[A] = new ReverseSorted + // we know `_sorted` is either tiny or has efficient random access, + // so this is acceptable for `reversed` + override protected def reversed: Iterable[A] = new ReverseSorted + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == this.ord) this + else if (ord1.isReverseOf(this.ord)) reverse + else new Sorted(elems, len, ord1) + } +} + +/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala new file mode 100644 index 000000000000..a9c279b82a49 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -0,0 +1,271 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.util.hashing.MurmurHash3 +import java.lang.String + +import scala.annotation.nowarn +import language.experimental.captureChecking + +/** Base trait for set collections. + */ +trait Set[A] + extends Iterable[A] + with SetOps[A, Set, Set[A]] + with Equals + with IterableFactoryDefaults[A, Set] + with Pure { + self: Set[A] => + + def canEqual(that: Any) = true + + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ + override def equals(that: Any): Boolean = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.setHash(this) + + override def iterableFactory: IterableFactory[Set] = Set + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Set" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too +} + +/** Base trait for set operations + * + * @define coll set + * @define Coll `Set` + */ +trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends IterableOps[A, CC, C], (A -> Boolean) { self => + + def contains(elem: A): Boolean + + /** Tests if some element is contained in this set. + * + * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. + * @param elem the element to test for membership. + * @return `true` if `elem` is contained in this set, `false` otherwise. + */ + @`inline` final def apply(elem: A): Boolean = this.contains(elem) + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: Set[A]): Boolean = this.forall(that) + + /** An iterator over all subsets of this set of the given size. + * If the requested size is impossible, an empty iterator is returned. + * + * @param len the size of the subsets. + * @return the iterator. + */ + def subsets(len: Int): Iterator[C] = { + if (len < 0 || len > size) Iterator.empty + else new SubsetsItr(this.to(IndexedSeq), len) + } + + /** An iterator over all subsets of this set. + * + * @return the iterator. + */ + def subsets(): Iterator[C] = new AbstractIterator[C] { + private[this] val elms = SetOps.this.to(IndexedSeq) + private[this] var len = 0 + private[this] var itr: Iterator[C] = Iterator.empty + + def hasNext = len <= elms.size || itr.hasNext + def next() = { + if (!itr.hasNext) { + if (len > elms.size) Iterator.empty.next() + else { + itr = new SubsetsItr(elms, len) + len += 1 + } + } + + itr.next() + } + } + + /** An Iterator including all subsets containing exactly len elements. + * If the elements in 'This' type is ordered, then the subsets will also be in the same order. + * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} + * + * $willForceEvaluation + * + */ + private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { + private[this] val idxs = Array.range(0, len+1) + private[this] var _hasNext = true + idxs(len) = elms.size + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) Iterator.empty.next() + + val buf = newSpecificBuilder + idxs.slice(0, len) foreach (idx => buf += elms(idx)) + val result = buf.result() + + var i = len - 1 + while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 + + if (i < 0) _hasNext = false + else { + idxs(i) += 1 + for (j <- (i+1) until len) + idxs(j) = idxs(j-1) + 1 + } + + result + } + } + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: Set[A]): C = this.filter(that) + + /** Alias for `intersect` */ + @`inline` final def & (that: Set[A]): C = intersect(that) + + /** Computes the difference of this set and another set. + * + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def diff(that: Set[A]): C + + /** Alias for `diff` */ + @`inline` final def &~ (that: Set[A]): C = this diff that + + @deprecated("Consider requiring an immutable Set", "2.13.0") + def -- (that: IterableOnce[A]): C = { + val toRemove = that.iterator.to(immutable.Set) + fromSpecific(view.filterNot(toRemove)) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") + def - (elem: A): C = diff(Set(elem)) + + @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") + def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) + + /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. + * + * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. + * + * Example: + * {{{ + * scala> val a = Set(1, 2) concat Set(2, 3) + * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) + * }}} + * + * @param that the collection containing the elements to add. + * @return a new $coll with the given elements added, omitting duplicates. + */ + def concat(that: collection.IterableOnce[A]): C = this match { + case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => + // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) + var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] + val it = that.iterator + while (it.hasNext) result = result + it.next() + result.asInstanceOf[C] + case _ => fromSpecific(that match { + case that: collection.Iterable[A] => new View.Concat(this, that) + case _ => iterator.concat(that.iterator) + }) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + /** Alias for `concat` */ + @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) + + /** Computes the union between of set and another set. + * + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + @`inline` final def union(that: Set[A]): C = concat(that) + + /** Alias for `union` */ + @`inline` final def | (that: Set[A]): C = concat(that) +} + +/** + * $factoryInfo + * @define coll set + * @define Coll `Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](immutable.Set) + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala new file mode 100644 index 000000000000..7b9381ebb078 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -0,0 +1,222 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] + with Pure { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala new file mode 100644 index 000000000000..16751d86d9d5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -0,0 +1,91 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import language.experimental.captureChecking + +/** Base trait for sorted collections */ +trait SortedOps[A, +C] { + + def ordering: Ordering[A] + + /** Returns the first key of the collection. */ + def firstKey: A + + /** Returns the last key of the collection. */ + def lastKey: A + + /** Comparison function that orders keys. */ + @deprecated("Use ordering.compare instead", "2.13.0") + @deprecatedOverriding("Use ordering.compare instead", "2.13.0") + @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + + /** Creates a ranged projection of this collection. Any mutations in the + * ranged projection will update this collection and vice versa. + * + * Note: keys are not guaranteed to be consistent between this collection + * and the projection. This is the case for buffers where indexing is + * relative to the projection. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * `None` if there is no lower bound. + * @param until The upper-bound (exclusive) of the ranged projection. + * `None` if there is no upper bound. + */ + def rangeImpl(from: Option[A], until: Option[A]): C + + /** Creates a ranged projection of this collection with both a lower-bound + * and an upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeFrom", "2.13.0") + final def from(from: A): C = rangeFrom(from) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + def rangeFrom(from: A): C = rangeImpl(Some(from), None) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + @deprecated("Use rangeUntil", "2.13.0") + final def until(until: A): C = rangeUntil(until) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def rangeUntil(until: A): C = rangeImpl(None, Some(until)) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeTo", "2.13.0") + final def to(to: A): C = rangeTo(to) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + def rangeTo(to: A): C +} diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala new file mode 100644 index 000000000000..fb2f879edcd2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -0,0 +1,190 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{implicitNotFound, nowarn} +import scala.annotation.unchecked.uncheckedVariance +import language.experimental.captureChecking + +/** Base type of sorted sets */ +trait SortedSet[A] extends Set[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + def unsorted: Set[A] = this + + def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedSet" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => + (ss canEqual this) && + (this.size == ss.size) && { + val i1 = this.iterator + val i2 = ss.iterator + var allEqual = true + while (allEqual && i1.hasNext) + allEqual = ordering.equiv(i1.next(), i2.next()) + allEqual + } + case _ => + super.equals(that) + } + +} + +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with SortedOps[A, C] { + + /** The companion object of this sorted set, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedIterableFactory: SortedIterableFactory[CC] + + def unsorted: Set[A] + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] + + @deprecated("Use `iteratorFrom` instead.", "2.13.0") + @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) + + def firstKey: A = head + def lastKey: A = last + + /** Find the smallest element larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: A): Option[A] = rangeFrom(key).headOption + + /** Find the largest element less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption + + override def min[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.min") + else if (ord == ordering) head + else if (ord isReverseOf ordering) last + else super.min[B] // need the type annotation for it to infer the correct implicit + + override def max[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.max") + else if (ord == ordering) last + else if (ord isReverseOf ordering) head + else super.max[B] // need the type annotation for it to infer the correct implicit + + def rangeTo(to: A): C = { + val i = rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + /** Builds a new sorted collection by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Map(this, f)) + + /** Builds a new sorted collection by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.FlatMap(this, f)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote + sortedIterableFactory.from(that match { + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + /** Builds a new sorted collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Collect(this, pf)) +} + +object SortedSetOps { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + + /** Specialize `WithFilter` for sorted collections + * + * @define coll sorted collection + */ + class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( + self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], + p: A => Boolean + ) extends IterableOps.WithFilter[A, IterableCC](self, p) { + + def map[B : Ordering](f: A => B): CC[B] = + self.sortedIterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = + self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = + new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) + } + +} + +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) + diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala new file mode 100644 index 000000000000..0a0ac0075990 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -0,0 +1,378 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} +import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} +import java.{lang => jl} +import language.experimental.captureChecking + +import scala.collection.Stepper.EfficientSplit + +/** Steppers exist to enable creating Java streams over Scala collections, see + * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections + * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. + * + * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference + * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are + * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). + * These enable iterating over collections holding unboxed primitives (e.g., Arrays, + * [[scala.jdk.Accumulator]]s) without boxing the elements. + * + * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized + * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) + * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). + * + * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive + * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. + * + * @tparam A the element type of the Stepper + */ +trait Stepper[@specialized(Double, Int, Long) +A] { + this: Stepper[A]^ => + + /** Check if there's an element available. */ + def hasStep: Boolean + + /** Return the next element and advance the stepper */ + def nextStep(): A + + /** Split this stepper, if applicable. The elements of the current Stepper are split up between + * the resulting Stepper and the current stepper. + * + * May return `null`, in which case the current Stepper yields the same elements as before. + * + * See method `trySplit` in [[java.util.Spliterator]]. + */ + def trySplit(): Stepper[A] + + /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See + * method `estimateSize` in [[java.util.Spliterator]]. + */ + def estimateSize: Long + + /** Returns a set of characteristics of this Stepper and its elements. See method + * `characteristics` in [[java.util.Spliterator]]. + */ + def characteristics: Int + + /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. + * + * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning + * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] + * (which is a `Stepper[Int]`). + */ + def spliterator[B >: A]: Spliterator[_] + + /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. + * + * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning + * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass + * [[IntStepper]] (which is a `Stepper[Int]`). + */ + def javaIterator[B >: A]: JIterator[_] + + /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to + * primitive Steppers box the elements. + */ + def iterator: Iterator[A] = new AbstractIterator[A] { + def hasNext: Boolean = hasStep + def next(): A = nextStep() + } +} + +object Stepper { + /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time + * and space complexity, and that the division is likely to be reasonably even. Steppers marked + * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method + * defined in [[scala.jdk.StreamConverters]]. + */ + trait EfficientSplit + + private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") + + /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example IntArrayStepper and WidenedByteArrayStepper). */ + + private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingDoubleStepper(s) + } + } + + private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingIntStepper(s) + } + } + + private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): LongStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingLongStepper(s) + } + } + + private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingByteStepper(s) + } + } + + private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingCharStepper(s) + } + } + + private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingShortStepper(s) + } + } + + private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingFloatStepper(s) + } + } +} + +/** A Stepper for arbitrary element types. See [[Stepper]]. */ +trait AnyStepper[+A] extends Stepper[A] { + this: AnyStepper[A]^ => + + def trySplit(): AnyStepper[A] + + def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) + + def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { + def hasNext: Boolean = hasStep + def next(): B = nextStep() + } +} + +object AnyStepper { + class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { + def tryAdvance(c: Consumer[_ >: A]): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + def trySplit(): Spliterator[A]^{this} = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: A]): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + } + + def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) + def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit + + def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) + def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit + + def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) + def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit + + private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Double] = { + val s = st.trySplit() + if (s == null) null else new BoxedDoubleStepper(s) + } + } + + private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Int] = { + val s = st.trySplit() + if (s == null) null else new BoxedIntStepper(s) + } + } + + private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Long] = { + val s = st.trySplit() + if (s == null) null else new BoxedLongStepper(s) + } + } +} + +/** A Stepper for Ints. See [[Stepper]]. */ +trait IntStepper extends Stepper[Int] { + this: IntStepper^ => + + def trySplit(): IntStepper + + def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) + + def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext: Boolean = hasStep + def nextInt(): Int = nextStep() + } +} +object IntStepper { + class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { + def tryAdvance(c: IntConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfInt^{this} = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: IntConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Doubles. See [[Stepper]]. */ +trait DoubleStepper extends Stepper[Double] { + this: DoubleStepper^ => + def trySplit(): DoubleStepper + + def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) + + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { + def hasNext: Boolean = hasStep + def nextDouble(): Double = nextStep() + } +} + +object DoubleStepper { + class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { + def tryAdvance(c: DoubleConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfDouble^{this} = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: DoubleConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Longs. See [[Stepper]]. */ +trait LongStepper extends Stepper[Long] { + this: LongStepper^ => + + def trySplit(): LongStepper^{this} + + def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) + + def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { + def hasNext: Boolean = hasStep + def nextLong(): Long = nextStep() + } +} + +object LongStepper { + class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { + def tryAdvance(c: LongConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfLong^{this} = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: LongConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } + } + } +} diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala new file mode 100644 index 000000000000..c6b520400d89 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.{lang => jl} + +import language.experimental.captureChecking +import scala.collection.Stepper.EfficientSplit + +/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly + * specialized Stepper `S` according to the element type `T`. + */ +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: StepperShape.Shape + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit +} + +object StepperShape extends StepperShapeLowPriority1 { + class Shape private[StepperShape] (private val s: Int) extends AnyVal + + // reference + val ReferenceShape = new Shape(0) + + // primitive + val IntShape = new Shape(1) + val LongShape = new Shape(2) + val DoubleShape = new Shape(3) + + // widening + val ByteShape = new Shape(4) + val ShortShape = new Shape(5) + val CharShape = new Shape(6) + val FloatShape = new Shape(7) + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntShape + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit + } + implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] + + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongShape + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit + } + implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] + + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleShape + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit + } + implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] + + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteShape + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit + } + implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] + + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortShape + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit + } + implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] + + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharShape + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit + } + implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] + + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatShape + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit + } + implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] +} + +trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { + implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] +} + +trait StepperShapeLowPriority2 { + implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] + + protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { + def shape = StepperShape.ReferenceShape + def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st + } +} \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala new file mode 100644 index 000000000000..5b504a2469b5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala @@ -0,0 +1,286 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.runtime.Statics +import language.experimental.captureChecking + +/** + * Trait that overrides iterable operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedIterableOps[+A, +CC[_], +C] + extends Any + with IterableOps[A, CC, C] { + this: StrictOptimizedIterableOps[A, CC, C] => + + // Optimized, push-based version of `partition` + override def partition(p: A => Boolean): (C, C) = { + val l, r = newSpecificBuilder + iterator.foreach(x => (if (p(x)) l else r) += x) + (l.result(), r.result()) + } + + override def span(p: A => Boolean): (C, C) = { + val first = newSpecificBuilder + val second = newSpecificBuilder + val it = iterator + var inFirst = true + while (it.hasNext && inFirst) { + val a = it.next() + if (p(a)) { + first += a + } else { + second += a + inFirst = false + } + } + while (it.hasNext) { + second += it.next() + } + (first.result(), second.result()) + } + + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1], CC[A2]) = { + val first = iterableFactory.newBuilder[A1] + val second = iterableFactory.newBuilder[A2] + foreach { a => + val pair = asPair(a) + first += pair._1 + second += pair._2 + } + (first.result(), second.result()) + } + + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = iterableFactory.newBuilder[A1] + val b2 = iterableFactory.newBuilder[A2] + val b3 = iterableFactory.newBuilder[A3] + + foreach { xyz => + val triple = asTriple(xyz) + b1 += triple._1 + b2 += triple._2 + b3 += triple._3 + } + (b1.result(), b2.result(), b3.result()) + } + + // The implementations of the following operations are not fundamentally different from + // the view-based implementations, but they turn out to be slightly faster because + // a couple of indirection levels are removed + + override def map[B](f: A => B): CC[B] = + strictOptimizedMap(iterableFactory.newBuilder, f) + + /** + * @param b Builder to use to build the resulting collection + * @param f Element transformation function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedMap[B, C2](b: mutable.Builder[B, C2], f: A => B): C2 = { + val it = iterator + while (it.hasNext) { + b += f(it.next()) + } + b.result() + } + + override def flatMap[B](f: A => IterableOnce[B]^): CC[B] = + strictOptimizedFlatMap(iterableFactory.newBuilder, f) + + /** + * @param b Builder to use to build the resulting collection + * @param f Element transformation function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]^): C2 = { + val it = iterator + while (it.hasNext) { + b ++= f(it.next()) + } + b.result() + } + + /** + * @param that Elements to concatenate to this collection + * @param b Builder to use to build the resulting collection + * @tparam B Type of elements of the resulting collections (e.g. `Int`) + * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B]^, b: mutable.Builder[B, C2]): C2 = { + b ++= this + b ++= that + b.result() + } + + override def collect[B](pf: PartialFunction[A, B]^): CC[B] = + strictOptimizedCollect(iterableFactory.newBuilder, pf) + + /** + * @param b Builder to use to build the resulting collection + * @param pf Element transformation partial function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]^): C2 = { + val marker = Statics.pfMarker + val it = iterator + while (it.hasNext) { + val elem = it.next() + val v = pf.applyOrElse(elem, ((x: A) => marker).asInstanceOf[Function[A, B]]) + if (marker ne v.asInstanceOf[AnyRef]) b += v + } + b.result() + } + + override def flatten[B](implicit toIterableOnce: A -> IterableOnce[B]): CC[B] = + strictOptimizedFlatten(iterableFactory.newBuilder) + + /** + * @param b Builder to use to build the resulting collection + * @param toIterableOnce Evidence that `A` can be seen as an `IterableOnce[B]` + * @tparam B Type of elements of the resulting collection (e.g. `Int`) + * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A -> IterableOnce[B]): C2 = { + val it = iterator + while (it.hasNext) { + b ++= toIterableOnce(it.next()) + } + b.result() + } + + override def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) + + /** + * @param that Collection to zip with this collection + * @param b Builder to use to build the resulting collection + * @tparam B Type of elements of the second collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B]^, b: mutable.Builder[(A, B), C2]): C2 = { + val it1 = iterator + val it2 = that.iterator + while (it1.hasNext && it2.hasNext) { + b += ((it1.next(), it2.next())) + } + b.result() + } + + override def zipWithIndex: CC[(A @uncheckedVariance, Int)] = { + val b = iterableFactory.newBuilder[(A, Int)] + var i = 0 + val it = iterator + while (it.hasNext) { + b += ((it.next(), i)) + i += 1 + } + b.result() + } + + override def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = { + val b = iterableFactory.newBuilder[B] + b.sizeHint(this, delta = 0) + var acc = z + b += acc + val it = iterator + while (it.hasNext) { + acc = op(acc, it.next()) + b += acc + } + b.result() + } + + override def filter(pred: A => Boolean): C = filterImpl(pred, isFlipped = false) + + override def filterNot(pred: A => Boolean): C = filterImpl(pred, isFlipped = true) + + protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): C = { + val b = newSpecificBuilder + val it = iterator + while (it.hasNext) { + val elem = it.next() + if (pred(elem) != isFlipped) { + b += elem + } + } + b.result() + } + + // Optimized, push-based version of `partitionMap` + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val l = iterableFactory.newBuilder[A1] + val r = iterableFactory.newBuilder[A2] + foreach { x => + f(x) match { + case Left(x1) => l += x1 + case Right(x2) => r += x2 + } + } + (l.result(), r.result()) + } + + // Optimization avoids creation of second collection + override def tapEach[U](f: A => U): C = { + foreach(f) + coll + } + + /** A collection containing the last `n` elements of this collection. + * $willForceEvaluation + */ + override def takeRight(n: Int): C = { + val b = newSpecificBuilder + b.sizeHintBounded(n, toIterable: @nowarn("cat=deprecation")) + val lead = iterator drop n + val it = iterator + while (lead.hasNext) { + lead.next() + it.next() + } + while (it.hasNext) b += it.next() + b.result() + } + + /** The rest of the collection without its `n` last elements. For + * linear, immutable collections this should avoid making a copy. + * $willForceEvaluation + */ + override def dropRight(n: Int): C = { + val b = newSpecificBuilder + if (n >= 0) b.sizeHint(this, delta = -n) + val lead = iterator drop n + val it = iterator + while (lead.hasNext) { + b += it.next() + lead.next() + } + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala new file mode 100644 index 000000000000..a9c5e0af43b3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** + * Trait that overrides map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends MapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] + with Pure { + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + strictOptimizedMap(mapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + strictOptimizedFlatMap(mapFactory.newBuilder, f) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = + strictOptimizedConcat(suffix, mapFactory.newBuilder) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + strictOptimizedCollect(mapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val b = mapFactory.newBuilder[K, V1] + b ++= this + b += elem1 + b += elem2 + if (elems.nonEmpty) b ++= elems + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..b8af7e5f172b --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -0,0 +1,113 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** + * Trait that overrides operations on sequences in order + * to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps [+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A -> B): C = { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next + } + builder.result() + } + + override def prepended[B >: A](elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size + 1) + } + b += elem + b ++= this + b.result() + } + + override def appended[B >: A](elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size + 1) + } + b ++= this + b += elem + b.result() + } + + override def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + strictOptimizedConcat(suffix, iterableFactory.newBuilder) + + override def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = { + val b = iterableFactory.newBuilder[B] + b ++= prefix + b ++= this + b.result() + } + + override def padTo[B >: A](len: Int, elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + val L = size + b.sizeHint(math.max(L, len)) + var diff = len - L + b ++= this + while (diff > 0) { + b += elem + diff -= 1 + } + b.result() + } + + override def diff[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) coll + else { + val occ = occCounts[B](that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => { + b.addOne(x) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + } + b.result() + } + + override def intersect[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) empty + else { + val occ = occCounts[B](that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => None + case Some(n) => { + b.addOne(x) + if (n == 1) None else Some(n - 1) + } + } + } + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala new file mode 100644 index 000000000000..8ed337fff998 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -0,0 +1,30 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** + * Trait that overrides set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: IterableOnce[A]): C = + strictOptimizedConcat(that, newSpecificBuilder) + +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..9a9e6e367922 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import language.experimental.captureChecking + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala new file mode 100644 index 000000000000..ded7deabccca --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.implicitNotFound +import scala.annotation.unchecked.uncheckedVariance + +/** + * Trait that overrides sorted set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { + + override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedMap(sortedIterableFactory.newBuilder, f) + + override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) + + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) + + override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) + +} diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala new file mode 100644 index 000000000000..f570531def98 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -0,0 +1,1649 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.{StringBuilder => JStringBuilder} + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.{CharStringStepper, CodePointStringStepper} +import scala.collection.immutable.{ArraySeq, WrappedString} +import scala.collection.mutable.StringBuilder +import scala.math.{ScalaNumber, max, min} +import scala.reflect.ClassTag +import scala.util.matching.Regex +import language.experimental.captureChecking + +object StringOps { + // just statics for companion class. + private final val LF = 0x0A + private final val FF = 0x0C + private final val CR = 0x0D + private final val SU = 0x1A + + private class StringIterator(private[this] val s: String) extends AbstractIterator[Char] { + private[this] var pos = 0 + def hasNext: Boolean = pos < s.length + def next(): Char = { + if (pos >= s.length) Iterator.empty.next() + val r = s.charAt(pos) + pos += 1 + r + } + } + + private class ReverseIterator(private[this] val s: String) extends AbstractIterator[Char] { + private[this] var pos = s.length-1 + def hasNext: Boolean = pos >= 0 + def next(): Char = { + if (pos < 0) Iterator.empty.next() + val r = s.charAt(pos) + pos -= 1 + r + } + } + + private class GroupedIterator(s: String, groupSize: Int) extends AbstractIterator[String] { + private[this] var pos = 0 + def hasNext: Boolean = pos < s.length + def next(): String = { + if(pos >= s.length) Iterator.empty.next() + val r = s.slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** A lazy filtered string. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter(p: Char => Boolean, s: String) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: Char => U): Unit = { + val len = s.length + var i = 0 + while(i < len) { + val x = s.charAt(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new collection by applying a function to all chars of this filtered string. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map[B](f: Char => B): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) b.addOne(f(x)) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this filtered string. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map(f: Char => Char): String = { + val len = s.length + val sb = new JStringBuilder(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) sb.append(f(x)) + i += 1 + } + sb.toString + } + + /** Builds a new collection by applying a function to all chars of this filtered string + * and using the elements of the resulting collections. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given collection-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) b.addAll(f(x)) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this filtered string + * and using the elements of the resulting Strings. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given string-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap(f: Char => String): String = { + val len = s.length + val sb = new JStringBuilder + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) sb.append(f(x)) + i += 1 + } + sb.toString + } + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: Char => Boolean): WithFilter^{p, q} = new WithFilter(a => p(a) && q(a), s) + } + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** Provides extension methods for strings. + * + * Some of these methods treat strings as a plain collection of [[Char]]s + * without any regard for Unicode handling. Unless the user takes Unicode + * handling in to account or makes sure the strings don't require such handling, + * these methods may result in unpaired or invalidly paired surrogate code + * units. + * + * @define unicodeunaware This method treats a string as a plain sequence of + * Char code units and makes no attempt to keep + * surrogate pairs or codepoint sequences together. + * The user is responsible for making sure such cases + * are handled correctly. Failing to do so may result in + * an invalid Unicode string. + */ +final class StringOps(private val s: String) extends AnyVal { + import StringOps._ + + @`inline` def view: StringView = new StringView(s) + + @`inline` def size: Int = s.length + + @`inline` def knownSize: Int = s.length + + /** Get the char at the specified index. */ + @`inline` def apply(i: Int): Char = s.charAt(i) + + def sizeCompare(otherSize: Int): Int = Integer.compare(s.length, otherSize) + + def lengthCompare(len: Int): Int = Integer.compare(s.length, len) + + def sizeIs: Int = s.length + + def lengthIs: Int = s.length + + /** Builds a new collection by applying a function to all chars of this string. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map[B](f: Char => B): immutable.IndexedSeq[B] = { + val len = s.length + val dst = new Array[AnyRef](len) + var i = 0 + while (i < len) { + dst(i) = f(s charAt i).asInstanceOf[AnyRef] + i += 1 + } + new ArraySeq.ofRef(dst).asInstanceOf[immutable.IndexedSeq[B]] + } + + /** Builds a new string by applying a function to all chars of this string. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map(f: Char => Char): String = { + val len = s.length + val dst = new Array[Char](len) + var i = 0 + while (i < len) { + dst(i) = f(s charAt i) + i += 1 + } + new String(dst) + } + + /** Builds a new collection by applying a function to all chars of this string + * and using the elements of the resulting collections. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given collection-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + var i = 0 + while (i < len) { + b.addAll(f(s.charAt(i))) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this string + * and using the elements of the resulting strings. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given string-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap(f: Char => String): String = { + val len = s.length + val sb = new JStringBuilder + var i = 0 + while (i < len) { + sb append f(s.charAt(i)) + i += 1 + } + sb.toString + } + + /** Builds a new String by applying a partial function to all chars of this String + * on which the function is defined. + * + * @param pf the partial function which filters and maps the String. + * @return a new String resulting from applying the given partial function + * `pf` to each char on which it is defined and collecting the results. + */ + def collect(pf: PartialFunction[Char, Char]): String = { + val fallback: Any => Any = StringOps.fallback + var i = 0 + val b = new StringBuilder + while (i < s.length) { + val v = pf.applyOrElse(s.charAt(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[Char]) + i += 1 + } + b.result() + } + + /** Builds a new collection by applying a partial function to all chars of this String + * on which the function is defined. + * + * @param pf the partial function which filters and maps the String. + * @tparam B the element type of the returned collection. + * @return a new collection resulting from applying the given partial function + * `pf` to each char on which it is defined and collecting the results. + */ + def collect[B](pf: PartialFunction[Char, B]): immutable.IndexedSeq[B] = { + val fallback: Any => Any = StringOps.fallback + var i = 0 + val b = immutable.IndexedSeq.newBuilder[B] + while (i < s.length) { + val v = pf.applyOrElse(s.charAt(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Returns a new collection containing the chars from this string followed by the elements from the + * right hand operand. + * + * @param suffix the collection to append. + * @return a new collection which contains all chars + * of this string followed by all elements of `suffix`. + */ + def concat[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + val k = suffix.knownSize + b.sizeHint(s.length + (if(k >= 0) k else 16)) + b.addAll(new WrappedString(s)) + b.addAll(suffix) + b.result() + } + + /** Returns a new string containing the chars from this string followed by the chars from the + * right hand operand. + * + * @param suffix the collection to append. + * @return a new string which contains all chars + * of this string followed by all chars of `suffix`. + */ + def concat(suffix: IterableOnce[Char]^): String = { + val k = suffix.knownSize + val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) + sb.append(s) + for (ch <- suffix.iterator) sb.append(ch) + sb.toString + } + + /** Returns a new string containing the chars from this string followed by the chars from the + * right hand operand. + * + * @param suffix the string to append. + * @return a new string which contains all chars + * of this string followed by all chars of `suffix`. + */ + @`inline` def concat(suffix: String): String = s + suffix + + /** Alias for `concat` */ + @`inline` def ++[B >: Char](suffix: Iterable[B]^): immutable.IndexedSeq[B] = concat(suffix) + + /** Alias for `concat` */ + @`inline` def ++(suffix: IterableOnce[Char]^): String = concat(suffix) + + /** Alias for `concat` */ + def ++(xs: String): String = concat(xs) + + /** Returns a collection with an element appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @return a collection consisting of + * this string followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: Char](len: Int, elem: B): immutable.IndexedSeq[B] = { + val sLen = s.length + if (sLen >= len) new WrappedString(s) else { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(len) + b.addAll(new WrappedString(s)) + var i = sLen + while (i < len) { + b.addOne(elem) + i += 1 + } + b.result() + } + } + + /** Returns a string with a char appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @return a string consisting of + * this string followed by the minimal number of occurrences of `elem` so + * that the resulting string has a length of at least `len`. + */ + def padTo(len: Int, elem: Char): String = { + val sLen = s.length + if (sLen >= len) s else { + val sb = new JStringBuilder(len) + sb.append(s) + // With JDK 11, this can written as: + // sb.append(String.valueOf(elem).repeat(len - sLen)) + var i = sLen + while (i < len) { + sb.append(elem) + i += 1 + } + sb.toString + } + } + + /** A copy of the string with an element prepended */ + def prepended[B >: Char](elem: B): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(s.length + 1) + b.addOne(elem) + b.addAll(new WrappedString(s)) + b.result() + } + + /** Alias for `prepended` */ + @`inline` def +: [B >: Char] (elem: B): immutable.IndexedSeq[B] = prepended(elem) + + /** A copy of the string with an char prepended */ + def prepended(c: Char): String = + new JStringBuilder(s.length + 1).append(c).append(s).toString + + /** Alias for `prepended` */ + @`inline` def +: (c: Char): String = prepended(c) + + /** A copy of the string with all elements from a collection prepended */ + def prependedAll[B >: Char](prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + val k = prefix.knownSize + b.sizeHint(s.length + (if(k >= 0) k else 16)) + b.addAll(prefix) + b.addAll(new WrappedString(s)) + b.result() + } + + /** Alias for `prependedAll` */ + @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = prependedAll(prefix) + + /** A copy of the string with another string prepended */ + def prependedAll(prefix: String): String = prefix + s + + /** Alias for `prependedAll` */ + @`inline` def ++: (prefix: String): String = prependedAll(prefix) + + /** A copy of the string with an element appended */ + def appended[B >: Char](elem: B): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(s.length + 1) + b.addAll(new WrappedString(s)) + b.addOne(elem) + b.result() + } + + /** Alias for `appended` */ + @`inline` def :+ [B >: Char](elem: B): immutable.IndexedSeq[B] = appended(elem) + + /** A copy of the string with an element appended */ + def appended(c: Char): String = + new JStringBuilder(s.length + 1).append(s).append(c).toString + + /** Alias for `appended` */ + @`inline` def :+ (c: Char): String = appended(c) + + /** A copy of the string with all elements from a collection appended */ + @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + concat(suffix) + + /** Alias for `appendedAll` */ + @`inline` def :++ [B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + concat(suffix) + + /** A copy of the string with another string appended */ + @`inline` def appendedAll(suffix: String): String = s + suffix + + /** Alias for `appendedAll` */ + @`inline` def :++ (suffix: String): String = s + suffix + + /** Produces a new collection where a slice of characters in this string is replaced by another collection. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement collection + * @param replaced the number of chars to drop in the original string + * @return a new collection consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + */ + def patch[B >: Char](from: Int, other: IterableOnce[B]^, replaced: Int): immutable.IndexedSeq[B] = { + val len = s.length + @`inline` def slc(off: Int, length: Int): WrappedString = + new WrappedString(s.substring(off, off+length)) + val b = immutable.IndexedSeq.newBuilder[B] + val k = other.knownSize + if(k >= 0) b.sizeHint(len + k - replaced) + val chunk1 = if(from > 0) min(from, len) else 0 + if(chunk1 > 0) b.addAll(slc(0, chunk1)) + b ++= other + val remaining = len - chunk1 - replaced + if(remaining > 0) b.addAll(slc(len - remaining, remaining)) + b.result() + } + + /** Produces a new collection where a slice of characters in this string is replaced by another collection. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement string + * @param replaced the number of chars to drop in the original string + * @return a new string consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + * @note $unicodeunaware + */ + def patch(from: Int, other: IterableOnce[Char]^, replaced: Int): String = + patch(from, other.iterator.mkString, replaced) + + /** Produces a new string where a slice of characters in this string is replaced by another string. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement string + * @param replaced the number of chars to drop in the original string + * @return a new string consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + * @note $unicodeunaware + */ + def patch(from: Int, other: String, replaced: Int): String = { + val len = s.length + val sb = new JStringBuilder(len + other.size - replaced) + val chunk1 = if(from > 0) min(from, len) else 0 + if(chunk1 > 0) sb.append(s, 0, chunk1) + sb.append(other) + val remaining = len - chunk1 - replaced + if(remaining > 0) sb.append(s, len - remaining, len) + sb.toString + } + + /** A copy of this string with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new string which is a copy of this string with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + * @note $unicodeunaware + */ + def updated(index: Int, elem: Char): String = { + val sb = new JStringBuilder(s.length).append(s) + sb.setCharAt(index, elem) + sb.toString + } + + /** Tests whether this string contains the given character. + * + * @param elem the character to test. + * @return `true` if this string has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: Char): Boolean = s.indexOf(elem) >= 0 + + /** Displays all elements of this string in a string using start, end, and + * separator strings. + * + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return The resulting string + * begins with the string `start` and ends with the string + * `end`. Inside, the string chars of this string are separated by + * the string `sep`. + * @note $unicodeunaware + */ + final def mkString(start: String, sep: String, end: String): String = + addString(new StringBuilder(), start, sep, end).toString + + /** Displays all elements of this string in a string using a separator string. + * + * @param sep the separator string. + * @return In the resulting string + * the chars of this string are separated by the string `sep`. + * @note $unicodeunaware + */ + @inline final def mkString(sep: String): String = + if (sep.isEmpty || s.length < 2) s + else mkString("", sep, "") + + /** Returns this string */ + @inline final def mkString: String = s + + /** Appends this string to a string builder. */ + @inline final def addString(b: StringBuilder): b.type = b.append(s) + + /** Appends this string to a string builder using a separator string. */ + @inline final def addString(b: StringBuilder, sep: String): b.type = + addString(b, "", sep, "") + + /** Appends this string to a string builder using start, end and separator strings. */ + final def addString(b: StringBuilder, start: String, sep: String, end: String): b.type = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val len = s.length + if (len != 0) { + if (sep.isEmpty) jsb.append(s) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(s.charAt(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(s.charAt(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + b + } + + /** Selects an interval of elements. The returned string is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this string. + * @param until the lowest index to EXCLUDE from this string. + * @return a string containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this string. + * @note $unicodeunaware + */ + def slice(from: Int, until: Int): String = { + val start = from max 0 + val end = until min s.length + + if (start >= end) "" + else s.substring(start, end) + } + + // Note: String.repeat is added in JDK 11. + /** Return the current string concatenated `n` times. + */ + def *(n: Int): String = { + if (n <= 0) { + "" + } else { + val sb = new JStringBuilder(s.length * n) + var i = 0 + while (i < n) { + sb.append(s) + i += 1 + } + sb.toString + } + } + + @`inline` private[this] def isLineBreak(c: Char) = c == CR || c == LF + @`inline` private[this] def isLineBreak2(c0: Char, c: Char) = c0 == CR && c == LF + + /** Strip the trailing line separator from this string if there is one. + * The line separator is taken as `"\n"`, `"\r"`, or `"\r\n"`. + */ + def stripLineEnd: String = + if (s.isEmpty) s + else { + var i = s.length - 1 + val last = apply(i) + if (!isLineBreak(last)) s + else { + if (i > 0 && isLineBreak2(apply(i - 1), last)) i -= 1 + s.substring(0, i) + } + } + + /** Return an iterator of all lines embedded in this string, + * including trailing line separator characters. + * + * The empty string yields an empty iterator. + */ + def linesWithSeparators: Iterator[String] = linesSeparated(stripped = false) + + /** Lines in this string, where a line is terminated by + * `"\n"`, `"\r"`, `"\r\n"`, or the end of the string. + * A line may be empty. Line terminators are removed. + */ + def linesIterator: Iterator[String] = linesSeparated(stripped = true) + + // if `stripped`, exclude the line separators + private def linesSeparated(stripped: Boolean): Iterator[String] = new AbstractIterator[String] { + def hasNext: Boolean = !done + def next(): String = if (done) Iterator.empty.next() else advance() + + private[this] val len = s.length + private[this] var index = 0 + @`inline` private def done = index >= len + private def advance(): String = { + val start = index + while (!done && !isLineBreak(apply(index))) index += 1 + var end = index + if (!done) { + val c = apply(index) + index += 1 + if (!done && isLineBreak2(c, apply(index))) index += 1 + if (!stripped) end = index + } + s.substring(start, end) + } + } + + /** Return all lines in this string in an iterator, excluding trailing line + * end characters; i.e., apply `.stripLineEnd` to all lines + * returned by `linesWithSeparators`. + */ + @deprecated("Use `linesIterator`, because JDK 11 adds a `lines` method on String", "2.13.0") + def lines: Iterator[String] = linesIterator + + /** Returns this string with first character converted to upper case. + * If the first character of the string is capitalized, it is returned unchanged. + * This method does not convert characters outside the Basic Multilingual Plane (BMP). + */ + def capitalize: String = + if (s == null || s.length == 0 || !s.charAt(0).isLower) s + else updated(0, s.charAt(0).toUpper) + + /** Returns this string with the given `prefix` stripped. If this string does not + * start with `prefix`, it is returned unchanged. + */ + def stripPrefix(prefix: String) = + if (s startsWith prefix) s.substring(prefix.length) + else s + + /** Returns this string with the given `suffix` stripped. If this string does not + * end with `suffix`, it is returned unchanged. + */ + def stripSuffix(suffix: String) = + if (s endsWith suffix) s.substring(0, s.length - suffix.length) + else s + + /** Replace all literal occurrences of `literal` with the literal string `replacement`. + * This method is equivalent to [[java.lang.String#replace(CharSequence,CharSequence)]]. + * + * @param literal the string which should be replaced everywhere it occurs + * @param replacement the replacement string + * @return the resulting string + */ + @deprecated("Use `s.replace` as an exact replacement", "2.13.2") + def replaceAllLiterally(literal: String, replacement: String): String = s.replace(literal, replacement) + + /** For every line in this string: + * + * Strip a leading prefix consisting of blanks or control characters + * followed by `marginChar` from the line. + */ + def stripMargin(marginChar: Char): String = { + val sb = new JStringBuilder(s.length) + for (line <- linesWithSeparators) { + val len = line.length + var index = 0 + while (index < len && line.charAt(index) <= ' ') index += 1 + val stripped = + if (index < len && line.charAt(index) == marginChar) line.substring(index + 1) + else line + sb.append(stripped) + } + sb.toString + } + + /** For every line in this string: + * + * Strip a leading prefix consisting of blanks or control characters + * followed by `|` from the line. + */ + def stripMargin: String = stripMargin('|') + + private[this] def escape(ch: Char): String = if ( + (ch >= 'a') && (ch <= 'z') || + (ch >= 'A') && (ch <= 'Z') || + (ch >= '0' && ch <= '9')) ch.toString + else "\\" + ch + + /** Split this string around the separator character + * + * If this string is the empty string, returns an array of strings + * that contains a single empty string. + * + * If this string is not the empty string, returns an array containing + * the substrings terminated by the start of the string, the end of the + * string or the separator character, excluding empty trailing substrings + * + * If the separator character is a surrogate character, only split on + * matching surrogate characters if they are not part of a surrogate pair + * + * The behaviour follows, and is implemented in terms of String.split(re: String) + * + * + * @example {{{ + * "a.b".split('.') //returns Array("a", "b") + * + * //splitting the empty string always returns the array with a single + * //empty string + * "".split('.') //returns Array("") + * + * //only trailing empty substrings are removed + * "a.".split('.') //returns Array("a") + * ".a.".split('.') //returns Array("", "a") + * "..a..".split('.') //returns Array("", "", "a") + * + * //all parts are empty and trailing + * ".".split('.') //returns Array() + * "..".split('.') //returns Array() + * + * //surrogate pairs + * val high = 0xD852.toChar + * val low = 0xDF62.toChar + * val highstring = high.toString + * val lowstring = low.toString + * + * //well-formed surrogate pairs are not split + * val highlow = highstring + lowstring + * highlow.split(high) //returns Array(highlow) + * + * //bare surrogate characters are split + * val bare = "_" + highstring + "_" + * bare.split(high) //returns Array("_", "_") + * + * }}} + * + * @param separator the character used as a delimiter + */ + def split(separator: Char): Array[String] = s.split(escape(separator)) + + @throws(classOf[java.util.regex.PatternSyntaxException]) + def split(separators: Array[Char]): Array[String] = { + val re = separators.foldLeft("[")(_+escape(_)) + "]" + s.split(re) + } + + /** You can follow a string with `.r`, turning it into a `Regex`. E.g. + * + * `"""A\w*""".r` is the regular expression for ASCII-only identifiers starting with `A`. + * + * `"""(?\d\d)-(?\d\d)-(?\d\d\d\d)""".r` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". + */ + def r: Regex = new Regex(s) + + /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, + * with group names g1 through gn. + * + * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". + * + * @param groupNames The names of the groups in the pattern, in the order they appear. + */ + @deprecated("use inline group names like (?X) instead", "2.13.7") + def r(groupNames: String*): Regex = new Regex(s, groupNames: _*) + + /** + * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`. + */ + def toBoolean: Boolean = toBooleanImpl(s) + + /** + * Try to parse as a `Boolean` + * @return `Some(true)` if the string is "true" case insensitive, + * `Some(false)` if the string is "false" case insensitive, + * and `None` if the string is anything else + * @throws java.lang.NullPointerException if the string is `null` + */ + def toBooleanOption: Option[Boolean] = StringParsers.parseBool(s) + + /** + * Parse as a `Byte` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`. + */ + def toByte: Byte = java.lang.Byte.parseByte(s) + + /** + * Try to parse as a `Byte` + * @return `Some(value)` if the string contains a valid byte value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toByteOption: Option[Byte] = StringParsers.parseByte(s) + + /** + * Parse as a `Short` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`. + */ + def toShort: Short = java.lang.Short.parseShort(s) + + /** + * Try to parse as a `Short` + * @return `Some(value)` if the string contains a valid short value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toShortOption: Option[Short] = StringParsers.parseShort(s) + + /** + * Parse as an `Int` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`. + */ + def toInt: Int = java.lang.Integer.parseInt(s) + + /** + * Try to parse as an `Int` + * @return `Some(value)` if the string contains a valid Int value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toIntOption: Option[Int] = StringParsers.parseInt(s) + + /** + * Parse as a `Long` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`. + */ + def toLong: Long = java.lang.Long.parseLong(s) + + /** + * Try to parse as a `Long` + * @return `Some(value)` if the string contains a valid long value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toLongOption: Option[Long] = StringParsers.parseLong(s) + + /** + * Parse as a `Float` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`. + * @throws java.lang.NullPointerException If the string is null. + */ + def toFloat: Float = java.lang.Float.parseFloat(s) + + /** + * Try to parse as a `Float` + * @return `Some(value)` if the string is a parsable `Float`, `None` otherwise + * @throws java.lang.NullPointerException If the string is null + */ + def toFloatOption: Option[Float] = StringParsers.parseFloat(s) + + /** + * Parse as a `Double` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`. + * @throws java.lang.NullPointerException If the string is null. + */ + def toDouble: Double = java.lang.Double.parseDouble(s) + + /** + * Try to parse as a `Double` + * @return `Some(value)` if the string is a parsable `Double`, `None` otherwise + * @throws java.lang.NullPointerException If the string is null + */ + def toDoubleOption: Option[Double] = StringParsers.parseDouble(s) + + private[this] def toBooleanImpl(s: String): Boolean = + if (s == null) throw new IllegalArgumentException("For input string: \"null\"") + else if (s.equalsIgnoreCase("true")) true + else if (s.equalsIgnoreCase("false")) false + else throw new IllegalArgumentException("For input string: \""+s+"\"") + + def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = + if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] + else new WrappedString(s).toArray[B] + + private[this] def unwrapArg(arg: Any): AnyRef = arg match { + case x: ScalaNumber => x.underlying + case x => x.asInstanceOf[AnyRef] + } + + /** Uses the underlying string as a pattern (in a fashion similar to + * printf in C), and uses the supplied arguments to fill in the + * holes. + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]], with the addition that + * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and + * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter` + * understands. + * + * @param args the arguments used to instantiating the pattern. + * @throws java.lang.IllegalArgumentException + */ + def format(args : Any*): String = + java.lang.String.format(s, args map unwrapArg: _*) + + /** Like `format(args*)` but takes an initial `Locale` parameter + * which influences formatting as in `java.lang.String`'s format. + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]], with the addition that + * classes deriving from `ScalaNumber` (such as `scala.BigInt` and + * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter` + * understands. + * + * @param l an instance of `java.util.Locale` + * @param args the arguments used to instantiating the pattern. + * @throws java.lang.IllegalArgumentException + */ + def formatLocal(l: java.util.Locale, args: Any*): String = + java.lang.String.format(l, s, args map unwrapArg: _*) + + def compare(that: String): Int = s.compareTo(that) + + /** Returns true if `this` is less than `that` */ + def < (that: String): Boolean = compare(that) < 0 + + /** Returns true if `this` is greater than `that`. */ + def > (that: String): Boolean = compare(that) > 0 + + /** Returns true if `this` is less than or equal to `that`. */ + def <= (that: String): Boolean = compare(that) <= 0 + + /** Returns true if `this` is greater than or equal to `that`. */ + def >= (that: String): Boolean = compare(that) >= 0 + + /** Counts the number of chars in this string which satisfy a predicate */ + def count(p: (Char) => Boolean): Int = { + var i, res = 0 + val len = s.length + while(i < len) { + if(p(s.charAt(i))) res += 1 + i += 1 + } + res + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: Char => U): Unit = { + val len = s.length + var i = 0 + while(i < len) { + f(s.charAt(i)) + i += 1 + } + } + + /** Tests whether a predicate holds for all chars of this string. + * + * @param p the predicate used to test elements. + * @return `true` if this string is empty or the given predicate `p` + * holds for all chars of this string, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: Char => Boolean): Boolean = { + var i = 0 + val len = s.length + while(i < len) { + if(!p(s.charAt(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all chars of this string, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive chars of this string, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the chars of this string. + * Returns `z` if this string is empty. + */ + def foldLeft[B](z: B)(op: (B, Char) => B): B = { + var v = z + var i = 0 + val len = s.length + while(i < len) { + v = op(v, s.charAt(i)) + i += 1 + } + v + } + + /** Applies a binary operator to all chars of this string and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive chars of this string, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the chars of this string. + * Returns `z` if this string is empty. + */ + def foldRight[B](z: B)(op: (Char, B) => B): B = { + var v = z + var i = s.length - 1 + while(i >= 0) { + v = op(s.charAt(i), v) + i -= 1 + } + v + } + + /** Folds the chars of this string using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of Char. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the chars and `z`, or `z` if this string is empty. + */ + @`inline` def fold[A1 >: Char](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Selects the first char of this string. + * @return the first char of this string. + * @throws NoSuchElementException if the string is empty. + */ + def head: Char = if(s.isEmpty) throw new NoSuchElementException("head of empty String") else s.charAt(0) + + /** Optionally selects the first char. + * @return the first char of this string if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[Char] = + if(s.isEmpty) None else Some(s.charAt(0)) + + /** Selects the last char of this string. + * @return the last char of this string. + * @throws NoSuchElementException if the string is empty. + */ + def last: Char = if(s.isEmpty) throw new NoSuchElementException("last of empty String") else s.charAt(s.length-1) + + /** Optionally selects the last char. + * @return the last char of this string if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[Char] = + if(s.isEmpty) None else Some(s.charAt(s.length-1)) + + /** Produces the range of all indices of this string. + * + * @return a `Range` value from `0` to one less than the length of this string. + */ + def indices: Range = Range(0, s.length) + + /** Iterator can be used only once */ + def iterator: Iterator[Char] = new StringIterator(s) + + /** Stepper can be used with Java 8 Streams. This method is equivalent to a call to + * [[charStepper]]. See also [[codePointStepper]]. + */ + @`inline` def stepper: IntStepper with EfficientSplit = charStepper + + /** Steps over characters in this string. Values are packed in `Int` for efficiency + * and compatibility with Java 8 Streams which have an efficient specialization for `Int`. + */ + @`inline` def charStepper: IntStepper with EfficientSplit = new CharStringStepper(s, 0, s.length) + + /** Steps over code points in this string. + */ + @`inline` def codePointStepper: IntStepper with EfficientSplit = new CodePointStringStepper(s, 0, s.length) + + /** Tests whether the string is not empty. */ + @`inline` def nonEmpty: Boolean = !s.isEmpty + + /** Returns new sequence with elements in reversed order. + * @note $unicodeunaware + */ + def reverse: String = new JStringBuilder(s).reverse().toString + + /** An iterator yielding chars in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the chars of this string in reversed order + */ + def reverseIterator: Iterator[Char] = new ReverseIterator(s) + + /** Creates a non-strict filter of this string. + * + * @note the difference between `c filter p` and `c withFilter p` is that + * the former creates a new string, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `stringOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those chars of this string + * which satisfy the predicate `p`. + */ + def withFilter(p: Char => Boolean): StringOps.WithFilter^{p} = new StringOps.WithFilter(p, s) + + /** The rest of the string without its first char. + * @note $unicodeunaware + */ + def tail: String = slice(1, s.length) + + /** The initial part of the string without its last char. + * @note $unicodeunaware + */ + def init: String = slice(0, s.length-1) + + /** A string containing the first `n` chars of this string. + * @note $unicodeunaware + */ + def take(n: Int): String = slice(0, min(n, s.length)) + + /** The rest of the string without its `n` first chars. + * @note $unicodeunaware + */ + def drop(n: Int): String = slice(min(n, s.length), s.length) + + /** A string containing the last `n` chars of this string. + * @note $unicodeunaware + */ + def takeRight(n: Int): String = drop(s.length - max(n, 0)) + + /** The rest of the string without its `n` last chars. + * @note $unicodeunaware + */ + def dropRight(n: Int): String = take(s.length - max(n, 0)) + + /** Iterates over the tails of this string. The first value will be this + * string and the final one will be an empty string, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this string + * @note $unicodeunaware + */ + def tails: Iterator[String] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this string. The first value will be this + * string and the final one will be an empty string, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this string + * @note $unicodeunaware + */ + def inits: Iterator[String] = iterateUntilEmpty(_.init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: String => String): Iterator[String]^{f} = + Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") + + /** Selects all chars of this string which satisfy a predicate. */ + def filter(pred: Char => Boolean): String = { + val len = s.length + val sb = new JStringBuilder(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(pred(x)) sb.append(x) + i += 1 + } + if(len == sb.length()) s else sb.toString + } + + /** Selects all chars of this string which do not satisfy a predicate. */ + @`inline` def filterNot(pred: Char => Boolean): String = filter(c => !pred(c)) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either the entire string has been copied + * or the end of the array is reached + * + * @param xs the array to fill. + */ + @`inline` def copyToArray(xs: Array[Char]): Int = + copyToArray(xs, 0, Int.MaxValue) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either the entire string has been copied + * or the end of the array is reached + * + * @param xs the array to fill. + * @param start the starting index. + */ + @`inline` def copyToArray(xs: Array[Char], start: Int): Int = + copyToArray(xs, start, Int.MaxValue) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index `start` with at most `len` chars. + * Copying will stop once either the entire string has been copied, + * or the end of the array is reached or `len` chars have been copied. + * + * @param xs the array to fill. + * @param start the starting index. + * @param len the maximal number of elements to copy. + */ + def copyToArray(xs: Array[Char], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(s.length, xs.length, start, len) + if (copied > 0) { + s.getChars(0, copied, xs, start) + } + copied + } + + /** Finds index of the first char satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this string that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(p: Char => Boolean, from: Int = 0): Int = { + val len = s.length + var i = from + while(i < len) { + if(p(s.charAt(i))) return i + i += 1 + } + -1 + } + + /** Finds index of the last char satisfying some predicate before or at some end index. + * + * @param p the predicate used to test elements. + * @param end the end index + * @return the index `<= end` of the last element of this string that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: Char => Boolean, end: Int = Int.MaxValue): Int = { + val len = s.length + var i = min(end, len-1) + while(i >= 0) { + if(p(s.charAt(i))) return i + i -= 1 + } + -1 + } + + /** Tests whether a predicate holds for at least one char of this string. */ + def exists(p: Char => Boolean): Boolean = indexWhere(p) != -1 + + /** Finds the first char of the string satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the string + * that satisfies `p`, or `None` if none exists. + */ + def find(p: Char => Boolean): Option[Char] = indexWhere(p) match { + case -1 => None + case i => Some(s.charAt(i)) + } + + /** Drops longest prefix of chars that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this string whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: Char => Boolean): String = indexWhere(c => !p(c)) match { + case -1 => "" + case i => s.substring(i) + } + + /** Takes longest prefix of chars that satisfy a predicate. */ + def takeWhile(p: Char => Boolean): String = indexWhere(c => !p(c)) match { + case -1 => s + case i => s.substring(0, i) + } + + /** Splits this string into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of strings consisting of the first `n` + * chars of this string, and the other chars. + * @note $unicodeunaware + */ + def splitAt(n: Int): (String, String) = (take(n), drop(n)) + + /** Splits this string into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this string whose + * chars all satisfy `p`, and the rest of this string. + */ + def span(p: Char => Boolean): (String, String) = indexWhere(c => !p(c)) match { + case -1 => (s, "") + case i => (s.substring(0, i), s.substring(i)) + } + + /** Partitions elements in fixed size strings. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing strings of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + * @note $unicodeunaware + */ + def grouped(size: Int): Iterator[String] = new StringOps.GroupedIterator(s, size) + + /** A pair of, first, all chars that satisfy predicate `p` and, second, all chars that do not. */ + def partition(p: Char => Boolean): (String, String) = { + val res1, res2 = new JStringBuilder + var i = 0 + val len = s.length + while(i < len) { + val x = s.charAt(i) + (if(p(x)) res1 else res2).append(x) + i += 1 + } + (res1.toString, res2.toString) + } + + /** Applies a function `f` to each character of the string and returns a pair of strings: the first one + * made of those characters returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = "1one2two3three" partitionMap { c => + * if (c > 'a') Left(c) else Right(c) + * } + * // xs == ("onetwothree", "123") + * }}} + * + * @param f the 'split function' mapping the elements of this string to an [[scala.util.Either]] + * + * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap(f: Char => Either[Char,Char]): (String, String) = { + val res1, res2 = new JStringBuilder + var i = 0 + val len = s.length + while(i < len) { + f(s.charAt(i)) match { + case Left(c) => res1.append(c) + case Right(c) => res2.append(c) + } + i += 1 + } + (res1.toString, res2.toString) + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip2[Char, B, String]^{that} = new LazyZip2(s, new WrappedString(s), that) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to WrappedString implementations which + may not provide the best possible performance. We need them in `StringOps` because their return type + mentions `C` (which is `String` in `StringOps` and `WrappedString` in `WrappedString`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this string and another sequence. + * + * @param that the sequence of chars to remove + * @return a new string which contains all chars of this string + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + * @note $unicodeunaware + */ + def diff[B >: Char](that: Seq[B]): String = new WrappedString(s).diff(that).unwrap + + /** Computes the multiset intersection between this string and another sequence. + * + * @param that the sequence of chars to intersect with. + * @return a new string which contains all chars of this string + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + * @note $unicodeunaware + */ + def intersect[B >: Char](that: Seq[B]): String = new WrappedString(s).intersect(that).unwrap + + /** Selects all distinct chars of this string ignoring the duplicates. + * + * @note $unicodeunaware + */ + def distinct: String = new WrappedString(s).distinct.unwrap + + /** Selects all distinct chars of this string ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new string consisting of all the chars of this string without duplicates. + * @note $unicodeunaware + */ + def distinctBy[B](f: Char -> B): String = new WrappedString(s).distinctBy(f).unwrap + + /** Sorts the characters of this string according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return a string consisting of the chars of this string + * sorted according to the ordering `ord`. + * @note $unicodeunaware + */ + def sorted[B >: Char](implicit ord: Ordering[B]): String = new WrappedString(s).sorted(ord).unwrap + + /** Sorts this string according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return a string consisting of the elements of this string + * sorted according to the comparison function `lt`. + * @note $unicodeunaware + */ + def sortWith(lt: (Char, Char) => Boolean): String = new WrappedString(s).sortWith(lt).unwrap + + /** Sorts this string according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return a string consisting of the chars of this string + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + * @note $unicodeunaware + */ + def sortBy[B](f: Char => B)(implicit ord: Ordering[B]): String = new WrappedString(s).sortBy(f)(ord).unwrap + + /** Partitions this string into a map of strings according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to strings such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a string of those elements `x` + * for which `f(x)` equals `k`. + * @note $unicodeunaware + */ + def groupBy[K](f: Char => K): immutable.Map[K, String] = new WrappedString(s).groupBy(f).view.mapValues(_.unwrap).toMap + + /** Groups chars in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of chars per group + * @param step the distance between the first chars of successive groups + * @return An iterator producing strings of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` chars remaining to be grouped. + * @note $unicodeunaware + */ + def sliding(size: Int, step: Int = 1): Iterator[String] = new WrappedString(s).sliding(size, step).map(_.unwrap) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this string. + * @example {{{ + * "abbbc".combinations(2).foreach(println) + * // ab + * // ac + * // bb + * // bc + * "bab".combinations(2).foreach(println) + * // bb + * // ba + * }}} + * @note $unicodeunaware + */ + def combinations(n: Int): Iterator[String] = new WrappedString(s).combinations(n).map(_.unwrap) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this string. + * @example {{{ + * "abb".permutations.foreach(println) + * // abb + * // bab + * // bba + * }}} + * @note $unicodeunaware + */ + def permutations: Iterator[String] = new WrappedString(s).permutations.map(_.unwrap) +} + +final case class StringView(s: String) extends AbstractIndexedSeqView[Char] { + def length = s.length + @throws[StringIndexOutOfBoundsException] + def apply(n: Int) = s.charAt(n) + override def toString: String = s"StringView($s)" +} diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala new file mode 100644 index 000000000000..47281815da71 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -0,0 +1,320 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec +import language.experimental.captureChecking + +/** A module containing the implementations of parsers from strings to numeric types, and boolean + */ +private[scala] object StringParsers { + + //compile-time constant helpers + + //Int.MinValue == -2147483648 + private final val intOverflowBoundary = -214748364 + private final val intOverflowDigit = 9 + //Long.MinValue == -9223372036854775808L + private final val longOverflowBoundary = -922337203685477580L + private final val longOverflowDigit = 9 + + @inline + private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) + + @inline + private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { + @tailrec + def rec(i: Int, agg: Int): Option[Int] = + if (agg < min) None + else if (i == len) { + if (!isPositive) Some(agg) + else if (agg == min) None + else Some(-agg) + } + else { + val digit = decValue(from.charAt(i)) + if (digit == -1) None + else rec(i + 1, agg * 10 - digit) + } + rec(1, agg) + } + + @inline + private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' + + //bool + @inline + final def parseBool(from: String): Option[Boolean] = + if (from.equalsIgnoreCase("true")) Some(true) + else if (from.equalsIgnoreCase("false")) Some(false) + else None + + //integral types + final def parseByte(from: String): Option[Byte] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toByte) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) + else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) + else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) + else None + } + } + + final def parseShort(from: String): Option[Short] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toShort) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) + else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) + else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) + else None + } + } + + final def parseInt(from: String): Option[Int] = { + val len = from.length() + + @tailrec + def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { + if (i == len) { + if (!isPositive) Some(agg) + else if (agg == Int.MinValue) None + else Some(-agg) + } + else if (agg < intOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None + else step(i + 1, (agg * 10) - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + final def parseLong(from: String): Option[Long] = { + //like parseInt, but Longer + val len = from.length() + + @tailrec + def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { + if (i == len) { + if (isPositive && agg == Long.MinValue) None + else if (isPositive) Some(-agg) + else Some(agg) + } + else if (agg < longOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None + else step(i + 1, agg * 10 - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first).toLong + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + //floating point + final def checkFloatFormat(format: String): Boolean = { + //indices are tracked with a start index which points *at* the first index + //and an end index which points *after* the last index + //so that slice length === end - start + //thus start == end <=> empty slice + //and format.substring(start, end) is equivalent to the slice + + //some utilities for working with index bounds into the original string + @inline + def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { + @tailrec + def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) + rec(start) + } + + //one after last index for the predicate to hold, or `from` if none hold + //may point after the end of the string + @inline + def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { + @tailrec @inline + def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) + else i + rec(from) + } + + + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F')) + + def prefixOK(startIndex: Int, endIndex: Int): Boolean = { + val len = endIndex - startIndex + (len > 0) && { + //the prefix part is + //hexDigits + //hexDigits. + //hexDigits.hexDigits + //.hexDigits + //but not . + if (format.charAt(startIndex) == '.') { + (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) + } else { + val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) + (noLeading >= endIndex) || + ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) + } + } + } + + def postfixOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + (forAllBetween(startIndex, endIndex, isDigit)) || { + val startchar = format.charAt(startIndex) + (startchar == '+' || startchar == '-') && + (endIndex - startIndex > 1) && + forAllBetween(startIndex + 1, endIndex, isDigit) + } + } + // prefix [pP] postfix + val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) + (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) + } + + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + //invariant: endIndex > startIndex + + def isExp(c: Char): Boolean = c == 'e' || c == 'E' + + def expOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + val startChar = format.charAt(startIndex) + if (startChar == '+' || startChar == '-') + (endIndex > (startIndex + 1)) && + skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex + else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex + } + + //significant can be one of + //* digits.digits + //* .digits + //* digits. + //but not just . + val startChar = format.charAt(startIndex) + if (startChar == '.') { + val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) + // a digit is required followed by optional exp + (noSignificant > startIndex + 1) && (noSignificant >= endIndex || + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + ) + } + else if (isDigit(startChar)) { + // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent + val noInt = skipIndexWhile(isDigit, startIndex, endIndex) + // just the digits + (noInt == endIndex) || { + if (format.charAt(noInt) == '.') { + val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) + (noSignificant >= endIndex) || //no exponent + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + } else + isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) + } + } + else false + } + + //count 0x00 to 0x20 as "whitespace", and nothing else + val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) + val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 + + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false + else { + //all formats can have a sign + val unsigned = { + val startchar = format.charAt(unspacedStart) + if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart + } + if (unsigned >= unspacedEnd) false + //that's it for NaN and Infinity + else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" + else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" + else { + //all other formats can have a format suffix + val desuffixed = { + val endchar = format.charAt(unspacedEnd - 1) + if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 + else unspacedEnd + } + val len = desuffixed - unsigned + if (len <= 0) false + else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) + format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) + else isDecFloatLiteral(unsigned, desuffixed) + } + } + } + + @inline + def parseFloat(from: String): Option[Float] = + if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) + else None + + @inline + def parseDouble(from: String): Option[Double] = + if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) + else None + +} diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala new file mode 100644 index 000000000000..8e2ee3ad9e32 --- /dev/null +++ b/tests/pos-special/stdlib/collection/View.scala @@ -0,0 +1,542 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{nowarn, tailrec} +import scala.collection.mutable.{ArrayBuffer, Builder} +import scala.collection.immutable.LazyList +import language.experimental.captureChecking + +/** Views are collections whose transformation operations are non strict: the resulting elements + * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), + * or when the view is converted to a strict collection type (using the `to` operation). + * @define coll view + * @define Coll `View` + */ +trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { + this: View[A]^ => + + override def view: View[A]^{this} = this + + override def iterableFactory: IterableFactory[View] = View + + override def empty: scala.collection.View[A] = iterableFactory.empty + + override def toString: String = className + "()" + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "View" + + @deprecated("Views no longer know about their underlying collection type; .force always returns an IndexedSeq", "2.13.0") + @`inline` def force: IndexedSeq[A] = toIndexedSeq +} + +/** This object reifies operations on views as case classes + * + * @define Coll View + * @define coll view + */ +@SerialVersionUID(3L) +object View extends IterableFactory[View] { + + /** + * @return A `View[A]` whose underlying iterator is provided by the `it` parameter-less function. + * + * @param it Function creating the iterator to be used by the view. This function must always return + * a fresh `Iterator`, otherwise the resulting view will be effectively iterable only once. + * + * @tparam A View element type + */ + def fromIteratorProvider[A](it: () => Iterator[A]^): View[A]^{it} = new AbstractView[A] { + def iterator: Iterator[A]^{it} = it() + } + + /** + * @return A view iterating over the given `Iterable` + * + * @param it The `IterableOnce` to view. A proper `Iterable` is used directly. If it is really only + * `IterableOnce` it gets memoized on the first traversal. + * + * @tparam E View element type + */ + def from[E](it: IterableOnce[E]^): View[E]^{it} = it match { + case it: View[E] => it + case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) + case _ => LazyList.from(it).view + } + + def empty[A]: View[A] = Empty + + def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + + override def apply[A](xs: A*): View[A] = new Elems(xs: _*) + + /** The empty view */ + @SerialVersionUID(3L) + case object Empty extends AbstractView[Nothing] { + def iterator = Iterator.empty + override def knownSize = 0 + override def isEmpty: Boolean = true + } + + /** A view with exactly one element */ + @SerialVersionUID(3L) + class Single[A](a: A) extends AbstractView[A] { + def iterator: Iterator[A] = Iterator.single(a) + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + } + + /** A view with given elements */ + @SerialVersionUID(3L) + class Elems[A](xs: A*) extends AbstractView[A], Pure { + def iterator = xs.iterator + override def knownSize = xs.knownSize + override def isEmpty: Boolean = xs.isEmpty + } + + /** A view containing the results of some element computation a number of times. */ + @SerialVersionUID(3L) + class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { + def iterator: Iterator[A]^{elem} = Iterator.fill(n)(elem) + override def knownSize: Int = 0 max n + override def isEmpty: Boolean = n <= 0 + } + + /** A view containing values of a given function over a range of integer values starting from 0. */ + @SerialVersionUID(3L) + class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { + def iterator: Iterator[A]^{f} = Iterator.tabulate(n)(f) + override def knownSize: Int = 0 max n + override def isEmpty: Boolean = n <= 0 + } + + /** A view containing repeated applications of a function to a start value */ + @SerialVersionUID(3L) + class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { + def iterator: Iterator[A]^{f} = Iterator.iterate(start)(f).take(len) + override def knownSize: Int = 0 max len + override def isEmpty: Boolean = len <= 0 + } + + /** A view that uses a function `f` to produce elements of type `A` and update + * an internal state `S`. + */ + @SerialVersionUID(3L) + class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { + def iterator: Iterator[A]^{f} = Iterator.unfold(initial)(f) + } + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableOps[A] = IterableOps[A, AnyConstr, _] + + /** A view that filters an underlying collection. */ + @SerialVersionUID(3L) + class Filter[A](val underlying: SomeIterableOps[A]^, val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.filterImpl(p, isFlipped) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + object Filter { + def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = + underlying match { + case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + case _ => new Filter(underlying, p, isFlipped) + } + } + + /** A view that removes the duplicated elements as determined by the transformation function `f` */ + @SerialVersionUID(3L) + class DistinctBy[A, B](underlying: SomeIterableOps[A]^, f: A -> B) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.distinctBy(f) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator: Iterator[A1]^{underlying, f} = new AbstractIterator[A1] { + private[this] val self = underlying.iterator + private[this] var hd: A1 = _ + private[this] var hdDefined: Boolean = false + def hasNext = hdDefined || { + @tailrec + def findNext(): Boolean = + if (self.hasNext) { + f(self.next()) match { + case Left(a1) => hd = a1; hdDefined = true; true + case Right(_) => findNext() + } + } else false + findNext() + } + def next() = + if (hasNext) { + hdDefined = false + hd + } else Iterator.empty.next() + } + } + + @SerialVersionUID(3L) + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator: Iterator[A2]^{this} = new AbstractIterator[A2] { + private[this] val self = underlying.iterator + private[this] var hd: A2 = _ + private[this] var hdDefined: Boolean = false + def hasNext = hdDefined || { + @tailrec + def findNext(): Boolean = + if (self.hasNext) { + f(self.next()) match { + case Left(_) => findNext() + case Right(a2) => hd = a2; hdDefined = true; true + } + } else false + findNext() + } + def next() = + if (hasNext) { + hdDefined = false + hd + } else Iterator.empty.next() + } + } + + /** A view that drops leading elements of the underlying collection. */ + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.drop(n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) (size - normN) max 0 else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that drops trailing elements of the underlying collection. */ + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = dropRightIterator(underlying.iterator, n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) (size - normN) max 0 else -1 + } + override def isEmpty: Boolean = + if(knownSize >= 0) knownSize == 0 + else iterator.isEmpty + } + + @SerialVersionUID(3L) + class DropWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.dropWhile(p) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that takes leading elements of the underlying collection. */ + @SerialVersionUID(3L) + class Take[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.take(n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) size min normN else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that takes trailing elements of the underlying collection. */ + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = takeRightIterator(underlying.iterator, n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) size min normN else -1 + } + override def isEmpty: Boolean = + if(knownSize >= 0) knownSize == 0 + else iterator.isEmpty + } + + @SerialVersionUID(3L) + class TakeWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.takeWhile(p) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A]^, z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, op} = underlying.iterator.scanLeft(z)(op) + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that maps elements of the underlying collection. */ + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeIterableOps[A]^, f: A => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.map(f) + override def knownSize = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + /** A view that flatmaps elements of the underlying collection. */ + @SerialVersionUID(3L) + class FlatMap[A, B](underlying: SomeIterableOps[A]^, f: A => IterableOnce[B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.flatMap(f) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that collects elements of the underlying collection. */ + @SerialVersionUID(3L) + class Collect[+A, B](underlying: SomeIterableOps[A]^, pf: PartialFunction[A, B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, pf} = underlying.iterator.collect(pf) + } + + /** A view that concatenates elements of the prefix collection or iterator with the elements + * of the suffix collection or iterator. + */ + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIterableOps[A]^, suffix: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{prefix, suffix} = prefix.iterator ++ suffix.iterator + override def knownSize = { + val prefixSize = prefix.knownSize + if (prefixSize >= 0) { + val suffixSize = suffix.knownSize + if (suffixSize >= 0) prefixSize + suffixSize + else -1 + } + else -1 + } + override def isEmpty: Boolean = prefix.isEmpty && suffix.isEmpty + } + + /** A view that zips elements of the underlying collection with the elements + * of another collection. + */ + @SerialVersionUID(3L) + class Zip[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zip(other) + override def knownSize = { + val s1 = underlying.knownSize + if (s1 == 0) 0 else { + val s2 = other.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + override def isEmpty: Boolean = underlying.isEmpty || other.isEmpty + } + + /** A view that zips elements of the underlying collection with the elements + * of another collection. If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + */ + @SerialVersionUID(3L) + class ZipAll[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^, thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zipAll(other, thisElem, thatElem) + override def knownSize = { + val s1 = underlying.knownSize + if(s1 == -1) -1 else { + val s2 = other.knownSize + if(s2 == -1) -1 else s1 max s2 + } + } + override def isEmpty: Boolean = underlying.isEmpty && other.isEmpty + } + + /** A view that appends an element to its elements */ + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIterableOps[A]^, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(underlying, new View.Single(elem)) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = false + } + + /** A view that prepends an element to its elements */ + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(new View.Single(elem), underlying) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = false + } + + @SerialVersionUID(3L) + class Updated[A](underlying: SomeIterableOps[A]^, index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = new AbstractIterator[A] { + private[this] val it = underlying.iterator + private[this] var i = 0 + def next(): A = { + val value = if (i == index) { it.next(); elem } else it.next() + i += 1 + value + } + def hasNext: Boolean = + if(it.hasNext) true + else if(index >= i) throw new IndexOutOfBoundsException(index.toString) + else false + } + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + private[collection] class Patched[A](underlying: SomeIterableOps[A]^, from: Int, other: IterableOnce[A]^, replaced: Int) extends AbstractView[A] { + // we may be unable to traverse `other` more than once, so we need to cache it if that's the case + private val _other: Iterable[A]^{other} = other match { + case other: Iterable[A] => other + case other => LazyList.from(other) + } + + def iterator: Iterator[A]^{underlying, other} = underlying.iterator.patch(from, _other.iterator, replaced) + override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty + } + + @SerialVersionUID(3L) + class ZipWithIndex[A](underlying: SomeIterableOps[A]^) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)]^{underlying} = underlying.iterator.zipWithIndex + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class PadTo[A](underlying: SomeIterableOps[A]^, len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.padTo(len, elem) + + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size max len else -1 + } + override def isEmpty: Boolean = underlying.isEmpty && len <= 0 + } + + private[collection] def takeRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + val k = it.knownSize + if(k == 0 || n <= 0) Iterator.empty + else if(n == Int.MaxValue) it + else if(k > 0) it.drop((k-n) max 0) + else new TakeRightIterator[A](it, n) + } + + private final class TakeRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private[this] var current: Iterator[A]^{underlying} = underlying + private[this] var len: Int = -1 + private[this] var pos: Int = 0 + private[this] var buf: ArrayBuffer[AnyRef] = _ + def init(): Unit = if(buf eq null) { + buf = new ArrayBuffer[AnyRef](maxlen min 256) + len = 0 + while(current.hasNext) { + val n = current.next().asInstanceOf[AnyRef] + if(pos >= buf.length) buf.addOne(n) + else buf(pos) = n + pos += 1 + if(pos == maxlen) pos = 0 + len += 1 + } + current = null + if(len > maxlen) len = maxlen + pos = pos - len + if(pos < 0) pos += maxlen + } + override def knownSize = len + def hasNext: Boolean = { + init() + len > 0 + } + def next(): A = { + init() + if(len == 0) Iterator.empty.next() + else { + val x = buf(pos).asInstanceOf[A] + pos += 1 + if(pos == maxlen) pos = 0 + len -= 1 + x + } + } + override def drop(n: Int): Iterator[A]^{this} = { + init() + if (n > 0) { + len = (len - n) max 0 + pos = (pos + n) % maxlen + } + this + } + } + + private[collection] def dropRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + if(n <= 0) it + else { + val k = it.knownSize + if(k >= 0) it.take(k - n) + else new DropRightIterator[A](it, n) + } + } + + private final class DropRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet + private[this] var pos: Int = 0 + private[this] var buf: ArrayBuffer[AnyRef] = _ + def init(): Unit = if(buf eq null) { + buf = new ArrayBuffer[AnyRef](maxlen min 256) + while(pos < maxlen && underlying.hasNext) { + buf.addOne(underlying.next().asInstanceOf[AnyRef]) + pos += 1 + } + if(!underlying.hasNext) len = 0 + pos = 0 + } + override def knownSize = len + def hasNext: Boolean = { + init() + len != 0 + } + def next(): A = { + if(!hasNext) Iterator.empty.next() + else { + val x = buf(pos).asInstanceOf[A] + if(len == -1) { + buf(pos) = underlying.next().asInstanceOf[AnyRef] + if(!underlying.hasNext) len = 0 + } else len -= 1 + pos += 1 + if(pos == maxlen) pos = 0 + x + } + } + } +} + +/** Explicit instantiation of the `View` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractView[+A] extends scala.collection.AbstractIterable[A] with View[A] diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala new file mode 100644 index 000000000000..0f3830e9fe25 --- /dev/null +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -0,0 +1,72 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ +@SerialVersionUID(3L) +abstract class WithFilter[+A, +CC[_]] extends Serializable { + this: WithFilter[A, CC]^ => + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given function `f` to each element of the filtered outer $coll + * and collecting the results. + */ + def map[B](f: A => B): CC[B]^{this, f} + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll containing this `WithFilter` instance that satisfy + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given collection-valued function `f` to each element + * of the filtered outer $coll and + * concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} + + /** Applies a function `f` to all elements of the `filtered` outer $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + */ + def foreach[U](f: A => U): Unit + + /** Further refines the filter for this `filtered` $coll. + * + * @param q the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll which + * also satisfy both `p` and `q` predicates. + */ + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/BasicNode.java b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java new file mode 100644 index 000000000000..c6ec91e4fde8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java @@ -0,0 +1,19 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +public abstract class BasicNode { + + public abstract String string(int lev); + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java new file mode 100644 index 000000000000..ddffa365234e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; + +abstract class CNodeBase extends MainNode { + + @SuppressWarnings("unchecked") + public static final AtomicIntegerFieldUpdater> updater = + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) CNodeBase.class, "csize"); + + public volatile int csize = -1; + + public boolean CAS_SIZE(int oldval, int nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_SIZE(int nval) { + updater.set(this, nval); + } + + public int READ_SIZE() { + return updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Gen.java b/tests/pos-special/stdlib/collection/concurrent/Gen.java new file mode 100644 index 000000000000..07af2983f32d --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Gen.java @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +final class Gen {} diff --git a/tests/pos-special/stdlib/collection/concurrent/INodeBase.java b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java new file mode 100644 index 000000000000..dfb99806594f --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class INodeBase extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) INodeBase.class, (Class>) (Class) MainNode.class, "mainnode"); + + static final Object RESTART = new Object(); + + static final Object NO_SUCH_ELEMENT_SENTINEL = new Object(); + + public volatile MainNode mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/MainNode.java b/tests/pos-special/stdlib/collection/concurrent/MainNode.java new file mode 100644 index 000000000000..f7f022974e9e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/MainNode.java @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class MainNode extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) MainNode.class, (Class>) (Class) MainNode.class, "prev"); + + public volatile MainNode prev = null; + + public abstract int cachedSize(Object ct); + + // standard contract + public abstract int knownSize(); + + public boolean CAS_PREV(MainNode oldval, MainNode nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + // apparently not - volatile reads are supposed to be safe + // regardless of whether there are concurrent ARFU updates + @Deprecated @SuppressWarnings("unchecked") + public MainNode READ_PREV() { + return (MainNode) updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala new file mode 100644 index 000000000000..d985dad2edc5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -0,0 +1,193 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.concurrent + +import language.experimental.captureChecking +import scala.annotation.tailrec + +/** A template trait for mutable maps that allow concurrent access. + * + * $concurrentmapinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] + * section on `Concurrent Maps` for more information. + * + * @tparam K the key type of the map + * @tparam V the value type of the map + * + * @define Coll `concurrent.Map` + * @define coll concurrent map + * @define concurrentmapinfo + * This is a base trait for all Scala concurrent map implementations. It + * provides all of the methods a `Map` does, with the difference that all the + * changes are atomic. It also describes methods specific to concurrent maps. + * + * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. + * + * @define atomicop + * This is an atomic operation. + */ +trait Map[K, V] extends scala.collection.mutable.Map[K, V] { + + /** + * Associates the given key with a given value, unless the key was already + * associated with some other value. + * + * $atomicop + * + * @param k key with which the specified value is to be associated with + * @param v value to be associated with the specified key + * @return `Some(oldvalue)` if there was a value `oldvalue` previously + * associated with the specified key, or `None` if there was no + * mapping for the specified key + */ + def putIfAbsent(k: K, v: V): Option[V] + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + def remove(k: K, v: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldvalue value expected to be associated with the specified key + * if replacing is to happen + * @param newvalue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + def replace(k: K, oldvalue: V, newvalue: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped + * to some value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param v value to be associated with the specified key + * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + */ + def replace(k: K, v: V): Option[V] + + override def getOrElseUpdate(key: K, op: => V): V = get(key) match { + case Some(v) => v + case None => + val v = op + putIfAbsent(key, v) match { + case Some(ov) => ov + case None => v + } + } + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) + + @tailrec + private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = get(key) + val nextValue = remappingFunction(previousValue) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala new file mode 100644 index 000000000000..8a34eddf0bdb --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -0,0 +1,1206 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package concurrent + +import java.util.concurrent.atomic._ +import scala.{unchecked => uc} +import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{List, Nil} +import scala.collection.mutable.GrowableBuilder +import scala.util.Try +import scala.util.hashing.Hashing +import language.experimental.captureChecking + +private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen, equiv: Equiv[K]) = this(null, g, equiv) + + def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct) + + def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.readRoot(abort = true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) + + private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen, equiv) + nin.WRITE(cn) + nin + } + + def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { + val nin = new INode[K, V](ngen, equiv) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + GCAS(cn, nn, ct) + } + case basicNode => throw new MatchError(basicNode) + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + case mainNode => throw new MatchError(mainNode) + } + } + + + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond KEY_PRESENT_OR_ABSENT - don't care if the key was there, insert or overwrite + * KEY_ABSENT - key wasn't there, insert only, do not overwrite + * KEY_PRESENT - key was there, overwrite only, do not insert + * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` + * + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => cond match { + case INode.KEY_PRESENT_OR_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv => + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + case basicNode => throw new MatchError(basicNode) + } + } else cond match { + case INode.KEY_PRESENT_OR_ABSENT | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case INode.KEY_PRESENT_OR_ABSENT => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv => + ln.get(k) match { + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null + case _ => None + } + } + case mainNode => throw new MatchError(mainNode) + } + } + + /** Looks up the value associated with the key. + * + * @param hc the hashcode of `k` + * + * @return NO_SUCH_ELEMENT_SENTINEL if no value has been found, RESTART if the operation wasn't successful, + * or any other value otherwise + */ + @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) NO_SUCH_ELEMENT_SENTINEL // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] @uc => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else RESTART + } + case sn: SNode[K, V] @uc => // 2) singleton node + if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + case basicNode => throw new MatchError(basicNode) + } + } + case tn: TNode[_, _] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].getOrElse(NO_SUCH_ELEMENT_SENTINEL) + case mainNode => throw new MatchError(mainNode) + } + } + + /** Removes the key associated with the given value. + * + * @param hc the hashcode of `k` + * + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + def rec_remove( + k: K, + v: V, + removalPolicy: Int, + hc: Int, + lev: Int, + parent: INode[K, V], + startgen: Gen, + ct: TrieMap[K, V]): Option[V] = { + + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + case basicNode => throw new MatchError(basicNode) + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef): Unit = { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.readRoot().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (removalPolicy == RemovalPolicy.Always) { + val optv = ln.get(k) + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + case mainNode => throw new MatchError(mainNode) + } + } + + private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int): Unit = { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null + + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "".format(x) + }) + +} + + +private[concurrent] object INode { + //////////////////////////////////////////////////////////////////////////////////////////////////// + // Arguments for `cond` argument in TrieMap#rec_insertif + //////////////////////////////////////////////////////////////////////////////////////////////////// + final val KEY_PRESENT = new AnyRef + final val KEY_ABSENT = new AnyRef + final val KEY_PRESENT_OR_ABSENT = new AnyRef + + def newRootNode[K, V](equiv: Equiv[K]) = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen, equiv) + } +} + + +private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + + def knownSize: Int = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[concurrent] trait KVNode[K, V] { + def kvPair: (K, V) +} + + +private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends BasicNode with KVNode[K, V] { + def copy = new SNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + +// Tomb Node, used to ensure proper ordering during removals +private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends MainNode[K, V] with KVNode[K, V] { + def copy = new TNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 + def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + +// List Node, leaf node that handles hash collisions +private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) + extends MainNode[K, V] { + + def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) + + def this(k1: K, v1: V, k2: K, v2: V, equiv: Equiv[K]) = + this(if (equiv.equiv(k1, k2)) (k2 -> v2) :: Nil else (k1 -> v1) :: (k2 -> v2) :: Nil, equiv) + + def inserted(k: K, v: V) = { + var k0: K = k + @tailrec + def remove(elems: List[(K, V)], acc: List[(K, V)]): List[(K, V)] = { + if (elems.isEmpty) acc + else if (equiv.equiv(elems.head._1, k)) { + k0 = elems.head._1 + acc ::: elems.tail + } else remove(elems.tail, elems.head :: acc) + } + val e = remove(entries, Nil) + new LNode((k0 -> v) :: e, equiv) + } + + def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { + val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) + else { + val (k, v) = updmap.iterator.next() + new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses + } + } + + def get(k: K): Option[V] = entries.find(entry => equiv.equiv(entry._1, k)).map(_._2) + + def cachedSize(ct: AnyRef): Int = entries.size + + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) + +} + +// Ctrie Node, contains bitmap and array of references to branch nodes +private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { + // this should only be called from within read-only snapshots + def cachedSize(ct: AnyRef): Int = { + val currsz = READ_SIZE() + if (currsz != -1) currsz + else { + val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]]) + while (READ_SIZE() == -1) CAS_SIZE(-1, sz) + READ_SIZE() + } + } + + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + + // lends itself towards being parallelizable by choosing + // a random starting offset in the array + // => if there are concurrent size computations, they start + // at different positions, so they are more likely to + // to be independent + private def computeSize(ct: TrieMap[K, V]): Int = { + var i = 0 + var sz = 0 + val offset = + if (array.length > 0) + //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ + java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) + else 0 + while (i < array.length) { + val pos = (i + offset) % array.length + array(pos) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + sz + } + + def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + def insertedAt(pos: Int, flag: Int, k: K, v: V, hc: Int, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = new SNode(k, v, hc) + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + def renewed(ngen: Gen, ct: TrieMap[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] @uc => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { + val bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] @uc => + val inodemain = in.gcasRead(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] @uc => + tmparray(i) = sn + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + override def toString = { + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" + } +} + +private[concurrent] object CNode { + + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen, equiv)//(TrieMap.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen, equiv) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v, equiv) + } + +} + + +private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] + */ +@SerialVersionUID(-5212455458703321708L) +final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) + extends scala.collection.mutable.AbstractMap[K, V] + with scala.collection.concurrent.Map[K, V] + with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] + with scala.collection.MapFactoryDefaults[K, V, TrieMap, mutable.Iterable] + with DefaultSerializable { + + private[this] var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf + private[this] var equalityobj = ef + @transient + private[this] var rootupdater = rtupd + def hashing = hashingobj + def equality = equalityobj + @volatile private var root = r + + def this(hashf: Hashing[K], ef: Equiv[K]) = this( + INode.newRootNode(ef), + AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), + hashf, + ef + ) + + def this() = this(Hashing.default, Equiv.universal) + + override def mapFactory: MapFactory[TrieMap] = TrieMap + + /* internal methods */ + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.writeObject(hashingobj) + out.writeObject(equalityobj) + + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(TrieMapSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + root = INode.newRootNode(equality) + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") + + hashingobj = in.readObject().asInstanceOf[Hashing[K]] + equalityobj = in.readObject().asInstanceOf[Equiv[K]] + + var obj: AnyRef = in.readObject() + + while (obj != TrieMapSerializationEnd) { + obj = in.readObject() + if (obj != TrieMapSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } + } + + private def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + private[collection] def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + + private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.gcasRead(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + case x => throw new MatchError(x) + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(abort = false) + /*READ*/desc.committed + } else false + } + + @tailrec private def inserthc(k: K, hc: Int, v: V): Unit = { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) + else ret + } + + /** Finds the value associated with this key + * + * @param k the key to look up + * @param hc the hashcode of `k` + * + * @return the value: V associated with `k`, if it exists. Otherwise, INodeBase.NO_SUCH_ELEMENT_SENTINEL + */ + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /** Removes a key-value pair from the map + * + * @param k the key to remove + * @param v the value compare with the value found associated with the key + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * @return an Option[V] indicating the previous value + */ + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, removalPolicy, hc) + } + + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + def isReadOnly = rootupdater eq null + + def nonReadOnly = rootupdater ne null + + /** Returns a snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this TrieMap are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * TrieMap is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ + @tailrec def snapshot(): TrieMap[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality) + else snapshot() + } + + /** Returns a read-only snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this TrieMap are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this TrieMap by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ + @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality) + else readOnlySnapshot() + } + + @tailrec override def clear(): Unit = { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V](equality))) clear() + } + + def computeHash(k: K) = hashingobj.hash(k) + + @deprecated("Use getOrElse(k, null) instead.", "2.13.0") + def lookup(k: K): V = { + val hc = computeHash(k) + val lookupRes = lookuphc(k, hc) + val res = if (lookupRes == INodeBase.NO_SUCH_ELEMENT_SENTINEL) null else lookupRes + res.asInstanceOf[V] + } + + override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + def get(k: K): Option[V] = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) None else Some(res).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) + } + + override def update(k: K, v: V): Unit = { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + def addOne(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) + } + + def subtractOne(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) + } + + // TODO once computeIfAbsent is added to concurrent.Map, + // move the comment there and tweak the 'at most once' part + /** If the specified key is not already in the map, computes its value using + * the given thunk `op` and enters it into the map. + * + * If the specified mapping function throws an exception, + * that exception is rethrown. + * + * Note: This method will invoke op at most once. + * However, `op` may be invoked without the result being added to the map if + * a concurrent process is also trying to add a value corresponding to the + * same key `k`. + * + * @param k the key to modify + * @param op the expression that computes the value + * @return the newly added value + */ + override def getOrElseUpdate(k: K, op: => V): V = { + val hc = computeHash(k) + lookuphc(k, hc) match { + case INodeBase.NO_SUCH_ELEMENT_SENTINEL => + val v = op + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { + case Some(oldValue) => oldValue + case None => v + } + case oldValue => oldValue.asInstanceOf[V] + } + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) + } + + def iterator: Iterator[(K, V)] = { + if (nonReadOnly) readOnlySnapshot().iterator + else new TrieMapIterator(0, this) + } + + //////////////////////////////////////////////////////////////////////////// + // + // scala/bug#10177 These methods need overrides as the inherited implementations + // call `.iterator` more than once, which doesn't guarantee a coherent + // view of the data if there is a concurrent writer + // Note that the we don't need overrides for keysIterator or valuesIterator + // TrieMapTest validates the behaviour. + override def values: Iterable[V] = { + if (nonReadOnly) readOnlySnapshot().values + else super.values + } + override def keySet: Set[K] = { + if (nonReadOnly) readOnlySnapshot().keySet + else super.keySet + } + + override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view + + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + override def filterKeys(p: K => Boolean): collection.MapView[K, V]^{p} = view.filterKeys(p) + + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + override def mapValues[W](f: V => W): collection.MapView[K, W]^{f} = view.mapValues(f) + // END extra overrides + /////////////////////////////////////////////////////////////////// + + override def size: Int = + if (nonReadOnly) readOnlySnapshot().size + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize + override protected[this] def className = "TrieMap" + + override def lastOption: Option[(K, V)] = if (isEmpty) None else Try(last).toOption +} + + +@SerialVersionUID(3L) +object TrieMap extends MapFactory[TrieMap] { + + def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + + def from[K, V](it: IterableOnce[(K, V)]^): TrieMap[K, V] = new TrieMap[K, V]() ++= it + + def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + + @transient + val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + class MangledHashing[K] extends Hashing[K] { + def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) + } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } +} + +// non-final as an extension point for parallel collections +private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { + private val stack = new Array[Array[BasicNode]](7) + private val stackpos = new Array[Int](7) + private var depth = -1 + private var subiter: Iterator[(K, V)] = null + private var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.gcasRead(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.entries.iterator + checkSubiter() + case null => + current = null + case mainNode => throw new MatchError(mainNode) + } + + private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + private def initialize(): Unit = { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + @tailrec + final def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) + } + } else { + depth -= 1 + advance() + } + } else current = null + + protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): TrieMapIterator[K, V] = new TrieMapIterator[K, V](_lev, _ct, _mustInit) + + protected def dupTo(it: TrieMapIterator[K, V]): Unit = { + it.level = this.level + it.ct = this.ct + it.depth = this.depth + it.current = this.current + + // these need a deep copy + Array.copy(this.stack, 0, it.stack, 0, 7) + Array.copy(this.stackpos, 0, it.stackpos, 0, 7) + + // this one needs to be evaluated + if (this.subiter == null) it.subiter = null + else { + val lst = this.subiter.to(immutable.List) + this.subiter = lst.iterator + it.subiter = lst.iterator + } + } + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the TrieMap. + */ + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = newIterator(level + 1, ct, _mustInit = false) + it.depth = -1 + it.subiter = this.subiter + it.current = null + this.subiter = null + advance() + this.level += 1 + Seq(it, this) + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = newIterator(level + 1, ct, _mustInit = false) + val xss: Array[Array[BasicNode]] = it.stack.asInstanceOf + // !!! cc split into separate xss and asInstanceOf needed because cc gets confused with + // two-dimensinal invariant arrays + xss(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + this.level += 1 + return Seq(this, it) + } + d += 1 + } + this.level += 1 + Seq(this) + } + +} + +/** Only used for ctrie serialization. */ +@SerialVersionUID(3L) +private[concurrent] case object TrieMapSerializationEnd diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala new file mode 100644 index 000000000000..bfae792c5107 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -0,0 +1,261 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} +import language.experimental.captureChecking + +/** Defines converter methods from Scala to Java collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsJavaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Scala `Iterator` to a Java `Iterator`. + * + * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterator` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Iterator` view of the argument. + */ + def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + case null => null + case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterator` to a Java `Enumeration`. + * + * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects + * of using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Enumeration` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Enumeration` view of the argument. + */ + def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { + case null => null + case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterable` to a Java `Iterable`. + * + * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterable` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Iterable` view of the argument. + */ + def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + case null => null + case wrapper: JIterableWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala `Iterable` to an immutable Java `Collection`. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Collection` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Collection` view of the argument. + */ + def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { + case null => null + case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala mutable `Buffer` to a Java List. + * + * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using + * it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Buffer` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param b The Scala `Buffer` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableBufferWrapper(b) + } + + /** + * Converts a Scala mutable `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableSeqWrapper(s) + } + + /** + * Converts a Scala `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new SeqWrapper(s) + } + + /** + * Converts a Scala mutable `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala mutable `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new MutableSetWrapper(s) + } + + /** + * Converts a Scala `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new SetWrapper(s) + } + + /** + * Converts a Scala mutable `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala mutable `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MutableMapWrapper(m) + } + + /** + * Converts a Scala mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Dictionary` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Dictionary` view of the argument. + */ + def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + case null => null + case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new DictionaryWrapper(m) + } + + /** + * Converts a Scala `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MapWrapper(m) + } + + /** + * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `ConcurrentMap` will be returned. + * + * @param m The Scala `concurrent.Map` to be converted. + * @return A Java `ConcurrentMap` view of the argument. + */ + def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + case null => null + case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new ConcurrentMapWrapper(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala new file mode 100644 index 000000000000..14268f7aa165 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -0,0 +1,109 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} +import language.experimental.captureChecking + +/** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsJavaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsJava[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Iterator`, see + * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Iterator[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Enumeration`, see + * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. + */ + def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + } + + implicit class IterableHasAsJava[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Iterable`, see + * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: jl.Iterable[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Collection`, see + * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. + */ + def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + } + + implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + /** Converts a Scala `Buffer` to a Java `List`, see + * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(b) + } + + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class SeqHasAsJava[A](s: Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + /** Converts a Scala `mutable.Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class SetHasAsJava[A](s: Set[A]) { + /** Converts a Scala `Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. + */ + def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + } + + implicit class MapHasAsJava[K, V](m: Map[K, V]) { + /** Converts a Scala `Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + } + + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala new file mode 100644 index 000000000000..6cc02b13bb06 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -0,0 +1,208 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} +import language.experimental.captureChecking + +/** Defines converter methods from Java to Scala collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsScalaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Java `Iterator` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterator` will be returned. + * + * @param i The Java `Iterator` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JIteratorWrapper(i) + } + + /** + * Converts a Java `Enumeration` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects + * of using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Enumeration` was previously obtained from an implicit or explicit call of + * `asJavaEnumeration` then the original Scala `Iterator` will be returned. + * + * @param e The Java `Enumeration` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JEnumerationWrapper(e) + } + + /** + * Converts a Java `Iterable` to a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterable` will be returned. + * + * @param i The Java `Iterable` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JIterableWrapper(i) + } + + /** + * Converts a Java `Collection` to a Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or explicit call of + * `asJavaCollection` then the original Scala `Iterable` will be returned. + * + * @param c The Java `Collection` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JCollectionWrapper(c) + } + + /** + * Converts a Java `List` to a Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using + * it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Buffer` will be returned. + * + * @param l The Java `List` to be converted. + * @return A Scala mutable `Buffer` view of the argument. + */ + def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case null => null + case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying + case _ => new JListWrapper(l) + } + + /** + * Converts a Java `Set` to a Scala mutable `Set`. + * + * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Set` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Set` will be returned. + * + * @param s The Java `Set` to be converted. + * @return A Scala mutable `Set` view of the argument. + */ + def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + case null => null + case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying + case _ => new JSetWrapper(s) + } + + /** + * Converts a Java `Map` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Map` will be returned. + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is + * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null` + * values may be present. + * + * @param m The Java `Map` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + case null => null + case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JMapWrapper(m) + } + + /** + * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * + * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `ConcurrentMap` will be returned. + * + * @param m The Java `ConcurrentMap` to be converted. + * @return A Scala mutable `ConcurrentMap` view of the argument. + */ + def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + case null => null + case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap + case _ => new JConcurrentMapWrapper(m) + } + + /** + * Converts a Java `Dictionary` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Dictionary` was previously obtained from an implicit or explicit call of + * `asJavaDictionary` then the original Scala `Map` will be returned. + * + * @param d The Java `Dictionary` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + case null => null + case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JDictionaryWrapper(d) + } + + /** + * Converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * @param p The Java `Properties` to be converted. + * @return A Scala mutable `Map[String, String]` view of the argument. + */ + def asScala(p: ju.Properties): mutable.Map[String, String] = p match { + case null => null + case _ => new JPropertiesWrapper(p) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala new file mode 100644 index 000000000000..d60bfc7f60a1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -0,0 +1,94 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} +import language.experimental.captureChecking + +/** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsScalaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + /** Converts a Java `Iterator` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(i) + } + + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + /** Converts a Java `Enumeration` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(e) + } + + implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + /** Converts a Java `Iterable` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(i) + } + + implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + /** Converts a Java `Collection` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(c) + } + + implicit class ListHasAsScala[A](l: ju.List[A]) { + /** Converts a Java `List` to a Scala `Buffer`, see + * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Buffer[A] = conv.asScala(l) + } + + implicit class SetHasAsScala[A](s: ju.Set[A]) { + /** Converts a Java `Set` to a Scala `Set`, see + * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Set[A] = conv.asScala(s) + } + + implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + /** Converts a Java `Map` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(m) + } + + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: concurrent.Map[K, V] = conv.asScala(m) + } + + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + /** Converts a Java `Dictionary` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(d) + } + + implicit class PropertiesHasAsScala(i: ju.Properties) { + /** Converts a Java `Properties` to a Scala `Map`, see + * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[String, String] = conv.asScala(i) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala new file mode 100644 index 000000000000..1bc284462ff1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -0,0 +1,182 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.JavaConverters._ +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Defines implicit converter methods from Java to Scala collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToScalaImplicits { + /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[JavaConverters.asScalaIterator]] + */ + implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) + + /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[JavaConverters.enumerationAsScalaIterator]] + */ + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) + + /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[JavaConverters.iterableAsScalaIterable]] + */ + implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) + + /** Implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[JavaConverters.collectionAsScalaIterable]] + */ + implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) + + /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[JavaConverters.asScalaBuffer]] + */ + implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) + + /** Implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[JavaConverters.asScalaSet]] + */ + implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) + + /** Implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[JavaConverters.mapAsScalaMap]] + */ + implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) + + /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * @see [[JavaConverters.mapAsScalaConcurrentMap]] + */ + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) + + /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[JavaConverters.dictionaryAsScalaMap]] + */ + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) + + /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. + * @see [[JavaConverters.propertiesAsScalaMap]] + */ + implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) +} + +/** Defines implicit conversions from Scala to Java collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToJavaImplicits { + /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[JavaConverters.asJavaIterator]] + */ + implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) + + /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[JavaConverters.asJavaEnumeration]] + */ + implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) + + /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[JavaConverters.asJavaIterable]] + */ + implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) + + /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[JavaConverters.asJavaCollection]] + */ + implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) + + /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[JavaConverters.bufferAsJavaList]] + */ + implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) + + /** Implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[JavaConverters.mutableSeqAsJavaList]] + */ + implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) + + /** Implicitly converts a Scala `Seq` to a Java `List`. + * @see [[JavaConverters.seqAsJavaList]] + */ + implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) + + /** Implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[JavaConverters.mutableSetAsJavaSet]] + */ + implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) + + /** Implicitly converts a Scala `Set` to a Java `Set`. + * @see [[JavaConverters.setAsJavaSet]] + */ + implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) + + /** Implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[JavaConverters.mutableMapAsJavaMap]] + */ + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[JavaConverters.asJavaDictionary]] + */ + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) + + /** Implicitly converts a Scala `Map` to a Java `Map`. + * @see [[JavaConverters.mapAsJavaMap]] + */ + implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[JavaConverters.mapAsJavaConcurrentMap]] + */ + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) +} + +/** + * Convenience for miscellaneous implicit conversions from Scala to Java collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToJava extends ToJavaImplicits + +/** + * Convenience for miscellaneous implicit conversions from Java to Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToScala extends ToScalaImplicits + +/** + * Convenience for miscellaneous implicit conversions between Java and Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues. Example: + * + * {{{ + * import collection.convert.ImplicitConversions._ + * case class StringBox(s: String) + * val m = Map(StringBox("one") -> "uno") + * m.get("one") + * }}} + * + * The above example returns `null` instead of producing a type error at compile-time. The map is + * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala new file mode 100644 index 000000000000..e826bdeb23db --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -0,0 +1,616 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.util.{NavigableMap} +import java.{lang => jl, util => ju} + +import scala.jdk.CollectionConverters._ +import scala.util.Try +import scala.util.chaining._ +import scala.util.control.ControlThrowable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + +/** Wrappers for exposing Scala collections as Java collections and vice-versa */ +@SerialVersionUID(3L) +// not private[convert] because `WeakHashMap` uses JMapWrapper +private[collection] object JavaCollectionWrappers extends Serializable { + @SerialVersionUID(3L) + class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next() + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next() + override def remove() = throw new UnsupportedOperationException + } + + @SerialVersionUID(3L) + class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next + } + + @SerialVersionUID(3L) + class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + } + + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator = new IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + @SerialVersionUID(3L) + class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable { + import scala.runtime.Statics._ + override def equals(other: Any): Boolean = + other match { + case other: IterableWrapper[_] => underlying.equals(other.underlying) + case _ => false + } + override def hashCode = finalizeHash(mix(mix(0xcafebabe, "IterableWrapper".hashCode), anyHash(underlying)), 1) + } + + @SerialVersionUID(3L) + class JIterableWrapper[A](val underlying: jl.Iterable[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def iterableFactory = mutable.ArrayBuffer + override def isEmpty: Boolean = !underlying.iterator().hasNext + } + + @SerialVersionUID(3L) + class JCollectionWrapper[A](val underlying: ju.Collection[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def size = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterableFactory = mutable.ArrayBuffer + } + + @SerialVersionUID(3L) + class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + } + + @SerialVersionUID(3L) + class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + @SerialVersionUID(3L) + class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying += elem; true } + override def remove(i: Int) = underlying remove i + } + + @SerialVersionUID(3L) + class JListWrapper[A](val underlying: ju.List[A]) + extends mutable.AbstractBuffer[A] + with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with IterableFactoryDefaults[A, mutable.Buffer] + with Serializable { + def length = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator.asScala + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } + def addOne(elem: A): this.type = { underlying add elem; this } + def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) + def insertAll(i: Int, elems: IterableOnce[A]^) = { + val ins = underlying.subList(0, i) + elems.iterator.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() + override def iterableFactory = mutable.ArrayBuffer + override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } + } + + @SerialVersionUID(3L) + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => + // Note various overrides to avoid performance gotchas. + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty + def size = underlying.size + def iterator = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next(); prev = Some(e); e } + override def remove() = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + @SerialVersionUID(3L) + class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying.remove(elem.asInstanceOf[A]) + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + class JSetWrapper[A](val underlying: ju.Set[A]) + extends mutable.AbstractSet[A] + with mutable.SetOps[A, mutable.Set, mutable.Set[A]] + with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] + with Serializable { + + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + def iterator: Iterator[A] = underlying.iterator.asScala + + def contains(elem: A): Boolean = underlying.contains(elem) + + def addOne(elem: A): this.type = { underlying add elem; this } + def subtractOne(elem: A): this.type = { underlying remove elem; this } + + override def remove(elem: A): Boolean = underlying remove elem + + override def clear(): Unit = { + underlying.clear() + } + + override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) + + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) + + override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet + + override def filterInPlace(p: A => Boolean): this.type = { + if (underlying.size() > 0) underlying.removeIf(!p(_)) + this + } + } + + @SerialVersionUID(3L) + class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => + override def size = underlying.size + + override def get(key: AnyRef): V = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { + def size = self.size + + def iterator = new ju.Iterator[ju.Map.Entry[K, V]] { + val ui = underlying.iterator + var prev : Option[K] = None + + def hasNext = ui.hasNext + + def next() = { + val (k, v) = ui.next() + prev = Some(k) + new ju.Map.Entry[K, V] { + def getKey = k + def getValue = v + def setValue(v1 : V) = self.put(k, v1) + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + override def remove(): Unit = { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm -= k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + + override def containsKey(key: AnyRef): Boolean = try { + // Note: Subclass of collection.Map with specific key type may redirect generic + // contains to specific contains, which will throw a ClassCastException if the + // wrong type is passed. This is why we need a type cast to A inside a try/catch. + underlying.contains(key.asInstanceOf[K]) + } catch { + case ex: ClassCastException => false + } + } + + @SerialVersionUID(3L) + class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { + override def put(k: K, v: V) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef): V = try { + underlying remove k.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + abstract class AbstractJMapWrapper[K, V] + extends mutable.AbstractMap[K, V] + with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable + + trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] + extends mutable.MapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { + + def underlying: ju.Map[K, V] + + override def size = underlying.size + + // support Some(null) if currently bound to null + def get(k: K) = { + val v = underlying.get(k) + if (v != null) + Some(v) + else if (underlying.containsKey(k)) + Some(null.asInstanceOf[V]) + else + None + } + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] + case v => v + } + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + // support Some(null) if currently bound to null + override def put(k: K, v: V): Option[V] = + if (v == null) { + val present = underlying.containsKey(k) + val result = underlying.put(k, v) + if (present) Some(result) else None + } else { + var result: Option[V @uncheckedCaptures] = None + def recompute(k0: K, v0: V): V = v.tap(_ => + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + ) + underlying.compute(k, recompute) + result + } + + override def update(k: K, v: V): Unit = underlying.put(k, v) + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) + } + } + + // support Some(null) if currently bound to null + override def remove(k: K): Option[V] = { + var result: Option[V @uncheckedCaptures] = None + def recompute(k0: K, v0: V): V = { + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + null.asInstanceOf[V] + } + underlying.compute(k, recompute) + result + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val i = underlying.entrySet().iterator() + while (i.hasNext) { + val entry = i.next() + f(entry.getKey, entry.getValue) + } + } + + override def clear() = underlying.clear() + + } + + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ + @SerialVersionUID(3L) + class JMapWrapper[K, V](val underlying : ju.Map[K, V]) + extends AbstractJMapWrapper[K, V] with Serializable { + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JMapWrapper(new ju.HashMap[K, V]) + } + + @SerialVersionUID(3L) + class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { + + def underlyingConcurrentMap: concurrent.Map[K, V] = underlying + + override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef, v: AnyRef) = try { + underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) + } catch { + case ex: ClassCastException => + false + } + + override def replace(k: K, v: V): V = underlying.replace(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) + } + + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ + @SerialVersionUID(3L) + class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) + extends AbstractJMapWrapper[K, V] + with concurrent.Map[K, V] { + + override def get(k: K) = Option(underlying get k) + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) + case v => v + } + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) + + def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) + + def remove(k: K, v: V): Boolean = underlying.remove(k, v) + + def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) + + override def lastOption: Option[(K, V)] = + underlying match { + case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) + case _ if isEmpty => None + case _ => Try(last).toOption + } + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull // see scala/scala#10129 + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) + } + } + } + + @SerialVersionUID(3L) + class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + def put(key: K, value: V): V = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + } + + @SerialVersionUID(3L) + class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + + def get(k: K) = Option(underlying get k) + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) + + override def update(k: K, v: V): Unit = { underlying.put(k, v) } + + override def remove(k: K): Option[V] = Option(underlying remove k) + def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + + override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) + + override def mapFactory = mutable.HashMap + } + + @SerialVersionUID(3L) + class JPropertiesWrapper(underlying: ju.Properties) + extends mutable.AbstractMap[String, String] + with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] + with Serializable { + + override def size = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = size + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String): Unit = { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty = new JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + + override def mapFactory = mutable.HashMap + } + + /** Thrown when certain Map operations attempt to put a null value. */ + private val PutNull = new ControlThrowable {} +} diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala new file mode 100644 index 000000000000..ddda95707881 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -0,0 +1,481 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert + +import java.util.Spliterator +import java.util.stream._ +import java.{lang => jl} + +import scala.annotation.implicitNotFound +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.jdk.CollectionConverters._ +import scala.jdk._ +import language.experimental.captureChecking + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.jdk.javaapi.StreamConverters]]. + */ +trait StreamExtensions { + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The asJavaSeqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The asJavaParStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] => st.seqUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = false) + } + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] with EfficientSplit => st.parUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = true) + } + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = asJavaSeqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = asJavaSeqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaSeqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaParStream: LongStream = asJavaSeqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaSeqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaParStream: Stream[A] = asJavaSeqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = a.stepper.asJavaParStream + } + + + + // strings + + implicit class StringHasSeqParStream(s: String) { + /** + * A sequential stream on the characters of a string, same as [[asJavaSeqCharStream]]. See also + * [[asJavaSeqCodePointStream]]. + */ + def asJavaSeqStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ false) + /** + * A parallel stream on the characters of a string, same as [[asJavaParCharStream]]. See also + * [[asJavaParCodePointStream]]. + */ + def asJavaParStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ true) + + /** A sequential stream on the characters of a string. See also [[asJavaSeqCodePointStream]]. */ + def asJavaSeqCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ false) + /** A parallel stream on the characters of a string. See also [[asJavaParCodePointStream]]. */ + def asJavaParCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ true) + + /** A sequential stream on the code points of a string. See also [[asJavaSeqCharStream]]. */ + def asJavaSeqCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ false) + /** A parallel stream on the code points of a string. See also [[asJavaParCharStream]]. */ + def asJavaParCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ true) + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[A, C1])(implicit info: AccumulatorFactoryInfo[A, C1]): C1 = { + + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C1] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C1] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C1] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def asJavaPrimitiveStream[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Int, C1])(implicit info: AccumulatorFactoryInfo[Int, C1]): C1 = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Long, C1])(implicit info: AccumulatorFactoryInfo[Long, C1]): C1 = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Double, C1])(implicit info: AccumulatorFactoryInfo[Double, C1]): C1 = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +object StreamExtensions { + /** An implicit StreamShape instance connects element types with the corresponding specialized + * Stream and Stepper types. This is used in `asJavaStream` extension methods to create + * generic or primitive streams according to the element type. + */ + sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S + } + + object StreamShape extends StreamShapeLowPriority1 { + // primitive + implicit val intStreamShape : StreamShape[Int , IntStream , IntStepper] = mkIntStreamShape[Int] + implicit val longStreamShape : StreamShape[Long , LongStream , LongStepper] = mkLongStreamShape[Long] + implicit val doubleStreamShape: StreamShape[Double, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Double] + + // widening + implicit val byteStreamShape : StreamShape[Byte , IntStream , IntStepper] = mkIntStreamShape[Byte] + implicit val shortStreamShape: StreamShape[Short, IntStream , IntStepper] = mkIntStreamShape[Short] + implicit val charStreamShape : StreamShape[Char , IntStream , IntStepper] = mkIntStreamShape[Char] + implicit val floatStreamShape: StreamShape[Float, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Float] + + // boxed java primitives + + implicit val jIntegerStreamShape : StreamShape[jl.Integer , IntStream , IntStepper ] = mkIntStreamShape[jl.Integer] + implicit val jLongStreamShape : StreamShape[jl.Long , LongStream , LongStepper ] = mkLongStreamShape[jl.Long] + implicit val jDoubleStreamShape : StreamShape[jl.Double , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Double] + implicit val jByteStreamShape : StreamShape[jl.Byte , IntStream , IntStepper ] = mkIntStreamShape[jl.Byte] + implicit val jShortStreamShape : StreamShape[jl.Short , IntStream , IntStepper ] = mkIntStreamShape[jl.Short] + implicit val jCharacterStreamShape : StreamShape[jl.Character, IntStream , IntStepper ] = mkIntStreamShape[jl.Character] + implicit val jFloatStreamShape : StreamShape[jl.Float , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Float] + + private def mkIntStreamShape[T]: StreamShape[T, IntStream, IntStepper] = new StreamShape[T, IntStream, IntStepper] { + protected def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st.spliterator, par) + } + + private def mkLongStreamShape[T]: StreamShape[T, LongStream, LongStepper] = new StreamShape[T, LongStream, LongStepper] { + protected def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st.spliterator, par) + } + + private def mkDoubleStreamShape[T]: StreamShape[T, DoubleStream, DoubleStepper] = new StreamShape[T, DoubleStream, DoubleStepper] { + protected def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st.spliterator, par) + } + } + + trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } + } + + /** Connects a stream element type `A` to the corresponding, potentially specialized, Stream type. + * Used in the `stream.asJavaPrimitiveStream` extension method. + */ + sealed trait StreamUnboxer[A, S] { + def apply(s: Stream[A]): S + } + object StreamUnboxer { + implicit val intStreamUnboxer: StreamUnboxer[Int, IntStream] = new StreamUnboxer[Int, IntStream] { + def apply(s: Stream[Int]): IntStream = s.mapToInt(x => x) + } + implicit val javaIntegerStreamUnboxer: StreamUnboxer[jl.Integer, IntStream] = intStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Integer, IntStream]] + + implicit val longStreamUnboxer: StreamUnboxer[Long, LongStream] = new StreamUnboxer[Long, LongStream] { + def apply(s: Stream[Long]): LongStream = s.mapToLong(x => x) + } + implicit val javaLongStreamUnboxer: StreamUnboxer[jl.Long, LongStream] = longStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Long, LongStream]] + + implicit val doubleStreamUnboxer: StreamUnboxer[Double, DoubleStream] = new StreamUnboxer[Double, DoubleStream] { + def apply(s: Stream[Double]): DoubleStream = s.mapToDouble(x => x) + } + implicit val javaDoubleStreamUnboxer: StreamUnboxer[jl.Double, DoubleStream] = doubleStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Double, DoubleStream]] + } + + + + /** An implicit `AccumulatorFactoryInfo` connects primitive element types to the corresponding + * specialized [[scala.jdk.Accumulator]] factory. This is used in the `stream.toScala` extension methods + * to ensure collecting a primitive stream into a primitive accumulator does not box. + * + * When converting to a collection other than `Accumulator`, the generic + * `noAccumulatorFactoryInfo` is passed. + */ + trait AccumulatorFactoryInfo[A, C] { + val companion: AnyRef + } + trait LowPriorityAccumulatorFactoryInfo { + implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] + private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { + val companion: AnyRef = null + } + } + object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { + implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] + + private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { + val companion: AnyRef = AnyAccumulator + } + + implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { + val companion: AnyRef = IntAccumulator + } + + implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { + val companion: AnyRef = LongAccumulator + } + + implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { + val companion: AnyRef = DoubleAccumulator + } + + implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Long, IntAccumulator] = longAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Long, IntAccumulator]] + implicit val jDoubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Double, IntAccumulator] = doubleAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Double, IntAccumulator]] + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala new file mode 100644 index 000000000000..ba51c7a5a353 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): ObjectArrayStepper[A] = new ObjectArrayStepper[A](underlying, i0, half) +} + +private[collection] class BoxedBooleanArrayStepper(underlying: Array[Boolean], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[Boolean], BoxedBooleanArrayStepper](_i0, _iN) + with AnyStepper[Boolean] { + def nextStep(): Boolean = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): BoxedBooleanArrayStepper = new BoxedBooleanArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedByteArrayStepper(underlying: Array[Byte], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedByteArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedByteArrayStepper = new WidenedByteArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedCharArrayStepper(underlying: Array[Char], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedCharArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedCharArrayStepper = new WidenedCharArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedShortArrayStepper(underlying: Array[Short], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedShortArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedShortArrayStepper = new WidenedShortArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedFloatArrayStepper(underlying: Array[Float], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, WidenedFloatArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedFloatArrayStepper = new WidenedFloatArrayStepper(underlying, i0, half) +} + +private[collection] class DoubleArrayStepper(underlying: Array[Double], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleArrayStepper = new DoubleArrayStepper(underlying, i0, half) +} + +private[collection] class IntArrayStepper(underlying: Array[Int], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntArrayStepper = new IntArrayStepper(underlying, i0, half) +} + +private[collection] class LongArrayStepper(underlying: Array[Long], _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongArrayStepper](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongArrayStepper = new LongArrayStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala new file mode 100644 index 000000000000..8b2f604b0977 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -0,0 +1,249 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + + +private[collection] object BinaryTreeStepper { + val emptyStack = new Array[AnyRef](0) +} + + +/** A generic stepper that can traverse ordered binary trees. + * The tree is assumed to have all the stuff on the left first, then the root, then everything on the right. + * + * Splits occur at the root of whatever has not yet been traversed (the substepper steps up to but + * does not include the root). + * + * The stepper maintains an internal stack, not relying on the tree traversal to be reversible. Trees with + * nodes that maintain a parent pointer may be traversed slightly faster without a stack, but splitting is + * more awkward. + * + * Algorithmically, this class implements a simple state machine that unrolls the left-leaning links in + * a binary tree onto a stack. At all times, the machine should be in one of these states: + * 1. Empty: `myCurrent` is `null` and `index` is `-1`. `stack` should also be `Array.empty` then. + * 2. Ready: `myCurrent` is not `null` and contains the next `A` to be extracted + * 3. Pending: `myCurrent` is `null` and `stack(index)` contains the next node to visit + * + * Subclasses should allow this class to do all the work of maintaining state; `next` should simply + * reduce `maxLength` by one, and consume `myCurrent` and set it to `null` if `hasNext` is true. + */ +private[collection] abstract class BinaryTreeStepperBase[A, T >: Null <: AnyRef, Sub >: Null, Semi <: Sub with BinaryTreeStepperBase[A, T, _, _]]( + protected var maxLength: Int, protected var myCurrent: T, protected var stack: Array[AnyRef], protected var index: Int, + protected val left: T => T, protected val right: T => T +) +extends EfficientSplit { + /** Unrolls a subtree onto the stack starting from a particular node, returning + * the last node found. This final node is _not_ placed on the stack, and + * may have things to its right. + */ + @tailrec protected final def unroll(from: T): T = { + val l = left(from) + if (l eq null) from + else { + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = from + unroll(l) + } + } + + /** Takes a subtree whose left side, if any, has already been visited, and unrolls + * the right side of the tree onto the stack, thereby detaching that node of + * the subtree from the stack entirely (so it is ready to use). It returns + * the node that is being detached. Note that the node must _not_ already be + * on the stack. + */ + protected final def detach(node: T): node.type = { + val r = right(node) + if (r ne null) { + val last = unroll(r) + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = last + } + node + } + + /** Given an empty state and the root of a new tree, initialize the tree properly + * to be in an (appropriate) ready state. Will do all sorts of wrong stuff if the + * tree is not already empty. + * + * Right now overwrites everything so could allow reuse, but isn't used for it. + */ + private[impl] final def initialize(root: T, size: Int): Unit = + if (root eq null) { + maxLength = 0 + myCurrent = null + stack = BinaryTreeStepper.emptyStack + index = -1 + } + else { + maxLength = size + index = -1 + myCurrent = detach(unroll(root)) + } + + protected def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): Semi + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = if (hasStep) maxLength else 0 + + def hasStep: Boolean = (myCurrent ne null) || (maxLength > 0 && { + if (index < 0) { maxLength = 0; stack = BinaryTreeStepper.emptyStack; false } + else { + val ans = stack(index).asInstanceOf[T] + index -= 1 + myCurrent = detach(ans) + true + } + }) + + /** Splits the tree at the root by giving everything unrolled on the stack to a new stepper, + * detaching the root, and leaving the right-hand side of the root unrolled. + * + * If the tree is empty or only has one element left, it returns `null` instead of splitting. + */ + def trySplit(): Sub = + if (!hasStep || index < 0) null + else { + val root = stack(0).asInstanceOf[T] + val leftStack = + if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) + else BinaryTreeStepper.emptyStack + val leftIndex = index - 1 + val leftCurrent = myCurrent + var leftMax = maxLength + index = -1 + detach(root) + myCurrent = root + leftMax -= 2+index + maxLength -= 2+leftIndex + semiclone(leftMax, leftCurrent, leftStack, leftIndex) + } +} + + +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A +) +extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): AnyBinaryTreeStepper[A, T] = + new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) +} +private[collection] object AnyBinaryTreeStepper { + def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double +) +extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): DoubleBinaryTreeStepper[T] = + new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object DoubleBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int +) +extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): IntBinaryTreeStepper[T] = + new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object IntBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + + +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long +) +extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): LongBinaryTreeStepper[T] = + new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object LongBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala new file mode 100644 index 000000000000..16801089c39f --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -0,0 +1,119 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{BitSetOps, IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + + +private[collection] final class BitSetStepper( + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, + _i0: Int, _iN: Int, + private var cacheIndex: Int +) +extends InOrderStepperBase[IntStepper, BitSetStepper](_i0, _iN) +with IntStepper { + import BitSetOps.{WordLength, LogWL} + + // When `found` is set, `i0` is an element that exists + protected var found: Boolean = false + + @annotation.tailrec + protected def findNext(): Boolean = + if (i0 >= iN) false + else { + val ix = i0 >> LogWL + if (ix == cacheIndex || ix == cacheIndex+1) { + val i = scanLong(if (ix == cacheIndex) cache0 else cache1, i0 & (WordLength - 1)) + if (i >= 0) { + i0 = (i0 & ~(WordLength - 1)) | i + found = (i0 < iN) + found + } + else { + i0 = (i0 & ~(WordLength - 1)) + WordLength + findNext() + } + } + else if (underlying eq null) { + i0 = iN + found = false + found + } + else { + cacheIndex = ix + cache0 = underlying.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + findNext() + } + } + + def semiclone(half: Int): BitSetStepper = + if (underlying == null) { + val ans = new BitSetStepper(null, cache0, cache1, i0, half, cacheIndex) + ans.found = found + i0 = half + found = false + ans + } + else { + // Set up new stepper + val ixNewN = (half - 1) >> LogWL + val ans = + new BitSetStepper(if (ixNewN <= cacheIndex + 1) null else underlying, cache0, cache1, i0, half, cacheIndex) + if (found) ans.found = true + + // Advance old stepper to breakpoint + val ixOld0 = half >> LogWL + if (ixOld0 > cacheIndex + 1) { + cache0 = underlying.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cacheIndex = ixOld0 + i0 = half + found = false + } + + // Return new stepper + ans + } + + @annotation.tailrec + private[this] def scanLong(bits: Long, from: Int): Int = + if (from >= WordLength) -1 + else if ((bits & (1L << from)) != 0) from + else scanLong(bits, from + 1) + + def nextStep(): Int = + if (found || findNext()) { + found = false + val ans = i0 + i0 += 1 + ans + } + else Stepper.throwNSEE() +} + +private[collection] object BitSetStepper { + def from(bs: scala.collection.BitSetOps[_]): IntStepper with EfficientSplit = + new BitSetStepper( + if (bs.nwords <= 2) null else bs, + if (bs.nwords <= 0) -1L else bs.word(0), + if (bs.nwords <= 1) -1L else bs.word(1), + 0, + bs.nwords * BitSetOps.WordLength, + 0 + ) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala new file mode 100644 index 000000000000..12fb471ea768 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -0,0 +1,246 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.immutable.Node +// import language.experimental.captureChecking // TODO enable + +/** A stepper that is a slightly elaborated version of the ChampBaseIterator; + * the main difference is that it knows when it should stop instead of running + * to the end of all trees. + */ +private[collection] abstract class ChampStepperBase[ + A, T <: Node[T], Sub >: Null, Semi <: Sub with ChampStepperBase[A, T, _, _] +](protected var maxSize: Int) +extends EfficientSplit { + import Node.MaxDepth + + // Much of this code is identical to ChampBaseIterator. If you change that, look here too! + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private var currentStackLevel: Int = -1 + private var nodeCursorsAndLengths: Array[Int] = _ + private var nodes: Array[T] = _ + + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + def initRoot(rootNode: T): Unit = { + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + false + } + + def characteristics: Int = 0 + + def estimateSize: Long = if (hasStep) maxSize else 0L + + def semiclone(): Semi + + final def hasStep: Boolean = maxSize > 0 && { + val ans = (currentValueCursor < currentValueLength) || searchNextValueNode() + if (!ans) maxSize = 0 + ans + } + + final def trySplit(): Sub = + if (!hasStep) null + else { + var fork = 0 + while (fork <= currentStackLevel && nodeCursorsAndLengths(2*fork) >= nodeCursorsAndLengths(2*fork + 1)) fork += 1 + if (fork > currentStackLevel && currentValueCursor > currentValueLength -2) null + else { + val semi = semiclone() + semi.maxSize = maxSize + semi.currentValueCursor = currentValueCursor + semi.currentValueNode = currentValueNode + if (fork > currentStackLevel) { + // Just need to finish the current node + semi.currentStackLevel = -1 + val i = (currentValueCursor + currentValueLength) >>> 1 + semi.currentValueLength = i + currentValueCursor = i + } + else { + // Need (at least some of) the full stack, so make an identical copy + semi.nodeCursorsAndLengths = java.util.Arrays.copyOf(nodeCursorsAndLengths, nodeCursorsAndLengths.length) + semi.nodes = java.util.Arrays.copyOf(nodes.asInstanceOf[Array[Node[T]]], nodes.length).asInstanceOf[Array[T]] + semi.currentStackLevel = currentStackLevel + semi.currentValueLength = currentValueLength + + // Split the top level of the stack where there's still something to split + // Could make this more efficient by duplicating code from searchNextValueNode + // instead of setting up for it to run normally. But splits tend to be rare, + // so it's not critically important. + // + // Note that this split can be kind of uneven; if we knew how many child nodes there + // were we could do better. + val i = (nodeCursorsAndLengths(2*fork) + nodeCursorsAndLengths(2*fork + 1)) >>> 1 + semi.nodeCursorsAndLengths(2*fork + 1) = i + var j = currentStackLevel + while (j > fork) { + nodeCursorsAndLengths(2*j) = nodeCursorsAndLengths(2*j + 1) + j -= 1 + } + nodeCursorsAndLengths(2*fork) = i + searchNextValueNode() + } + semi + } + } +} + + +private[collection] final class AnyChampStepper[A, T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => A) +extends ChampStepperBase[A, T, AnyStepper[A], AnyChampStepper[A, T]](_maxSize) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): AnyChampStepper[A, T] = new AnyChampStepper[A, T](0, extract) +} +private[collection] object AnyChampStepper { + def from[A, T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => A): AnyChampStepper[A, T] = { + val ans = new AnyChampStepper[A, T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class DoubleChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Double) +extends ChampStepperBase[Double, T, DoubleStepper, DoubleChampStepper[T]](_maxSize) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): DoubleChampStepper[T] = new DoubleChampStepper[T](0, extract) +} +private[collection] object DoubleChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Double): DoubleChampStepper[T] = { + val ans = new DoubleChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class IntChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Int) +extends ChampStepperBase[Int, T, IntStepper, IntChampStepper[T]](_maxSize) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): IntChampStepper[T] = new IntChampStepper[T](0, extract) +} +private[collection] object IntChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Int): IntChampStepper[T] = { + val ans = new IntChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class LongChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Long) +extends ChampStepperBase[Long, T, LongStepper, LongChampStepper[T]](_maxSize) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): LongChampStepper[T] = new LongChampStepper[T](0, extract) +} +private[collection] object LongChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Long): LongChampStepper[T] = { + val ans = new LongChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala new file mode 100644 index 000000000000..7140c7d673d0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -0,0 +1,54 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable + +/** Abstracts all the generic operations of stepping over a collection + * that has an indexable ordering but may have gaps. + * + * For collections that are guaranteed to not have gaps, use `IndexedStepperBase` instead. + */ +private[convert] abstract class InOrderStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) +extends EfficientSplit { + /** Set `true` if the element at `i0` is known to be there. `false` if either not known or is a gap. + */ + protected def found: Boolean + + /** Advance `i0` over any gaps, updating internal state so `found` is correct at the new position. + * Returns the new value of `found`. + */ + protected def findNext(): Boolean + + protected def semiclone(half: Int): Semi + + final def hasStep: Boolean = found || findNext() + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala new file mode 100644 index 000000000000..1e2983fde50d --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -0,0 +1,45 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): AnyIndexedSeqStepper[A] = new AnyIndexedSeqStepper[A](underlying, i0, half) +} + +private[collection] class DoubleIndexedSeqStepper[CC <: collection.IndexedSeqOps[Double, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleIndexedSeqStepper[CC]](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleIndexedSeqStepper[CC] = new DoubleIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class IntIndexedSeqStepper[CC <: collection.IndexedSeqOps[Int, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntIndexedSeqStepper[CC]](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntIndexedSeqStepper[CC] = new IntIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class LongIndexedSeqStepper[CC <: collection.IndexedSeqOps[Long, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongIndexedSeqStepper[CC]](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongIndexedSeqStepper[CC] = new LongIndexedSeqStepper[CC](underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala new file mode 100644 index 000000000000..cae3809ab077 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSplit { + protected def semiclone(half: Int): Semi + + def hasStep: Boolean = i0 < iN + + def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala new file mode 100644 index 000000000000..393e988959eb --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -0,0 +1,130 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} +import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) + with AnyStepper[A] { + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + + def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + val acc = new AnyAccumulator[A] + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) + with DoubleStepper { + protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) + + def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new DoubleAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) + with IntStepper { + protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) + + def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new IntAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) + with LongStepper { + protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) + + def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new LongAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], Semi <: SP](final protected val underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + protected def semiclone(): Semi // Must initialize with null iterator! + def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED + def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala new file mode 100644 index 000000000000..7c122f901839 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} +import scala.collection.immutable.NumericRange +// import language.experimental.captureChecking // TODO enable + +private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) +extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new AnyNumericRangeStepper[A](underlying, i0, half) +} + +private[collection] class IntNumericRangeStepper(underlying: NumericRange[Int], _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, IntNumericRangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new IntNumericRangeStepper(underlying, i0, half) +} + +private[collection] class LongNumericRangeStepper(underlying: NumericRange[Long], _i0: Int, _iN: Int) +extends IndexedStepperBase[LongStepper, LongNumericRangeStepper](_i0, _iN) +with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new LongNumericRangeStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala new file mode 100644 index 000000000000..50ab623a014e --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + +/** Implements Stepper on an integer Range. You don't actually need the Range to do this, + * so only the relevant parts are included. Because the arguments are protected, they are + * not error-checked; `Range` is required to provide valid arguments. + */ +private[collection] final class RangeStepper(protected var myNext: Int, myStep: Int, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, RangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = myNext + myNext += myStep + i0 += 1 + ans + } + else Stepper.throwNSEE() + protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) + override def trySplit(): IntStepper = { + val old_i0 = i0 + val ans = super.trySplit() + myNext += (i0 - old_i0) * myStep + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala new file mode 100644 index 000000000000..fe127b857c45 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -0,0 +1,59 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.lang.Character.{charCount, isLowSurrogate} +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable + +/** Implements `Stepper` on a `String` where you step through chars packed into `Int`. + */ +private[collection] final class CharStringStepper(underlying: String, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, CharStringStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { val j = i0; i0 += 1; underlying.charAt(j) } + else Stepper.throwNSEE() + + def semiclone(half: Int): CharStringStepper = new CharStringStepper(underlying, i0, half) +} + +/** Implements `Stepper` on a `String` where you step through code points. + */ +private[collection] final class CodePointStringStepper(underlying: String, private var i0: Int, private var iN: Int) +extends IntStepper with EfficientSplit { + def characteristics: Int = Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED + def estimateSize: Long = iN - i0 + def hasStep: Boolean = i0 < iN + def nextStep(): Int = { + if (hasStep) { + val cp = underlying.codePointAt(i0) + i0 += charCount(cp) + cp + } + else Stepper.throwNSEE() + } + def trySplit(): CodePointStringStepper = + if (iN - 3 > i0) { + var half = (i0 + iN) >>> 1 + if (isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new CodePointStringStepper(underlying, i0, half) + i0 = half + ans + } + else null +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala new file mode 100644 index 000000000000..6329d83bc2a0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( + protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int +) +extends EfficientSplit { + // Always holds table(i0); if `null` it is time to switch to the next element + protected var myCurrent: I = if (i0 < iN) table(i0) else null + + // Only call this when `myCurrent` is null (meaning we need to advance) + @annotation.tailrec + protected final def findNextCurrent(): Boolean = + if (i0 < iN) { + i0 += 1 + if (i0 >= iN) false + else { + myCurrent = table(i0) + if (myCurrent eq null) findNextCurrent() + else true + } + } + else false + + protected def semiclone(half: Int): Semi + + def characteristics: Int = 0 + + def estimateSize: Long = if (!hasStep) { maxLength = 0; 0 } else maxLength + + def hasStep: Boolean = (myCurrent ne null) || findNextCurrent() + + def trySplit(): Sub = { + if (iN-1 > i0 && maxLength > 0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + ans.myCurrent = myCurrent + myCurrent = table(half) + var inLeft = if (ans.myCurrent ne null) 1 else 0 + var inRight = if (myCurrent ne null) 1 else 0 + if (iN - i0 < 32) { + var i = i0+1 + while (i < half && (table(i) ne null)) { i += 1; inLeft += 1 } + i = half+1 + while (i < iN && (table(i) ne null)) { i += 1; inRight += 1 } + } + maxLength -= inLeft + ans.maxLength -= inRight + i0 = half + ans + } + else null + } +} + + +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int +) +extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): AnyTableStepper[A, I] = new AnyTableStepper[A, I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int +) +extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): DoubleTableStepper[I] = new DoubleTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class IntTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int +) +extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): IntTableStepper[I] = new IntTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class LongTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int +) +extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): LongTableStepper[I] = new LongTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala new file mode 100644 index 000000000000..504e0dac63ea --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -0,0 +1,132 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ +// import language.experimental.captureChecking // TODO enable + +private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( + _i0: Int, + _iN: Int, + protected val displayN: Int, + protected val trunk: Array[AnyRef] +) +extends IndexedStepperBase[Sub, Semi](_i0, _iN) { + protected var index: Int = 32 // Force an advanceData on the first element + protected var leaves: Array[AnyRef] = null + protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element + protected var twigs: Array[AnyRef] = null + + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index1 >= 32) initTo(iX) + else { + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = displayN match { + case 0 => + leaves = trunk + index = iX + case 1 => + twigs = trunk + index1 = iX >>> 5 + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + case _ => + var n = displayN + var dataN = trunk + while (n > 2) { + dataN = dataN((iX >> (5*n)) & 0x1F).asInstanceOf[Array[AnyRef]] + n -= 1 + } + twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index1 = (iX >> 5) & 0x1F + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + } +} + +private[collection] class AnyVectorStepper[A](_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[AnyStepper[A], AnyVectorStepper[A]](_i0, _iN, _displayN, _trunk) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[A] + } else Stepper.throwNSEE() + def semiclone(half: Int): AnyVectorStepper[A] = { + val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class DoubleVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[DoubleStepper, DoubleVectorStepper](_i0, _iN, _displayN, _trunk) +with DoubleStepper { + def nextStep(): Double = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Double] + } else Stepper.throwNSEE() + def semiclone(half: Int): DoubleVectorStepper = { + val ans = new DoubleVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[IntStepper, IntVectorStepper](_i0, _iN, _displayN, _trunk) +with IntStepper { + def nextStep(): Int = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Int] + } else Stepper.throwNSEE() + def semiclone(half: Int): IntVectorStepper = { + val ans = new IntVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[LongStepper, LongVectorStepper](_i0, _iN, _displayN, _trunk) +with LongStepper { + def nextStep(): Long = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Long] + } else Stepper.throwNSEE() + def semiclone(half: Int): LongVectorStepper = { + val ans = new LongVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala new file mode 100644 index 000000000000..f76619a004fa --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic +import language.experimental.captureChecking + + +/** Some bit operations. + * + * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for + * an explanation of unsignedCompare. + */ +private[collection] object BitOperations { + trait Int { + type Int = scala.Int + def zero(i: Int, mask: Int) = (i & mask) == 0 + def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix + def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) + def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) + def complement(i: Int) = (-1) ^ i + def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) + def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) + } + object Int extends Int + + trait Long { + type Long = scala.Long + def zero(i: Long, mask: Long) = (i & mask) == 0L + def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix + def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) + def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) + def complement(i: Long) = (-1L) ^ i + def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) + def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) + } + object Long extends Long +} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..e36bb77ebdb8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,88 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder +import language.experimental.captureChecking + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala new file mode 100644 index 000000000000..c309299b615b --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -0,0 +1,165 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic +import language.experimental.captureChecking + +/** A trait which can be used to avoid code duplication when defining extension + * methods that should be applicable both to existing Scala collections (i.e., + * types extending `Iterable`) as well as other (potentially user-defined) + * types that could be converted to a Scala collection type. This trait + * makes it possible to treat Scala collections and types that can be implicitly + * converted to a collection type uniformly. For example, one can provide + * extension methods that work both on collection types and on `String`s (`String`s + * do not extend `Iterable`, but can be converted to `Iterable`) + * + * `IsIterable` provides three members: + * + * 1. type member `A`, which represents the element type of the target `Iterable[A]` + * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type + * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. + * + * ===Usage=== + * + * One must provide `IsIterable` as an implicit parameter type of an implicit + * conversion. Its usage is shown below. Our objective in the following example + * is to provide a generic extension method `mapReduce` to any type that extends + * or can be converted to `Iterable`. In our example, this includes + * `String`. + * + * {{{ + * import scala.collection.{Iterable, IterableOps} + * import scala.collection.generic.IsIterable + * + * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { + * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { + * val iter = it(coll).iterator + * var res = mapper(iter.next()) + * while (iter.hasNext) + * res = reducer(res, mapper(iter.next())) + * res + * } + * } + * + * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = + * new ExtensionMethods(coll, it) + * + * // See it in action! + * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 + * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 + *}}} + * + * Here, we begin by creating a class `ExtensionMethods` which contains our + * `mapReduce` extension method. + * + * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where + * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. + * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to + * call the `iterator` method on it. + * The remaining of the implementation is straightforward. + * + * The `withExtensions` implicit conversion makes the `mapReduce` operation available + * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. + * Note how we keep track of the precise type of the implicit `it` argument by using the + * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that + * so that the information carried by the type members `A` and `C` of the `it` argument + * is not lost. + * + * When the `mapReduce` method is called on some type of which it is not + * a member, implicit search is triggered. Because implicit conversion + * `withExtensions` is generic, it will be applied as long as an implicit + * value of type `IsIterable[Repr]` can be found. Given that the + * `IsIterable` companion object contains implicit members that return values of type + * `IsIterable`, this requirement is typically satisfied, and the chain + * of interactions described in the previous paragraph is set into action. + * (See the `IsIterable` companion object, which contains a precise + * specification of the available implicits.) + * + * ''Note'': Currently, it's not possible to combine the implicit conversion and + * the class with the extension methods into an implicit class due to + * limitations of type inference. + * + * ===Implementing `IsIterable` for New Types=== + * + * One must simply provide an implicit value of type `IsIterable` + * specific to the new type, or an implicit conversion which returns an + * instance of `IsIterable` specific to the new type. + * + * Below is an example of an implementation of the `IsIterable` trait + * where the `Repr` type is `Range`. + * + *{{{ + * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = + * new IsIterable[Range] { + * type A = Int + * type C = IndexedSeq[Int] + * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll + * } + *}}} + * + * (Note that in practice the `IsIterable[Range]` instance is already provided by + * the standard library, and it is defined as an `IsSeq[Range]` instance) + */ +trait IsIterable[Repr] extends IsIterableOnce[Repr] { + + /** The type returned by transformation operations that preserve the same elements + * type (e.g. `filter`, `take`). + * + * In practice, this type is often `Repr` itself, excepted in the case + * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. + */ + type C + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ + def apply(coll: Repr): IterableOps[A, Iterable, C] + +} + +object IsIterable extends IsIterableLowPriority { + + // Straightforward case: IterableOps subclasses + implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = CC0[A0] + def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll + } + + // The `BitSet` type can not be unified with the `CC0` parameter of + // the above definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = + new IsIterable[C0] { + type A = Int + type C = C0 + def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll + } + +} + +trait IsIterableLowPriority { + + // Makes `IsSeq` instances visible in `IsIterable` companion + implicit def isSeqLikeIsIterable[Repr](implicit + isSeqLike: IsSeq[Repr] + ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike + + // Makes `IsMap` instances visible in `IsIterable` companion + implicit def isMapLikeIsIterable[Repr](implicit + isMapLike: IsMap[Repr] + ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala new file mode 100644 index 000000000000..2836ca2bb520 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -0,0 +1,72 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic +import language.experimental.captureChecking + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `IterableOnce[A]`. + * + * This type enables simple enrichment of `IterableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { + * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { + * val b = bf.newBuilder(coll) + * for(e <- it(coll).iterator) f(e) foreach (b +=) + * b.result() + * } + * } + * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = + * new FilterMapImpl(coll, it) + * + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + */ +trait IsIterableOnce[Repr] { + + /** The type of elements we can traverse over (e.g. `Int`). */ + type A + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + val conversion: Repr => IterableOnce[A] = apply(_) + + /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ + def apply(coll: Repr): IterableOnce[A] + +} + +object IsIterableOnce extends IsIterableOnceLowPriority { + + // Straightforward case: IterableOnce subclasses + implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = + new IsIterableOnce[CC0[A0]] { + type A = A0 + def apply(coll: CC0[A0]): IterableOnce[A0] = coll + } + +} + +trait IsIterableOnceLowPriority { + + // Makes `IsIterable` instance visible in `IsIterableOnce` companion + implicit def isIterableLikeIsIterableOnce[Repr](implicit + isIterableLike: IsIterable[Repr] + ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala new file mode 100644 index 000000000000..ad7254d2dd61 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import IsMap.Tupled +import scala.collection.immutable.{IntMap, LongMap} +import language.experimental.captureChecking + +/** + * Type class witnessing that a collection type `Repr` + * has keys of type `K`, values of type `V` and has a conversion to + * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. + * + * This type enables simple enrichment of `Map`s with extension methods. + * + * @see [[scala.collection.generic.IsIterable]] + * @tparam Repr Collection type (e.g. `Map[Int, String]`) + */ +trait IsMap[Repr] extends IsIterable[Repr] { + + /** The type of keys */ + type K + + /** The type of values */ + type V + + type A = (K, V) + + /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` + * + * @note The third type parameter of the returned `MapOps` value is + * still `Iterable` (and not `Map`) because `MapView[K, V]` only + * extends `MapOps[K, V, View, View[A]]`. + */ + override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] + +} + +object IsMap { + + /** Convenient type level function that takes a unary type constructor `F[_]` + * and returns a binary type constructor that tuples its parameters and passes + * them to `F`. + * + * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. + */ + type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } + + // Map collections + implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = CC0[K0, V0] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c + } + + // MapView + implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = View[(K, V)] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c + } + + // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition + implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = + new IsMap[mutable.AnyRefMap[K0, V0]] { + type K = K0 + type V = V0 + type C = mutable.AnyRefMap[K0, V0] + def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c + } + + // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters + implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = + new IsMap[IntMap[V0]] { + type K = Int + type V = V0 + type C = IntMap[V0] + def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c + } + + // LongMap is in a similar situation as IntMap + implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = + new IsMap[LongMap[V0]] { + type K = Long + type V = V0 + type C = LongMap[V0] + def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c + } + + // mutable.LongMap is in a similar situation as LongMap and IntMap + implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = + new IsMap[mutable.LongMap[V0]] { + type K = Long + type V = V0 + type C = mutable.LongMap[V0] + def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c + } + + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala new file mode 100644 index 000000000000..041d74f84d9c --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import scala.reflect.ClassTag +import language.experimental.captureChecking +import language.experimental.captureChecking + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for + * some types `A` and `C`. + * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * @see [[scala.collection.generic.IsIterable]] + */ +trait IsSeq[Repr] extends IsIterable[Repr] { + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` + * + * @note The second type parameter of the returned `SeqOps` value is + * still `Iterable` (and not `Seq`) because `SeqView[A]` only + * extends `SeqOps[A, View, View[A]]`. + */ + def apply(coll: Repr): SeqOps[A, Iterable, C] +} + +object IsSeq { + + private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = + new IsSeq[Seq[Any]] { + type A = Any + type C = Any + def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll + } + + implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = + seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] + + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to seqViewIsIterable + */ + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = View[A] + def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll + } + + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to stringViewIsIterable + */ + implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = + new IsIterable[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll + } + + implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = + new IsSeq[String] { + type A = Char + type C = String + def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = + new SeqOps[Char, immutable.ArraySeq, String] { + def length: Int = s.length + def apply(i: Int): Char = s.charAt(i) + def toIterable: Iterable[Char] = new immutable.WrappedString(s) + protected[this] def coll: String = s + protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString + def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + override def empty: String = "" + protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder + def iterator: Iterator[Char] = s.iterator + } + } + + implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + new IsSeq[Array[A0]] { + type A = A0 + type C = Array[A0] + def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = + new SeqOps[A, mutable.ArraySeq, Array[A]] { + def apply(i: Int): A = a(i) + def length: Int = a.length + def toIterable: Iterable[A] = mutable.ArraySeq.make[A](a) + protected def coll: Array[A] = a + protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) + def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + override def empty: Array[A] = Array.empty[A] + protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder + def iterator: Iterator[A] = a.iterator + } + } + + // `Range` can not be unified with the `CC0` parameter of the + // `seqOpsIsSeq` definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = + new IsSeq[C0] { + type A = Int + type C = immutable.IndexedSeq[Int] + def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll + } + +} diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala new file mode 100644 index 000000000000..2c0967dbaf4b --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -0,0 +1,63 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic +import language.experimental.captureChecking + +/** This trait represents collection-like objects that can be reduced + * using a '+' operator. It defines variants of `-` and `--` + * as convenience methods in terms of single-element removal `-`. + * + * @tparam A the type of the elements of the $coll. + * @tparam Repr the type of the $coll itself + * @define coll collection + * @define Coll Subtractable + */ +@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") +trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => + + /** The representation object of type `Repr` which contains the collection's elements + */ + protected def repr: Repr + + /** Creates a new $coll from this $coll with an element removed. + * @param elem the element to remove + * @return a new collection that contains all elements of the current $coll + * except one less occurrence of `elem`. + */ + def -(elem: A): Repr + + /** Creates a new $coll from this $coll with some elements removed. + * + * This method takes two or more elements to be removed. Another overloaded + * variant of this method handles the case where a single element is + * removed. + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the given elements. + */ + def -(elem1: A, elem2: A, elems: A*): Repr = + this - elem1 - elem2 -- elems + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param xs the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) +} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala new file mode 100644 index 000000000000..0ba67c1bf76e --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -0,0 +1,35 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + + +package object generic { + @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") + type Clearable = scala.collection.mutable.Clearable + + @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") + type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] + + @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") + type Growable[-A] = scala.collection.mutable.Growable[A] + + @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") + type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] + + @deprecated("Use IsIterable instead", "2.13.0") + type IsTraversableLike[Repr] = IsIterable[Repr] + + @deprecated("Use IsIterableOnce instead", "2.13.0") + type IsTraversableOnce[Repr] = IsIterableOnce[Repr] +} diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala new file mode 100644 index 000000000000..e21490571d29 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -0,0 +1,691 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime +import scala.util.Sorting +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define coll immutable array + * @define Coll `ArraySeq` + */ +sealed abstract class ArraySeq[+A] + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] + with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] + with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] + with Serializable + with Pure { + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + protected def elemTag: ClassTag[_] + + override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged + + /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break + * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. + * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an + * array of a supertype or subtype of the element type. */ + def unsafeArray: Array[_] + + def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] + + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq + protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): A + + override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { + val dest = new Array[Any](length) + Array.copy(unsafeArray, 0, dest, 0, length) + dest(index) = elem + ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] + } + + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + + override def prepended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] + + override def appended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + + /** Fast concatenation of two [[ArraySeq]]s. + * + * @return null if optimisation not possible. + */ + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + // Optimise concatenation of two ArraySeqs + // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast + if (isEmpty) + that + else if (that.isEmpty) + this + else { + val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] + val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] + val mismatch = thisIsObj != thatIsObj + if (mismatch) + // Combining primatives and objects: abort + null + else if (thisIsObj) { + // A and B are objects + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[B]] + val len = ax.length + ay.length + val a = new Array[AnyRef](len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } else { + // A is a primative and B = A. Use this instance's protected ClassTag. + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[A]] + val len = ax.length + ay.length + val a = iterableEvidence.newArray(len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + } + } + + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { + def genericResult = { + val k = suffix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(unsafeArray) + b.addAll(suffix) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + suffix match { + case that: ArraySeq[_] => + val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { + def genericResult = { + val k = prefix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + unsafeArray.length) + b.addAll(unsafeArray) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + prefix match { + case that: ArraySeq[_] => + val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = + that match { + case bs: ArraySeq[B] => + ArraySeq.tabulate(length min bs.length) { i => + (apply(i), bs(i)) + } + case _ => + strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) + } + + private inline def ops[A](xs: Array[A]): ArrayOps[A] = new ArrayOps[A](xs) + + override def take(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] + + override def takeRight(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + + override def drop(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] + + override def dropRight(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + + override def slice(from: Int, until: Int): ArraySeq[A] = + if (from <= 0 && unsafeArray.length <= until) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + + override def foldLeft[B](z: B)(f: (B, A) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = 0 + while (i < array.length) { + val a = array(i).asInstanceOf[A] + b = f(b, a) + i += 1 + } + b + } + + override def foldRight[B](z: B)(f: (A, B) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = array.length + while (i > 0) { + i -= 1 + val a = array(i).asInstanceOf[A] + b = f(a, b) + } + b + } + + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] + + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] + + override protected[this] def className = "ArraySeq" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(unsafeArray, 0, xs, start, copied) + } + copied + } + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = + if(unsafeArray.length <= 1) this + else { + val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] + } +} + +/** + * $factoryInfo + * @define coll immutable array + * @define Coll `ArraySeq` + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) + + def empty[A : ClassTag]: ArraySeq[A] = emptyImpl + + def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + case as: ArraySeq[A] => as + case _ => unsafeWrapArray(Array.from[A](it)) + } + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = + ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + + override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { + val elements = Array.ofDim[A](scala.math.max(n, 0)) + var i = 0 + while (i < n) { + ScalaRunTime.array_update(elements, i, f(i)) + i = i + 1 + } + ArraySeq.unsafeWrapArray(elements) + } + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. Any changes to wrapped array will break the expected immutability. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): T = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any): Boolean = that match { + case that: ofRef[_] => + Array.equals( + this.unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { + if(unsafeArray.length <= 1) this + else { + val a = unsafeArray.clone() + Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) + new ArraySeq.ofRef(a) + } + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) + else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { + protected def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Byte = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = + if(length <= 1) this + else if(ord eq Ordering.Byte) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofByte(a) + } else super.sorted[B] + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { + protected def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Short = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = + if(length <= 1) this + else if(ord eq Ordering.Short) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofShort(a) + } else super.sorted[B] + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { + protected def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Char = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = + if(length <= 1) this + else if(ord eq Ordering.Char) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofChar(a) + } else super.sorted[B] + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { + protected def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Int = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = + if(length <= 1) this + else if(ord eq Ordering.Int) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofInt(a) + } else super.sorted[B] + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { + protected def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Long = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = + if(length <= 1) this + else if(ord eq Ordering.Long) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofLong(a) + } else super.sorted[B] + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { + protected def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Float = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { + protected def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Double = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { + protected def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Boolean = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = + if(length <= 1) this + else if(ord eq Ordering.Boolean) { + val a = unsafeArray.clone() + Sorting.stableSort(a) + new ArraySeq.ofBoolean(a) + } else super.sorted[B] + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { + protected def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Unit = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala new file mode 100644 index 000000000000..9c2bfdad54d0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -0,0 +1,376 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import BitSetOps.{LogWL, updateArray} +import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking + +/** A class for immutable bitsets. + * $bitsetinfo + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] + * section on `Immutable BitSets` for more information. + * + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +sealed abstract class BitSet + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + override def unsorted: Set[Int] = this + + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) + + def incl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) this + else { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + } + + def excl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } else this + } + + /** Update word at index `idx`; enlarge set if `idx` outside range of set. + */ + protected def updateWord(idx: Int, w: Long): BitSet + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) +} + +/** + * $factoryInfo + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = + it match { + case bs: BitSet => bs + case _ => (newBuilder ++= it).result() + } + + final val empty: BitSet = new BitSet1(0L) + + def newBuilder: Builder[Int, BitSet] = + mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) + + private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSetN(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else new BitSetN(elems) + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet1(val elems: Long) extends BitSet { + protected[collection] def nwords = 1 + protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet1(w) + else if (idx == 1) createSmall(elems, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case _ => + val newElems = elems & ~bs.word(0) + if (newElems == 0L) this.empty else new BitSet1(newElems) + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) + if (_elems == 0L) this.empty else new BitSet1(_elems) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { + protected[collection] def nwords = 2 + protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet2(w, elems1) + else if (idx == 1) createSmall(elems0, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case 1 => + new BitSet2(elems0 & ~bs.word(0), elems1) + case _ => + val _elems0 = elems0 & ~bs.word(0) + val _elems1 = elems1 & ~bs.word(1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } else { + new BitSet1(_elems0) + } + } else { + new BitSet2(_elems0, _elems1) + } + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) + val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } + else new BitSet1(_elems0) + } + else new BitSet2(_elems0, _elems1) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSetN(val elems: Array[Long]) extends BitSet { + protected[collection] def nwords = elems.length + + protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L + + protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. Two extra concerns for optimization are described below. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + * + * Tracking Changes: + * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, + * we check each word for if it has changed from its corresponding word in `this`. Once a single change is + * detected, we stop checking because the cost of the new Array must be paid anyways. + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = oldFirstWord & ~bs.word(0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } else { + var i = bsnwords - 1 + var anyChanges = false + var currentWord = 0L + while (i >= 0 && !anyChanges) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newElems = elems.clone() + newElems(i + 1) = currentWord + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + this.fromBitMaskNoCopy(newElems) + } else { + this + } + } + case _ => super.diff(that) + } + + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = nwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } + + override def toBitMask: Array[Long] = elems.clone() + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala new file mode 100644 index 000000000000..fc9bcb022874 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -0,0 +1,253 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + + +import java.lang.Integer.bitCount +import java.lang.Math.ceil +import java.lang.System.arraycopy +import language.experimental.captureChecking + +private[collection] object Node { + final val HashCodeLength = 32 + + final val BitPartitionSize = 5 + + final val BitPartitionMask = (1 << BitPartitionSize) - 1 + + final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt + + final val BranchingFactor = 1 << BitPartitionSize + + final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask + + final def bitposFrom(mask: Int): Int = 1 << mask + + final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) + + final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) + +} + +private[collection] abstract class Node[T <: Node[T]] { + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): T + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): Any + + def getHash(index: Int): Int + + def cachedJavaKeySetHashCode: Int + + private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = + new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") + + protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a + * depth-first pre-order traversal, which yields first all payload elements of the current + * node before traversing sub-nodes (left to right). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { + + import Node.MaxDepth + + // Note--this code is duplicated to a large extent both in + // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. + // If you change this code, check those also in case they also + // need to be modified. + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] var nodeCursorsAndLengths: Array[Int] = _ + private[this] var nodes: Array[T] = _ + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + + def this(rootNode: T) = { + this() + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + + return false + } + + final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() + +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base + * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { + + import Node.MaxDepth + + protected var currentValueCursor: Int = -1 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) + private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] + + def this(rootNode: T) = { + this() + pushNode(rootNode) + searchNextValueNode() + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = node.payloadArity - 1 + } + + private final def pushNode(node: T): Unit = { + currentStackLevel = currentStackLevel + 1 + + nodeStack(currentStackLevel) = node + nodeIndex(currentStackLevel) = node.nodeArity - 1 + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for rightmost node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 + + if (nodeCursor >= 0) { + val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) + pushNode(nextNode) + } else { + val currNode = nodeStack(currentStackLevel) + popNode() + + if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } + } + } + + return false + } + + final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() + +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala new file mode 100644 index 000000000000..8faa37625d51 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -0,0 +1,2424 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import java.lang.Integer.bitCount +import java.lang.System.arraycopy + +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable, mutable.ReusableBuilder +import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} +import scala.runtime.AbstractFunction2 +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam K the type of the keys contained in this hash set. + * @tparam V the type of the values associated with the keys in this hash map. + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ + +final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) + extends AbstractMap[K, V] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with DefaultSerializable { + + def this() = this(MapNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + override def mapFactory: MapFactory[HashMap] = HashMap + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet + + private final class HashKeySet extends ImmutableKeySet { + + private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = + if (newHashMap eq HashMap.this) this else newHashMap.keySet + private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet + + override def incl(elem: K): Set[K] = { + val originalHash = elem.## + val improvedHash = improve(originalHash) + val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + newKeySetOrThis(newNode) + } + override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) + override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) + override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) + } + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleIterator[K, V](rootNode) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapKeyIterator[K, V](rootNode) + } + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else new MapValueIterator[K, V](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleReverseIterator[K, V](rootNode) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override final def contains(key: K): Boolean = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) + } + + override def apply(key: K): V = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.apply(key, keyUnimprovedHash, keyHash, 0) + } + + def get(key: K): Option[V] = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.get(key, keyUnimprovedHash, keyHash, 0) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) + } + + @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode) + + def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) + } + + // preemptively overridden in anticipation of performance optimizations + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = + super.updatedWith[V1](key)(remappingFunction) + + def removed(key: K): HashMap[K, V] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) + } + + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { + case hm: HashMap[K, V1] => + if (isEmpty) hm + else { + val newNode = rootNode.concat(hm.rootNode, 0) + if (newNode eq hm.rootNode) hm + else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) + } + case hm: mutable.HashMap[K @unchecked, V @unchecked] => + val iter = hm.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => + val iter = lhm.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case _ => + class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { + var changed = false + var shallowlyMutableNodeMap: Int = 0 + var current: BitmapIndexedMapNode[K, V1] = rootNode + def apply(kv: (K, V1)) = apply(kv._1, kv._2) + def apply(key: K, value: V1): Unit = { + val originalHash = key.## + val improved = improve(originalHash) + if (!changed) { + current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + changed = true + shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + } + } else { + shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) + } + } + } + that match { + case thatMap: Map[K, V1] => + if (thatMap.isEmpty) this + else { + val accum = new accum + thatMap.foreachEntry(accum) + newHashMapOrThis(accum.current) + } + case _ => + val it = that.iterator + if (it.isEmpty) this + else { + val accum = new accum + it.foreach(accum) + newHashMapOrThis(accum.current) + } + } + } + + override def tail: HashMap[K, V] = this - head._1 + + override def init: HashMap[K, V] = this - last._1 + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = reverseIterator.next() + + override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) + + override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) + + /** Applies a function to each key, value, and **original** hash value in this Map */ + @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + override def equals(that: Any): Boolean = + that match { + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) + case _ => super.equals(that) + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be + // immutable. + val hashIterator = new MapKeyValueTupleHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) + // assert(hash == super.hashCode()) + hash + } + } + + override protected[this] def className = "HashMap" + + /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge + * function to resolve any key collisions between the two HashMaps. + * + * @example {{{ + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(2 -> 2, 3 -> 2) + * + * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } + * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) + * + * }}} + * + * @param that the HashMap to merge this HashMap with + * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then + * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to + * `that.concat(this)` + * + * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or + * found in `this` or `that`, it is not defined which value will be chosen. For example: + * + * Colliding multiple results of merging: + * {{{ + * // key `3` collides between a result of merging keys `1` and `2` + * val left = HashMap(1 -> 1, 2 -> 2) + * val right = HashMap(1 -> 1, 2 -> 2) + * + * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } + * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) + * }}} + * Colliding results of merging with other keys: + * {{{ + * // key `2` collides between a result of merging `1`, and existing key `2` + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(1 -> 2) + * + * val merged = left.merged(right)((_,_) => 2 -> 3) + * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) + * }}} + * + */ + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + if (mergef == null) { + that ++ this + } else { + if (isEmpty) that + else if (that.isEmpty) this + else if (size == 1) { + val payload@(k, v) = rootNode.getPayload(0) + val originalHash = rootNode.getHash(0) + val improved = improve(originalHash) + + if (that.rootNode.containsKey(k, originalHash, improved, 0)) { + val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) + } + } else if (that.size == 0) { + val thatPayload@(k, v) = rootNode.getPayload(0) + val thatOriginalHash = rootNode.getHash(0) + val thatImproved = improve(thatOriginalHash) + + if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { + val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) + } + } else { + val builder = new HashMapBuilder[K, V1] + rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + builder.result() + } + } + + override def transform[W](f: (K, V) => W): HashMap[K, W] = + newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] + + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashMap.empty + else new HashMap(newRootNode) + } + + override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { + if (isEmpty) { + this + } else { + keys match { + case hashSet: HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree + // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` + val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) + if (newRootNode eq rootNode) this + else if (newRootNode.size <= 0) HashMap.empty + else new HashMap(newRootNode) + } + case hashSet: collection.mutable.HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + val iter = hashSet.nodeIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case lhashSet: collection.mutable.LinkedHashSet[K] => + if (lhashSet.isEmpty) { + this + } else { + val iter = lhashSet.entryIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case _ => + val iter = keys.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + } + } + + override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, + // based on the result of applying `p` to its elements and subnodes. + super.partition(p) + } + + override def take(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including + // those nodes in the resulting trie, until `n` total elements have been included. + super.take(n) + } + + override def takeRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and + // and including those nodes in the resulting trie, until `n` total elements have been included. + super.takeRight(n) + } + + override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and + // including those nodes in the resulting trie, until `p` returns `false` + super.takeWhile(p) + } + + override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and + // dropping those nodes in the resulting trie, until `p` returns `true` + super.dropWhile(p) + } + + override def dropRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse + // order, and dropping all nodes until `n` elements have been dropped + super.dropRight(n) + } + + override def drop(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and + // dropping all nodes until `n` elements have been dropped + super.drop(n) + } + + override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and + // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. + // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality + // checks. + super.span(p) + } + +} + +private[immutable] object MapNode { + + private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] + + final val TupleLength = 2 + +} + + +private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 + + def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean + + /** Returns a MapNode with the passed key-value assignment added + * + * @param key the key to add to the MapNode + * @param value the value to associate with `key` + * @param originalHash the original hash of `key` + * @param hash the improved hash of `key` + * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) + * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value + * argument. + * if false, then the key will be inserted if not already present, however if the key is present + * then the passed value will not replace the current value. That is, if `false`, then this + * method has `update if not exists` semantics. + */ + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): MapNode[K, V] + + def hasPayload: Boolean + + def payloadArity: Int + + def getKey(index: Int): K + + def getValue(index: Int): V + + def getPayload(index: Int): (K, V) + + def size: Int + + def foreach[U](f: ((K, V)) => U): Unit + + def foreachEntry[U](f: (K, V) => U): Unit + + def foreachWithHash(f: (K, V, Int) => Unit): Unit + + def transform[W](f: (K, V) => W): MapNode[K, W] + + def copy(): MapNode[K, V] + + def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] + + def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] + + /** Merges this node with that node, adding each resulting tuple to `builder` + * + * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` + * + * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, + * as `this` is, within the left tree + */ + def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit + + /** Returns the exact (equal by reference) key, and value, associated to a given key. + * If the key is not bound to a value, then an exception is thrown + */ + def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) + + /** Adds all key-value pairs to a builder */ + def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit +} + +private final class BitmapIndexedMapNode[K, +V]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { + + releaseFence() + + import MapNode._ + import Node._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] + def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] + + def getPayload(index: Int) = Tuple2( + content(TupleLength * index).asInstanceOf[K], + content(TupleLength * index + 1).asInstanceOf[V]) + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): MapNode[K, V] = + content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] + + def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException(s"key not found: $key") + } + } + + def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) Some(this.getValue(index)) else None + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + None + } + } + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val mask = maskFrom(hash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val payload = getPayload(index) + if (key == payload._1) payload else throw new NoSuchElementException + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException + } + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) getValue(index) else f + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) + } else { + f + } + } + + override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) + (originalHashes(index) == originalHash) && key == getKey(index) + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + false + } + } + + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + if (replaceValue) { + val value0 = this.getValue(index) + if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) + this + else copyAndSetValue(bitpos, key, value) + } else this + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) + + if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) + } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) + } + + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param value the value to set `key` to + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + val value0 = this.getValue(index) + if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + content(idx + 1) = value + } + shallowlyMutableNodeMap + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeHashCode = subNode.cachedJavaKeySetHashCode + + var returnMutableNodeMap = shallowlyMutableNodeMap + + val subNodeNew: MapNode[K, V1] = subNode match { + case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) + if (result ne subNode) { + returnMutableNodeMap |= bitpos + } + result + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode + returnMutableNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = insertElement(originalHashes, dataIx, originalHash) + this.size += 1 + this.cachedJavaKeySetHashCode += keyHash + shallowlyMutableNodeMap + } + } + + def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + + if (key0 == key) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) + if (index == 0) + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) + else + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) + } else copyAndRemoveValue(bitpos, keyHash) + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) + // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] + } else { + // inline value (move to front) + copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + } else this + } else this + } + + def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + val newCachedHash = keyHash0 + keyHash1 + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + + if (mask0 < mask1) { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) + } else { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) + new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + //dst(idx) = newKey + dst(idx + 1) = newValue + new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedMapNode[K, V1]( + dataMap, + nodeMap, + dst, + originalHashes, + size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) + } + + def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + // copy 'src' and remove 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) + } + + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the key currently at `bitpos` + * @param node the node to place at `bitpos` beneath `this` + */ + def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = dataMap ^ bitpos + this.nodeMap = nodeMap | bitpos + this.content = dst + this.originalHashes = dstHashes + this.size = size - 1 + node.size + this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, + originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + ) + } + + def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val key = node.getKey(0) + val value = node.getValue(0) + val src = this.content + val dst = new Array[Any](src.length - 1 + TupleLength) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 2 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = key + dst(idxNew + 1) = value + arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreach(f) + j += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getKey(i), getValue(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachEntry(f) + j += 1 + } + } + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + var i = 0 + val iN = payloadArity // arity doesn't change during this operation + while (i < iN) { + f(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + var i = 0 + val iN = payloadArity + val jN = nodeArity + while (i < iN) { + builder.addOne(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + var j = 0 + while (j < jN) { + getNode(j).buildTo(builder) + j += 1 + } + } + + override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { + var newContent: Array[Any] = null + val iN = payloadArity // arity doesn't change during this operation + val jN = nodeArity // arity doesn't change during this operation + val newContentLength = content.length + var i = 0 + while (i < iN) { + val key = getKey(i) + val value = getValue(i) + val newValue = f(key, value) + if (newContent eq null) { + if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { + newContent = content.clone() + newContent(TupleLength * i + 1) = newValue + } + } else { + newContent(TupleLength * i + 1) = newValue + } + i += 1 + } + + var j = 0 + while (j < jN) { + val node = getNode(j) + val newNode = node.transform(f) + if (newContent eq null) { + if (newNode ne node) { + newContent = content.clone() + newContent(newContentLength - j - 1) = newNode + } + } else + newContent(newContentLength - j - 1) = newNode + j += 1 + } + if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + } + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) { + that.buildTo(builder) + return + } else if (bm.size == 0) { + buildTo(builder) + return + } + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + val minIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + { + var index = minIndex + var leftIdx = 0 + var rightIdx = 0 + + while (index < maxIndex) { + val bitpos = bitposFrom(index) + + if ((bitpos & dataMap) != 0) { + val leftKey = getKey(leftIdx) + val leftValue = getValue(leftIdx) + val leftOriginalHash = getHash(leftIdx) + if ((bitpos & bm.dataMap) != 0) { + // left data and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { + builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) + } else { + builder.addOne(leftKey, leftValue, leftOriginalHash) + builder.addOne(rightKey, rightValue, rightOriginalHash) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + // left data and right node + val subNode = bm.getNode(bm.nodeIndex(bitpos)) + val leftImprovedHash = improve(leftOriginalHash) + val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftData and rightNode, just build both children to builder + subNode.buildTo(builder) + builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) + } + } else { + // left data and nothing on right + builder.addOne(leftKey, leftValue, leftOriginalHash) + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + // left node and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + val rightImprovedHash = improve(rightOriginalHash) + + val subNode = getNode(nodeIndex(bitpos)) + val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftNode and rightData, just build both children to builder + subNode.buildTo(builder) + builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) + } + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // left node and right node + getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) + } else { + // left node and nothing on right + getNode(nodeIndex(bitpos)).buildTo(builder) + } + } else if ((bitpos & bm.dataMap) != 0) { + // nothing on left, right data + val dataIndex = bm.dataIndex(bitpos) + builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // nothing on left, right node + bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) + } + + index += 1 + } + } + case _: HashCollisionMapNode[_, _] => + throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") + } + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedMapNode[_, _] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) + } + // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `bm` + var anyChangesMadeSoFar = false + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataRightOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + val leftOriginalHash = getHash(leftIdx) + if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { + leftDataRightDataRightOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { + // nothing from `this` will make it into the result -- return early + return bm + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val rightNode = bm.getNode(rightNodeIdx) + val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) + if (rightNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftKey = getKey(leftDataIdx) + val leftValue = getValue(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + + val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) + + if (updated ne n) { + anyChangesMadeSoFar = true + } + + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + getNode(leftNodeIdx).updated( + key = bm.getKey(rightDataIdx), + value = bm.getValue(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift, + replaceValue = true + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = originalHashes(leftDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedMapNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else bm + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + override def copy(): BitmapIndexedMapNode[K, V] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) * TupleLength + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() + i += 1 + } + new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else MapNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + MapNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize * TupleLength) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) + newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + + + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[MapNode[K, V]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[MapNode[K, V]] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue() + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + if (newSize == 0) { + MapNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) + newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(TupleLength * newDataIndex) = node.getKey(0) + newContent(TupleLength * newDataIndex + 1) = node.getValue(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + } +} + +private final class HashCollisionMapNode[K, +V ]( + val originalHash: Int, + val hash: Int, + var content: Vector[(K, V @uV)] + ) extends MapNode[K, V] { + + import Node._ + + require(content.length >= 2) + + releaseFence() + + private[immutable] def indexOf(key: Any): Int = { + val iter = content.iterator + var i = 0 + while (iter.hasNext) { + if (iter.next()._1 == key) return i + i += 1 + } + -1 + } + + def size: Int = content.length + + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = + if (this.hash == hash) { + val index = indexOf(key) + if (index >= 0) Some(content(index)._2) else None + } else None + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val index = indexOf(key) + if (index >= 0) content(index) else throw new NoSuchElementException + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { + if (this.hash == hash) { + indexOf(key) match { + case -1 => f + case other => content(other)._2 + } + } else f + } + + override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && indexOf(key) >= 0 + + def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = + this.hash == hash && { + val index = indexOf(key) + index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) + } + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { + val index = indexOf(key) + if (index >= 0) { + if (replaceValue) { + if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { + this + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) + } + } else { + this + } + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) + } + } + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { + if (!this.containsKey(key, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => + val (k, v) = updatedContent(0) + new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) + } + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): MapNode[K, V] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getKey(index: Int): K = getPayload(index)._1 + def getValue(index: Int): V = getPayload(index)._2 + + def getPayload(index: Int): (K, V) = content(index) + + override def getHash(index: Int): Int = originalHash + + def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) + + def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next._1, next._2, originalHash) + } + } + + override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { + val newContent = Vector.newBuilder[(K, W)] + val contentIter = content.iterator + // true if any values have been transformed to a different value via `f` + var anyChanges = false + while(contentIter.hasNext) { + val (k, v) = contentIter.next() + val newValue = f(k, v) + newContent.addOne((k, newValue)) + anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) + } + if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) + else this.asInstanceOf[HashCollisionMapNode[K, W]] + } + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionMapNode[_, _] => + (this eq node) || + (this.hash == node.hash) && + (this.content.length == node.content.length) && { + val iter = content.iterator + while (iter.hasNext) { + val (key, value) = iter.next() + val index = node.indexOf(key) + if (index < 0 || value != node.content(index)._2) { + return false + } + } + true + } + case _ => false + } + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { + case hc: HashCollisionMapNode[K, V1] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[(K, V1)] = null + val iter = content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (hc.indexOf(nextPayload._1) < 0) { + if (newContent eq null) { + newContent = new VectorBuilder[(K, V1)]() + newContent.addAll(hc.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedMapNode[K, V1] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case hc: HashCollisionMapNode[K, V1] => + val iter = content.iterator + val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] + + def rightIndexOf(key: K): Int = { + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i + i += 1 + } + -1 + } + + while (iter.hasNext) { + val nextPayload = iter.next() + val index = rightIndexOf(nextPayload._1) + + if (index == -1) { + builder.addOne(nextPayload) + } else { + val rightPayload = rightArray(index).asInstanceOf[(K, V1)] + rightArray(index) = null + + builder.addOne(mergef(nextPayload, rightPayload)) + } + } + + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) + i += 1 + } + case _: BitmapIndexedMapNode[K, V1] => + throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") + + } + + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + builder.addOne(k, v, originalHash, hash) + } + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + MapNode.empty + } else if (newContentLength == 1) { + val (k, v) = newContent.head + new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + } else if (newContentLength == content.length) this + else new HashCollisionMapNode(originalHash, hash, newContent) + } + + override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def cachedJavaKeySetHashCode: Int = size * hash + +} + +private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val key = currentValueNode.getKey(currentValueCursor) + currentValueCursor += 1 + + key + } + +} + +private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val value = currentValueNode.getValue(currentValueCursor) + currentValueCursor += 1 + + value + } +} + +private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } +} + +private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { + private[this] var hash = 0 + private[this] var value: V = _ + override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) + def next() = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + value = currentValueNode.getValue(currentValueCursor) + currentValueCursor -= 1 + this + } +} + +/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ +private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { + /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ + def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { + var curr = rootMapNode + while (curr.size > 0 && hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + curr = curr.removed( + key = currentValueNode.getPayload(currentValueCursor), + keyHash = improve(originalHash), + originalHash = originalHash, + shift = 0 + ) + currentValueCursor += 1 + } + curr + } +} + +/** + * $factoryInfo + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + @transient + private final val EmptyMap = new HashMap(MapNode.empty) + + def empty[K, V]: HashMap[K, V] = + EmptyMap.asInstanceOf[HashMap[K, V]] + + def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = + source match { + case hs: HashMap[K, V] => hs + case _ => (newBuilder[K, V] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] +} + + +/** A Builder for a HashMap. + * $multipleResults + */ +private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { + import MapNode._ + import Node._ + + private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashMap as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashMap[K, V] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (rootNode.size == 0) value + else { + val originalHash = key.## + rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) + } + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap |= bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Upserts a key/value pair into mapNode, mutably */ + private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { + mapNode match { + case bm: BitmapIndexedMapNode[K, V] => + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val key0 = bm.getKey(index) + val key0UnimprovedHash = bm.getHash(index) + + if (key0UnimprovedHash == originalHash && key0 == key) { + bm.content(TupleLength * index + 1) = value + } else { + val value0 = bm.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew: MapNode[K, V] = + bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + } + + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHash = subNode.cachedJavaKeySetHashCode + update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash + } else { + insertValue(bm, bitpos, key, originalHash, keyHash, value) + } + case hc: HashCollisionMapNode[K, V] => + val index = hc.indexOf(key) + if (index < 0) { + hc.content = hc.content.appended((key, value)) + } else { + hc.content = hc.content.updated(index, (key, value)) + } + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private[this] def ensureUnaliased() = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private[this] def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashMap[K, V] = + if (rootNode.size == 0) { + HashMap.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashMap(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: (K, V)): this.type = { + ensureUnaliased() + val h = elem._1.## + val im = improve(h) + update(rootNode, elem._1, elem._2, h, im, 0) + this + } + + def addOne(key: K, value: V): this.type = { + ensureUnaliased() + val originalHash = key.## + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, hash, 0) + this + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + ensureUnaliased() + xs match { + case hm: HashMap[K, V] => + new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + mapNode = rootNode, + key = currentValueNode.getKey(currentValueCursor), + value = currentValueNode.getValue(currentValueCursor), + originalHash = originalHash, + keyHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position + case hm: collection.mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case lhm: collection.mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case thatMap: Map[K, V] => + thatMap.foreachEntry((key, value) => addOne(key, value)) + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala new file mode 100644 index 000000000000..9a3676705201 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -0,0 +1,2124 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.lang.Integer.{bitCount, numberOfTrailingZeros} +import java.lang.System.arraycopy + +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam A the type of the elements contained in this hash set. + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) + extends AbstractSet[A] + with StrictOptimizedSetOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with DefaultSerializable { + + def this() = this(SetNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = + if (rootNode eq newRootNode) this else new HashSet(newRootNode) + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + def iterator: Iterator[A] = { + if (isEmpty) Iterator.empty + else new SetIterator[A](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + def contains(element: A): Boolean = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + rootNode.contains(element, elementUnimprovedHash, elementHash, 0) + } + + def incl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + def excl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + override def concat(that: IterableOnce[A]): HashSet[A] = + that match { + case hs: HashSet[A] => + if (isEmpty) hs + else { + val newNode = rootNode.concat(hs.rootNode, 0) + if (newNode eq hs.rootNode) hs + else newHashSetOrThis(newNode) + } + case hs: collection.mutable.HashSet[A] => + val iter = hs.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case lhs: collection.mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case _ => + val iter = that.iterator + var current = rootNode + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + current = current.updated(element, originalHash, improved, 0) + + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + } + + override def tail: HashSet[A] = this - head + + override def init: HashSet[A] = this - last + + override def head: A = iterator.next() + + override def last: A = reverseIterator.next() + + override def foreach[U](f: A => U): Unit = rootNode.foreach(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ + @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set + * Stops iterating the first time that f returns `false`.*/ + @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) + + def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { + case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) + case _ => super.subsetOf(that) + } + + override def equals(that: Any): Boolean = + that match { + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) + case _ => super.equals(that) + } + + override protected[this] def className = "HashSet" + + override def hashCode(): Int = { + val it = new SetHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) + //assert(hash == super.hashCode()) + hash + } + + override def diff(that: collection.Set[A]): HashSet[A] = { + if (isEmpty) { + this + } else { + that match { + case hashSet: HashSet[A] => + if (hashSet.isEmpty) this else { + val newRootNode = rootNode.diff(hashSet.rootNode, 0) + if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) + } + case hashSet: collection.mutable.HashSet[A] => + val iter = hashSet.nodeIterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next.key, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + + case other => + val thatKnownSize = other.knownSize + + if (thatKnownSize == 0) { + this + } else if (thatKnownSize <= size) { + /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so + we're likely to be the faster of the two at that. */ + removedAllWithShallowMutations(other) + } else { + // TODO: Develop more sophisticated heuristic for which branch to take + filterNot(other.contains) + } + } + + } + } + + /** Immutably removes all elements of `that` from this HashSet + * + * Mutation is used internally, but only on root SetNodes which this method itself creates. + * + * That is, this method is safe to call on published sets because it does not mutate `this` + */ + private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { + val iter = that.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + } + + override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { + case set: scala.collection.Set[A] => diff(set) + case range: Range if range.length > size => + filter { + case i: Int => !range.contains(i) + case _ => true + } + + case _ => + removedAllWithShallowMutations(that) + } + + override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.partition(p) + } + + override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.span(p) + } + + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashSet.empty + else new HashSet(newRootNode) + } + + override def intersect(that: collection.Set[A]): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.intersect(that) + } + + override def take(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.take(n) + } + + override def takeRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeRight(n) + } + + override def takeWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeWhile(p) + } + + override def drop(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.drop(n) + } + + override def dropRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropRight(n) + } + + override def dropWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropWhile(p) + } +} + +private[immutable] object SetNode { + + private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] + + final val TupleLength = 1 + +} + +private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): SetNode[A] + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): A + + def size: Int + + def foreach[U](f: A => U): Unit + + def subsetOf(that: SetNode[A], shift: Int): Boolean + + def copy(): SetNode[A] + + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] + + def diff(that: SetNode[A], shift: Int): SetNode[A] + + def concat(that: SetNode[A], shift: Int): SetNode[A] + + def foreachWithHash(f: (A, Int) => Unit): Unit + + def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean +} + +private final class BitmapIndexedSetNode[A]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends SetNode[A] { + + import Node._ + import SetNode._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new SetIterator[A](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[SetNode[_]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[SetNode[_]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getPayload(index: Int): A = content(index).asInstanceOf[A] + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] + + def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + return originalHashes(index) == originalHash && element == this.getPayload(index) + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) + } + + false + } + + def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { + return this + } else { + val element0UnimprovedHash = getHash(index) + val element0Hash = improve(element0UnimprovedHash) + if (originalHash == element0UnimprovedHash && element0 == element) { + return this + } else { + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) + } + } + } + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNode eq subNodeNew) { + return this + } else { + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + copyAndInsertValue(bitpos, element, originalHash, elementHash) + } + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated value is located in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated value is located, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = getPayload(index) + val element0UnimprovedHash = getHash(index) + if (element0UnimprovedHash == originalHash && element0 == element) { + shallowlyMutableNodeMap + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode + + var returnNodeMap = shallowlyMutableNodeMap + + val subNodeNew: SetNode[A] = subNode match { + case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNodeNew ne subNode) { + returnNodeMap |= bitpos + } + subNodeNew + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + returnNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = element + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = dstHashes + this.size += 1 + this.cachedJavaKeySetHashCode += elementHash + shallowlyMutableNodeMap + } + } + + + def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) + if (index == 0) + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) + else + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) + } + else return copyAndRemoveValue(bitpos, elementHash) + } else return this + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] + } else { + // inline value (move to front) + return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + this + } + /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new + * node + * + * Should only be called on root nodes, because shift is assumed to be 0 + * + * @param element the element to remove + * @param originalHash the original hash of `element` + * @param elementHash the improved hash of `element` + */ + def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { + val mask = maskFrom(elementHash, 0) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + val newDataMap = dataMap ^ bitpos + if (index == 0) { + val newContent = Array[Any](getPayload(1)) + val newOriginalHashes = Array(originalHashes(1)) + val newCachedJavaKeySetHashCode = improve(getHash(1)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } else { + val newContent = Array[Any](getPayload(0)) + val newOriginalHashes = Array(originalHashes(0)) + val newCachedJavaKeySetHashCode = improve(getHash(0)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } + this.dataMap = newDataMap + this.nodeMap = 0 + this.size = 1 + this + } + else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = this.dataMap ^ bitpos + this.content = dst + this.originalHashes = dstHashes + this.size -= 1 + this.cachedJavaKeySetHashCode -= elementHash + this + } + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] + + if (subNodeNew eq subNode) return this + + if (subNodeNew.size == 1) { + if (this.payloadArity == 0 && this.nodeArity == 1) { + this.dataMap = subNodeNew.dataMap + this.nodeMap = subNodeNew.nodeMap + this.content = subNodeNew.content + this.originalHashes = subNodeNew.originalHashes + this.size = subNodeNew.size + this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode + this + } else { + migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) + this + } + } else { + // size must be > 1 + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size -= 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + this + } + } else this + } + + def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + val newCachedHashCode = keyHash0 + keyHash1 + + if (mask0 < mask1) { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) + } else { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) + + new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedSetNode[A]( + dataMap = dataMap, + nodeMap = nodeMap, + content = dst, + originalHashes = originalHashes, + size = size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + 1) + + // copy 'src' and insert 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + 1, src.length - idx) + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) + } + + def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = key + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - 1) + + // copy 'src' and remove 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) + } + + def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode + ) + } + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the element currently at `bitpos` + * @param node the node to place at `bitpos` + */ + def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) + content(idxNew) = node + + this.dataMap = this.dataMap ^ bitpos + this.nodeMap = this.nodeMap | bitpos + this.originalHashes = removeElement(originalHashes, dataIx) + this.size = this.size - 1 + node.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = node.getPayload(0) + arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedSetNode[A]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the node to migrate inline + * @param oldNode the node currently stored at position `bitpos` + * @param node the node containing the single element to migrate inline + */ + def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val element = node.getPayload(0) + arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) + content(dataIxNew) = element + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + + this.dataMap = this.dataMap | bitpos + this.nodeMap = this.nodeMap ^ bitpos + this.originalHashes = dstHashes + this.size = this.size - oldNode.size + 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + } + + def foreach[U](f: A => U): Unit = { + val thisPayloadArity = payloadArity + var i = 0 + while (i < thisPayloadArity) { + f(getPayload(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity) { + getNode(j).foreach(f) + j += 1 + } + } + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case _: HashCollisionSetNode[A] => false + case node: BitmapIndexedSetNode[A] => + val thisBitmap = this.dataMap | this.nodeMap + val nodeBitmap = node.dataMap | node.nodeMap + + if ((thisBitmap | nodeBitmap) != nodeBitmap) + return false + + var bitmap = thisBitmap & nodeBitmap + var bitsToSkip = numberOfTrailingZeros(bitmap) + + var isValidSubset = true + while (isValidSubset && bitsToSkip < HashCodeLength) { + val bitpos = bitposFrom(bitsToSkip) + + isValidSubset = + if ((this.dataMap & bitpos) != 0) { + if ((node.dataMap & bitpos) != 0) { + // Data x Data + val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) + val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) + payload0 == payload1 + } else { + // Data x Node + val thisDataIndex = indexFrom(this.dataMap, bitpos) + val payload = this.getPayload(thisDataIndex) + val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) + val elementUnimprovedHash = getHash(thisDataIndex) + val elementHash = improve(elementUnimprovedHash) + subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) + } + } else { + // Node x Node + val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) + val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) + subNode0.subsetOf(subNode1, shift + BitPartitionSize) + } + + val newBitmap = bitmap ^ bitpos + bitmap = newBitmap + bitsToSkip = numberOfTrailingZeros(newBitmap) + } + isValidSubset + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else SetNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + SetNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex) = content(oldIndex) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + + // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, + // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in + // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may + // return at runtime a SetNode[A], or a tuple of (A, Int, Int) + + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[SetNode[A]] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty[SetNode[A]] + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + } + + override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) this + else if (size == 1) { + val h = getHash(0) + if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val originalHash = getHash(dataIndex) + val hash = improve(originalHash) + + if (!bm.contains(payload, originalHash, hash, shift)) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += hash + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + + val newSubNode: SetNode[A] = + if ((bitpos & bm.dataMap) != 0) { + val thatDataIndex = indexFrom(bm.dataMap, bitpos) + val thatPayload = bm.getPayload(thatDataIndex) + val thatOriginalHash = bm.getHash(thatDataIndex) + val thatHash = improve(thatOriginalHash) + oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) + } else if ((bitpos & bm.nodeMap) != 0) { + oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) + } else { + oldSubNode + } + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[SetNode[A]] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty[SetNode[A]] + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + case _: HashCollisionSetNode[A] => + // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the + // same depth + throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") + } + + /** Utility method only for use in `diff` and `filterImpl` + * + * @param newSize the size of the new SetNode + * @param newDataMap the dataMap of the new SetNode + * @param newNodeMap the nodeMap of the new SetNode + * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new + * SetNode + * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new + * SetNode + * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode + * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, + * but which were nodes in `this` + * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated + * to data, in positions in the `nodeMigrateToDataTargetMap` + * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode + * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode + * @param newCachedHashCode the cached java keyset hashcode of the new SetNode + */ + private[this] def newNodeFrom( + newSize: Int, + newDataMap: Int, + newNodeMap: Int, + minimumIndex: Int, + oldDataPassThrough: Int, + nodesToPassThroughMap: Int, + nodeMigrateToDataTargetMap: Int, + nodesToMigrateToData: mutable.Queue[SetNode[A]], + mapOfNewNodes: Int, + newNodes: mutable.Queue[SetNode[A]], + newCachedHashCode: Int): BitmapIndexedSetNode[A] = { + if (newSize == 0) { + SetNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex) = getPayload(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(newDataIndex) = node.getPayload(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedSetNode[_] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy(): BitmapIndexedSetNode[A] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() + i += 1 + } + new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) + } + + // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `this` + var anyChangesMadeSoFar = false + + // bitmap containing `1` in any position that has any descendant in either left or right, either data or node + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataLeftOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { + leftDataRightDataLeftOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { + // nothing from `bm` will make it into the result -- return early + return this + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val leftNode = getNode(leftNodeIdx) + val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) + if (leftNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftPayload = getPayload(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + val leftNode = getNode(leftNodeIdx) + val updated = leftNode.updated( + element = bm.getPayload(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift + ) + if (updated ne leftNode) { + anyChangesMadeSoFar = true + } + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + val originalHash = originalHashes(leftDataIdx) + newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedSetNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else this + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + val thisPayloadArity = payloadArity + var pass = true + var i = 0 + while (i < thisPayloadArity && pass) { + pass &&= f(getPayload(i), getHash(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity && pass) { + pass &&= getNode(j).foreachWithHashWhile(f) + j += 1 + } + pass + } +} + +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { + + import Node._ + + require(content.length >= 2) + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && content.contains(element) + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (this.contains(element, originalHash, hash, shift)) { + this + } else { + new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) + } + + /** + * Remove an element from the hash collision node. + * + * When after deletion only one element remains, we return a bit-mapped indexed node with a + * singleton element and a hash-prefix for trie level 0. This node will be then a) either become + * the new root, or b) unwrapped and inlined deeper in the trie. + */ + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (!this.contains(element, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(element0 => element0 == element) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) + case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): SetNode[A] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getPayload(index: Int): A = content(index) + + override def getHash(index: Int): Int = originalHash + + def size: Int = content.length + + def foreach[U](f: A => U): Unit = { + val iter = content.iterator + while (iter.hasNext) { + f(iter.next()) + } + } + + + override def cachedJavaKeySetHashCode: Int = size * hash + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case node: HashCollisionSetNode[A] => + this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) + case _ => + false + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + SetNode.empty + } else if (newContentLength == 1) { + new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) + } else if (newContent.length == content.length) this + else new HashCollisionSetNode(originalHash, hash, newContent) + } + + override def diff(that: SetNode[A], shift: Int): SetNode[A] = + filterImpl(that.contains(_, originalHash, hash, shift), true) + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionSetNode[_] => + (this eq node) || + (this.hash == node.hash) && + (this.content.size == node.content.size) && + this.content.forall(node.content.contains) + case _ => false + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) + + override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { + case hc: HashCollisionSetNode[A] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[A] = null + val iter = hc.content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (!content.contains(nextPayload)) { + if (newContent eq null) { + newContent = new VectorBuilder() + newContent.addAll(this.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedSetNode[A] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next.asInstanceOf[A], originalHash) + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + var stillGoing = true + val iter = content.iterator + while (iter.hasNext && stillGoing) { + val next = iter.next() + stillGoing &&= f(next.asInstanceOf[A], originalHash) + } + stillGoing + } +} + +private final class SetIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class SetReverseIterator[A](rootNode: SetNode[A]) + extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next(): A = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } + +} + +private final class SetHashIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { + private[this] var hash = 0 + override def hashCode(): Int = hash + + def next(): AnyRef = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + currentValueCursor += 1 + this + } + +} + + +/** + * $factoryInfo + * + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + @transient + private final val EmptySet = new HashSet(SetNode.empty) + + def empty[A]: HashSet[A] = + EmptySet.asInstanceOf[HashSet[A]] + + def from[A](source: collection.IterableOnce[A]^): HashSet[A] = + source match { + case hs: HashSet[A] => hs + case _ if source.knownSize == 0 => empty[A] + case _ => (newBuilder[A] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder +} + +/** Builder for HashSet. + * $multipleResults + */ +private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { + import Node._ + import SetNode._ + + private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashSet as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashSet[A] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap = bm.dataMap | bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ + private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + bm.content(idx) = elem + } + + def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = + setNode match { + case bm: BitmapIndexedSetNode[A] => + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val element0 = bm.getPayload(index) + val element0UnimprovedHash = bm.getHash(index) + + if (element0UnimprovedHash == originalHash && element0 == element) { + setValue(bm, bitpos, element0) + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + } + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHashCode = subNode.cachedJavaKeySetHashCode + update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode + } else { + insertValue(bm, bitpos, element, originalHash, elementHash) + } + case hc: HashCollisionSetNode[A] => + val index = hc.content.indexOf(element) + if (index < 0) { + hc.content = hc.content.appended(element) + } else { + hc.content = hc.content.updated(index, element) + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private def ensureUnaliased():Unit = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashSet[A] = + if (rootNode.size == 0) { + HashSet.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashSet(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: A): this.type = { + ensureUnaliased() + val h = elem.## + val im = improve(h) + update(rootNode, elem, h, im, 0) + this + } + + override def addAll(xs: IterableOnce[A]^) = { + ensureUnaliased() + xs match { + case hm: HashSet[A] => + new ChampBaseIterator[SetNode[A]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + setNode = rootNode, + element = currentValueNode.getPayload(currentValueCursor), + originalHash = originalHash, + elementHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala new file mode 100644 index 000000000000..88148691e5c0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -0,0 +1,503 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Utility class for integer maps. + */ +private[immutable] object IntMapUtils extends BitOperations.Int { + def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) + else IntMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { + case (left, IntMap.Nil) => left + case (IntMap.Nil, right) => right + case (left, right) => IntMap.Bin(prefix, mask, left, right) + } +} + +import IntMapUtils._ + +/** A companion object for integer maps. + * + * @define Coll `IntMap` + */ +object IntMap { + def empty[T] : IntMap[T] = IntMap.Nil + + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) + + def apply[T](elems: (Int, T)*): IntMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = + newBuilder[V].addAll(coll).result() + + private[immutable] case object Nil extends IntMap[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any) = that match { + case _: this.type => true + case _: IntMap[_] => false // The only empty IntMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] + else IntMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { + def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] + else IntMap.Bin[S](prefix, mask, left, right) + } + } + + def newBuilder[V]: Builder[(Int, V), IntMap[V]] = + new ImmutableBuilder[(Int, V), IntMap[V]](empty) { + def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } + } + + implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) + def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) + implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty IntMap. +private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and + // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 and + var index = 0 + var buffer = new Array[AnyRef](33) + + def pop = { + index -= 1 + buffer(index).asInstanceOf[IntMap[V]] + } + + def push(x: IntMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: IntMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop match { + case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case IntMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@IntMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap + // and don't return an IntMapIterator for IntMap.Nil. + case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { + def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.value +} + +private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.key +} + +import IntMap._ + +/** Specialised immutable map structure for integer keys, based on + * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with integer keys. + * + * @define Coll `immutable.IntMap` + * @define coll immutable integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class IntMap[+T] extends AbstractMap[Int, T] + with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = + intMapFrom[T](coll) + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { + val b = IntMap.newBuilder[V2] + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Int, T), IntMap[T]](empty) { + def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } + } + + override def empty: IntMap[T] = IntMap.Nil + + override def toList = { + val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of integer keys and corresponding values. + */ + def iterator: Iterator[(Int, T)] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case IntMap.Tip(key, value) => f((key, value)) + case IntMap.Nil => + } + + override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case IntMap.Tip(key, value) => f(key, value) + case IntMap.Nil => + } + + override def keysIterator: Iterator[Int] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Int => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case IntMap.Tip(key, _) => f(key) + case IntMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as `values.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case IntMap.Tip(_, value) => f(value) + case IntMap.Nil => + } + + override protected[this] def className = "IntMap" + + override def isEmpty = this eq IntMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case IntMap.Tip(key, value) => + if (f((key, value))) this + else IntMap.Nil + case IntMap.Nil => IntMap.Nil + } + + override def transform[S](f: (Int, T) => S): IntMap[S] = this match { + case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) + case IntMap.Nil => IntMap.Nil + } + + final override def size: Int = this match { + case IntMap.Nil => 0 + case IntMap.Tip(_, _) => 1 + case IntMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Int): Option[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None + case IntMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Int, default: => S): S = this match { + case IntMap.Nil => default + case IntMap.Tip(key2, value) => if (key == key2) value else default + case IntMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Int): T = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case IntMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) + else IntMap.Bin(prefix, mask, left, right.updated(key, value)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, value) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) + + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) + + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = + super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = + strictOptimizedCollect(IntMap.newBuilder[V2], pf) + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to: + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update + * @param value The value to use if there is no conflict + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, f(value2, value)) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def removed (key: Int): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case IntMap.Tip(key2, _) => + if (key == key2) IntMap.Nil + else this + case IntMap.Nil => IntMap.Nil + } + + /** + * A combined transform and filter function. Returns an `IntMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] + else bin(prefix, mask, newleft, newright) + case IntMap.Tip(key, value) => f(key, value) match { + case None => + IntMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] + else IntMap.Tip(key, value2) + } + case IntMap.Nil => + IntMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ + case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) + case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (IntMap.Nil, x) => x + case (x, IntMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { + case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) IntMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) IntMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (IntMap.Tip(key, value), that) => that.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value, value2)) + } + case (_, IntMap.Tip(key, value)) => this.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value2, value)) + } + case (_, _) => IntMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings + * as this but only for keys which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: IntMap[R]): IntMap[T] = + this.intersectionWith(that, (key: Int, value: T, value2: R) => value) + + def ++[S >: T](that: IntMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + /** + * The entry with the lowest key value considered in unsigned order. + */ + @tailrec + final def firstKey: Int = this match { + case Bin(_, _, l, r) => l.firstKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + /** + * The entry with the highest key value considered in unsigned order. + */ + @tailrec + final def lastKey: Int = this match { + case Bin(_, _, l, r) => r.lastKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala new file mode 100644 index 000000000000..c4f9900eea8b --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking + +/** A trait for collections that are guaranteed immutable. + * + * @tparam A the element type of the collection + * + * @define coll immutable collection + * @define Coll `immutable.Iterable` + */ +trait Iterable[+A] extends collection.Iterable[A] + with collection.IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => + + override def iterableFactory: IterableFactory[Iterable] = Iterable +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](List) { + override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { + case iterable: Iterable[E] => iterable + case _ => super.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala new file mode 100644 index 000000000000..959dfbe36679 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala @@ -0,0 +1,1378 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} +import scala.language.implicitConversions +import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + +/** This class implements an immutable linked list. We call it "lazy" + * because it computes its elements only when they are needed. + * + * The class extends Iterable; it is a replacement for LazyList, which + * which implemented Seq. The reason is that under capture checking, we + * assume that all Seqs are strict, and LazyList broke that assumption. + * As a consequence, we declare LazyList is deprecated and unsafe for + * capture checking, and replace it by the current class, LazyListIterable. + * + * Elements are memoized; that is, the value of each element is computed at most once. + * + * Elements are computed in-order and are never skipped. In other words, + * accessing the tail causes the head to be computed first. + * + * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you + * don't know yet whether the list is empty or not. If you learn that it is non-empty, + * then you also know that the head has been computed. But the tail is itself + * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. + * + * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains + * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, + * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. + * + * Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyListIterable[BigInt] = + * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } + * fibs.take(5).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * To illustrate, let's add some output to the definition `fibs`, so we + * see what's going on. + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyListIterable[BigInt] = + * BigInt(0) #:: BigInt(1) #:: + * fibs.zip(fibs.tail).map{ n => + * println(s"Adding \${n._1} and \${n._2}") + * n._1 + n._2 + * } + * fibs.take(5).foreach(println) + * fibs.take(6).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * Note that the definition of `fibs` uses `val` not `def`. The memoization of the + * `LazyListIterable` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * Further remarks about the semantics of `LazyListIterable`: + * + * - Though the `LazyListIterable` changes as it is accessed, this does not + * contradict its immutability. Once the values are memoized they do + * not change. Values that have yet to be memoized still "exist", they + * simply haven't been computed yet. + * + * - One must be cautious of memoization; it can eat up memory if you're not + * careful. That's because memoization of the `LazyListIterable` creates a structure much like + * [[scala.collection.immutable.List]]. As long as something is holding on to + * the head, the head holds on to the tail, and so on recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. if we used + * `def` to define the `LazyListIterable`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. + * + * Here's another example. Let's start with the natural numbers and iterate + * over them. + * + * {{{ + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyListIterable definition will be a val definition + * val lazylist1: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 + * val it1 = lazylist1.iterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyListIterable such that all we have is the Iterator left + * // and allow the LazyListIterable to be garbage collected as required. Using a def + * // to provide the LazyListIterable ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.iterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyListIterable at all for such a simple + * // problem. There's no reason to use a LazyListIterable if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. + * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known, then the act + * of determining `tail` would require the evaluation of `tail`, so the + * computation would be unable to progress, as in this code: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyListIterable[Int] = { + * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * The head, the tail and whether the list is empty or not can be initially unknown. + * Once any of those are evaluated, they are all known, though if the tail is + * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating + * the tails content is deferred until the tails empty status, head or tail is + * evaluated. + * + * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed + * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. + * + * Only when it's further evaluated (which may be never!) any of the elements gets + * forced. + * + * for example: + * + * {{{ + * def tailWithSideEffect: LazyListIterable[Nothing] = { + * println("getting empty LazyListIterable") + * LazyListIterable.empty + * } + * + * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" + * + * val suspended = 1 #:: tailWithSideEffect // doesn't print anything + * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed + * val filtered = tail.filter(_ => false) // still nothing is printed + * filtered.isEmpty // prints "getting empty LazyListIterable" + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyListIterable` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, LazyListIterable, LazyListIterable[A]] + with IterableFactoryDefaults[A, LazyListIterable] + with Serializable { + this: LazyListIterable[A]^ => + import LazyListIterable._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A]^ = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable + + override def isEmpty: Boolean = state eq State.Empty + + /** @inheritdoc + * + * $preservesLaziness + */ + override def knownSize: Int = if (knownIsEmpty) 0 else -1 + + override def head: A = state.head + + override def tail: LazyListIterable[A]^{this} = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyListIterable(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyListIterable[A]^{this} = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A]^{this} = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyListIterable as elements + * are consumed. + * @note This function will force the realization of the entire LazyListIterable + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyListIterable specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyListIterable`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef^{this} = + if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this + + override protected[this] def className = "LazyListIterable" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable + case coll if coll.knownSize == 0 => State.Empty + case coll => stateFromIterator(coll.iterator) + } + else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = + if (knownIsEmpty) LazyListIterable.from(suffix) + else lazyAppendedAll(suffix) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + def appended[B >: A](elem: B): LazyListIterable[B]^{this} = + if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) + else lazyAppendedAll(Iterator.single(elem)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = + if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) + else newLL(scanLeftState(z)(op)) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyListIterable specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyListIterable`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyListIterable[A]^{this} = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { + val (left, right) = map(f).partition(_.isLeft) + (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = + new LazyListIterable.WithFilter(coll, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) + + /** @inheritdoc + * + * $preservesLaziness + */ + def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = + if (knownIsEmpty) LazyListIterable.from(prefix) + else if (prefix.knownSize == 0) this + else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B](f: A => B): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } + + private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.collectImpl(this, pf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = + if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty + else newLL(zipState(that.iterator)) + + private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { + if (this.knownIsEmpty) { + if (that.knownSize == 0) LazyListIterable.empty + else LazyListIterable.continually(thisElem) zip that + } else { + if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) + else newLL(zipAllState(that.iterator, thisElem, thatElem)) + } + } + + private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) + } + } + + /** @inheritdoc + * + * This method is not particularly useful for a lazy list, as [[zip]] already preserves + * laziness. + * + * The `collection.LazyZip2` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + // just in case it can be meaningfully overridden at some point + override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = + super.lazyZip(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = + (map(asPair(_)._1), map(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = + (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyListIterable[A]^{this} = + if (n <= 0) this + else if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyListIterable[A]^{this} = { + if (n <= 0) this + else if (knownIsEmpty) LazyListIterable.empty + else newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyListIterable[A] = + if (knownIsEmpty) LazyListIterable.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyListIterable[A] = { + if (n <= 0) LazyListIterable.empty + else newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyListIterable[A]^{this} = + if (n <= 0 || knownIsEmpty) LazyListIterable.empty + else LazyListIterable.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) + + // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyListIterable[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { + if (len <= 0) this + else newLL { + if (isEmpty) LazyListIterable.fill(len)(elem).state + else sCons(head, tail.padTo(len - 1, elem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = + if (knownIsEmpty) LazyListIterable from other + else patchImpl(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = + newLL { + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.iterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $evaluatesAllElements + */ + // overridden just in case a lazy implementation is developed at some point + override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose + + /** @inheritdoc + * + * $preservesLaziness + */ + def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyListIterable` + */ +@SerialVersionUID(3L) +object LazyListIterable extends IterableFactory[LazyListIterable] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + this: State[A]^ => + def head: A + def tail: LazyListIterable[A]^ + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] + } + + /** Creates a new LazyListIterable. */ + @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) + + private val anyToMarker: Any => Any = _ => Statics.pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + newLL { + val marker = Statics.pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + newLL { + var it: Iterator[B]^{ll, f} = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).iterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A]^{ll*} = ll // same situation + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while(!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). + */ + object cons { + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) + } + + extension [A](l: => LazyListIterable[A]) + /** Construct a LazyListIterable consisting of a given first element followed by elements + * from another LazyListIterable. + */ + def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) + + /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and + * another LazyListIterable. + */ + def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l + + object #:: { + def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { + case lazyList: LazyListIterable[A] => lazyList + case _ if coll.knownSize == 0 => empty[A] + case _ => newLL(stateFromIterator(coll.iterator)) + } + + def empty[A]: LazyListIterable[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = + if (xss.knownSize == 0) empty + else newLL(concatIterator(xss.iterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) + + /** An infinite LazyListIterable that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyListIterable + * @param f the function that's repeatedly applied + * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyListIterable starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyListIterable + * @param step the increment value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. + */ + def from(start: Int, step: Int): LazyListIterable[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. + */ + def from(start: Int): LazyListIterable[Int] = from(start, 1) + + /** + * Create an infinite LazyListIterable containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyListIterable + * @return the LazyListIterable containing an infinite number of elem + */ + def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { + def at(index: Int): LazyListIterable[A]^{f} = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + extends AbstractIterator[LazyListIterable[A]] { + this: SlidingIterator[A]^ => + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyListIterable[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) + extends collection.WithFilter[A, LazyListIterable] { + this: WithFilter[A]^ => + private[this] val filtered = lazyList.filter(p) + def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) + } + + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyListIterable[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyListIterable[A] = { + next init State.Empty + list + } + + override def addOne(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def addAll(xs: IterableOnce[A]^): this.type = { + if (xs.knownSize != 0) { + val deferred = new DeferredState[A] + next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) + next = deferred + } + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + this: DeferredState[A]^ => + private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ + + def eval(): State[A]^ = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]^): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](_coll: LazyListIterable[A]^) extends Serializable { + + @transient protected var coll: LazyListIterable[A]^{this} = _coll + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new mutable.ListBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyListIterable[A]] + // scala/scala#10118: caution that no code path can evaluate `tail.state` + // before the resulting LazyListIterable is returned + val it = init.toList.iterator + coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) + } + + private[this] def readResolve(): Any = coll + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/List.scala b/tests/pos-special/stdlib/collection/immutable/List.scala new file mode 100644 index 000000000000..913de8b0be08 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/List.scala @@ -0,0 +1,693 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.tailrec +import mutable.{Builder, ListBuffer} +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking + +/** A class for immutable linked lists representing ordered collections + * of elements of type `A`. + * + * This class comes with two implementing case classes `scala.Nil` + * and `scala.::` that implement the abstract members `isEmpty`, + * `head` and `tail`. + * + * This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access + * pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`. + * + * ==Performance== + * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list. + * This includes the index-based lookup of elements, `length`, `append` and `reverse`. + * + * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either + * zero- or constant-memory cost. + * {{{ + * val mainList = List(3, 2, 1) + * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance + * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance + * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList + * }}} + * + * @example {{{ + * // Make a list via the companion object factory + * val days = List("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday") + * + * // Make a list element-by-element + * val when = "AM" :: "PM" :: Nil + * + * // Pattern match + * days match { + * case firstDay :: otherDays => + * println("The first day of the week is: " + firstDay) + * case Nil => + * println("There don't seem to be any week days.") + * } + * }}} + * + * @note The functional list is characterized by persistence and structural sharing, thus offering considerable + * performance and space consumption benefits in some scenarios if used correctly. + * However, note that objects having multiple references into the same functional list (that is, + * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for + * each reference to it. I.e. structural sharing is lost after serialization/deserialization. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]] + * section on `Lists` for more information. + * + * @define coll list + * @define Coll `List` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class List[+A] + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, List, List[A]] + with StrictOptimizedLinearSeqOps[A, List, List[A]] + with StrictOptimizedSeqOps[A, List, List[A]] + with IterableFactoryDefaults[A, List] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[List] = List + + /** Adds an element at the beginning of this list. + * @param elem the element to prepend. + * @return a list which contains `x` as first element and + * which continues with this list. + * Example: + * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}} + */ + def :: [B >: A](elem: B): List[B] = new ::(elem, this) + + /** Adds the elements of a given list in front of this list. + * + * Example: + * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}} + * + * @param prefix The list elements to prepend. + * @return a list resulting from the concatenation of the given + * list `prefix` and this list. + */ + def ::: [B >: A](prefix: List[B]): List[B] = + if (isEmpty) prefix + else if (prefix.isEmpty) this + else { + val result = new ::[B](prefix.head, this) + var curr = result + var that = prefix.tail + while (!that.isEmpty) { + val temp = new ::[B](that.head, this) + curr.next = temp + curr = temp + that = that.tail + } + releaseFence() + result + } + + /** Adds the elements of a given list in reverse order in front of this list. + * `xs reverse_::: ys` is equivalent to + * `xs.reverse ::: ys` but is more efficient. + * + * @param prefix the prefix to reverse and then prepend + * @return the concatenation of the reversed prefix and the current list. + */ + def reverse_:::[B >: A](prefix: List[B]): List[B] = { + var these: List[B] = this + var pres = prefix + while (!pres.isEmpty) { + these = pres.head :: these + pres = pres.tail + } + these + } + + override final def isEmpty: Boolean = this eq Nil + + override def prepended[B >: A](elem: B): List[B] = elem :: this + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): List[B] = prefix match { + case xs: List[B] => xs ::: this + case _ if prefix.knownSize == 0 => this + case b: ListBuffer[B] if this.isEmpty => b.toList + case _ => + val iter = prefix.iterator + if (iter.hasNext) { + val result = new ::[B](iter.next(), this) + var curr = result + while (iter.hasNext) { + val temp = new ::[B](iter.next(), this) + curr.next = temp + curr = temp + } + releaseFence() + result + } else { + this + } + } + + // When calling appendAll with another list `suffix`, avoid copying `suffix` + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): List[B] = suffix match { + case xs: List[B] => this ::: xs + case _ => super.appendedAll(suffix) + } + + override def take(n: Int): List[A] = if (isEmpty || n <= 0) Nil else { + val h = new ::(head, Nil) + var t = h + var rest = tail + var i = 1 + while ({if (rest.isEmpty) return this; i < n}) { + i += 1 + val nx = new ::(rest.head, Nil) + t.next = nx + t = nx + rest = rest.tail + } + releaseFence() + h + } + + /** + * @example {{{ + * // Given a list + * val letters = List('a','b','c','d','e') + * + * // `slice` returns all elements beginning at index `from` and afterwards, + * // up until index `until` (excluding index `until`.) + * letters.slice(1,3) // Returns List('b','c') + * }}} + */ + override def slice(from: Int, until: Int): List[A] = { + val lo = scala.math.max(from, 0) + if (until <= lo || isEmpty) Nil + else this drop lo take (until - lo) + } + + override def takeRight(n: Int): List[A] = { + @tailrec + def loop(lead: List[A], lag: List[A]): List[A] = lead match { + case Nil => lag + case _ :: tail => loop(tail, lag.tail) + } + loop(drop(n), this) + } + + // dropRight is inherited from LinearSeq + + override def splitAt(n: Int): (List[A], List[A]) = { + val b = new ListBuffer[A] + var i = 0 + var these = this + while (!these.isEmpty && i < n) { + i += 1 + b += these.head + these = these.tail + } + (b.toList, these) + } + + override def updated[B >: A](index: Int, elem: B): List[B] = { + var i = 0 + var current = this + val prefix = ListBuffer.empty[B] + while (i < index && current.nonEmpty) { + i += 1 + prefix += current.head + current = current.tail + } + if (i == index && current.nonEmpty) { + prefix.prependToList(elem :: current.tail) + } else { + throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") + } + } + + final override def map[B](f: A => B): List[B] = { + if (this eq Nil) Nil else { + val h = new ::[B](f(head), Nil) + var t: ::[B] = h + var rest = tail + while (rest ne Nil) { + val nx = new ::(f(rest.head), Nil) + t.next = nx + t = nx + rest = rest.tail + } + releaseFence() + h + } + } + + final override def collect[B](pf: PartialFunction[A, B]^): List[B] = { + if (this eq Nil) Nil else { + var rest = this + var h: ::[B] = null + var x: Any = null + // Special case for first element + while (h eq null) { + x = pf.applyOrElse(rest.head, List.partialNotApplied) + if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) h = new ::(x.asInstanceOf[B], Nil) + rest = rest.tail + if (rest eq Nil) return if (h eq null) Nil else h + } + var t = h + // Remaining elements + while (rest ne Nil) { + x = pf.applyOrElse(rest.head, List.partialNotApplied) + if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) { + val nx = new ::(x.asInstanceOf[B], Nil) + t.next = nx + t = nx + } + rest = rest.tail + } + releaseFence() + h + } + } + + final override def flatMap[B](f: A => IterableOnce[B]^): List[B] = { + var rest = this + var h: ::[B] = null + var t: ::[B] = null + while (rest ne Nil) { + val it = f(rest.head).iterator + while (it.hasNext) { + val nx = new ::(it.next(), Nil) + if (t eq null) { + h = nx + } else { + t.next = nx + } + t = nx + } + rest = rest.tail + } + if (h eq null) Nil else {releaseFence(); h} + } + + @inline final override def takeWhile(p: A => Boolean): List[A] = { + val b = new ListBuffer[A] + var these = this + while (!these.isEmpty && p(these.head)) { + b += these.head + these = these.tail + } + b.toList + } + + @inline final override def span(p: A => Boolean): (List[A], List[A]) = { + val b = new ListBuffer[A] + var these = this + while (!these.isEmpty && p(these.head)) { + b += these.head + these = these.tail + } + (b.toList, these) + } + + // Overridden with an implementation identical to the inherited one (at this time) + // solely so it can be finalized and thus inlinable. + @inline final override def foreach[U](f: A => U): Unit = { + var these = this + while (!these.isEmpty) { + f(these.head) + these = these.tail + } + } + + final override def reverse: List[A] = { + var result: List[A] = Nil + var these = this + while (!these.isEmpty) { + result = these.head :: result + these = these.tail + } + result + } + + final override def foldRight[B](z: B)(op: (A, B) => B): B = { + var acc = z + var these: List[A] = reverse + while (!these.isEmpty) { + acc = op(these.head, acc) + these = these.tail + } + acc + } + + // Copy/Paste overrides to avoid interface calls inside loops. + + override final def length: Int = { + var these = this + var len = 0 + while (!these.isEmpty) { + len += 1 + these = these.tail + } + len + } + + override final def lengthCompare(len: Int): Int = { + @tailrec def loop(i: Int, xs: List[A]): Int = { + if (i == len) + if (xs.isEmpty) 0 else 1 + else if (xs.isEmpty) + -1 + else + loop(i + 1, xs.tail) + } + if (len < 0) 1 + else loop(0, coll) + } + + override final def forall(p: A => Boolean): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (!p(these.head)) return false + these = these.tail + } + true + } + + override final def exists(p: A => Boolean): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (p(these.head)) return true + these = these.tail + } + false + } + + override final def contains[A1 >: A](elem: A1): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (these.head == elem) return true + these = these.tail + } + false + } + + override final def find(p: A => Boolean): Option[A] = { + var these: List[A] = this + while (!these.isEmpty) { + if (p(these.head)) return Some(these.head) + these = these.tail + } + None + } + + override def last: A = { + if (isEmpty) throw new NoSuchElementException("List.last") + else { + var these = this + var scout = tail + while (!scout.isEmpty) { + these = scout + scout = scout.tail + } + these.head + } + } + + override def corresponds[B](that: collection.Seq[B])(p: (A, B) => Boolean): Boolean = that match { + case that: LinearSeq[B] => + var i = this + var j = that + while (!(i.isEmpty || j.isEmpty)) { + if (!p(i.head, j.head)) + return false + i = i.tail + j = j.tail + } + i.isEmpty && j.isEmpty + case _ => + super.corresponds(that)(p) + } + + override protected[this] def className = "List" + + /** Builds a new list by applying a function to all elements of this list. + * Like `xs map f`, but returns `xs` unchanged if function + * `f` maps all elements to themselves (as determined by `eq`). + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a list resulting from applying the given function + * `f` to each element of this list and collecting the results. + */ + @`inline` final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { + // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`. + // If any successful optimization attempts or other changes are made, please rehash them there too. + @tailrec + def loop(mappedHead: List[B], mappedLast: ::[B], unchanged: List[A], pending: List[A]): List[B] = { + if (pending.isEmpty) { + if (mappedHead eq null) unchanged + else { + mappedLast.next = (unchanged: List[B]) + mappedHead + } + } + else { + val head0 = pending.head + val head1 = f(head0) + + if (head1 eq head0.asInstanceOf[AnyRef]) + loop(mappedHead, mappedLast, unchanged, pending.tail) + else { + var xc = unchanged + var mappedHead1: List[B] = mappedHead + var mappedLast1: ::[B] = mappedLast + while (xc ne pending) { + val next = new ::[B](xc.head, Nil) + if (mappedHead1 eq null) mappedHead1 = next + if (mappedLast1 ne null) mappedLast1.next = next + mappedLast1 = next + xc = xc.tail + } + val next = new ::(head1, Nil) + if (mappedHead1 eq null) mappedHead1 = next + if (mappedLast1 ne null) mappedLast1.next = next + mappedLast1 = next + val tail0 = pending.tail + loop(mappedHead1, mappedLast1, tail0, tail0) + + } + } + } + val result = loop(null, null, this, this) + releaseFence() + result + } + + override def filter(p: A => Boolean): List[A] = filterCommon(p, isFlipped = false) + + override def filterNot(p: A => Boolean): List[A] = filterCommon(p, isFlipped = true) + + private[this] def filterCommon(p: A => Boolean, isFlipped: Boolean): List[A] = { + + // everything seen so far so far is not included + @tailrec def noneIn(l: List[A]): List[A] = { + if (l.isEmpty) + Nil + else { + val h = l.head + val t = l.tail + if (p(h) != isFlipped) + allIn(l, t) + else + noneIn(t) + } + } + + // everything from 'start' is included, if everything from this point is in we can return the origin + // start otherwise if we discover an element that is out we must create a new partial list. + @tailrec def allIn(start: List[A], remaining: List[A]): List[A] = { + if (remaining.isEmpty) + start + else { + val x = remaining.head + if (p(x) != isFlipped) + allIn(start, remaining.tail) + else + partialFill(start, remaining) + } + } + + // we have seen elements that should be included then one that should be excluded, start building + def partialFill(origStart: List[A], firstMiss: List[A]): List[A] = { + val newHead = new ::(origStart.head, Nil) + var toProcess = origStart.tail + var currentLast = newHead + + // we know that all elements are :: until at least firstMiss.tail + while (!(toProcess eq firstMiss)) { + val newElem = new ::(toProcess.head, Nil) + currentLast.next = newElem + currentLast = newElem + toProcess = toProcess.tail + } + + // at this point newHead points to a list which is a duplicate of all the 'in' elements up to the first miss. + // currentLast is the last element in that list. + + // now we are going to try and share as much of the tail as we can, only moving elements across when we have to. + var next = firstMiss.tail + var nextToCopy = next // the next element we would need to copy to our list if we cant share. + while (!next.isEmpty) { + // generally recommended is next.isNonEmpty but this incurs an extra method call. + val head: A = next.head + if (p(head) != isFlipped) { + next = next.tail + } else { + // its not a match - do we have outstanding elements? + while (!(nextToCopy eq next)) { + val newElem = new ::(nextToCopy.head, Nil) + currentLast.next = newElem + currentLast = newElem + nextToCopy = nextToCopy.tail + } + nextToCopy = next.tail + next = next.tail + } + } + + // we have remaining elements - they are unchanged attach them to the end + if (!nextToCopy.isEmpty) + currentLast.next = nextToCopy + + newHead + } + + val result = noneIn(this) + releaseFence() + result + } + + override def partition(p: A => Boolean): (List[A], List[A]) = { + if (isEmpty) List.TupleOfNil + else super.partition(p) match { + case (Nil, xs) => (Nil, this) + case (xs, Nil) => (this, Nil) + case pair => pair + } + } + + final override def toList: List[A] = this + + // Override for performance + override def equals(o: scala.Any): Boolean = { + @tailrec def listEq(a: List[_], b: List[_]): Boolean = + (a eq b) || { + val aEmpty = a.isEmpty + val bEmpty = b.isEmpty + if (!(aEmpty || bEmpty) && a.head == b.head) { + listEq(a.tail, b.tail) + } + else { + aEmpty && bEmpty + } + } + + o match { + case that: List[_] => listEq(this, that) + case _ => super.equals(o) + } + } + + // TODO: uncomment once bincompat allows (reference: scala/scala#9365) + /* + // Override for performance: traverse only as much as needed + // and share tail when nothing needs to be filtered out anymore + override def diff[B >: A](that: collection.Seq[B]): AnyRef = { + if (that.isEmpty || this.isEmpty) this + else if (tail.isEmpty) if (that.contains(head)) Nil else this + else { + val occ = occCounts(that) + val b = new ListBuffer[A]() + @tailrec + def rec(remainder: List[A]): List[A] = { + if(occ.isEmpty) b.prependToList(remainder) + else remainder match { + case Nil => b.result() + case head :: next => { + occ.updateWith(head){ + case None => { + b.append(head) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + rec(next) + } + } + } + rec(this) + } + } + */ + +} + +// Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or +// before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally + extends List[A] { + releaseFence() + override def headOption: Some[A] = Some(head) + override def tail: List[A] = next +} + +case object Nil extends List[Nothing] { + override def head: Nothing = throw new NoSuchElementException("head of empty list") + override def headOption: None.type = None + override def tail: Nothing = throw new UnsupportedOperationException("tail of empty list") + override def last: Nothing = throw new NoSuchElementException("last of empty list") + override def init: Nothing = throw new UnsupportedOperationException("init of empty list") + override def knownSize: Int = 0 + override def iterator: Iterator[Nothing] = Iterator.empty + override def unzip[A1, A2](implicit asPair: Nothing -> (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + + @transient + private[this] val EmptyUnzip = (Nil, Nil) +} + +/** + * $factoryInfo + * @define coll list + * @define Coll `List` + */ +@SerialVersionUID(3L) +object List extends StrictOptimizedSeqFactory[List] { + private val TupleOfNil = (Nil, Nil) + + def from[B](coll: collection.IterableOnce[B]^): List[B] = Nil.prependedAll(coll) + + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A]() + + def empty[A]: List[A] = Nil + + @transient + private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala new file mode 100644 index 000000000000..06f9b0e91ec1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -0,0 +1,372 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.mutable.ReusableBuilder +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** + * This class implements immutable maps using a list-based data structure. List map iterators and + * traversal methods visit key-value pairs in the order they were first inserted. + * + * Entries are stored internally in reversed insertion order, which means the newest key is at the + * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` + * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes + * this collection suitable only for a small number of elements. + * + * Instances of `ListMap` represent empty maps; they can be either created by calling the + * constructor directly, or by applying the function `ListMap.empty`. + * + * @tparam K the type of the keys contained in this list map + * @tparam V the type of the values associated with the keys + * + * @define Coll ListMap + * @define coll list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListMap[K, +V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + override def size: Int = 0 + + override def isEmpty: Boolean = true + + override def knownSize: Int = 0 + def get(key: K): Option[V] = None + + def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) + + def removed(key: K): ListMap[K, V] = this + + def iterator: Iterator[(K, V)] = { + var curr: ListMap[K, V] = this + var res: List[(K, V)] = Nil + while (curr.nonEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next + } + res.iterator + } + + override def keys: Iterable[K] = { + var curr: ListMap[K, V] = this + var res: List[K] = Nil + while (curr.nonEmpty) { + res = curr.key :: res + curr = curr.next + } + res + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration + // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. + val _reversed = new immutable.AbstractMap[K, V] { + override def isEmpty: Boolean = ListMap.this.isEmpty + override def removed(key: K): Map[K, V] = ListMap.this.removed(key) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) + override def get(key: K): Option[V] = ListMap.this.get(key) + override def iterator: Iterator[(K, V)] = ListMap.this.iterator + override def foreachEntry[U](f: (K, V) => U): Unit = { + var curr: ListMap[K, V] = ListMap.this + while (curr.nonEmpty) { + f(curr.key, curr.value) + curr = curr.next + } + } + } + MurmurHash3.mapHash(_reversed) + } + } + + private[immutable] def key: K = throw new NoSuchElementException("key of empty map") + private[immutable] def value: V = throw new NoSuchElementException("value of empty map") + private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") + + override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) + override protected[this] def className = "ListMap" + +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @define Coll ListMap + * @define coll list map + */ +@SerialVersionUID(3L) +object ListMap extends MapFactory[ListMap] { + /** + * Represents an entry in the `ListMap`. + */ + private[immutable] final class Node[K, V]( + override private[immutable] val key: K, + private[immutable] var _value: V, + private[immutable] var _init: ListMap[K, V] + ) extends ListMap[K, V] { + releaseFence() + + override private[immutable] def value: V = _value + + override def size: Int = sizeInternal(this, 0) + + @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = + if (cur.isEmpty) acc + else sizeInternal(cur.next, acc + 1) + + override def isEmpty: Boolean = false + + override def knownSize: Int = -1 + + @throws[NoSuchElementException] + override def apply(k: K): V = applyInternal(this, k) + + @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = + if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) + else if (k == cur.key) cur.value + else applyInternal(cur.next, k) + + override def get(k: K): Option[V] = getInternal(this, k) + + @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = + if (cur.isEmpty) None + else if (k == cur.key) Some(cur.value) + else getInternal(cur.next, k) + + override def contains(k: K): Boolean = containsInternal(this, k) + + @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = + if (cur.isEmpty) false + else if (k == cur.key) true + else containsInternal(cur.next, k) + + override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { + + var index = -1 // the index (in reverse) where the key to update exists, if it is found + var found = false // true if the key is found int he map + var isDifferent = false // true if the key was found and the values are different + + { + var curr: ListMap[K, V] = this + + while (curr.nonEmpty && !found) { + if (k == curr.key) { + found = true + isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] + } + index += 1 + curr = curr.init + } + } + + if (found) { + if (isDifferent) { + var newHead: ListMap.Node[K, V1] = null + var prev: ListMap.Node[K, V1] = null + var curr: ListMap[K, V1] = this + var i = 0 + while (i < index) { + val temp = new ListMap.Node(curr.key, curr.value, null) + if (prev ne null) { + prev._init = temp + } + prev = temp + curr = curr.init + if (newHead eq null) { + newHead = prev + } + i += 1 + } + val newNode = new ListMap.Node(curr.key, v, curr.init) + if (prev ne null) { + prev._init = newNode + } + releaseFence() + if (newHead eq null) newNode else newHead + } else { + this + } + } else { + new ListMap.Node(k, v, this) + } + } + + @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = + if (cur.isEmpty) acc.last + else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } + else removeInternal(k, cur.next, cur :: acc) + + override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) + + override private[immutable] def next: ListMap[K, V] = _init + + override def last: (K, V) = (key, value) + override def init: ListMap[K, V] = next + + } + + def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] + + private object EmptyListMap extends ListMap[Any, Nothing] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = + it match { + case lm: ListMap[K, V] => lm + case lhm: collection.mutable.LinkedHashMap[K, V] => + // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each + // key-value pair + var current: ListMap[K, V] = empty[K, V] + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + current = new Node(firstEntry.key, firstEntry.value, current) + firstEntry = firstEntry.later + } + current + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end + var current: ListMap[K, V] = empty[K, V] + val iter = it.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + current = new Node(k, v, current) + } + current + + case _ => (newBuilder[K, V] ++= it).result() + } + + /** Returns a new ListMap builder + * + * The implementation safely handles additions after `result()` without calling `clear()` + * + * @tparam K the map key type + * @tparam V the map value type + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] + + @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { + if (map.isEmpty) prevValue + else foldRightInternal(map.init, op(map.last, prevValue), op) + } +} + +/** Builder for ListMap. + * $multipleResults + */ +private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { + private[this] var isAliased: Boolean = false + private[this] var underlying: ListMap[K, V] = ListMap.empty + + override def clear(): Unit = { + underlying = ListMap.empty + isAliased = false + } + + override def result(): ListMap[K, V] = { + isAliased = true + releaseFence() + underlying + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + + @tailrec + private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { + case n: ListMap.Node[K, V] => + if (n.key == key) { + n._value = value + true + } else { + insertValueAtKeyReturnFound(n.init, key, value) + } + case _ => false + } + + def addOne(key: K, value: V): this.type = { + if (isAliased) { + underlying = underlying.updated(key, value) + } else { + if (!insertValueAtKeyReturnFound(underlying, key, value)) { + underlying = new ListMap.Node(key, value, underlying) + } + } + this + } + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + if (isAliased) { + super.addAll(xs) + } else if (underlying.nonEmpty) { + xs match { + case m: collection.Map[K, V] => + // if it is a map, then its keys will not collide with themselves. + // therefor we only need to check the already-existing elements for collisions. + // No need to check the entire list + + val iter = m.iterator + var newUnderlying = underlying + while (iter.hasNext) { + val next = iter.next() + if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { + newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) + } + } + underlying = newUnderlying + this + + case _ => + super.addAll(xs) + } + } else xs match { + case lhm: collection.mutable.LinkedHashMap[K, V] => + // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) + firstEntry = firstEntry.later + } + this + + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + val iter = xs.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + underlying = new ListMap.Node(k, v, underlying) + } + + this + case _ => + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala new file mode 100644 index 000000000000..ab5e8c65600b --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + +/** + * This class implements immutable sets using a list-based data structure. List set iterators and + * traversal methods visit elements in the order they were first inserted. + * + * Elements are stored internally in reversed insertion order, which means the newest element is at + * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and + * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which + * makes this collection suitable only for a small number of elements. + * + * Instances of `ListSet` represent empty sets; they can be either created by calling the + * constructor directly, or by applying the function `ListSet.empty`. + * + * @tparam A the type of the elements contained in this list set + * + * @define Coll ListSet + * @define coll list set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListSet[A] + extends AbstractSet[A] + with StrictOptimizedSetOps[A, ListSet, ListSet[A]] + with IterableFactoryDefaults[A, ListSet] + with DefaultSerializable { + + override protected[this] def className: String = "ListSet" + + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + + def contains(elem: A): Boolean = false + + def incl(elem: A): ListSet[A] = new Node(elem) + def excl(elem: A): ListSet[A] = this + + def iterator: scala.collection.Iterator[A] = { + var curr: ListSet[A] = this + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next + } + res.iterator + } + + protected def elem: A = throw new NoSuchElementException("elem of empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") + + override def iterableFactory: IterableFactory[ListSet] = ListSet + + /** + * Represents an entry in the `ListSet`. + */ + protected class Node(override protected val elem: A) extends ListSet[A] { + + override def size = sizeInternal(this, 0) + override def knownSize: Int = -1 + @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = + if (n.isEmpty) acc + else sizeInternal(n.next, acc + 1) + + override def isEmpty: Boolean = false + + override def contains(e: A): Boolean = containsInternal(this, e) + + @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = + !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) + + override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) + + override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) + else removeInternal(k, cur.next, cur :: acc) + + override protected def next: ListSet[A] = ListSet.this + + override def last: A = elem + + override def init: ListSet[A] = next + } +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @define Coll ListSet + * @define coll list set + */ +@SerialVersionUID(3L) +object ListSet extends IterableFactory[ListSet] { + + def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = + it match { + case ls: ListSet[E] => ls + case _ if it.knownSize == 0 => empty[E] + case _ => (newBuilder[E] ++= it).result() + } + + private object EmptyListSet extends ListSet[Any] { + override def knownSize: Int = 0 + } + private[collection] def emptyInstance: ListSet[Any] = EmptyListSet + + def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] + + def newBuilder[A]: Builder[A, ListSet[A]] = + new ImmutableBuilder[A, ListSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala new file mode 100644 index 000000000000..1e32cb88767d --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -0,0 +1,491 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.IllegalStateException + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Utility class for long maps. + */ +private[immutable] object LongMapUtils extends BitOperations.Long { + def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) + + def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) + else LongMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { + case (left, LongMap.Nil) => left + case (LongMap.Nil, right) => right + case (left, right) => LongMap.Bin(prefix, mask, left, right) + } +} + +import LongMapUtils._ + +/** A companion object for long maps. + * + * @define Coll `LongMap` + */ +object LongMap { + def empty[T]: LongMap[T] = LongMap.Nil + def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) + def apply[T](elems: (Long, T)*): LongMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = + newBuilder[V].addAll(coll).result() + + def newBuilder[V]: Builder[(Long, V), LongMap[V]] = + new ImmutableBuilder[(Long, V), LongMap[V]](empty) { + def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } + } + + private[immutable] case object Nil extends LongMap[Nothing] { + // Important, don't remove this! See IntMap for explanation. + override def equals(that : Any) = that match { + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] + else LongMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { + def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] + else LongMap.Bin[S](prefix, mask, left, right) + } + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty LongMap. +private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and + // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 65 + var index = 0 + var buffer = new Array[AnyRef](65) + + def pop() = { + index -= 1 + buffer(index).asInstanceOf[LongMap[V]] + } + + def push(x: LongMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: LongMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop() match { + case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case LongMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@LongMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap + // and don't return an LongMapIterator for LongMap.Nil. + case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ + def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.value +} + +private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.key +} + +/** + * Specialised immutable map structure for long keys, based on + * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * Note: This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with the long keys. + * + * @define Coll `immutable.LongMap` + * @define coll immutable long integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class LongMap[+T] extends AbstractMap[Long, T] + with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Long, T), LongMap[T]](empty) { + def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } + } + + override def empty: LongMap[T] = LongMap.Nil + + override def toList = { + val buffer = new ListBuffer[(Long, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of long keys and corresponding values. + */ + def iterator: Iterator[(Long, T)] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case LongMap.Tip(key, value) => f((key, value)) + case LongMap.Nil => + } + + override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case LongMap.Tip(key, value) => f(key, value) + case LongMap.Nil => + } + + override def keysIterator: Iterator[Long] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as keys.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Long => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case LongMap.Tip(key, _) => f(key) + case LongMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as values.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case LongMap.Tip(_, value) => f(value) + case LongMap.Nil => + } + + override protected[this] def className = "LongMap" + + override def isEmpty = this eq LongMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => + if (f((key, value))) this + else LongMap.Nil + case LongMap.Nil => LongMap.Nil + } + + override def transform[S](f: (Long, T) => S): LongMap[S] = this match { + case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) + case LongMap.Nil => LongMap.Nil + } + + final override def size: Int = this match { + case LongMap.Nil => 0 + case LongMap.Tip(_, _) => 1 + case LongMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Long): Option[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None + case LongMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Long, default: => S): S = this match { + case LongMap.Nil => default + case LongMap.Tip(key2, value) => if (key == key2) value else default + case LongMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Long): T = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case LongMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) + else LongMap.Bin(prefix, mask, left, right.updated(key, value)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, value) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update. + * @param value The value to use if there is no conflict. + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, f(value2, value)) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + def removed(key: Long): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case LongMap.Tip(key2, _) => + if (key == key2) LongMap.Nil + else this + case LongMap.Nil => LongMap.Nil + } + + /** + * A combined transform and filter function. Returns an `LongMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => f(key, value) match { + case None => LongMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] + else LongMap.Tip(key, value2) + } + case LongMap.Nil => LongMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ + case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) + case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (LongMap.Nil, x) => x + case (x, LongMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { + case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) LongMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) LongMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (LongMap.Tip(key, value), that) => that.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value, value2)) + } + case (_, LongMap.Tip(key, value)) => this.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value2, value)) + } + case (_, _) => LongMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings as this but only for keys + * which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: LongMap[R]): LongMap[T] = + this.intersectionWith(that, (key: Long, value: T, value2: R) => value) + + def ++[S >: T](that: LongMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + @tailrec + final def firstKey: Long = this match { + case LongMap.Bin(_, _, l, r) => l.firstKey + case LongMap.Tip(k, v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + @tailrec + final def lastKey: Long = this match { + case LongMap.Bin(_, _, l, r) => r.lastKey + case LongMap.Tip(k , v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = + super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala new file mode 100644 index 000000000000..1b74883bb612 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -0,0 +1,693 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.Map.Map4 +import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking + +/** Base type of immutable Maps */ +trait Map[K, +V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) +} + +/** Base trait of immutable Maps implementations + * + * @define coll immutable map + * @define Coll `immutable.Map` + */ +trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] { + + protected def coll: C with CC[K, V] + + /** Removes a key from this map, returning a new map. + * + * @param key the key to be removed + * @return a new map without a binding for ''key'' + */ + def removed(key: K): C + + /** Alias for `removed` */ + @`inline` final def - (key: K): C = removed(key) + + @deprecated("Use -- with an explicit collection", "2.13.0") + def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * $willForceEvaluation + * + * @param keys the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for `removedAll` */ + @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) + + /** Creates a new map obtained by updating this map with a given key/value pair. + * @param key the key + * @param value the value + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. + */ + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return A new map with the updated mapping with the key + */ + def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + + /** + * Alias for `updated` + * + * @param kv the key/value pair. + * @tparam V1 the type of the value in the key/value pair. + * @return A new map with the new binding added to this map. + */ + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + + /** This function transforms all the values of mappings contained + * in this map with function `f`. + * + * @param f A function over keys and values + * @return the updated map + */ + def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } + + override def keySet: Set[K] = new ImmutableKeySet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem + def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this + } + +} + +trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends MapOps[K, V, CC, C] + with collection.StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { + var result: CC[K, V1] = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + + +/** + * $factoryInfo + * @define coll immutable map + * @define Coll `immutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory[Map] { + + @SerialVersionUID(3L) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + def get(key: K): Option[V] = underlying.get(key) + + override def default(key: K): V = defaultValue(key) + + override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory + + def iterator: Iterator[(K, V)] = underlying.iterator + + override def isEmpty: Boolean = underlying.isEmpty + + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = + new WithDefault(underlying.concat(xs), defaultValue) + + def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = + it match { + case it: Iterable[_] if it.isEmpty => empty[K, V] + case m: Map[K, V] => m + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl + + @SerialVersionUID(3L) + private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def keysIterator: Iterator[Any] = Iterator.empty + override def valuesIterator: Iterator[Nothing] = Iterator.empty + def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def removed(key: Any): Map[Any, Nothing] = this + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { + case m: immutable.Map[Any, V2] => m + case _ => super.concat(suffix) + } + } + + @SerialVersionUID(3L) + final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) + override def keysIterator: Iterator[K] = Iterator.single(key1) + override def valuesIterator: Iterator[V] = Iterator.single(value1) + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = + if (pred((key1, value1)) != isFlipped) this else Map.empty + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] + else new Map1(key1, walue1) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 1 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map2Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map2Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 2 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1 = null.asInstanceOf[K] + var v1 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map2(key1, walue1, key2, walue2) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 2 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map3Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map3Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 3 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2 = null.asInstanceOf[K] + var v1, v2 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map3(key1, walue1, key2, walue2, key3, walue3) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 3 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) + extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + + override def size: Int = 4 + override def knownSize: Int = 4 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map4Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map4Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 4 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case 3 => nextResult(key4, value4) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2, k3 = null.asInstanceOf[K] + var v1, v2, v3 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} + if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => new Map3(k1, v1, k2, v2, k3, v3) + case 4 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + val walue4 = f(key4, value4) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && + (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) + } + private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = + builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 4 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key4, value4) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] + +private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { + private[this] var elems: Map[K, V] = Map.empty + private[this] var switchedToHashMapBuilder: Boolean = false + private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + else elems.getOrElse(key, value) + + override def clear(): Unit = { + elems = Map.empty + if (hashMapBuilder != null) { + hashMapBuilder.clear() + } + switchedToHashMapBuilder = false + } + + override def result(): Map[K, V] = + if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + + def addOne(key: K, value: V): this.type = { + if (switchedToHashMapBuilder) { + hashMapBuilder.addOne(key, value) + } else if (elems.size < 4) { + elems = elems.updated(key, value) + } else { + // assert(elems.size == 4) + if (elems.contains(key)) { + elems = elems.updated(key, value) + } else { + switchedToHashMapBuilder = true + if (hashMapBuilder == null) { + hashMapBuilder = new HashMapBuilder + } + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) + hashMapBuilder.addOne(key, value) + } + } + + this + } + + def addOne(elem: (K, V)) = addOne(elem._1, elem._2) + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = + if (switchedToHashMapBuilder) { + hashMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala new file mode 100644 index 000000000000..695083faf27d --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -0,0 +1,508 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import language.experimental.captureChecking + +/** `NumericRange` is a more generic version of the + * `Range` class which works with arbitrary types. + * It must be supplied with an `Integral` implementation of the + * range type. + * + * Factories for likely types include `Range.BigInt`, `Range.Long`, + * and `Range.BigDecimal`. `Range.Int` exists for completeness, but + * the `Int`-based `scala.Range` should be more performant. + * + * {{{ + * val r1 = Range(0, 100, 1) + * val veryBig = Int.MaxValue.toLong + 1 + * val r2 = Range.Long(veryBig, veryBig + 100, 1) + * assert(r1 sameElements r2.map(_ - veryBig)) + * }}} + * + * @define Coll `NumericRange` + * @define coll numeric range + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed class NumericRange[T]( + val start: T, + val end: T, + val step: T, + val isInclusive: Boolean +)(implicit + num: Integral[T] +) + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] + with Serializable { self => + + override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { + import scala.collection.convert._ + import impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) + case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) + case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + } + s.asInstanceOf[S with EfficientSplit] + } + + + /** Note that NumericRange must be invariant so that constructs + * such as "1L to 10 by 5" do not infer the range type as AnyVal. + */ + import num._ + + // See comment in Range for why this must be lazy. + override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) + override lazy val isEmpty: Boolean = ( + (num.gt(start, end) && num.gt(step, num.zero)) + || (num.lt(start, end) && num.lt(step, num.zero)) + || (num.equiv(start, end) && !isInclusive) + ) + override def last: T = + if (isEmpty) Nil.head + else locationAfterN(length - 1) + override def init: NumericRange[T] = + if (isEmpty) Nil.init + else new NumericRange(start, end - step, step, isInclusive) + + override def head: T = if (isEmpty) Nil.head else start + override def tail: NumericRange[T] = + if (isEmpty) Nil.tail + else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) + else new NumericRange.Exclusive(start + step, end, step) + + /** Create a new range with the start and end values of this range and + * a new `step`. + */ + def by(newStep: T): NumericRange[T] = copy(start, end, newStep) + + + /** Create a copy of this range. + */ + def copy(start: T, end: T, step: T): NumericRange[T] = + new NumericRange(start, end, step, isInclusive) + + @throws[IndexOutOfBoundsException] + def apply(idx: Int): T = { + if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") + else locationAfterN(idx) + } + + override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { + var count = 0 + var current = start + while (count < length) { + f(current) + current += step + count += 1 + } + } + + // TODO: these private methods are straight copies from Range, duplicated + // to guard against any (most likely illusory) performance drop. They should + // be eliminated one way or another. + + // Tests whether a number is within the endpoints, without testing + // whether it is a member of the sequence (i.e. when step > 1.) + private def isWithinBoundaries(elem: T) = !isEmpty && ( + (step > zero && start <= elem && elem <= last ) || + (step < zero && last <= elem && elem <= start) + ) + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private def locationAfterN(n: Int): T = start + (step * fromInt(n)) + + private def crossesTheEndAfterN(n: Int): Boolean = { + // if we're sure that subtraction in the context of T won't overflow, we use this function + // to calculate the length of the range + def unsafeRangeLength(r: NumericRange[T]): T = { + val diff = num.minus(r.end, r.start) + val quotient = num.quot(diff, r.step) + val remainder = num.rem(diff, r.step) + if (!r.isInclusive && num.equiv(remainder, num.zero)) + num.max(quotient, num.zero) + else + num.max(num.plus(quotient, num.one), num.zero) + } + + // detects whether value can survive a bidirectional trip to -and then from- Int. + def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) + + val stepIsInTheSameDirectionAsStartToEndVector = + (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) + + if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 + + val sameSign = num.equiv(num.sign(start), num.sign(end)) + + if (sameSign) { // subtraction is safe + val len = unsafeRangeLength(this) + if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) + } else { + // split to two ranges, which subtraction is safe in both of them (around zero) + val stepsRemainderToZero = num.rem(start, step) + val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) + val closestToZero = if (walksOnZero) -step else stepsRemainderToZero + + /* + When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, + so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). + Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. + After performing such operation, there are some elements remaining in between and around zero, + which their length is represented by carry. + */ + val (l: NumericRange[T], r: NumericRange[T], carry: Int) = + if (num.lt(start, num.zero)) { + if (walksOnZero) { + val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) + } else { + (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) + } + } else { + if (walksOnZero) { + val twoStepsAfterZero = num.times(step, num.fromInt(2)) + (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) + } else { + val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) + } + } + + val leftLength = unsafeRangeLength(l) + val rightLength = unsafeRangeLength(r) + + // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, + // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) + if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) + n - num.toInt(leftLength) - carry >= num.toInt(rightLength) + else + num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) + } + } + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private def newEmptyRange(value: T) = NumericRange(value, value, step) + + override def take(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (crossesTheEndAfterN(n)) this + else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) + } + + override def drop(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) this + else if (crossesTheEndAfterN(n)) newEmptyRange(end) + else copy(locationAfterN(n), end, step) + } + + override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) + + override def reverse: NumericRange[T] = + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } + + import NumericRange.defaultOrdering + + override def min[T1 >: T](implicit ord: Ordering[T1]): T = + // We can take the fast path: + // - If the Integral of this NumericRange is also the requested Ordering + // (Integral <: Ordering). This can happen for custom Integral types. + // - The Ordering is the default Ordering of a well-known Integral type. + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) head + else last + } else super.min(ord) + + override def max[T1 >: T](implicit ord: Ordering[T1]): T = + // See comment for fast path in min(). + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) last + else head + } else super.max(ord) + + // a well-typed contains method. + def containsTyped(x: T): Boolean = + isWithinBoundaries(x) && (((x - start) % step) == zero) + + override def contains[A1 >: T](x: A1): Boolean = + try containsTyped(x.asInstanceOf[T]) + catch { case _: ClassCastException => false } + + override def sum[B >: T](implicit num: Numeric[B]): B = { + if (isEmpty) num.zero + else if (size == 1) head + else { + // If there is no overflow, use arithmetic series formula + // a + ... (n terms total) ... + b = n*(a+b)/2 + if ((num eq scala.math.Numeric.IntIsIntegral)|| + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)) { + // We can do math with no overflow in a Long--easy + val exact = (size * ((num toLong head) + (num toInt last))) / 2 + num fromInt exact.toInt + } + else if (num eq scala.math.Numeric.LongIsIntegral) { + // Uh-oh, might be overflow, so we have to divide before we overflow. + // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying + val a = head.toLong + val b = last.toLong + val ans = + if ((size & 1) == 0) (size / 2) * (a + b) + else size * { + // Sum is even, but we might overflow it, so divide in pieces and add back remainder + val ha = a/2 + val hb = b/2 + ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 + } + ans.asInstanceOf[B] + } + else if ((num eq scala.math.Numeric.BigIntIsIntegral) || + (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { + // No overflow, so we can use arithmetic series formula directly + // (not going to worry about running out of memory) + val numAsIntegral = num.asInstanceOf[Integral[B]] + import numAsIntegral._ + ((num fromInt size) * (head + last)) / (num fromInt 2) + } + else { + // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) + if (isEmpty) num.zero + else { + var acc = num.zero + var i = head + var idx = 0 + while(idx < length) { + acc = num.plus(acc, i) + i = i + step + idx = idx + 1 + } + acc + } + } + } + } + + override lazy val hashCode: Int = super.hashCode() + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def equals(other: Any): Boolean = other match { + case x: NumericRange[_] => + (x canEqual this) && (length == x.length) && ( + (isEmpty) || // all empty sequences are equal + (start == x.start && last == x.last) // same length and same endpoints implies equality + ) + case _ => + super.equals(other) + } + + override def toString: String = { + val empty = if (isEmpty) "empty " else "" + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + s"${empty}NumericRange $start $preposition $end$stepped" + } + + override protected[this] def className = "NumericRange" +} + +/** A companion object for numeric ranges. + * @define Coll `NumericRange` + * @define coll numeric range + */ +object NumericRange { + private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { + def FAIL(boundary: T, step: T): Unit = { + val msg = boundary match { + case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" + case _ => "Precision" + } + throw new IllegalArgumentException( + s"$msg inadequate to represent steps of size $step near $boundary" + ) + } + if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) + if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) + } + + /** Calculates the number of elements in a range given start, end, step, and + * whether or not it is inclusive. Throws an exception if step == 0 or + * the number of elements exceeds the maximum Int. + */ + def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { + val zero = num.zero + val upward = num.lt(start, end) + val posStep = num.gt(step, zero) + + if (step == zero) throw new IllegalArgumentException("step cannot be 0.") + else if (start == end) if (isInclusive) 1 else 0 + else if (upward != posStep) 0 + else { + /* We have to be frightfully paranoid about running out of range. + * We also can't assume that the numbers will fit in a Long. + * We will assume that if a > 0, -a can be represented, and if + * a < 0, -a+1 can be represented. We also assume that if we + * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). + * And we assume that numbers wrap rather than cap when they overflow. + */ + // Check whether we can short-circuit by deferring to Int range. + val startint = num.toInt(start) + if (start == num.fromInt(startint)) { + val endint = num.toInt(end) + if (end == num.fromInt(endint)) { + val stepint = num.toInt(step) + if (step == num.fromInt(stepint)) { + return { + if (isInclusive) Range.inclusive(startint, endint, stepint).length + else Range (startint, endint, stepint).length + } + } + } + } + // If we reach this point, deferring to Int failed. + // Numbers may be big. + if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { + bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) + } + val one = num.one + val limit = num.fromInt(Int.MaxValue) + def check(t: T): T = + if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") + else t + // If the range crosses zero, it might overflow when subtracted + val startside = num.sign(start) + val endside = num.sign(end) + num.toInt{ + if (num.gteq(num.times(startside, endside), zero)) { + // We're sure we can subtract these numbers. + // Note that we do not use .rem because of different conventions for Long and BigInt + val diff = num.minus(end, start) + val quotient = check(num.quot(diff, step)) + val remainder = num.minus(diff, num.times(quotient, step)) + if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) + } + else { + // We might not even be able to subtract these numbers. + // Jump in three pieces: + // * start to -1 or 1, whichever is closer (waypointA) + // * one step, which will take us at least to 0 (ends at waypointB) + // * (except with really small numbers) + // * there to the end + val negone = num.fromInt(-1) + val startlim = if (posStep) negone else one + //Use start value if the start value is closer to zero than startlim + // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 + val startdiff = { + if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) + start + else + num.minus(startlim, start) + } + val startq = check(num.quot(startdiff, step)) + val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) + val waypointB = num.plus(waypointA, step) + check { + if (num.lt(waypointB, end) != upward) { + // No last piece + if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) + else num.plus(startq, one) + } + else { + // There is a last piece + val enddiff = num.minus(end,waypointB) + val endq = check(num.quot(enddiff, step)) + val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) + // Now we have to tally up all the pieces + // 1 for the initial value + // startq steps to waypointA + // 1 step to waypointB + // endq steps to the end (one less if !isInclusive and last==end) + num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) + } + } + } + } + } + } + + @SerialVersionUID(3L) + class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, true) { + override def copy(start: T, end: T, step: T): Inclusive[T] = + NumericRange.inclusive(start, end, step) + + def exclusive: Exclusive[T] = NumericRange(start, end, step) + } + + @SerialVersionUID(3L) + class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, false) { + override def copy(start: T, end: T, step: T): Exclusive[T] = + NumericRange(start, end, step) + + def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) + } + + def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = + new Exclusive(start, end, step) + def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = + new Inclusive(start, end, step) + + private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( + Numeric.BigIntIsIntegral -> Ordering.BigInt, + Numeric.IntIsIntegral -> Ordering.Int, + Numeric.ShortIsIntegral -> Ordering.Short, + Numeric.ByteIsIntegral -> Ordering.Byte, + Numeric.CharIsIntegral -> Ordering.Char, + Numeric.LongIsIntegral -> Ordering.Long, + Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal + ) + + @SerialVersionUID(3L) + private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { + import num.mkNumericOps + + private[this] var _hasNext = !self.isEmpty + private[this] var _next: T = self.start + private[this] val lastElement: T = if (_hasNext) self.last else self.start + override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 + def hasNext: Boolean = _hasNext + def next(): T = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = num.plus(value, self.step) + value + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala new file mode 100644 index 000000000000..785cca4b7c3e --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -0,0 +1,218 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{Builder, ListBuffer} +import language.experimental.captureChecking + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. + * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the + * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. + * + * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case + * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, + * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] + * section on `Immutable Queues` for more information. + * + * @define Coll `immutable.Queue` + * @define coll immutable queue + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ + +sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Queue] = Queue + + /** Returns the `n`-th element of this queue. + * The first element is at position `0`. + * + * @param n index of the element to return + * @return the element at position `n` in this queue. + * @throws NoSuchElementException if the queue is too short. + */ + override def apply(n: Int): A = { + def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) + + var index = 0 + var curr = out + + while (index < n && curr.nonEmpty) { + index += 1 + curr = curr.tail + } + + if (index == n) { + if (curr.nonEmpty) curr.head + else if (in.nonEmpty) in.last + else indexOutOfRange() + } else { + val indexFromBack = n - index + val inLength = in.length + if (indexFromBack >= inLength) indexOutOfRange() + else in(inLength - indexFromBack - 1) + } + } + + /** Returns the elements in the list as an iterator + */ + override def iterator: Iterator[A] = out.iterator.concat(in.reverse) + + /** Checks if the queue is empty. + * + * @return true, iff there is no element in the queue. + */ + override def isEmpty: Boolean = in.isEmpty && out.isEmpty + + override def head: A = + if (out.nonEmpty) out.head + else if (in.nonEmpty) in.last + else throw new NoSuchElementException("head on empty queue") + + override def tail: Queue[A] = + if (out.nonEmpty) new Queue(in, out.tail) + else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) + else throw new NoSuchElementException("tail on empty queue") + + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + + /* This is made to avoid inefficient implementation of iterator. */ + override def forall(p: A => Boolean): Boolean = + in.forall(p) && out.forall(p) + + /* This is made to avoid inefficient implementation of iterator. */ + override def exists(p: A => Boolean): Boolean = + in.exists(p) || out.exists(p) + + override protected[this] def className = "Queue" + + /** Returns the length of the queue. */ + override def length: Int = in.length + out.length + + override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) + + override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) + + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { + val newIn = that match { + case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) + case that: List[B] => that reverse_::: this.in + case _ => + var result: List[B] = this.in + val iter = that.iterator + while (iter.hasNext) { + result = iter.next() :: result + } + result + } + if (newIn eq this.in) this else new Queue[B](newIn, this.out) + } + + /** Creates a new queue with element added at the end + * of the old queue. + * + * @param elem the element to insert + */ + def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") + @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) + + /** Returns a tuple with the first element in the queue, + * and a new queue with this element removed. + * + * @throws NoSuchElementException + * @return the first element of the queue. + */ + def dequeue: (A, Queue[A]) = out match { + case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) + case x :: xs => (x, new Queue(in, xs)) + case _ => throw new NoSuchElementException("dequeue on empty queue") + } + + /** Optionally retrieves the first element and a queue of the remaining elements. + * + * @return A tuple of the first element of the queue, and a new queue with this element removed. + * If the queue is empty, `None` is returned. + */ + def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @throws NoSuchElementException + * @return the first element. + */ + def front: A = head + + /** Returns a string representation of this queue. + */ + override def toString(): String = mkString("Queue(", ", ", ")") +} + +/** $factoryInfo + * @define Coll `immutable.Queue` + * @define coll immutable queue + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + + def from[A](source: IterableOnce[A]^): Queue[A] = source match { + case q: Queue[A] => q + case _ => + val list = List.from(source) + if (list.isEmpty) empty + else new Queue(Nil, list) + } + + def empty[A]: Queue[A] = EmptyQueue + override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) + + private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala new file mode 100644 index 000000000000..459591d1a9cb --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -0,0 +1,673 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.RangeStepper +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** The `Range` class represents integer values in range + * ''[start;end)'' with non-zero step value `step`. + * It's a special case of an indexed sequence. + * For example: + * + * {{{ + * val r1 = 0 until 10 + * val r2 = r1.start until r1.end by r1.step + 1 + * println(r2.length) // = 5 + * }}} + * + * Ranges that contain more than `Int.MaxValue` elements can be created, but + * these overfull ranges have only limited capabilities. Any method that + * could require a collection of over `Int.MaxValue` length to be created, or + * could be asked to index beyond `Int.MaxValue` elements will throw an + * exception. Overfull ranges can safely be reduced in size by changing + * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, + * `equals`, and access to the ends of the range (`head`, `last`, `tail`, + * `init`) are also permitted on overfull ranges. + * + * @param start the start of this range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + * use `last` instead. + * @param step the step for the range. + * + * @define coll range + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define doesNotUseBuilders + * '''Note:''' this method does not use builders to construct a new range, + * and its complexity is O(1). + */ +@SerialVersionUID(3L) +sealed abstract class Range( + val start: Int, + val end: Int, + val step: Int +) + extends AbstractSeq[Int] + with IndexedSeq[Int] + with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with IterableFactoryDefaults[Int, IndexedSeq] + with Serializable { range => + + final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) + + override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new RangeStepper(start, step, 0, length) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private[this] def gap = end.toLong - start.toLong + private[this] def isExact = gap % step == 0 + private[this] def hasStub = isInclusive || !isExact + private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) + + def isInclusive: Boolean + + final override val isEmpty: Boolean = ( + (start > end && step > 0) + || (start < end && step < 0) + || (start == end && !isInclusive) + ) + + private[this] val numRangeElements: Int = { + if (step == 0) throw new IllegalArgumentException("step cannot be 0.") + else if (isEmpty) 0 + else { + val len = longLength + if (len > scala.Int.MaxValue) -1 + else len.toInt + } + } + + final def length = if (numRangeElements < 0) fail() else numRangeElements + + // This field has a sensible value only for non-empty ranges + private[this] val lastElement = step match { + case 1 => if (isInclusive) end else end-1 + case -1 => if (isInclusive) end else end+1 + case _ => + val remainder = (gap % step).toInt + if (remainder != 0) end - remainder + else if (isInclusive) end + else end - step + } + + /** The last element of this range. This method will return the correct value + * even if there are too many elements to iterate over. + */ + final override def last: Int = + if (isEmpty) throw Range.emptyRangeError("last") else lastElement + final override def head: Int = + if (isEmpty) throw Range.emptyRangeError("head") else start + + /** Creates a new range containing all the elements of this range except the last one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the last one. + */ + final override def init: Range = + if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) + + /** Creates a new range containing all the elements of this range except the first one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the first one. + */ + final override def tail: Range = { + if (isEmpty) throw Range.emptyRangeError("tail") + if (numRangeElements == 1) newEmptyRange(end) + else if(isInclusive) new Range.Inclusive(start + step, end, step) + else new Range.Exclusive(start + step, end, step) + } + + override def map[B](f: Int => B): IndexedSeq[B] = { + validateMaxLength() + super.map(f) + } + + final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = + if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) + + /** Create a new range with the `start` and `end` values of this range and + * a new `step`. + * + * @return a new range with a different step + */ + final def by(step: Int): Range = copy(start, end, step) + + // Check cannot be evaluated eagerly because we have a pattern where + // ranges are constructed like: "x to y by z" The "x to y" piece + // should not trigger an exception. So the calculation is delayed, + // which means it will not fail fast for those cases where failing was + // correct. + private[this] def validateMaxLength(): Unit = { + if (numRangeElements < 0) + fail() + } + private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) + private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") + + @throws[IndexOutOfBoundsException] + final def apply(idx: Int): Int = { + validateMaxLength() + if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") + else start + (step * idx) + } + + /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { + // Implementation chosen on the basis of favorable microbenchmarks + // Note--initialization catches step == 0 so we don't need to here + if (!isEmpty) { + var i = start + while (true) { + f(i) + if (i == lastElement) return + i += step + } + } + } + + override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos >= from) pos else -1 + case _ => super.indexOf(elem, from) + } + + override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos <= end) pos else -1 + case _ => super.lastIndexOf(elem, end) + } + + private[this] def posOf(i: Int): Int = + if (contains(i)) (i - start) / step else -1 + + override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { + case other: Range => + (this.length : @annotation.switch) match { + case 0 => other.isEmpty + case 1 => other.length == 1 && this.start == other.start + case n => other.length == n && ( + (this.start == other.start) + && (this.step == other.step) + ) + } + case _ => super.sameElements(that) + } + + /** Creates a new range containing the first `n` elements of this range. + * + * @param n the number of elements to take. + * @return a new range consisting of `n` first elements. + */ + final override def take(n: Int): Range = + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (n >= numRangeElements && numRangeElements >= 0) this + else { + // May have more than Int.MaxValue elements in range (numRangeElements < 0) + // but the logic is the same either way: take the first n + new Range.Inclusive(start, locationAfterN(n - 1), step) + } + + /** Creates a new range containing all the elements of this range except the first `n` elements. + * + * @param n the number of elements to drop. + * @return a new range consisting of all the elements of this range except `n` first elements. + */ + final override def drop(n: Int): Range = + if (n <= 0 || isEmpty) this + else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) + else { + // May have more than Int.MaxValue elements (numRangeElements < 0) + // but the logic is the same either way: go forwards n steps, keep the rest + copy(locationAfterN(n), end, step) + } + + /** Creates a new range consisting of the last `n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def takeRight(n: Int): Range = { + if (n <= 0) newEmptyRange(start) + else if (numRangeElements >= 0) drop(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last + val x = y - step.toLong*(n-1) + if ((step > 0 && x < start) || (step < 0 && x > start)) this + else Range.inclusive(x.toInt, y, step) + } + } + + /** Creates a new range consisting of the initial `length - n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def dropRight(n: Int): Range = { + if (n <= 0) this + else if (numRangeElements >= 0) take(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last - step.toInt*n + if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) + else Range.inclusive(start, y.toInt, step) + } + } + + // Advance from the start while we meet the given test + private[this] def argTakeWhile(p: Int => Boolean): Long = { + if (isEmpty) start + else { + var current = start + val stop = last + while (current != stop && p(current)) current += step + if (current != stop || !p(current)) current + else current.toLong + step + } + } + + final override def takeWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop==start) newEmptyRange(start) + else { + val x = (stop - step).toInt + if (x == last) this + else Range.inclusive(start, x, step) + } + } + + final override def dropWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop == start) this + else { + val x = (stop - step).toInt + if (x == last) newEmptyRange(last) + else Range.inclusive(x + step, last, step) + } + } + + final override def span(p: Int => Boolean): (Range, Range) = { + val border = argTakeWhile(p) + if (border == start) (newEmptyRange(start), this) + else { + val x = (border - step).toInt + if (x == last) (this, newEmptyRange(last)) + else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) + } + } + + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + final override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + else { + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else Range.inclusive(fromValue, locationAfterN(until-1), step) + } + + // Overridden only to refine the return type + final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) + + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private[this] def locationAfterN(n: Int) = start + (step * n) + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) + + /** Returns the reverse of this range. + */ + final override def reverse: Range = + if (isEmpty) this + else new Range.Inclusive(last, start, -step) + + /** Make range inclusive. + */ + final def inclusive: Range = + if (isInclusive) this + else new Range.Inclusive(start, end, step) + + final def contains(x: Int): Boolean = { + if (x == end && !isInclusive) false + else if (step > 0) { + if (x < start || x > end) false + else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) + } + else { + if (x < end || x > start) false + else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) + } + } + /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ + override final def contains[B >: Int](elem: B): Boolean = elem match { + case i: Int => this.contains(i) + case _ => super.contains(elem) + } + + final override def sum[B >: Int](implicit num: Numeric[B]): Int = { + if (num eq scala.math.Numeric.IntIsIntegral) { + // this is normal integer range with usual addition. arithmetic series formula can be used + if (isEmpty) 0 + else if (size == 1) head + else ((size * (head.toLong + last)) / 2).toInt + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.toInt(num.zero) + else { + var acc = num.zero + var i = head + while (true) { + acc = num.plus(acc, i) + if (i == lastElement) return num.toInt(acc) + i = i + step + } + 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing + } + } + } + + final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) head + else last + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) last + else head + } else super.min(ord) + + final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) last + else head + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) head + else last + } else super.max(ord) + + override def tails: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.drop(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + + override def inits: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.dropRight(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + final override def equals(other: Any): Boolean = other match { + case x: Range => + // Note: this must succeed for overfull ranges (length > Int.MaxValue) + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } + case _ => + super.equals(other) + } + + final override def hashCode: Int = + if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) + else super.hashCode + + final override def toString: String = { + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" + s"${prefix}Range $start $preposition $end$stepped" + } + + override protected[this] def className = "Range" + + override def distinct: Range = this + + override def grouped(size: Int): Iterator[Range] = { + require(size >= 1, f"size=$size%d, but size must be positive") + if (isEmpty) { + Iterator.empty + } else { + val s = size + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = Range.this.length > i + override def next() = + if (hasNext) { + val x = Range.this.slice(i, i + s) + i += s + x + } else { + Iterator.empty.next() + } + } + } + } + + override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = + if (ord eq Ordering.Int) { + if (step > 0) { + this + } else { + reverse + } + } else { + super.sorted(ord) + } +} + +/** + * Companion object for ranges. + * @define Coll `Range` + * @define coll range + */ +object Range { + + /** Counts the number of range elements. + * precondition: step != 0 + * If the size of the range exceeds Int.MaxValue, the + * result will be negative. + */ + def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { + if (step == 0) + throw new IllegalArgumentException("step cannot be 0.") + + val isEmpty = + if (start == end) !isInclusive + else if (start < end) step < 0 + else step > 0 + + if (isEmpty) 0 + else { + // Counts with Longs so we can recognize too-large ranges. + val gap: Long = end.toLong - start.toLong + val jumps: Long = gap / step + // Whether the size of this range is one larger than the + // number of full-sized jumps. + val hasStub = isInclusive || (gap % step != 0) + val result: Long = jumps + ( if (hasStub) 1 else 0 ) + + if (result > scala.Int.MaxValue) -1 + else result.toInt + } + } + def count(start: Int, end: Int, step: Int): Int = + count(start, end, step, isInclusive = false) + + /** Make a range from `start` until `end` (exclusive) with given step value. + * @note step != 0 + */ + def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) + + /** Make a range from `start` until `end` (exclusive) with step value 1. + */ + def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) + + /** Make an inclusive range from `start` to `end` with given step value. + * @note step != 0 + */ + def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) + + /** Make an inclusive range from `start` to `end` with step value 1. + */ + def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) + + @SerialVersionUID(3L) + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = true + } + + @SerialVersionUID(3L) + final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = false + } + + // BigInt and Long are straightforward generic ranges. + object BigInt { + def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) + def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) + } + + object Long { + def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) + def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) + } + + // BigDecimal uses an alternative implementation of Numeric in which + // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for + // details. The intention is for it to throw an exception anytime + // imprecision or surprises might result from anything, although this may + // not yet be fully implemented. + object BigDecimal { + implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral + + def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = + NumericRange(start, end, step) + def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = + NumericRange.inclusive(start, end, step) + } + + // As there is no appealing default step size for not-really-integral ranges, + // we offer a partially constructed object. + class Partial[T, U](private val f: T -> U) extends AnyVal { + def by(x: T): U = f(x) + override def toString = "Range requires step" + } + + // Illustrating genericity with Int Range, which should have the same behavior + // as the original Range class. However we leave the original Range + // indefinitely, for performance and because the compiler seems to bootstrap + // off it and won't do so with our parameterized version without modifications. + object Int { + def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) + def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) + } + + private def emptyRangeError(what: String): Throwable = + new NoSuchElementException(what + " on empty Range") +} + +/** + * @param lastElement The last element included in the Range + * @param initiallyEmpty Whether the Range was initially empty or not + */ +@SerialVersionUID(3L) +private class RangeIterator( + start: Int, + step: Int, + lastElement: Int, + initiallyEmpty: Boolean +) extends AbstractIterator[Int] with Serializable { + private[this] var _hasNext: Boolean = !initiallyEmpty + private[this] var _next: Int = start + override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 + def hasNext: Boolean = _hasNext + @throws[NoSuchElementException] + def next(): Int = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = value + step + value + } + + override def drop(n: Int): Iterator[Int] = { + if (n > 0) { + val longPos = _next.toLong + step * n + if (step > 0) { + _next = Math.min(lastElement, longPos).toInt + _hasNext = longPos <= lastElement + } + else if (step < 0) { + _next = Math.max(lastElement, longPos).toInt + _hasNext = longPos >= lastElement + } + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala new file mode 100644 index 000000000000..69e93093f60c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -0,0 +1,1233 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.meta.{getter, setter} +import scala.annotation.tailrec +import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking + +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses `null` to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlackTree object tries to hide these + * optimizations behind a reasonably clean API. + */ +private[collection] object RedBlackTree { + + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + + @tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } + private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { + def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) tree + else if (tree.isMutable) { + val res = tree.mutableBlack.makeImmutable + releaseFence() + res + } else tree.black + } + /** Create a new balanced tree where `newLeft` replaces `tree.left`. + * tree and newLeft are never null */ + protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + //Note - unlike the immutable trees we can't consider tree.left eq newLeft + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.mutableBlack + val resultRight = tree.mutableBlackWithLeft(newLeft_right) + + newLeft.mutableWithLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) + val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) + + newLeft_right.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. + * tree and newRight are never null */ + protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + //Note - unlike the immutable trees we can't consider tree.right eq newRight + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + + val resultLeft = tree.mutableBlackWithRight(newRight_left.left) + val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) + + newRight_left.mutableWithLeftRight(resultLeft, resultRight) + + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + + val resultLeft = tree.mutableBlackWithRight(newRight_left) + val resultRight = newRight_right.mutableBlack + + newRight.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } + private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = + if (tree eq null) { + mutableRedTree(k, (), null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k)) + else tree + } + } + private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = + if (tree eq null) { + mutableRedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree.mutableWithV(v) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) + else tree.mutableWithV(v) + } + } + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + case (Some(from), Some(until)) => this.range(tree, from, until) + case (Some(from), None) => this.from(tree, from) + case (None, Some(until)) => this.until(tree, until) + case (None, None) => tree + } + def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + + def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.left ne null) result = result.left + result + } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.right ne null) result = result.right + result + } + + def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _tail(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tl = tree.left + if (tl eq null) tree.right + else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) + else tree.redWithLeft(_tail(tree.left)) + } + blacken(_tail(tree)) + } + + def init[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _init(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tr = tree.right + if (tr eq null) tree.left + else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) + else tree.redWithRight(_init(tr)) + } + blacken(_init(tree)) + } + + /** + * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. + */ + def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp == 0) tree + else if (cmp < 0) { + val l = minAfter(tree.left, x) + if (l != null) l else tree + } else minAfter(tree.right, x) + } + + /** + * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp <= 0) maxBefore(tree.left, x) + else { + val r = maxBefore(tree.right, x) + if (r != null) r else tree + } + } + + def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + + def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) + } + def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) + } + def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) + } + + private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { + if (tree.left ne null) _foreach(tree.left, f) + f((tree.key, tree.value)) + if (tree.right ne null) _foreach(tree.right, f) + } + + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + + private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + if (tree.left ne null) _foreachKey(tree.left, f) + f((tree.key)) + if (tree.right ne null) _foreachKey(tree.right, f) + } + + def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) + + private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + if (tree.left ne null) _foreachEntry(tree.left, f) + f(tree.key, tree.value) + if (tree.right ne null) _foreachEntry(tree.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) + + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = this.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + + def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack + + @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed + @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack + + private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + + // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` + // for building subtrees. Use `blacken` instead when building top-level trees. + private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = + if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t + + private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { + val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) + new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) + } + + /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ + private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + if (tree.left eq newLeft) tree + else { + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.black + val resultRight = tree.blackWithLeft(newLeft_right) + + newLeft.withLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.blackWithRight(newLeft_right.left) + val resultRight = tree.blackWithLeft(newLeft_right_right) + + newLeft_right.withLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. */ + private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + if (tree.right eq newRight) tree + else { + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + val resultLeft = tree.blackWithRight(newRight_left.left) + val resultRight = newRight.blackWithLeft(newRight_left.right) + + newRight_left.withLeftRight(resultLeft, resultRight) + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + val resultLeft = tree.blackWithRight(newRight_left) + val resultRight = newRight_right.black + + newRight.withLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } + + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + if (overwrite) + tree.withV(v) + else tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + balanceLeft(tree, upd(tree.left, k, v, overwrite)) + else if (cmp > 0) + balanceRight(tree, upd(tree.right, k, v, overwrite)) + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) + tree.withV(v) + else tree + } + private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else { + val rank = count(tree.left) + 1 + if (idx < rank) + balanceLeft(tree, updNth(tree.left, idx, k, v)) + else if (idx > rank) + balanceRight(tree, updNth(tree.right, idx - rank, k, v)) + else tree + } + + private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) + val newLeft = doFrom(tree.left, from) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, tree.right) + } + private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(to, tree.key)) return doTo(tree.left, to) + val newRight = doTo(tree.right, to) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join (tree.left, tree.key, tree.value, newRight) + } + private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) + val newRight = doUntil(tree.right, until) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join(tree.left, tree.key, tree.value, newRight) + } + + private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) + val newLeft = doFrom(tree.left, from) + val newRight = doUntil(tree.right, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, newRight) + } + + private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) tree + else if(n >= tree.count) null + else { + val l = count(tree.left) + if(n > l) doDrop(tree.right, n-l-1) + else if(n == l) join(null, tree.key, tree.value, tree.right) + else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) + } + + private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) null + else if(n >= tree.count) tree + else { + val l = count(tree.left) + if(n <= l) doTake(tree.left, n) + else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) + else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) + } + + private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = + if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null + else if((from <= 0) && (until >= tree.count)) tree + else { + val l = count(tree.left) + if(until <= l) doSlice(tree.left, from, until) + else if(from > l) doSlice(tree.right, from-l-1, until-l-1) + else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) + } + + /* + * Forcing direct fields access using the @`inline` annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + * + * Mutability + * This implementation encodes both mutable and immutable trees. + * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations + * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk + * API such as filter or ++ + * + * Mutable trees are only used within the confines of this bulk operation and not shared + * Mutable trees may transition to become immutable by calling beforePublish + * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) + * + * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable + * trees the entire transitive subtree is immutable + * + * Colour, mutablity and size encoding + * The colour of the Tree, its mutablity and size are all encoded in the _count field + * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without + * additional allocation + * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 + * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree + * + * Naming + * All of the methods that can yield a mutable result have "mutable" on their name, and generally there + * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when + * reviewing changes. e.g. + * def upd(...) will update an immutable Tree, producing an immutable Tree + * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree + * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree + * + */ + private[immutable] final class Tree[A, +B]( + @(`inline` @getter @setter) private var _key: A, + @(`inline` @getter @setter) private var _value: AnyRef, + @(`inline` @getter @setter) private var _left: Tree[A, _], + @(`inline` @getter @setter) private var _right: Tree[A, _], + @(`inline` @getter @setter) private var _count: Int) + { + @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 + // read only APIs + @`inline` private[RedBlackTree] final def count = { + //devTimeAssert((_count & 0x7FFFFFFF) != 0) + _count & colourMask + } + //retain the colour, and mark as mutable + @`inline` private def mutableRetainingColour = _count & colourBit + + //inlined here to avoid outer object null checks + @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + @`inline` private[immutable] final def key = _key + @`inline` private[immutable] final def value = _value.asInstanceOf[B] + @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] + @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isBlack = _count < 0 + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isRed = _count >= 0 + + override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" + + //mutable APIs + private[RedBlackTree] def makeImmutable: Tree[A, B] = { + def makeImmutableImpl() = { + if (isMutable) { + var size = 1 + if (_left ne null) { + _left.makeImmutable + size += _left.count + } + if (_right ne null) { + _right.makeImmutable + size += _right.count + } + _count |= size //retains colour + } + this + } + makeImmutableImpl() + this + } + + private[RedBlackTree] def mutableBlack: Tree[A, B] = { + if (isBlack) this + else if (isMutable) { + _count = initialBlackCount + this + } + else new Tree(_key, _value, _left, _right, initialBlackCount) + } +// private[RedBlackTree] def mutableRed: Tree[A, B] = { +// if (isRed) this +// else if (mutable) { +// _count = initialRedCount +// this +// } +// else new Tree(_key, _value, _left, _right, initialRedCount) +// } + + private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else if (isMutable) { + _value = newValue.asInstanceOf[AnyRef] + this + } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) + } + + private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if (_left eq newLeft) this + else if (isMutable) { + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if (_right eq newRight) this + else if (isMutable) { + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && (_right eq newRight)) this + else if (isMutable) { + _left = newLeft + _right = newRight + this + } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, initialBlackCount) + } + private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_right eq newRight) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, initialBlackCount) + } + + private[RedBlackTree] def black: Tree[A, B] = { + //assertNotMutable(this) + if (isBlack) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def red: Tree[A, B] = { + //assertNotMutable(this) + if (isRed) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && + (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this + else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + + private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if (newLeft eq _left) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) + } + } + private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if (newRight eq _right) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if ((newRight eq _right) && isBlack) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newRight eq _right) && isRed) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right)) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) + } + } + } + //see #Tree docs "Colour, mutablity and size encoding" + //we make these final vals because the optimiser inlines them, without reference to the enclosing module + private[RedBlackTree] final val colourBit = 0x80000000 + //really its ~colourBit but that doesnt get inlined + private[RedBlackTree] final val colourMask = colourBit - 1 + private[RedBlackTree] final val initialBlackCount = colourBit + private[RedBlackTree] final val initialRedCount = 0 + + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + + /** create a new immutable red tree. + * left and right may be null + */ + private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) + } + private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) + } + @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + //immutable APIs + //assertions - uncomment decls and callers when changing functionality + // private def devTimeAssert(assertion: Boolean) = { + // //uncomment this during development of the functionality + // assert(assertion) + // } + // private def assertNotMutable(t:Tree[_,_]) = { + // devTimeAssert ((t eq null) || t.count > 0) + // } + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + + override def hasNext: Boolean = lookahead ne null + + @throws[NoSuchElementException] + override def next(): R = { + val tree = lookahead + if(tree ne null) { + lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) + nextResult(tree) + } else Iterator.empty.next() + } + + @tailrec + protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else if (tree.left eq null) tree + else findLeftMostOrPopOnEmpty(goLeft(tree)) + + @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { + stackOfNexts(index) = tree + index += 1 + } + @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { + index -= 1 + stackOfNexts(index) + } + + protected[this] val stackOfNexts = if (root eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * Although we don't store the deepest nodes in the path during iteration, + * we potentially do so in `startFrom`. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 + new Array[Tree[A, B]](maximumHeight) + } + private[this] var index = 0 + protected var lookahead: Tree[A, B] = + if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + + /** + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ + private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else find( + if (ordering.lteq(key, tree.key)) goLeft(tree) + else goRight(tree) + ) + find(root) + } + + @`inline` private[this] def goLeft(tree: Tree[A, B]) = { + pushNext(tree) + tree.left + } + + @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right + } + + private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { + override def nextResult(tree: Tree[A, B]) = ??? + + def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key) + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameValues[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + } + private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.key + } + + private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.value + } + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, Null] = size match { + case 0 => null + case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(x, null, left, right) + } + f(1, size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + mkTree(level != maxUsedDepth || level == 1, k, v, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(k, v, left, right) + } + f(1, size) + } + + def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = + if(t eq null) null + else { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = transform(l, f) + val v2 = f(k, v) + val r2 = transform(r, f) + if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) + && (l2 eq l) + && (r2 eq r)) t.asInstanceOf[Tree[A, C]] + else mkTree(t.isBlack, k, v2, l2, r2) + } + + def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { + def fk(t: Tree[A, B]): Tree[A, B] = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = if(l eq null) null else fk(l) + val keep = f(k, v) + val r2 = if(r eq null) null else fk(r) + if(!keep) join2(l2, r2) + else if((l2 eq l) && (r2 eq r)) t + else join(l2, k, v, r2) + } + blacken(fk(t)) + } + + private[this] val null2 = (null, null) + + def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { + if (t eq null) null2 + else { + object partitioner { + var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk + def fk(t: Tree[A, B]): Unit = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + var l2k, l2d, r2k, r2d = null: Tree[A, B] + if (l ne null) { + fk(l) + l2k = tmpk + l2d = tmpd + } + val keep = p(k, v) + if (r ne null) { + fk(r) + r2k = tmpk + r2d = tmpd + } + val jk = + if (!keep) join2(l2k, r2k) + else if ((l2k eq l) && (r2k eq r)) t + else join(l2k, k, v, r2k) + val jd = + if (keep) join2(l2d, r2d) + else if ((l2d eq l) && (r2d eq r)) t + else join(l2d, k, v, r2d) + tmpk = jk + tmpd = jd + } + } + + partitioner.fk(t) + (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) + } + } + + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ + + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) { + val newLeft = del(tree.left, k) + if (newLeft eq tree.left) tree + else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) + else tree.redWithLeft(newLeft) + } else if (cmp > 0) { + val newRight = del(tree.right, k) + if (newRight eq tree.right) tree + else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) + else tree.redWithRight(newRight) + } else append(tree.left, tree.right) + } + + private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) { + if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) + else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) + else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else tree.blackWithLeftRight(tl, tr) + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) + else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) + else tree.blackWithLeftRight(tl, tr) + } else tree.blackWithLeftRight(tl, tr) + + private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) + else if (isBlackTree(tr)) balance(tree, tl, tr.red) + else if (isRedTree(tr) && isBlackTree(tr.left)) + tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) + else sys.error("Defect: invariance violation") + + private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) + else if (isBlackTree(tl)) balance(tree, tl.red, tr) + else if (isRedTree(tl) && isBlackTree(tl.right)) + tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else sys.error("Defect: invariance violation") + + /** `append` is similar to `join2` but requires that both subtrees have the same black height */ + private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { + if (tl eq null) tr + else if (tr eq null) tl + else if (tl.isRed) { + if (tr.isRed) { + //tl is red, tr is red + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else tl.withRight(tr.withLeft(bc)) + } else { + //tl is red, tr is black + tl.withRight(append(tl.right, tr)) + } + } else { + if (tr.isBlack) { + //tl is black tr is black + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else balLeft(tl, tl.left, tr.withLeft(bc)) + } else { + //tl is black tr is red + tr.withLeft(append(tl, tr.left)) + } + } + } + + + // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) + // We don't store the black height in the tree so we pass it down into the join methods and derive the black height + // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. + // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. + + def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) + + def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) + + def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = + blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) + + /** Compute the rank from a tree and its black height */ + @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { + if(t eq null) 0 + else if(t.isBlack) 2*(bh-1) + else 2*bh-1 + } + + private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { + val rtl = rank(tl, bhtl) + if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) + else { + val tlBlack = isBlackTree(tl) + val bhtlr = if(tlBlack) bhtl-1 else bhtl + val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) + if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) + RedTree(ttr.key, ttr.value, + BlackTree(tl.key, tl.value, tl.left, ttr.left), + ttr.right.black) + else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) + } + } + + private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { + val rtr = rank(tr, bhtr) + if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) + else { + val trBlack = isBlackTree(tr) + val bhtrl = if(trBlack) bhtr-1 else bhtr + val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) + if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) + RedTree(ttl.key, ttl.value, + ttl.left.black, + BlackTree(tr.key, tr.value, ttl.right, tr.right)) + else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) + } + } + + private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { + @tailrec def h(t: Tree[_, _], i: Int): Int = + if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) + val bhtl = h(tl, 0) + val bhtr = h(tr, 0) + if(bhtl > bhtr) { + val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) + if(isRedTree(tt) && isRedTree(tt.right)) tt.black + else tt + } else if(bhtr > bhtl) { + val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) + if(isRedTree(tt) && isRedTree(tt.left)) tt.black + else tt + } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) + } + + private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = + if(t eq null) (null, null, null, k2) + else { + val cmp = ordering.compare(k2, t.key) + if(cmp == 0) (t.left, t, t.right, t.key) + else if(cmp < 0) { + val (ll, b, lr, k1) = split(t.left, k2) + (ll, b, join(lr, t.key, t.value, t.right), k1) + } else { + val (rl, b, rr, k1) = split(t.right, k2) + (join(t.left, t.key, t.value, rl), b, rr, k1) + } + } + + private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = + if(t.right eq null) (t.left, t.key, t.value) + else { + val (tt, kk, vv) = splitLast(t.right) + (join(t.left, t.key, t.value, tt), kk, vv) + } + + private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if(tl eq null) tr + else if(tr eq null) tl + else { + val (ttl, k, v) = splitLast(tl) + join(ttl, k, v, tr) + } + + private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t1 eq t2)) t2 + else if(t2 eq null) t1 + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _union(l1, t2.left) + val tr = _union(r1, t2.right) + join(tl, k1, t2.value, tr) + } + + private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) null + else if (t1 eq t2) t1 + else { + val (l1, b, r1, k1) = split(t1, t2.key) + val tl = _intersect(l1, t2.left) + val tr = _intersect(r1, t2.right) + if(b ne null) join(tl, k1, t2.value, tr) + else join2(tl, tr) + } + + private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) t1 + else if (t1 eq t2) null + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _difference(l1, t2.left) + val tr = _difference(r1, t2.right) + join2(tl, tr) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala new file mode 100644 index 000000000000..d575c3aaf14a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -0,0 +1,157 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import language.experimental.captureChecking + +trait Seq[+A] extends Iterable[A] + with collection.Seq[A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] { + + override final def toSeq: this.type = this + + override def iterableFactory: SeqFactory[Seq] = Seq +} + +/** + * @define coll immutable sequence + * @define Coll `immutable.Seq` + */ +trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] + +/** + * $factoryInfo + * @define coll immutable sequence + * @define Coll `immutable.Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](List) { + override def from[E](it: IterableOnce[E]^): Seq[E] = it match { + case s: Seq[E] => s + case _ => super.from(it) + } +} + +/** Base trait for immutable indexed sequences that have efficient `apply` and `length` */ +trait IndexedSeq[+A] extends Seq[A] + with collection.IndexedSeq[A] + with IndexedSeqOps[A, IndexedSeq, IndexedSeq[A]] + with IterableFactoryDefaults[A, IndexedSeq] { + + final override def toIndexedSeq: IndexedSeq[A] = this + + override def canEqual(that: Any): Boolean = that match { + case otherIndexedSeq: IndexedSeq[_] => length == otherIndexedSeq.length && super.canEqual(that) + case _ => super.canEqual(that) + } + + + override def sameElements[B >: A](o: IterableOnce[B]^): Boolean = o match { + case that: IndexedSeq[_] => + (this eq that) || { + val length = this.length + var equal = length == that.length + if (equal) { + var index = 0 + // some IndexedSeq apply is less efficient than using Iterators + // e.g. Vector so we can compare the first few with apply and the rest with an iterator + // but if apply is more efficient than Iterators then we can use the apply for all the comparison + // we default to the minimum preferred length + val maxApplyCompare = { + val preferredLength = Math.min(applyPreferredMaxLength, that.applyPreferredMaxLength) + if (length > (preferredLength.toLong << 1)) preferredLength else length + } + while (index < maxApplyCompare && equal) { + equal = this (index) == that(index) + index += 1 + } + if ((index < length) && equal) { + val thisIt = this.iterator.drop(index) + val thatIt = that.iterator.drop(index) + while (equal && thisIt.hasNext) { + equal = thisIt.next() == thatIt.next() + } + } + } + equal + } + case _ => super.sameElements(o) + } + + /** a hint to the runtime when scanning values + * [[apply]] is preferred for scan with a max index less than this value + * [[iterator]] is preferred for scans above this range + * @return a hint about when to use [[apply]] or [[iterator]] + */ + protected def applyPreferredMaxLength: Int = IndexedSeqDefaults.defaultApplyPreferredMaxLength + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +object IndexedSeqDefaults { + val defaultApplyPreferredMaxLength: Int = + try System.getProperty( + "scala.collection.immutable.IndexedSeq.defaultApplyPreferredMaxLength", "64").toInt + catch { + case _: SecurityException => 64 + } +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { + override def from[E](it: IterableOnce[E]^): IndexedSeq[E] = it match { + case is: IndexedSeq[E] => is + case _ => super.from(it) + } +} + +/** Base trait for immutable indexed Seq operations */ +trait IndexedSeqOps[+A, +CC[_], +C] + extends SeqOps[A, CC, C] + with collection.IndexedSeqOps[A, CC, C] { + + override def slice(from: Int, until: Int): C = { + // since we are immutable we can just share the same collection + if (from <= 0 && until >= length) coll + else super.slice(from, until) + } + +} + +/** Base trait for immutable linear sequences that have efficient `head` and `tail` */ +trait LinearSeq[+A] + extends Seq[A] + with collection.LinearSeq[A] + with LinearSeqOps[A, LinearSeq, LinearSeq[A]] + with IterableFactoryDefaults[A, LinearSeq] { + + override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq +} + +@SerialVersionUID(3L) +object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { + override def from[E](it: IterableOnce[E]^): LinearSeq[E] = it match { + case ls: LinearSeq[E] => ls + case _ => super.from(it) + } +} + +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] + extends AnyRef with SeqOps[A, CC, C] + with collection.LinearSeqOps[A, CC, C] + +/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ +abstract class AbstractSeq[+A] extends scala.collection.AbstractSeq[A] with Seq[A] diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala new file mode 100644 index 000000000000..fdd628f32698 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -0,0 +1,277 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking + +/** A base trait for ordered, immutable maps. + * + * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs + * without regard to ordering. + * + * All behavior is defined in terms of the abstract methods in `SeqMap`. + * It is sufficient for concrete subclasses to implement those methods. + * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] + extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + + +object SeqMap extends MapFactory[SeqMap] { + def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = + it match { + case sm: SeqMap[K, V] => sm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl + + @SerialVersionUID(3L) + private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) + def removed(key: Any): SeqMap[Any, Nothing] = this + } + + @SerialVersionUID(3L) + private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator = Iterator.single((key1, value1)) + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap1(key1, value) + else new SeqMap2(key1, value1, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) SeqMap.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + } + } + + @SerialVersionUID(3L) + private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap2(key1, value, key2, value2) + else if (key == key2) new SeqMap2(key1, value1, key2, value) + else new SeqMap3(key1, value1, key2, value2, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap1(key2, value2) + else if (key == key2) new SeqMap1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + } + } + + @SerialVersionUID(3L) + private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) + else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) + else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap2(key2, value2, key3, value3) + else if (key == key2) new SeqMap2(key1, value1, key3, value3) + else if (key == key3) new SeqMap2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + } + } + + @SerialVersionUID(3L) + private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 4 + override def knownSize: Int = 4 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) + else { + // Directly create the elements for performance reasons + val fields = Vector(key1, key2, key3, key4, key) + val underlying: Map[K, (Int, V1)] = + HashMap( + (key1, (0, value1)), + (key2, (1, value2)), + (key3, (2, value3)), + (key4, (3, value4)), + (key, (4, value)) + ) + new VectorMap(fields, underlying) + } + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + f(key4, value4) + } + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala new file mode 100644 index 000000000000..4632a35f8484 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -0,0 +1,399 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.immutable.Set.Set4 +import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking + +/** Base trait for immutable set collections */ +trait Set[A] extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + override def iterableFactory: IterableFactory[Set] = Set +} + +/** Base trait for immutable set operations + * + * @define coll immutable set + * @define Coll `immutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] { + + /** Creates a new set with an additional element, unless the element is + * already present. + * + * @param elem the element to be added + * @return a new set that contains all elements of this set and that also + * contains `elem`. + */ + def incl(elem: A): C + + /** Alias for `incl` */ + override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated + + /** Creates a new set with a given element removed from this set. + * + * @param elem the element to be removed + * @return a new set that contains all elements of this set but that does not + * contain `elem`. + */ + def excl(elem: A): C + + /** Alias for `excl` */ + @`inline` final override def - (elem: A): C = excl(elem) + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param that the collection containing the elements to remove. + * @return a new $coll with the given elements removed, omitting duplicates. + */ + def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for removedAll */ + override final def -- (that: IterableOnce[A]): C = removedAll(that) +} + +trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with collection.StrictOptimizedSetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: collection.IterableOnce[A]): C = { + var result: C = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +/** + * $factoryInfo + * @define coll immutable set + * @define Coll `immutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory[Set] { + + def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] + + def from[E](it: collection.IterableOnce[E]^): Set[E] = + it match { + // We want `SortedSet` (and subclasses, such as `BitSet`) to + // rebuild themselves to avoid element type widening issues + case _: SortedSet[E] => (newBuilder[E] ++= it).result() + case _ if it.knownSize == 0 => empty[E] + case s: Set[E] => s + case _ => (newBuilder[E] ++= it).result() + } + + def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] + + /** An optimized representation for immutable empty sets */ + @SerialVersionUID(3L) + private object EmptySet extends AbstractSet[Any] with Serializable { + override def size: Int = 0 + override def isEmpty = true + override def knownSize: Int = size + override def filter(pred: Any => Boolean): Set[Any] = this + override def filterNot(pred: Any => Boolean): Set[Any] = this + override def removedAll(that: IterableOnce[Any]): Set[Any] = this + override def diff(that: collection.Set[Any]): Set[Any] = this + override def subsetOf(that: collection.Set[Any]): Boolean = true + override def intersect(that: collection.Set[Any]): Set[Any] = this + override def view: View[Any] = View.empty + def contains(elem: Any): Boolean = false + def incl(elem: Any): Set[Any] = new Set1(elem) + def excl(elem: Any): Set[Any] = this + def iterator: Iterator[Any] = Iterator.empty + override def foreach[U](f: Any => U): Unit = () + } + private[collection] def emptyInstance: Set[Any] = EmptySet + + @SerialVersionUID(3L) + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { + private[this] var current = 0 + private[this] var remainder = n + override def knownSize: Int = remainder + def hasNext = remainder > 0 + def apply(i: Int): A + def next(): A = + if (hasNext) { + val r = apply(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + } + + /** An optimized representation for immutable sets of size 1 */ + @SerialVersionUID(3L) + final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 1 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set2(elem1, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) Set.empty + else this + def iterator: Iterator[A] = Iterator.single(elem1) + override def foreach[U](f: A => U): Unit = f(elem1) + override def exists(p: A => Boolean): Boolean = p(elem1) + override def forall(p: A => Boolean): Boolean = p(elem1) + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = + if (pred(elem1) != isFlipped) this else Set.empty + + override def find(p: A => Boolean): Option[A] = + if (p(elem1)) Some(elem1) + else None + override def head: A = elem1 + override def tail: Set[A] = Set.empty + } + + /** An optimized representation for immutable sets of size 2 */ + @SerialVersionUID(3L) + final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 2 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 || elem == elem2 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set3(elem1, elem2, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set1(elem2) + else if (elem == elem2) new Set1(elem1) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set1(elem2) + } + + /** An optimized representation for immutable sets of size 3 */ + @SerialVersionUID(3L) + final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 3 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set4(elem1, elem2, elem3, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set2(elem2, elem3) + else if (elem == elem2) new Set2(elem1, elem3) + else if (elem == elem3) new Set2(elem1, elem2) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set2(elem2, elem3) + } + + /** An optimized representation for immutable sets of size 4 */ + @SerialVersionUID(3L) + final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 4 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem + def excl(elem: A): Set[A] = + if (elem == elem1) new Set3(elem2, elem3, elem4) + else if (elem == elem2) new Set3(elem1, elem3, elem4) + else if (elem == elem3) new Set3(elem1, elem2, elem4) + else if (elem == elem4) new Set3(elem1, elem2, elem3) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3); f(elem4) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) || p(elem4) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) && p(elem4) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2, r3: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} + if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => new Set3(r1, r2, r3) + case 4 => this + } + } + + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else if (p(elem4)) Some(elem4) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set3(elem2, elem3, elem4) + + private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = + builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) + } +} + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] + +/** Builder for Set. + * $multipleResults + */ +private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { + private[this] var elems: Set[A] = Set.empty + private[this] var switchedToHashSetBuilder: Boolean = false + private[this] var hashSetBuilder: HashSetBuilder[A] = _ + + override def clear(): Unit = { + elems = Set.empty + if (hashSetBuilder != null) { + hashSetBuilder.clear() + } + switchedToHashSetBuilder = false + } + + override def result(): Set[A] = + if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + + def addOne(elem: A) = { + if (switchedToHashSetBuilder) { + hashSetBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem)) { + () // do nothing + } else { + switchedToHashSetBuilder = true + if (hashSetBuilder == null) { + hashSetBuilder = new HashSetBuilder + } + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) + hashSetBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = + if (switchedToHashSetBuilder) { + hashSetBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala new file mode 100644 index 000000000000..9587502fd908 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -0,0 +1,178 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import language.experimental.captureChecking + +/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. + * + * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in + * sorted order, according to the map's [[scala.math.Ordering]]. + * + * @example {{{ + * import scala.collection.immutable.SortedMap + * + * // Make a SortedMap via the companion object factory + * val weekdays = SortedMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + */ +trait SortedMap[K, +V] + extends Map[K, V] + with collection.SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) +} + +trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => + + protected def coll: C with CC[K, V] + + def unsorted: Map[K, V] + + override def keySet: SortedSet[K] = new ImmutableKeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = self.rangeImpl(from, until) + new map.ImmutableKeySortedSet + } + def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) + def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) + } + + // We override these methods to fix their return type (which would be `Map` otherwise) + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { + // Implementation has been copied from `MapOps` + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) +} + +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with collection.StrictOptimizedSortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { + var result: CC[K, V2] = coll + val it = xs.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { + case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm + case _ => super.from(it) + } + + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { + + implicit def ordering: Ordering[K] = underlying.ordering + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = + new WithDefault( underlying.concat(xs) , defaultValue) + + override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala new file mode 100644 index 000000000000..874abcaecda1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable +import language.experimental.captureChecking + +/** Base trait for sorted sets */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with collection.StrictOptimizedSortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { +} + +/** + * $factoryInfo + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { + override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { + case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss + case _ => super.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..5ceb5435469f --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -0,0 +1,81 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable +import language.experimental.captureChecking + +/** + * Trait that overrides operations to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps[+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with collection.StrictOptimizedSeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A -> B): C = { + if (lengthCompare(1) <= 0) coll + else { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next else different = true + } + if (different) builder.result() else coll + } + } + + override def updated[B >: A](index: Int, elem: B): CC[B] = { + if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size) + } + var i = 0 + val it = iterator + while (i < index && it.hasNext) { + b += it.next() + i += 1 + } + if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") + b += elem + it.next() + while (it.hasNext) b += it.next() + b.result() + } + + override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { + val b = iterableFactory.newBuilder[B] + var i = 0 + val it = iterator + while (i < from && it.hasNext) { + b += it.next() + i += 1 + } + b ++= other + i = replaced + while (i > 0 && it.hasNext) { + it.next() + i -= 1 + } + while (it.hasNext) b += it.next() + b.result() + } + + override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) + +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala new file mode 100644 index 000000000000..8da2f189a985 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -0,0 +1,371 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{RedBlackTree => RB} +import scala.collection.mutable.ReusableBuilder +import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking + +/** An immutable SortedMap whose values are stored in a red-black tree. + * + * This class is optimal when range queries will be performed, + * or when traversal in order of an ordering is desired. + * If you only need key lookups, and don't care in which order key-values + * are traversed in, consider using * [[scala.collection.immutable.HashMap]], + * which will generally have better performance. If you need insertion order, + * consider a * [[scala.collection.immutable.SeqMap]], which does not need to + * have an ordering supplied. + * + * @example {{{ + * import scala.collection.immutable.TreeMap + * + * // Make a TreeMap via the companion object factory + * val weekdays = TreeMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * @param ordering the implicit ordering used to compare objects of type `A`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll immutable.TreeMap + * @define coll immutable tree map + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + def this()(implicit ordering: Ordering[K]) = this(null)(ordering) + private[immutable] def tree0: RB.Tree[K, V] = tree + + private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) + + override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap + + def iterator: Iterator[(K, V)] = RB.iterator(tree) + + def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) + + override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) + + def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) + + override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( + size, tree, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) + } + s.asInstanceOf[S with EfficientSplit] + } + + def get(key: K): Option[V] = RB.get(tree, key) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default + else resultOrNull.value + } + + def removed(key: K): TreeMap[K,V] = + newMapOrSelf(RB.delete(tree, key)) + + def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = + newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = + newMapOrSelf(that match { + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => + RB.union(tree, tm.tree) + case ls: LinearSeq[(K,V1)] => + if (ls.isEmpty) tree //to avoid the creation of the adder + else { + val adder = new Adder[V1] + adder.addAll(ls) + adder.finalTree + } + case _ => + val adder = new Adder[V1] + val it = that.iterator + while (it.hasNext) { + adder.apply(it.next()) + } + adder.finalTree + }) + + override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { + case ts: TreeSet[K] if ordering == ts.ordering => + newMapOrSelf(RB.difference(tree, ts.tree)) + case _ => super.removedAll(keys) + } + + /** A new TreeMap with the entry added is returned, + * assuming that key is not in the TreeMap. + * + * @tparam V1 type of the values of the new bindings, a supertype of `V` + * @param key the key to be inserted + * @param value the value to be associated with `key` + * @return a new $coll with the inserted binding, if it wasn't present in the map + */ + @deprecated("Use `updated` instead", "2.13.0") + def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { + assert(!RB.contains(tree, key)) + updated(key, value) + } + + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + override def size: Int = RB.count(tree) + override def knownSize: Int = size + + override def isEmpty = size == 0 + + override def firstKey: K = RB.smallest(tree).key + + override def lastKey: K = RB.greatest(tree).key + + override def head: (K, V) = { + val smallest = RB.smallest(tree) + (smallest.key, smallest.value) + } + + override def last: (K, V) = { + val greatest = RB.greatest(tree) + (greatest.key, greatest.value) + } + + override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) + + override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) + + override def drop(n: Int): TreeMap[K, V] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeMap(RB.drop(tree, n)) + } + + override def take(n: Int): TreeMap[K, V] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeMap(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeMap(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: ((K, V)) => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + + override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) + + override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) + + override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) + + override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = + newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) + + override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { + val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) + (newMapOrSelf(l), newMapOrSelf(r)) + } + + override def transform[W](f: (K, V) => W): TreeMap[K, W] = { + val t2 = RB.transform[K, V, W](tree, f) + if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] + else new TreeMap(t2) + } + + private final class Adder[B1 >: V] + extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { + private var currentMutableTree: RB.Tree[K,B1] = tree0 + def finalTree = beforePublish(currentMutableTree) + override def apply(kv: (K, B1)): Unit = { + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + } + @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { + if (!ls.isEmpty) { + val kv = ls.head + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + addAll(ls.tail) + } + } + } + override def equals(obj: Any): Boolean = obj match { + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeMap" +} + +/** $factoryInfo + * @define Coll immutable.TreeMap + * @define coll immutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() + + def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = + it match { + case tm: TreeMap[K, V] if ordering == tm.ordering => tm + case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => + new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) + case _ => + var t: RB.Tree[K, V] = null + val i = it.iterator + while (i.hasNext) { + val (k, v) = i.next() + t = RB.update(t, k, v, overwrite = true) + } + new TreeMap[K, V](t) + } + + def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] + + private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) + extends RB.MapHelper[K, V] + with ReusableBuilder[(K, V), TreeMap[K, V]] { + type Tree = RB.Tree[K, V] + private var tree:Tree = null + + def addOne(elem: (K, V)): this.type = { + tree = mutableUpd(tree, elem._1, elem._2) + this + } + private object adder extends AbstractFunction2[K, V, Unit] { + // we cache tree to avoid the outer access to tree + // in the hot path (apply) + private[this] var accumulator: Tree = null + def addForEach(hasForEach: collection.Map[K, V]): Unit = { + accumulator = tree + hasForEach.foreachEntry(this) + tree = accumulator + // be friendly to GC + accumulator = null + } + + override def apply(key: K, value: V): Unit = { + accumulator = mutableUpd(accumulator, key, value) + } + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeMap[K, V] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0) + case that: collection.Map[K, V] => + //add avoiding creation of tuples + adder.addForEach(that) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala new file mode 100644 index 000000000000..fe194fa9de5a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -0,0 +1,650 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import language.experimental.captureChecking + +/** This class implements an immutable map that preserves order using + * a hash map for the key to value mapping to provide efficient lookup, + * and a tree for the ordering of the keys to provide efficient + * insertion/modification order traversal and destructuring. + * + * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) + * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) + * can be used instead if so specified at creation. + * + * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method + * can be used to switch to the specified ordering for the returned map. + * + * A key can be manually refreshed (i.e. placed at the end) via the + * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in + * use). + * + * Internally, an ordinal counter is increased for each insertion/modification + * and then the current ordinal is used as key in the tree map. After 2^32^ + * insertions/modifications the entire map is copied (thus resetting the ordinal + * counter). + * + * @tparam K the type of the keys contained in this map. + * @tparam V the type of the values associated with the keys in this map. + * @define coll immutable tree seq map + * @define Coll `immutable.TreeSeqMap` + */ +final class TreeSeqMap[K, +V] private ( + private val ordering: TreeSeqMap.Ordering[K], + private val mapping: TreeSeqMap.Mapping[K, V], + private val ordinal: Int, + val orderedBy: TreeSeqMap.OrderBy) + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] + with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { + + import TreeSeqMap._ + + override protected[this] def className: String = "TreeSeqMap" + + override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap + + override val size = mapping.size + + override def knownSize: Int = size + + override def isEmpty = size == 0 + + /* + // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible + // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. + override def empty = TreeSeqMap.empty[K, V](orderedBy) + */ + + def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == this.orderedBy) this + else if (isEmpty) TreeSeqMap.empty(orderBy) + else new TreeSeqMap(ordering, mapping, ordinal, orderBy) + } + + def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { + mapping.get(key) match { + case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => + // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. + TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) + case Some((o, _)) if orderedBy == OrderBy.Insertion => + new TreeSeqMap( + ordering.include(o, key), + mapping.updated[(Int, V1)](key, (o, value)), + ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. + orderedBy) + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + case None => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + } + } + + def removed(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + new TreeSeqMap( + ordering.exclude(o), + mapping.removed(key), + ordinal, + orderedBy) + case None => + this + } + } + + def refresh(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping, + o1, + orderedBy) + case None => + this + } + } + + def get(key: K): Option[V] = mapping.get(key).map(value) + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): (K, V) = binding(iter.next()) + } + + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): K = iter.next() + } + + override def valuesIterator: Iterator[V] = new AbstractIterator[V] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): V = value(binding(iter.next())) + } + + override def contains(key: K): Boolean = mapping.contains(key) + + override def head: (K, V) = binding(ordering.head) + + override def headOption = ordering.headOption.map(binding) + + override def last: (K, V) = binding(ordering.last) + + override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) + + override def tail: TreeSeqMap[K, V] = { + val (head, tail) = ordering.headTail + new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) + } + + override def init: TreeSeqMap[K, V] = { + val (init, last) = ordering.initLast + new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) + } + + override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { + val sz = size + if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) + else { + val sz = size + val f = if (from >= 0) from else 0 + val u = if (until <= sz) until else sz + val l = u - f + if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) + else if (l > sz / 2) { + // Remove front and rear incrementally if majority of elements are to be kept + val (front, rest) = ordering.splitAt(f) + val (ong, rear) = rest.splitAt(l) + var mng = this.mapping + val frontIter = front.iterator + while (frontIter.hasNext) { + mng = mng - frontIter.next() + } + val rearIter = rear.iterator + while (rearIter.hasNext) { + mng = mng - rearIter.next() + } + new TreeSeqMap(ong, mng, ordinal, orderedBy) + } else { + // Populate with builder otherwise + val bdr = newBuilder[K, V](orderedBy) + val iter = ordering.iterator + var i = 0 + while (i < f) { + iter.next() + i += 1 + } + while (i < u) { + val k = iter.next() + bdr.addOne((k, mapping(k)._2)) + i += 1 + } + bdr.result() + } + } + } + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val (k2, v2) = f((k, v)) + bdr.addOne((k2, v2)) + } + bdr.result() + } + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val jter = f((k, v)).iterator + while (jter.hasNext) { + val (k2, v2) = jter.next() + bdr.addOne((k2, v2)) + } + } + bdr.result() + } + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) + } + bdr.result() + } + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { + var ong: Ordering[K] = ordering + var mng: Mapping[K, V2] = mapping + var ord = increment(ordinal) + val iter = suffix.iterator + while (iter.hasNext) { + val (k, v2) = iter.next() + mng.get(k) match { + case Some((o, v)) => + if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) + else if (orderedBy == OrderBy.Modification) { + mng = mng.updated(k, (ord, v2)) + ong = ong.exclude(o).append(ord, k) + ord = increment(ord) + } + case None => + mng = mng.updated(k, (ord, v2)) + ong = ong.append(ord, k) + ord = increment(ord) + } + } + new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) + } + + @`inline` private[this] def value(p: (_, V)) = p._2 + @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) +} +object TreeSeqMap extends MapFactory[TreeSeqMap] { + sealed trait OrderBy + object OrderBy { + case object Insertion extends OrderBy + case object Modification extends OrderBy + } + + private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) + private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) + val Empty = EmptyByInsertion + def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) + def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == OrderBy.Modification) EmptyByModification + else EmptyByInsertion + }.asInstanceOf[TreeSeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = + it match { + case om: TreeSeqMap[K, V] => om + case _ => (newBuilder[K, V] ++= it).result() + } + + @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 + + def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + + final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + private[this] val bdr = new MapBuilderImpl[K, (Int, V)] + private[this] var ong = Ordering.empty[K] + private[this] var ord = 0 + private[this] var aliased: TreeSeqMap[K, V] = _ + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + bdr.getOrElse(key, null) match { + case (o, v) => + if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) + else if (orderedBy == OrderBy.Modification) { + bdr.addOne(key, (ord, value)) + ong = ong.exclude(o).appendInPlace(ord, key) + ord = increment(ord) + } + case null => + bdr.addOne(key, (ord, value)) + ong = ong.appendInPlace(ord, key) + ord = increment(ord) + } + } + this + } + + override def clear(): Unit = { + ong = Ordering.empty + ord = 0 + bdr.clear() + aliased = null + } + + override def result(): TreeSeqMap[K, V] = { + if (aliased eq null) { + aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) + } + aliased + } + } + + private type Mapping[K, +V] = Map[K, (Int, V)] + @annotation.unused + private val Mapping = Map + + /* The ordering implementation below is an adapted version of immutable.IntMap. */ + private[immutable] object Ordering { + import scala.collection.generic.BitOperations.Int._ + + @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" + + def empty[T] : Ordering[T] = Zero + + def apply[T](elems: (Int, T)*): Ordering[T] = + elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) + + // Iterator over a non-empty Ordering. + final class Iterator[+V](it: Ordering[V]) { + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 Bins and + // one Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 + private[this] var index = 0 + private[this] val buffer = new Array[AnyRef](33) + + private[this] def pop = { + index -= 1 + buffer(index).asInstanceOf[Ordering[V]] + } + + private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + + if (it != Zero) push(it) + + def hasNext = index != 0 + @tailrec + def next(): V = + pop match { + case Bin(_,_, Tip(_, v), right) => + push(right) + v + case Bin(_, _, left, right) => + push(right) + push(left) + next() + case Tip(_, v) => v + // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering + // and don't return an Ordering.Iterator for Ordering.Zero. + case Zero => throw new IllegalStateException("empty subtree not allowed") + } + } + + object Iterator { + val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) + def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] + } + + case object Zero extends Ordering[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any): Boolean = that match { + case _: this.type => true + case _: Ordering[_] => false // The only empty Orderings are eq Nil + case _ => super.equals(that) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" + } + + final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] + else Tip(ord, s) + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" + } + + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] + else Bin[S](prefix, mask, left, right) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { + sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" + left.format(sb, subPrefix + "├── ", subPrefix + "│ ") + right.format(sb, subPrefix + "└── ", subPrefix + " ") + } + } + + private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) Bin(p, m, t1, t2) + else Bin(p, m, t2, t1) + } + + private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { + case (l, Zero) => l + case (Zero, r) => r + case (l, r) => Bin(prefix, mask, l, r) + } + } + + sealed abstract class Ordering[+T] { + import Ordering._ + import scala.annotation.tailrec + import scala.collection.generic.BitOperations.Int._ + + override final def toString: String = format + final def format: String = { + val sb = new StringBuilder + format(sb, "", "") + sb.toString() + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit + + @tailrec + final def head: T = this match { + case Zero => throw new NoSuchElementException("head of empty map") + case Tip(k, v) => v + case Bin(_, _, l, _) => l.head + } + + @tailrec + final def headOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, l, _) => l.headOption + } + + @tailrec + final def last: T = this match { + case Zero => throw new NoSuchElementException("last of empty map") + case Tip(_, v) => v + case Bin(_, _, _, r) => r.last + } + + @tailrec + final def lastOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, _, r) => r.lastOption + } + + @tailrec + final def ordinal: Int = this match { + case Zero => 0 + case Tip(o, _) => o + case Bin(_, _, _, r) => r.ordinal + } + + final def tail: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("tail of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => bin(p, m, l.tail, r) + } + + final def headTail: (T, Ordering[T]) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (v, Zero) + case Bin(p, m, l, r) => + val (head, tail) = l.headTail + (head, bin(p, m, tail, r)) + } + + final def init: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => + bin(p, m, l, r.init) + } + + final def initLast: (Ordering[T], T) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (Zero, v) + case Bin(p, m, l, r) => + val (init, last) = r.initLast + (bin(p, m, l, init), last) + } + + final def iterator: Iterator[T] = this match { + case Zero => Iterator.empty + case _ => new Iterator(this) + } + + final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) + else Bin(p, m, l, r.include(ordinal, value)) + } + + final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else Bin(p, m, l, r.append(ordinal, value)) + } + + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) if o >= ordinal => + throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") + case Tip(o, _) if parent == null => + join(ordinal, Tip(ordinal, value), o, this) + case Tip(o, _) => + parent.right = join(ordinal, Tip(ordinal, value), o, this) + parent + case b @ Bin(p, m, _, r) => + if (!hasMatch(ordinal, p, m)) { + val b2 = join(ordinal, Tip(ordinal, value), p, this) + if (parent != null) { + parent.right = b2 + parent + } else b2 + } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else { + r.appendInPlace1(b, ordinal, value) + this + } + } + + final def exclude(ordinal: Int): Ordering[T] = this match { + case Zero => + Zero + case Tip(o, _) => + if (ordinal == o) Zero + else this + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) this + else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) + else bin(p, m, l, r.exclude(ordinal)) + } + + final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { + var rear: Ordering[T] = Ordering.empty[T] + var i = n + (modifyOrRemove { (o, v) => + i -= 1 + if (i >= 0) Some(v) + else { + rear = rear.appendInPlace(o, v) + None + } + }, rear) + } + + /** + * A combined transform and filter function. Returns an `Ordering` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value) == Some(x)` the + * map contains `(key, x)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { + case Zero => Zero + case Tip(key, value) => + f(key, value) match { + case None => Zero + case Some(value2) => + // hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] + else Tip(key, value2) + } + case Bin(prefix, mask, left, right) => + val l = left.modifyOrRemove(f) + val r = right.modifyOrRemove(f) + if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] + else bin(prefix, mask, l, r) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala new file mode 100644 index 000000000000..097341283719 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -0,0 +1,296 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.collection.immutable.{RedBlackTree => RB} +import scala.runtime.AbstractFunction1 +import language.experimental.captureChecking + +/** This class implements immutable sorted sets using a tree. + * + * @tparam A the type of the elements contained in this tree set + * @param ordering the implicit ordering used to compare objects of type `A` + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) throw new NullPointerException("ordering must not be null") + + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) + + override def sortedIterableFactory = TreeSet + + private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) + + override def size: Int = RB.count(tree) + + override def isEmpty = size == 0 + + override def head: A = RB.smallest(tree).key + + override def last: A = RB.greatest(tree).key + + override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) + + override def init: TreeSet[A] = new TreeSet(RB.init(tree)) + + override def min[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + head + } else { + super.min(ord) + } + } + + override def max[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + last + } else { + super.max(ord) + } + } + + override def drop(n: Int): TreeSet[A] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeSet(RB.drop(tree, n)) + } + + override def take(n: Int): TreeSet[A] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeSet(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int): TreeSet[A] = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeSet(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: A => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) + + override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) + + override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + override def minAfter(key: A): Option[A] = { + val v = RB.minAfter(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + override def maxBefore(key: A): Option[A] = { + val v = RB.maxBefore(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + def iterator: Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[A, Any] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Checks if this set contains element `elem`. + * + * @param elem the element to check for membership. + * @return true, iff `elem` is contained in this set. + */ + def contains(elem: A): Boolean = RB.contains(tree, elem) + + override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) + + /** Creates a new `TreeSet` with the entry added. + * + * @param elem a new element to add. + * @return a new $coll containing `elem` and all the elements of this $coll. + */ + def incl(elem: A): TreeSet[A] = + newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) + + /** Creates a new `TreeSet` with the entry removed. + * + * @param elem a new element to add. + * @return a new $coll containing all the elements of this $coll except `elem`. + */ + def excl(elem: A): TreeSet[A] = + newSetOrSelf(RB.delete(tree, elem)) + + override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { + val t = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + RB.union(tree, ts.tree) + case _ => + val it = that.iterator + var t = tree + while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) + t + } + newSetOrSelf(t) + } + + override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + //TODO add an implementation of a mutable subtractor similar to TreeMap + //but at least this doesn't create a TreeSet for each iteration + object sub extends AbstractFunction1[A, Unit] { + var currentTree = tree + override def apply(k: A): Unit = { + currentTree = RB.delete(currentTree, k) + } + } + that.iterator.foreach(sub) + newSetOrSelf(sub.currentTree) + } + + override def intersect(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.intersect(tree, ts.tree)) + case _ => + super.intersect(that) + } + + override def diff(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + super.diff(that) + } + + override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) + + override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { + val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) + (newSetOrSelf(l), newSetOrSelf(r)) + } + + override def equals(obj: Any): Boolean = obj match { + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeSet" +} + +/** + * $factoryInfo + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] + + def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => ts + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) + // The cast is needed to compile with Dotty: + // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound + new TreeSet[E](tree) + case _ => + var t: RB.Tree[E, Null] = null + val i = it.iterator + while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] + private class TreeSetBuilder[A](implicit ordering: Ordering[A]) + extends RB.SetHelper[A] + with ReusableBuilder[A, TreeSet[A]] { + type Tree = RB.Tree[A, Any] + private [this] var tree:RB.Tree[A, Any] = null + + override def addOne(elem: A): this.type = { + tree = mutableUpd(tree, elem) + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeSet[A] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree + else tree = RB.union(beforePublish(tree), ts.tree)(ordering) + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala new file mode 100644 index 000000000000..d584d4a446af --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -0,0 +1,2475 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.Math.{abs, max => mmax, min => mmin} +import java.util.Arrays.{copyOf, copyOfRange} +import java.util.{Arrays, Spliterator} + +import scala.annotation.switch +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.VectorInline._ +import scala.collection.immutable.VectorStatics._ +import scala.collection.mutable.ReusableBuilder +import language.experimental.captureChecking + + +/** $factoryInfo + * @define Coll `Vector` + * @define coll vector + */ +@SerialVersionUID(3L) +object Vector extends StrictOptimizedSeqFactory[Vector] { + + def empty[A]: Vector[A] = Vector0 + + def from[E](it: collection.IterableOnce[E]^): Vector[E] = + it match { + case v: Vector[E] => v + case _ => + val knownSize = it.knownSize + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= WIDTH) { + val a1: Arr1 = it match { + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => + as.unsafeArray.asInstanceOf[Arr1] + case it: Iterable[E] => + val a1 = new Arr1(knownSize) + it.copyToArray(a1.asInstanceOf[Array[Any]]) + a1 + case _ => + val a1 = new Arr1(knownSize) + it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) + a1.asInstanceOf[Arr1] + } + new Vector1[E](a1) + } else { + (newBuilder ++= it).result() + } + } + + def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] + + /** Create a Vector with the same element at each index. + * + * Unlike `fill`, which takes a by-name argument for the value and can thereby + * compute different values for each index, this method guarantees that all + * elements are identical. This allows sparse allocation in O(log n) time and space. + */ + private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { + //TODO Make public; this method is private for now because it is not forward binary compatible + if(n <= 0) Vector0 + else { + val b = new VectorBuilder[A] + b.initSparse(n, elem) + b.result() + } + } + + private val defaultApplyPreferredMaxLength: Int = + try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", + "250").toInt + catch { + case _: SecurityException => 250 + } + + private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) +} + + +/** Vector is a general-purpose, immutable data structure. It provides random access and updates + * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). + * Because vectors strike a good balance between fast random selections and fast random functional updates, + * they are currently the default implementation of immutable indexed sequences. + * + * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass + * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the + * top level). + * + * Tree balancing: + * - Only the first dimension of an array may have a size < WIDTH + * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up + * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 + * - `prefix1` and `suffix1` are never empty + * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches + * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty + * - All arrays are left-aligned and truncated + * + * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running + * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. + */ +sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, Vector, Vector[A]] + with StrictOptimizedSeqOps[A, Vector, Vector[A]] + with IterableFactoryDefaults[A, Vector] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Vector] = Vector + + override final def length: Int = + if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 + else prefix1.length + + override final def iterator: Iterator[A] = + if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator + else new NewVectorIterator(this, length, vectorSliceCount) + + override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { + var i = 0 + val len = prefix1.length + while (i != len) { + if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { + // each 1 bit indicates that index passes the filter. + // all indices < i are also assumed to pass the filter + var bitmap = 0 + var j = i + 1 + while (j < len) { + if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { + bitmap |= (1 << j) + } + j += 1 + } + val newLen = i + java.lang.Integer.bitCount(bitmap) + + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + var k = 0 + while(k < i) { + b.addOne(prefix1(k).asInstanceOf[A]) + k += 1 + } + k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + b.addOne(prefix1(k).asInstanceOf[A]) + i += 1 + } + k += 1 + } + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + return b.result() + } else { + if (newLen == 0) return Vector0 + val newData = new Array[AnyRef](newLen) + System.arraycopy(prefix1, 0, newData, 0, i) + var k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + newData(i) = prefix1(k) + i += 1 + } + k += 1 + } + return new Vector1[A](newData) + } + } + i += 1 + } + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + b.initFrom(prefix1) + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + b.result() + } else this + } + + // Dummy overrides to refine result types for binary compatibility: + override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) + override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) + override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { + val k = prefix.knownSize + if (k == 0) this + else if (k < 0) super.prependedAll(prefix) + else prependedAll0(prefix, k) + } + + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { + val k = suffix.knownSize + if (k == 0) this + else if (k < 0) super.appendedAll(suffix) + else appendedAll0(suffix, k) + } + + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + // k >= 0, k = prefix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { + var v: Vector[B] = this + val it = IndexedSeq.from(prefix).reverseIterator + while (it.hasNext) v = it.next() +: v + v + } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { + var v = prefix.asInstanceOf[Vector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + } else if (k < this.size - AlignToFaster) { + new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() + } else super.prependedAll(prefix) + } + + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + // k >= 0, k = suffix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit) { + var v: Vector[B] = this + suffix match { + case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) + case _ => suffix.iterator.foreach(x => v = v.appended(x)) + } + v + } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { + var v = suffix.asInstanceOf[Vector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = v.prepended(ri.next()) + v + } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { + val v = suffix.asInstanceOf[Vector[B]] + new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() + } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() + } + + override def className = "Vector" + + @inline override final def take(n: Int): Vector[A] = slice(0, n) + @inline override final def drop(n: Int): Vector[A] = slice(n, length) + @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) + @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) + override def tail: Vector[A] = slice(1, length) + override def init: Vector[A] = slice(0, length-1) + + /** Like slice but parameters must be 0 <= lo < hi < length */ + protected[this] def slice0(lo: Int, hi: Int): Vector[A] + + /** Number of slices */ + protected[immutable] def vectorSliceCount: Int + /** Slice at index */ + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] + /** Length of all slices up to and including index */ + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + + override def toVector: Vector[A] = this + + override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) + case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) + case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) + case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) + } + s.asInstanceOf[S with EfficientSplit] + } + + protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") + + override final def head: A = + if (prefix1.length == 0) throw new NoSuchElementException("empty.head") + else prefix1(0).asInstanceOf[A] + + override final def last: A = { + if(this.isInstanceOf[BigVector[_]]) { + val suffix = this.asInstanceOf[BigVector[_]].suffix1 + if(suffix.length == 0) throw new NoSuchElementException("empty.tail") + else suffix(suffix.length-1) + } else prefix1(prefix1.length-1) + }.asInstanceOf[A] + + override final def foreach[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 0 + while (i < c) { + foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) + i += 1 + } + } + + // The following definitions are needed for binary compatibility with ParVector + private[collection] def startIndex: Int = 0 + private[collection] def endIndex: Int = length + private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = + s.it = iterator.asInstanceOf[NewVectorIterator[B]] +} + + +/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ +private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { + + override final def slice(from: Int, until: Int): Vector[A] = { + val lo = mmax(from, 0) + val hi = mmin(until, length) + if (hi <= lo) Vector0 + else if (hi - lo == length) this + else slice0(lo, hi) + } +} + + +/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ +private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { + + protected[immutable] final def foreachRest[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 1 + while(i < c) { + foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) + i += 1 + } + } +} + + +/** Empty vector */ +private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { + + def apply(index: Int): Nothing = throw ioob(index) + + override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) + + override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def map[B](f: Nothing => B): Vector[B] = this + + override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") + + override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") + + protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this + + protected[immutable] def vectorSliceCount: Int = 0 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 + + override def equals(o: Any): Boolean = { + if(this eq o.asInstanceOf[AnyRef]) true + else o match { + case that: Vector[_] => false + case o => super.equals(o) + } + } + + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + Vector.from(prefix) + + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = + Vector.from(suffix) + + override protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") +} + +/** Flat ArraySeq-like structure */ +private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { + + @inline def apply(index: Int): A = { + if(index >= 0 && index < prefix1.length) + prefix1(index).asInstanceOf[A] + else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < prefix1.length) + new Vector1(copyUpdate(prefix1, index, elem)) + else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) + else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) + else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) + } + + override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = + new Vector1(copyOfRange(prefix1, lo, hi)) + + override def tail: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyTail(prefix1)) + + override def init: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyInit(prefix1)) + + protected[immutable] def vectorSliceCount: Int = 1 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case data1b => new Vector1(data1b) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val data1b = append1IfSpace(prefix1, suffix) + if(data1b ne null) new Vector1(data1b) + else super.appendedAll0(suffix, k) + } +} + + +/** 2-dimensional radix-balanced finger tree */ +private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val data2: Arr2, + _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + data2: Arr2 = data2, + suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector2(prefix1, len1, data2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1 + if(io >= 0) { + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) data2(i2)(i1) + else suffix1(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1) { + val io = index - len1 + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) + else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) + else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, data2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 3 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => data2 + case 2 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => length0 - suffix1.length + case 2 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 3-dimensional radix-balanced finger tree */ +private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val data3: Arr3, + private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + data3: Arr3 = data3, + suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12 + if(io >= 0) { + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i3 < data3.length) data3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12) { + val io = index - len12 + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) + else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) + else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), + data3 = mapElems(3, data3, f), + suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, data3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 5 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => data3 + case 3 => suffix2 + case 4 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len12 + data3.length*WIDTH2 + case 3 => length0 - suffix1.length + case 4 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 4-dimensional radix-balanced finger tree */ +private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val data4: Arr4, + private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + data4: Arr4 = data4, + suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len123 + if(io >= 0) { + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i4 < data4.length) data4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len123) { + val io = index - len123 + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) + else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) + else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), + data4 = mapElems(4, data4, f), + suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, data4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 7 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => data4 + case 4 => suffix3 + case 5 => suffix2 + case 6 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len123 + data4.length*WIDTH3 + case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 + case 5 => length0 - suffix1.length + case 6 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 5-dimensional radix-balanced finger tree */ +private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val data5: Arr5, + private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + data5: Arr5 = data5, + suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1234 + if(io >= 0) { + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1234) { + val io = index - len1234 + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) + else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) + else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), + data5 = mapElems(5, data5, f), + suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, data5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 9 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => data5 + case 5 => suffix4 + case 6 => suffix3 + case 7 => suffix2 + case 8 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len1234 + data5.length*WIDTH4 + case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 7 => length0 - suffix1.length + case 8 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 6-dimensional radix-balanced finger tree */ +private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, + private[immutable] val data6: Arr6, + private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + prefix5: Arr5 = prefix5, len12345: Int = len12345, + data6: Arr6 = data6, + suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12345 + if(io >= 0) { + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) + else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1234) { + val io = index - len1234 + prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12345) { + val io = index - len12345 + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) + else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1234) { + val io = index - len1234 + copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), + data6 = mapElems(6, data6, f), + suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, prefix5) + b.consider(6, data6) + b.consider(5, suffix5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 11 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => prefix5 + case 5 => data6 + case 6 => suffix5 + case 7 => suffix4 + case 8 => suffix3 + case 9 => suffix2 + case 10 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len12345 + case 5 => len12345 + data6.length*WIDTH5 + case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 9 => length0 - suffix1.length + case 10 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + len12345 = len12345 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** Helper class for vector slicing. It is initialized with the validated start and end index, + * then the vector slices are added in succession with `consider`. No matter what the dimension + * of the originating vector is or where the cut is performed, this always results in a + * structure with the highest-dimensional data in the middle and fingers of decreasing dimension + * at both ends, which can be turned into a new vector with very little rebalancing. + */ +private final class VectorSliceBuilder(lo: Int, hi: Int) { + //println(s"***** VectorSliceBuilder($lo, $hi)") + + private[this] val slices = new Array[Array[AnyRef]](11) + private[this] var len, pos, maxDim = 0 + + @inline private[this] def prefixIdx(n: Int) = n-1 + @inline private[this] def suffixIdx(n: Int) = 11-n + + def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** consider($n, /${a.length})") + val count = a.length * (1 << (BITS*(n-1))) + val lo0 = mmax(lo-pos, 0) + val hi0 = mmin(hi-pos, count) + if(hi0 > lo0) { + addSlice(n, a, lo0, hi0) + len += (hi0 - lo0) + } + pos += count + } + + private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { + //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") + if(n == 1) { + add(1, copyOrUse(a, lo, hi)) + } else { + val bitsN = BITS * (n-1) + val widthN = 1 << bitsN + val loN = lo >>> bitsN + val hiN = hi >>> bitsN + val loRest = lo & (widthN - 1) + val hiRest = hi & (widthN - 1) + //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") + if(loRest == 0) { + if(hiRest == 0) { + add(n, copyOrUse(a, loN, hiN)) + } else { + if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } else { + if(hiN == loN) { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) + } else { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) + if(hiRest == 0) { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + } else { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } + } + } + } + + private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** add($n, /${a.length})") + val idx = + if(n <= maxDim) suffixIdx(n) + else { maxDim = n; prefixIdx(n) } + slices(idx) = a.asInstanceOf[Array[AnyRef]] + } + + def result[A](): Vector[A] = { + //println(s"***** result: $len, $maxDim") + if(len <= 32) { + if(len == 0) Vector0 + else { + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") + val a: Arr1 = + if(prefix1 ne null) { + if(suffix1 ne null) concatArrays(prefix1, suffix1) + else prefix1 + } else if(suffix1 ne null) suffix1 + else { + val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] + if(prefix2 ne null) prefix2(0) + else { + val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] + suffix2(0) + } + } + new Vector1(a) + } + } else { + balancePrefix(1) + balanceSuffix(1) + var resultDim = maxDim + if(resultDim < 6) { + val pre = slices(prefixIdx(maxDim)) + val suf = slices(suffixIdx(maxDim)) + if((pre ne null) && (suf ne null)) { + // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, + // otherwise increase the dimension + if(pre.length + suf.length <= WIDTH-2) { + slices(prefixIdx(maxDim)) = concatArrays(pre, suf) + slices(suffixIdx(maxDim)) = null + } else resultDim += 1 + } else { + // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we + // only allow WIDTH-2 for the main data, so increase the dimension in this case + val one = if(pre ne null) pre else suf + if(one.length > WIDTH-2) resultDim += 1 + } + } + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + val len1 = prefix1.length + val res = (resultDim: @switch) match { + case 2 => + val data2 = dataOr(2, empty2) + new Vector2[A](prefix1, len1, data2, suffix1, len) + case 3 => + val prefix2 = prefixOr(2, empty2) + val data3 = dataOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) + case 4 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val data4 = dataOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) + case 5 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val data5 = dataOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) + case 6 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val prefix5 = prefixOr(5, empty5) + val data6 = dataOr(6, empty6) + val suffix5 = suffixOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + val len12345 = len1234 + (prefix5.length * WIDTH4) + new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) + } + res + } + } + + @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] else a + } + + @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + + @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] + else { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + } + + /** Ensure prefix is not empty */ + private[this] def balancePrefix(n: Int): Unit = { + if(slices(prefixIdx(n)) eq null) { + if(n == maxDim) { + slices(prefixIdx(n)) = slices(suffixIdx(n)) + slices(suffixIdx(n)) = null + } else { + balancePrefix(n+1) + val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(preN1 ne null) + slices(prefixIdx(n)) = preN1(0) + if(preN1.length == 1) { + slices(prefixIdx(n+1)) = null + if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n + } else { + slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] + } + } + } + } + + /** Ensure suffix is not empty */ + private[this] def balanceSuffix(n: Int): Unit = { + if(slices(suffixIdx(n)) eq null) { + if(n == maxDim) { + slices(suffixIdx(n)) = slices(prefixIdx(n)) + slices(prefixIdx(n)) = null + } else { + balanceSuffix(n+1) + val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") + slices(suffixIdx(n)) = sufN1(sufN1.length-1) + if(sufN1.length == 1) { + slices(suffixIdx(n+1)) = null + if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n + } else { + slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] + } + } + } + } + + override def toString: String = + s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" + + private[immutable] def getSlices: Array[Array[AnyRef]] = slices +} + + +final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { + + private[this] var a6: Arr6 = _ + private[this] var a5: Arr5 = _ + private[this] var a4: Arr4 = _ + private[this] var a3: Arr3 = _ + private[this] var a2: Arr2 = _ + private[this] var a1: Arr1 = new Arr1(WIDTH) + private[this] var len1, lenRest, offset = 0 + private[this] var prefixIsRightAligned = false + private[this] var depth = 1 + + @inline private[this] final def setLen(i: Int): Unit = { + len1 = i & MASK + lenRest = i - len1 + } + + override def knownSize: Int = len1 + lenRest - offset + + @inline def size: Int = knownSize + @inline def isEmpty: Boolean = knownSize == 0 + @inline def nonEmpty: Boolean = knownSize != 0 + + def clear(): Unit = { + a6 = null + a5 = null + a4 = null + a3 = null + a2 = null + a1 = new Arr1(WIDTH) + len1 = 0 + lenRest = 0 + offset = 0 + prefixIsRightAligned = false + depth = 1 + } + + private[immutable] def initSparse(size: Int, elem: A): Unit = { + setLen(size) + Arrays.fill(a1, elem) + if(size > WIDTH) { + a2 = new Array(WIDTH) + Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) + if(size > WIDTH2) { + a3 = new Array(WIDTH) + Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) + if(size > WIDTH3) { + a4 = new Array(WIDTH) + Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) + if(size > WIDTH4) { + a5 = new Array(WIDTH) + Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) + if(size > WIDTH5) { + a6 = new Array(LASTWIDTH) + Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) + depth = 6 + } else depth = 5 + } else depth = 4 + } else depth = 3 + } else depth = 2 + } else depth = 1 + } + + private[immutable] def initFrom(prefix1: Arr1): Unit = { + depth = 1 + setLen(prefix1.length) + a1 = copyOrUse(prefix1, 0, WIDTH) + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + } + + private[immutable] def initFrom(v: Vector[_]): this.type = { + (v.vectorSliceCount: @switch) match { + case 0 => + case 1 => + val v1 = v.asInstanceOf[Vector1[_]] + depth = 1 + setLen(v1.prefix1.length) + a1 = copyOrUse(v1.prefix1, 0, WIDTH) + case 3 => + val v2 = v.asInstanceOf[Vector2[_]] + val d2 = v2.data2 + a1 = copyOrUse(v2.suffix1, 0, WIDTH) + depth = 2 + offset = WIDTH - v2.len1 + setLen(v2.length0 + offset) + a2 = new Arr2(WIDTH) + a2(0) = v2.prefix1 + System.arraycopy(d2, 0, a2, 1, d2.length) + a2(d2.length+1) = a1 + case 5 => + val v3 = v.asInstanceOf[Vector3[_]] + val d3 = v3.data3 + val s2 = v3.suffix2 + a1 = copyOrUse(v3.suffix1, 0, WIDTH) + depth = 3 + offset = WIDTH2 - v3.len12 + setLen(v3.length0 + offset) + a3 = new Arr3(WIDTH) + a3(0) = copyPrepend(v3.prefix1, v3.prefix2) + System.arraycopy(d3, 0, a3, 1, d3.length) + a2 = copyOf(s2, WIDTH) + a3(d3.length+1) = a2 + a2(s2.length) = a1 + case 7 => + val v4 = v.asInstanceOf[Vector4[_]] + val d4 = v4.data4 + val s3 = v4.suffix3 + val s2 = v4.suffix2 + a1 = copyOrUse(v4.suffix1, 0, WIDTH) + depth = 4 + offset = WIDTH3 - v4.len123 + setLen(v4.length0 + offset) + a4 = new Arr4(WIDTH) + a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) + System.arraycopy(d4, 0, a4, 1, d4.length) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a4(d4.length+1) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 9 => + val v5 = v.asInstanceOf[Vector5[_]] + val d5 = v5.data5 + val s4 = v5.suffix4 + val s3 = v5.suffix3 + val s2 = v5.suffix2 + a1 = copyOrUse(v5.suffix1, 0, WIDTH) + depth = 5 + offset = WIDTH4 - v5.len1234 + setLen(v5.length0 + offset) + a5 = new Arr5(WIDTH) + a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) + System.arraycopy(d5, 0, a5, 1, d5.length) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a5(d5.length+1) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 11 => + val v6 = v.asInstanceOf[Vector6[_]] + val d6 = v6.data6 + val s5 = v6.suffix5 + val s4 = v6.suffix4 + val s3 = v6.suffix3 + val s2 = v6.suffix2 + a1 = copyOrUse(v6.suffix1, 0, WIDTH) + depth = 6 + offset = WIDTH5 - v6.len12345 + setLen(v6.length0 + offset) + a6 = new Arr6(LASTWIDTH) + a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) + System.arraycopy(d6, 0, a6, 1, d6.length) + a5 = copyOf(s5, WIDTH) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a6(d6.length+1) = a5 + a5(s5.length) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + } + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + this + } + + //TODO Make public; this method is only private for binary compatibility + private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { + if (len1 != 0 || lenRest != 0) + throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") + val (prefixLength, maxPrefixLength) = bigVector match { + case Vector0 => (0, 1) + case v1: Vector1[_] => (0, 1) + case v2: Vector2[_] => (v2.len1, WIDTH) + case v3: Vector3[_] => (v3.len12, WIDTH2) + case v4: Vector4[_] => (v4.len123, WIDTH3) + case v5: Vector5[_] => (v5.len1234, WIDTH4) + case v6: Vector6[_] => (v6.len12345, WIDTH5) + } + if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector + val overallPrefixLength = (before + prefixLength) % maxPrefixLength + offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength + // pretend there are already `offset` elements added + advanceN(offset & ~MASK) + len1 = offset & MASK + prefixIsRightAligned = true + this + } + + /** + * Removes `offset` leading `null`s in the prefix. + * This is needed after calling `alignTo` and subsequent additions, + * directly before the result is used for creating a new Vector. + * Note that the outermost array keeps its length to keep the + * Builder re-usable. + * + * example: + * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) + * becomes + * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) + */ + private[this] def leftAlignPrefix(): Unit = { + @inline def shrinkOffsetIfToLarge(width: Int): Unit = { + val newOffset = offset % width + lenRest -= offset - newOffset + offset = newOffset + } + var a: Array[AnyRef] = null // the array we modify + var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + if (depth >= 6) { + a = a6.asInstanceOf[Array[AnyRef]] + val i = offset >>> BITS5 + if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) + shrinkOffsetIfToLarge(WIDTH5) + if ((lenRest >>> BITS5) == 0) depth = 5 + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 5) { + if (a == null) a = a5.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS4) & MASK + if (depth == 5) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a5 = a.asInstanceOf[Arr5] + shrinkOffsetIfToLarge(WIDTH4) + if ((lenRest >>> BITS4) == 0) depth = 4 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 4) { + if (a == null) a = a4.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS3) & MASK + if (depth == 4) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a4 = a.asInstanceOf[Arr4] + shrinkOffsetIfToLarge(WIDTH3) + if ((lenRest >>> BITS3) == 0) depth = 3 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 3) { + if (a == null) a = a3.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS2) & MASK + if (depth == 3) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a3 = a.asInstanceOf[Arr3] + shrinkOffsetIfToLarge(WIDTH2) + if ((lenRest >>> BITS2) == 0) depth = 2 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 2) { + if (a == null) a = a2.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS) & MASK + if (depth == 2) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a2 = a.asInstanceOf[Arr2] + shrinkOffsetIfToLarge(WIDTH) + if ((lenRest >>> BITS) == 0) depth = 1 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 1) { + if (a == null) a = a1.asInstanceOf[Array[AnyRef]] + val i = offset & MASK + if (depth == 1) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a1 = a.asInstanceOf[Arr1] + len1 -= offset + offset = 0 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + } + prefixIsRightAligned = false + } + + def addOne(elem: A): this.type = { + if(len1 == WIDTH) advance() + a1(len1) = elem.asInstanceOf[AnyRef] + len1 += 1 + this + } + + private[this] def addArr1(data: Arr1): Unit = { + val dl = data.length + if(dl > 0) { + if(len1 == WIDTH) advance() + val copy1 = mmin(WIDTH-len1, dl) + val copy2 = dl - copy1 + System.arraycopy(data, 0, a1, len1, copy1) + len1 += copy1 + if(copy2 > 0) { + advance() + System.arraycopy(data, copy1, a1, 0, copy2) + len1 += copy2 + } + } + } + + private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { +// assert(dim >= 2) +// assert(lenRest % WIDTH == 0) +// assert(len1 == 0 || len1 == WIDTH) + if (slice.isEmpty) return + if (len1 == WIDTH) advance() + val sl = slice.length + (dim: @switch) match { + case 2 => + // lenRest is always a multiple of WIDTH + val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS) & MASK + System.arraycopy(slice, 0, a2, destPos, copy1) + advanceN(WIDTH * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a2, 0, copy2) + advanceN(WIDTH * copy2) + } + case 3 => + if (lenRest % WIDTH2 != 0) { + // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) + return + } + val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS2) & MASK + System.arraycopy(slice, 0, a3, destPos, copy1) + advanceN(WIDTH2 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a3, 0, copy2) + advanceN(WIDTH2 * copy2) + } + case 4 => + if (lenRest % WIDTH3 != 0) { + // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) + return + } + val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS3) & MASK + System.arraycopy(slice, 0, a4, destPos, copy1) + advanceN(WIDTH3 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a4, 0, copy2) + advanceN(WIDTH3 * copy2) + } + case 5 => + if (lenRest % WIDTH4 != 0) { + // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) + return + } + val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS4) & MASK + System.arraycopy(slice, 0, a5, destPos, copy1) + advanceN(WIDTH4 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a5, 0, copy2) + advanceN(WIDTH4 * copy2) + } + case 6 => // note width is now LASTWIDTH + if (lenRest % WIDTH5 != 0) { + // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) + return + } + val copy1 = sl + // there is no copy2 because there can't be another a6 to copy to + val destPos = lenRest >>> BITS5 + if (destPos + copy1 > LASTWIDTH) + throw new IllegalArgumentException("exceeding 2^31 elements") + System.arraycopy(slice, 0, a6, destPos, copy1) + advanceN(WIDTH5 * copy1) + } + } + + private[this] def addVector(xs: Vector[A]): this.type = { + val sliceCount = xs.vectorSliceCount + var sliceIdx = 0 + while(sliceIdx < sliceCount) { + val slice = xs.vectorSlice(sliceIdx) + vectorSliceDim(sliceCount, sliceIdx) match { + case 1 => addArr1(slice.asInstanceOf[Arr1]) + case n if len1 == WIDTH || len1 == 0 => + addArrN(slice.asInstanceOf[Array[AnyRef]], n) + case n => foreachRec(n-2, slice, addArr1) + } + sliceIdx += 1 + } + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = xs match { + case v: Vector[_] => + if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) + else addVector(v.asInstanceOf[Vector[A]]) + case _ => + super.addAll(xs) + } + + private[this] def advance(): Unit = { + val idx = lenRest + WIDTH + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advanceN(n: Int): Unit = if (n > 0) { + // assert(n % 32 == 0) + val idx = lenRest + n + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advance1(idx: Int, xor: Int): Unit = { + if (xor <= 0) { // level = 6 or something very unexpected happened + throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") + } else if (xor < WIDTH2) { // level = 1 + if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } + a1 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + } else if (xor < WIDTH3) { // level = 2 + if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + } else if (xor < WIDTH4) { // level = 3 + if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + } else if (xor < WIDTH5) { // level = 4 + if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + } else { // level = 5 + if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a5 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + a6(idx >>> BITS5) = a5 + } + } + + def result(): Vector[A] = { + if (prefixIsRightAligned) leftAlignPrefix() + val len = len1 + lenRest + val realLen = len - offset + if(realLen == 0) Vector.empty + else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") + else if(len <= WIDTH) { + new Vector1(copyIfDifferentSize(a1, realLen)) + } else if(len <= WIDTH2) { + val i1 = (len-1) & MASK + val i2 = (len-1) >>> BITS + val data = copyOfRange(a2, 1, i2) + val prefix1 = a2(0) + val suffix1 = copyIfDifferentSize(a2(i2), i1+1) + new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) + } else if(len <= WIDTH3) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) + val data = copyOfRange(a3, 1, i3) + val prefix2 = copyTail(a3(0)) + val prefix1 = a3(0)(0) + val suffix2 = copyOf(a3(i3), i2) + val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) + } else if(len <= WIDTH4) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) + val data = copyOfRange(a4, 1, i4) + val prefix3 = copyTail(a4(0)) + val prefix2 = copyTail(a4(0)(0)) + val prefix1 = a4(0)(0)(0) + val suffix3 = copyOf(a4(i4), i3) + val suffix2 = copyOf(a4(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) + } else if(len <= WIDTH5) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) + val data = copyOfRange(a5, 1, i5) + val prefix4 = copyTail(a5(0)) + val prefix3 = copyTail(a5(0)(0)) + val prefix2 = copyTail(a5(0)(0)(0)) + val prefix1 = a5(0)(0)(0)(0) + val suffix4 = copyOf(a5(i5), i4) + val suffix3 = copyOf(a5(i5)(i4), i3) + val suffix2 = copyOf(a5(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) + } else { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) & MASK + val i6 = ((len-1) >>> BITS5) + val data = copyOfRange(a6, 1, i6) + val prefix5 = copyTail(a6(0)) + val prefix4 = copyTail(a6(0)(0)) + val prefix3 = copyTail(a6(0)(0)(0)) + val prefix2 = copyTail(a6(0)(0)(0)(0)) + val prefix1 = a6(0)(0)(0)(0)(0) + val suffix5 = copyOf(a6(i6), i5) + val suffix4 = copyOf(a6(i6)(i5), i4) + val suffix3 = copyOf(a6(i6)(i5)(i4), i3) + val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + val len12345 = len1234 + prefix5.length*WIDTH4 + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) + } + } + + override def toString: String = + s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" + + private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( + a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], + a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] + ).asInstanceOf[Array[Array[_]]] +} + + +/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ +private[immutable] object VectorInline { + // compile-time numeric constants + final val BITS = 5 + final val WIDTH = 1 << BITS + final val MASK = WIDTH - 1 + final val BITS2 = BITS * 2 + final val WIDTH2 = 1 << BITS2 + final val BITS3 = BITS * 3 + final val WIDTH3 = 1 << BITS3 + final val BITS4 = BITS * 4 + final val WIDTH4 = 1 << BITS4 + final val BITS5 = BITS * 5 + final val WIDTH5 = 1 << BITS5 + final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: + final val Log2ConcatFaster = 5 + final val AlignToFaster = 64 + + type Arr1 = Array[AnyRef] + type Arr2 = Array[Array[AnyRef]] + type Arr3 = Array[Array[Array[AnyRef]]] + type Arr4 = Array[Array[Array[Array[AnyRef]]]] + type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] + type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] + + /** Dimension of the slice at index */ + @inline def vectorSliceDim(count: Int, idx: Int): Int = { + val c = count/2 + c+1-abs(idx-c) + } + + @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = + if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) + + @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) + + @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) + + @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = + if(a.length == len) a else copyOf[T](a, len) + + @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } + @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } + @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } + @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } + @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } + + @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { + val a1c = a1.clone() + a1c(idx1) = elem.asInstanceOf[AnyRef] + a1c + } + + @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { + val a2c = a2.clone() + a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) + a2c + } + + @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { + val a3c = a3.clone() + a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) + a3c + } + + @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { + val a4c = a4.clone() + a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) + a4c + } + + @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { + val a5c = a5.clone() + a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) + a5c + } + + @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { + val a6c = a6.clone() + a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) + a6c + } + + @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { + val dest = copyOf[T](a, a.length+b.length) + System.arraycopy(b, 0, dest, a.length, b.length) + dest + } +} + + +/** Helper methods and constants for Vector. */ +private object VectorStatics { + + final def copyAppend1(a: Arr1, elem: Any): Arr1 = { + val alen = a.length + val ac = new Arr1(alen+1) + System.arraycopy(a, 0, ac, 0, alen) + ac(alen) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { + val ac = copyOf(a, a.length+1) + ac(ac.length-1) = elem + ac + } + + final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { + val ac = new Arr1(a.length+1) + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem + ac + } + + final val empty1: Arr1 = new Array(0) + final val empty2: Arr2 = new Array(0) + final val empty3: Arr3 = new Array(0) + final val empty4: Arr4 = new Array(0) + final val empty5: Arr5 = new Array(0) + final val empty6: Arr6 = new Array(0) + + final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { + var i = 0 + val len = a.length + if(level == 0) { + while(i < len) { + f(a(i).asInstanceOf[A]) + i += 1 + } + } else { + val l = level-1 + while(i < len) { + foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + } + } + + final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { + var i = 0 + while(i < a.length) { + val v1 = a(i).asInstanceOf[AnyRef] + val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] + if(v1 ne v2) + return mapElems1Rest(a, f, i, v2) + i += 1 + } + a + } + + final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { + val ac = new Arr1(a.length) + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] + i += 1 + } + ac + } + + final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { + if(n == 1) + mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] + else { + var i = 0 + while(i < a.length) { + val v1 = a(i) + val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) + if(v1 ne v2) + return mapElemsRest(n, a, f, i, v2) + i += 1 + } + a + } + } + + final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + ac.asInstanceOf[Array[T]] + } + + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) + case s => + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-prefix1.length) { + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } else null + } + + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) + case s => + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-suffix1.length) { + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } else null + } +} + + +private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { + + private[this] var a1: Arr1 = v.prefix1 + private[this] var a2: Arr2 = _ + private[this] var a3: Arr3 = _ + private[this] var a4: Arr4 = _ + private[this] var a5: Arr5 = _ + private[this] var a6: Arr6 = _ + private[this] var a1len = a1.length + private[this] var i1 = 0 // current index in a1 + private[this] var oldPos = 0 + private[this] var len1 = totalLength // remaining length relative to a1 + + private[this] var sliceIdx = 0 + private[this] var sliceDim = 1 + private[this] var sliceStart = 0 // absolute position + private[this] var sliceEnd = a1len // absolute position + + //override def toString: String = + // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" + + @inline override def knownSize = len1 - i1 + + @inline def hasNext: Boolean = len1 > i1 + + def next(): A = { + if(i1 == a1len) advance() + val r = a1(i1) + i1 += 1 + r.asInstanceOf[A] + } + + private[this] def advanceSlice(): Unit = { + if(!hasNext) Iterator.empty.next() + sliceIdx += 1 + var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) + while(slice.length == 0) { + sliceIdx += 1 + slice = v.vectorSlice(sliceIdx) + } + sliceStart = sliceEnd + sliceDim = vectorSliceDim(sliceCount, sliceIdx) + (sliceDim: @switch) match { + case 1 => a1 = slice.asInstanceOf[Arr1] + case 2 => a2 = slice.asInstanceOf[Arr2] + case 3 => a3 = slice.asInstanceOf[Arr3] + case 4 => a4 = slice.asInstanceOf[Arr4] + case 5 => a5 = slice.asInstanceOf[Arr5] + case 6 => a6 = slice.asInstanceOf[Arr6] + } + sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) + if(sliceEnd > totalLength) sliceEnd = totalLength + if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 + } + + private[this] def advance(): Unit = { + val pos = i1-len1+totalLength + if(pos == sliceEnd) advanceSlice() + if(sliceDim > 1) { + val io = pos - sliceStart + val xor = oldPos ^ io + advanceA(io, xor) + oldPos = io + } + len1 -= i1 + a1len = mmin(a1.length, len1) + i1 = 0 + } + + private[this] def advanceA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2(0) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3(0) + a1 = a2(0) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } else { + a5 = a6(io >>> BITS5) + a4 = a5(0) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } + } + + private[this] def setA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else { + a5 = a6(io >>> BITS5) + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } + } + + override def drop(n: Int): Iterator[A] = { + if(n > 0) { + val oldpos = i1-len1+totalLength + val newpos = mmin(oldpos + n, totalLength) + if(newpos == totalLength) { + i1 = 0 + len1 = 0 + a1len = 0 + } else { + while(newpos >= sliceEnd) advanceSlice() + val io = newpos - sliceStart + if(sliceDim > 1) { + val xor = oldPos ^ io + setA(io, xor) + oldPos = io + } + a1len = a1.length + i1 = io & MASK + len1 = i1 + (totalLength-newpos) + if(a1len > len1) a1len = len1 + } + } + this + } + + override def take(n: Int): Iterator[A] = { + if(n < knownSize) { + val trunc = knownSize - mmax(0, n) + totalLength -= trunc + len1 -= trunc + if(len1 < a1len) a1len = len1 + if(totalLength < sliceEnd) sliceEnd = totalLength + } + this + } + + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = + if(from > 0) { + drop(from) + until - from + } else until + take(_until) + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) + var copied = 0 + val isBoxed = xs.isInstanceOf[Array[AnyRef]] + while(copied < total) { + if(i1 == a1len) advance() + val count = mmin(total-copied, a1.length-i1) + if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) + else Array.copy(a1, i1, xs, start+copied, count) + i1 += count + copied += count + } + total + } + + override def toVector: Vector[A] = + v.slice(i1-len1+totalLength, totalLength) + + protected[immutable] def split(at: Int): NewVectorIterator[A] = { + val it2 = clone().asInstanceOf[NewVectorIterator[A]] + it2.take(at) + drop(at) + it2 + } +} + + +private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) + extends Stepper[A] with EfficientSplit { + + protected[this] def build(it: NewVectorIterator[A]): Semi + + final def hasStep: Boolean = it.hasNext + + final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + final def estimateSize: Long = it.knownSize + + def trySplit(): Sub = { + val len = it.knownSize + if(len > 1) build(it.split(len >>> 1)) + else null + } + + override final def iterator: Iterator[A] = it +} + +private class AnyVectorStepper[A](it: NewVectorIterator[A]) + extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { + protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) + def nextStep(): A = it.next() +} + +private class DoubleVectorStepper(it: NewVectorIterator[Double]) + extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { + protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) + def nextStep(): Double = it.next() +} + +private class IntVectorStepper(it: NewVectorIterator[Int]) + extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { + protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) + def nextStep(): Int = it.next() +} + +private class LongVectorStepper(it: NewVectorIterator[Long]) + extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { + protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) + def nextStep(): Long = it.next() +} + + +// The following definitions are needed for binary compatibility with ParVector +private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { + private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + def hasNext: Boolean = it.hasNext + def next(): A = it.next() + private[collection] def remainingElementCount: Int = it.size + private[collection] def remainingVector: Vector[A] = it.toVector +} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala new file mode 100644 index 000000000000..2bbcf429e01d --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -0,0 +1,276 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import language.experimental.captureChecking + +/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. + * + * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense + * of using extra memory and generally lower performance for other operations + * + * @tparam K the type of the keys contained in this vector map. + * @tparam V the type of the values associated with the keys in this vector map. + * + * @define coll immutable vector map + * @define Coll `immutable.VectorMap` + */ +final class VectorMap[K, +V] private ( + private[immutable] val fields: Vector[Any], + private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] + with MapFactoryDefaults[K, V, VectorMap, Iterable] { + + import VectorMap._ + + override protected[this] def className: String = "VectorMap" + + private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { + this(fields, underlying, 0) + } + + override val size = underlying.size + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { + underlying.get(key) match { + case Some((slot, _)) => + new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) + case None => + new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) + } + } + + override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = + new Map.WithDefault(this, d) + + override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = + new Map.WithDefault[K, V1](this, _ => d) + + def get(key: K): Option[V] = underlying.get(key) match { + case Some(v) => Some(v._2) + case None => None + } + + @tailrec + private def nextValidField(slot: Int): (Int, K) = { + if (slot >= fields.size) (-1, null.asInstanceOf[K]) + else fields(slot) match { + case Tombstone(distance) => + nextValidField(slot + distance) + case k => + (slot, k.asInstanceOf[K]) + } + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val fieldsLength = fields.length + private[this] var slot = -1 + private[this] var key: K = null.asInstanceOf[K] + + private[this] def advance(): Unit = { + val nextSlot = slot + 1 + if (nextSlot >= fieldsLength) { + slot = fieldsLength + key = null.asInstanceOf[K] + } else { + nextValidField(nextSlot) match { + case (-1, _) => + slot = fieldsLength + key = null.asInstanceOf[K] + case (s, k) => + slot = s + key = k + } + } + } + + advance() + + override def hasNext: Boolean = slot < fieldsLength + + override def next(): (K, V) = { + if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") + val result = (key, underlying(key)._2) + advance() + result + } + } + + // No-Op overrides to allow for more efficient steppers in a minor release. + // Refining the return type to `S with EfficientSplit` is binary compatible. + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) + + + def removed(key: K): VectorMap[K, V] = { + if (isEmpty) empty + else { + var fs = fields + val sz = fs.size + underlying.get(key) match { + case Some(_) if size == 1 => empty + case Some((slot, _)) => + val s = slot - dropped + + // Calculate next of kin + val next = + if (s < sz - 1) fs(s + 1) match { + case Tombstone(d) => s + d + 1 + case _ => s + 1 + } else s + 1 + + fs = fs.updated(s, Tombstone(next - s)) + + // Calculate first index of preceding tombstone sequence + val first = + if (s > 0) { + fs(s - 1) match { + case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 + case Tombstone(d) if d == 1 => s - 1 + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case _ => s + } + }else s + fs = fs.updated(first, Tombstone(next - first)) + + // Calculate last index of succeeding tombstone sequence + val last = next - 1 + if (last != first) { + fs = fs.updated(last, Tombstone(first - 1 - last)) + } + new VectorMap(fs, underlying - key, dropped) + case _ => + this + } + } + } + + override def mapFactory: MapFactory[VectorMap] = VectorMap + + override def contains(key: K): Boolean = underlying.contains(key) + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + val lastSlot = fields.length - 1 + val last = fields.last match { + case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] + case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => k.asInstanceOf[K] + } + (last, underlying(last)._2) + } + + override def lastOption: Option[(K, V)] = { + if (isEmpty) None + else Some(last) + } + + override def tail: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + val (slot, key) = nextValidField(0) + new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + } + + override def init: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + val lastSlot = fields.size - 1 + val (slot, key) = fields.last match { + case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) + case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => (lastSlot, k.asInstanceOf[K]) + } + new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) + } + + override def keys: Vector[K] = keysIterator.toVector + + override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { + override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) + } +} + +object VectorMap extends MapFactory[VectorMap] { + //Class to mark deleted slots in 'fields'. + //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' + // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). + //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' + // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. + //For other deleted slots, it simply indicates that they have been deleted. + private[VectorMap] final case class Tombstone(distance: Int) + + private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = + new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) + + def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = + it match { + case vm: VectorMap[K, V] => vm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] +} + +private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { + private[this] val vectorBuilder = new VectorBuilder[K] + private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] + private[this] var aliased: VectorMap[K, V] = _ // OK since VectorMapBuilder is private + + override def clear(): Unit = { + vectorBuilder.clear() + mapBuilder.clear() + aliased = null + } + + override def result(): VectorMap[K, V] = { + if (aliased eq null) { + aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) + } + aliased + } + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + mapBuilder.getOrElse(key, null) match { + case (slot, _) => + mapBuilder.addOne(key, (slot, value)) + case null => + val vectorSize = vectorBuilder.size + vectorBuilder.addOne(key) + mapBuilder.addOne(key, (vectorSize, value)) + } + } + this + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) +} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala new file mode 100644 index 000000000000..446bdceb3ace --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -0,0 +1,142 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.Predef.{wrapString => _, assert} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.CharStringStepper +import scala.collection.mutable.{Builder, StringBuilder} +import language.experimental.captureChecking + +/** + * This class serves as a wrapper augmenting `String`s with all the operations + * found in indexed sequences. + * + * The difference between this class and `StringOps` is that calling transformer + * methods such as `filter` and `map` will yield an object of type `WrappedString` + * rather than a `String`. + * + * @param self a string contained within this wrapped string + * + * @define Coll `WrappedString` + * @define coll wrapped string + */ +@SerialVersionUID(3L) +final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, WrappedString] + with Serializable + with Pure { + + def apply(i: Int): Char = self.charAt(i) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder + override def empty: WrappedString = WrappedString.empty + + override def slice(from: Int, until: Int): WrappedString = { + val start = if (from < 0) 0 else from + if (until <= start || start >= self.length) + return WrappedString.empty + + val end = if (until > length) length else until + new WrappedString(self.substring(start, end)) + } + override def length = self.length + override def toString = self + override def view: StringView = new StringView(self) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { + val st = new CharStringStepper(self, 0, self.length) + val r = + if (shape.shape == StepperShape.CharShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = + that match { + case s: WrappedString => self.startsWith(s.self, offset) + case _ => super.startsWith(that, offset) + } + + override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = + that match { + case s: WrappedString => self.endsWith(s.self) + case _ => super.endsWith(that) + } + + override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { + case c: Char => self.indexOf(c, from) + case _ => super.indexOf(elem, from) + } + + override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = + elem match { + case c: Char => self.lastIndexOf(c, end) + case _ => super.lastIndexOf(elem, end) + } + + override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + (xs: Any) match { + case chs: Array[Char] => + val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) + self.getChars(0, copied, chs, start) + copied + case _ => super.copyToArray(xs, start, len) + } + + override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = + suffix match { + case s: WrappedString => new WrappedString(self concat s.self) + case _ => super.appendedAll(suffix) + } + + override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { + case s: WrappedString => self == s.self + case _ => super.sameElements(o) + } + + override protected[this] def className = "WrappedString" + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + override def equals(other: Any): Boolean = other match { + case that: WrappedString => + this.self == that.self + case _ => + super.equals(other) + } +} + +/** A companion object for wrapped strings. + */ +@SerialVersionUID(3L) +object WrappedString extends SpecificIterableFactory[Char, WrappedString] { + def fromSpecific(it: IterableOnce[Char]^): WrappedString = { + val b = newBuilder + val s = it.knownSize + if(s >= 0) b.sizeHint(s) + b ++= it + b.result() + } + val empty: WrappedString = new WrappedString("") + def newBuilder: Builder[Char, WrappedString] = + new StringBuilder().mapResult(x => new WrappedString(x)) + + implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { + def unwrap: String = value.self + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala new file mode 100644 index 000000000000..985ef22859be --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +package object immutable { + type StringOps = scala.collection.StringOps + val StringOps = scala.collection.StringOps + type StringView = scala.collection.StringView + val StringView = scala.collection.StringView + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + + @deprecated("Use Map instead of DefaultMap", "2.13.0") + type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] +} diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala new file mode 100644 index 000000000000..8f1ac07e725e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -0,0 +1,603 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions +import language.experimental.captureChecking + + +/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically significantly faster with `AnyRefMap` than [[HashMap]]. + * Note that numbers and characters are not handled specially in AnyRefMap; + * only plain `equals` and `hashCode` are used in comparisons. + * + * Methods that traverse or regenerate the map, including `foreach` and `map`, + * are not in general faster than with `HashMap`. The methods `foreachKey`, + * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster + * than alternative ways to achieve the same functionality. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `AnyRefMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29^ entries (approximately + * 500 million). The maximum capacity is 2^30^, but performance will degrade + * rapidly as 2^30^ is approached. + * + */ +class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[K, V] + with MapOps[K, V, Map, AnyRefMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] + with Serializable { + + import AnyRefMap._ + def this() = this(AnyRefMap.exceptionDefault, 16, true) + + /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) + + /** Creates a new `AnyRefMap` with an initial buffer of specified size. + * + * An `AnyRefMap` can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ + def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _hashes: Array[Int] = null + private[this] var _keys: Array[AnyRef] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int): Unit = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] + ): Unit = { + mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz + } + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { + var sz = coll.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + coll.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder + + override def size: Int = _size + override def knownSize: Int = size + override def isEmpty: Boolean = _size == 0 + override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element + if (key eq null) 0x41081989 + else { + val h = key.hashCode + // Part of the MurmurHash3 32 bit finalizer + val i = (h ^ (h >>> 16)) * 0x85EBCA6B + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j + } + } + + private def seekEntry(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + val hashes = _hashes + val keys = _keys + while ({ g = hashes(e); g != 0}) { + if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + e | MissingBit + } + + @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + var o = -1 + while ({ g = _hashes(e); g != 0}) { + if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + else if (o == -1 && g+g == 0) o = e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (o >= 0) o | MissVacant else e | MissingBit + } + + override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 + + override def get(key: K): Option[V] = { + val i = seekEntry(hashOf(key), key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val i = seekEntry(hashOf(key), key) + if (i < 0) default else _values(i).asInstanceOf[V] + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val h = hashOf(key) + var i = seekEntryOrOpen(h, key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val oh = _hashes + val ans = defaultValue + if (oh ne _hashes) { + i = seekEntryOrOpen(h, key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key.asInstanceOf[AnyRef] + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: K): V = { + val i = seekEntry(hashOf(key), key) + (if (i < 0) null else _values(i)).asInstanceOf[V] + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead; an exception will be thrown if no + * `defaultEntry` was supplied. + */ + override def apply(key: K): V = { + val i = seekEntry(hashOf(key), key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + + /** Defers to defaultEntry to find a default value for the key. Throws an + * exception if no other default behavior was specified. + */ + override def default(key: K): V = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val oh = _hashes + val ok = _keys + val ov = _values + mask = newMask + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < oh.length) { + val h = oh(i) + if (h+h != 0) { + var e = h & mask + var x = 0 + while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + _hashes(e) = h + _keys(e) = ok(i) + _values(e) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: K, value: V): Option[V] = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to an `AnyRefMap`. + */ + override def update(key: K, value: V): Unit = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: K, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: K): this.type = { + val i = seekEntry(hashOf(key), key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _hashes(i) = Int.MinValue + _keys(i) = null + _values(i) = null + } + this + } + + def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { + protected def nextResult(k: K, v: V) = (k, v) + } + override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { + protected def nextResult(k: K, v: V) = k + } + override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { + protected def nextResult(k: K, v: V) = v + } + + private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { + private[this] val hz = _hashes + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var index = 0 + + def hasNext: Boolean = index= hz.length) return false + h = hz(index) + } + true + } + + def next(): A = { + if (hasNext) { + val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) + index += 1 + ans + } + else throw new NoSuchElementException("next") + } + + protected def nextResult(k: K, v: V): A + } + + + override def foreach[U](f: ((K,V)) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) + i += 1 + e -= 1 + } + else return + } + } + + override def foreachEntry[U](f: (K,V) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) + i += 1 + e -= 1 + } + else return + } + } + + override def clone(): AnyRefMap[K, V] = { + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val arm = new AnyRefMap[K, V](defaultEntry, 1, false) + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V2]] + xs.iterator.foreach(kv => arm += kv) + arm + } + + override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) + + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + f(elems(i).asInstanceOf[A]) + } + i += 1 + } + } + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) + + /** Creates a new `AnyRefMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { + val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) + def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.Map(this, f)) + def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.FlatMap(this, f)) + def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) + + override def clear(): Unit = { + import java.util.Arrays.fill + fill(_keys, null) + fill(_values, null) + fill(_hashes, 0) + _size = 0 + _vacant = 0 + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "AnyRefMap" +} + +object AnyRefMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private class ExceptionDefault extends (Any -> Nothing) with Serializable { + def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) + } + private val exceptionDefault = new ExceptionDefault + + /** A builder for instances of `AnyRefMap`. + * + * This builder can be reused to create multiple instances. + */ + final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] + def addOne(entry: (K, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new AnyRefMap[K, V] + def result(): AnyRefMap[K, V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ + def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + + def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + + private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + elems.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new empty `AnyRefMap`. */ + def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + + /** Creates a new empty `AnyRefMap` with the supplied default */ + def withDefault[K <: AnyRef, V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + + /** Creates a new `AnyRefMap` from an existing source collection. A source collection + * which is already an `AnyRefMap` gets cloned. + * + * @param source Source collection + * @tparam K the type of the keys + * @tparam V the type of the values + * @return a new `AnyRefMap` with the elements of `source` + */ + def from[K <: AnyRef, V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { + case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] + case _ => buildFromIterableOnce(source) + } + + /** Creates a new `AnyRefMap` from arrays of keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.length, values.length) + val arm = new AnyRefMap[K, V](sz * 2) + var i = 0 + while (i < sz) { arm(keys(i)) = values(i); i += 1 } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new `AnyRefMap` from keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { + val sz = math.min(keys.size, values.size) + val arm = new AnyRefMap[K, V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() + if (arm.size < (sz >> 3)) arm.repack() + arm + } + + implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) + def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala new file mode 100644 index 000000000000..85a045c34423 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -0,0 +1,405 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.Arrays + +import scala.annotation.nowarn +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + +/** An implementation of the `Buffer` class using an array to + * represent the assembled sequence internally. Append, update and random + * access take constant time (amortized time). Prepends and removes are + * linear in the buffer size. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] + * section on `Array Buffers` for more information. + + * + * @tparam A the type of this arraybuffer's elements. + * + * @define Coll `mutable.ArrayBuffer` + * @define coll array buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-1582447879429021880L) +class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) + extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with IterableFactoryDefaults[A, ArrayBuffer] + with DefaultSerializable { + + def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) + + def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + + @transient private[this] var mutationCount: Int = 0 + + // needs to be `private[collection]` or `protected[collection]` for parallel-collections + protected[collection] var array: Array[AnyRef] = initialElements + protected var size0 = initialSize + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) + } + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + /** Ensure that the internal array has at least `n` cells. */ + protected def ensureSize(n: Int): Unit = { + array = ArrayBuffer.ensureSize(array, size0, n) + } + + // TODO 3.T: should be `protected`, perhaps `protected[this]` + /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ + private[mutable] def ensureAdditionalSize(n: Int): Unit = { + // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow + array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) + } + + def sizeHint(size: Int): Unit = + if(size > length && size >= 1) ensureSize(size) + + /** Reduce length to `n`, nulling out all dropped elements */ + private def reduceToSize(n: Int): Unit = { + mutationCount += 1 + Arrays.fill(array, n, size0, null) + size0 = n + } + + /** Trims the ArrayBuffer to an appropriate size for the current + * number of elements (rounding up to the next natural size), + * which may replace the array by a shorter one. + * This allows releasing some unused memory. + */ + def trimToSize(): Unit = { + resize(length) + } + + /** Trims the `array` buffer size down to either a power of 2 + * or Int.MaxValue while keeping first `requiredLength` elements. + */ + private def resize(requiredLength: Int): Unit = + array = ArrayBuffer.downsize(array, requiredLength) + + @inline private def checkWithinBounds(lo: Int, hi: Int) = { + if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") + if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") + } + + def apply(n: Int): A = { + checkWithinBounds(n, n + 1) + array(n).asInstanceOf[A] + } + + def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index + 1) + mutationCount += 1 + array(index) = elem.asInstanceOf[AnyRef] + } + + def length = size0 + + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) + + override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer + + /** Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = reduceToSize(0) + + /** + * Clears this buffer and shrinks to @param size (rounding up to the next + * natural size) + * @param size + */ + def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { + clear() + resize(size) + this + } + + def addOne(elem: A): this.type = { + mutationCount += 1 + ensureAdditionalSize(1) + val oldSize = size0 + size0 = oldSize + 1 + this(oldSize) = elem + this + } + + // Overridden to use array copying for efficiency where possible. + override def addAll(elems: IterableOnce[A]^): this.type = { + elems match { + case elems: ArrayBuffer[_] => + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } + case _ => super.addAll(elems) + } + this + } + + def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index) + mutationCount += 1 + ensureAdditionalSize(1) + Array.copy(array, index, array, index + 1, size0 - index) + size0 += 1 + this(index) = elem + } + + def prepend(elem: A): this.type = { + insert(0, elem) + this + } + + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { + checkWithinBounds(index, index) + elems match { + case elems: collection.Iterable[A] => + val elemsLength = elems.size + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + val len = size0 + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") + size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy + } + case _ => insertAll(index, ArrayBuffer.from(elems)) + } + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int): A = { + checkWithinBounds(index, index + 1) + val res = this(index) + Array.copy(array, index + 1, array, index, size0 - (index + 1)) + reduceToSize(size0 - 1) + res + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = + if (count > 0) { + checkWithinBounds(index, index + count) + Array.copy(array, index + count, array, index, size0 - (index + count)) + reduceToSize(size0 - count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + @deprecated("Use 'this' instance instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def result(): this.type = this + + @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayBuffer" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } + this + } + + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) + + override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) + + override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) +} + +/** + * Factory object for the `ArrayBuffer` class. + * + * $factoryInfo + * + * @define coll array buffer + * @define Coll `mutable.ArrayBuffer` + */ +@SerialVersionUID(3L) +object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { + final val DefaultInitialSize = 16 + private[this] val emptyArray = new Array[AnyRef](0) + + def from[B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { + val k = coll.knownSize + if (k >= 0) { + // Avoid reallocation of buffer if length is known + val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + new ArrayBuffer[B](array, k) + } + else new ArrayBuffer[B] ++= coll + } + + def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + new GrowableBuilder[A, ArrayBuffer[A]](empty) { + override def sizeHint(size: Int): Unit = elems.ensureSize(size) + } + + def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + + /** + * @param arrayLen the length of the backing array + * @param targetLen the minimum length to resize up to + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeUp(arrayLen: Long, targetLen: Long): Int = { + if (targetLen <= arrayLen) -1 + else { + if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") + IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` + + val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) + math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt + } + } + // if necessary, copy (curSize elements of) the array to a new array of capacity n. + // Should use Array.copyOf(array, resizeEnsuring(array.length))? + private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { + val newLen = resizeUp(array.length, targetSize) + if (newLen < 0) array + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, curSize) + res + } + } + + /** + * @param arrayLen the length of the backing array + * @param targetLen the length to resize down to, if smaller than `arrayLen` + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeDown(arrayLen: Int, targetLen: Int): Int = + if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) + private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { + val newLen = resizeDown(array.length, targetSize) + if (newLen < 0) array + else if (newLen == 0) emptyArray + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, targetSize) + res + } + } +} + +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) + extends AbstractIndexedSeqView[A], Pure { + /* Removed since it poses problems for capture checking + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array: Array[Object] = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + }: ArrayBuffer[A] + }, () => 0) + }*/ + + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length + override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala new file mode 100644 index 000000000000..be7367c7f021 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -0,0 +1,523 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import language.experimental.captureChecking +import scala.reflect.ClassTag + +/** A builder class for arrays. + * + * @tparam T the type of the elements for the builder. + */ +@SerialVersionUID(3L) +sealed abstract class ArrayBuilder[T] + extends ReusableBuilder[T, Array[T]] + with Serializable { + protected[this] var capacity: Int = 0 + protected[this] def elems: Array[T] + protected var size: Int = 0 + + def length: Int = size + + override def knownSize: Int = size + + protected[this] final def ensureSize(size: Int): Unit = { + if (capacity < size || capacity == 0) { + var newsize = if (capacity == 0) 16 else capacity * 2 + while (newsize < size) newsize *= 2 + resize(newsize) + } + } + + override final def sizeHint(size: Int): Unit = + if (capacity < size) resize(size) + + def clear(): Unit = size = 0 + + protected[this] def resize(size: Int): Unit + + /** Add all elements of an array */ + def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) + + /** Add a slice of an array */ + def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + ensureSize(this.size + length) + Array.copy(xs, offset, elems, this.size, length) + size += length + this + } + + override def addAll(xs: IterableOnce[T]^): this.type = { + val k = xs.knownSize + if (k > 0) { + ensureSize(this.size + k) + val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + size += k + } else if (k < 0) super.addAll(xs) + this + } +} + +/** A companion object for array builders. + */ +object ArrayBuilder { + + /** Creates a new arraybuilder of type `T`. + * + * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. + * @return a new empty array builder. + */ + @inline def make[T: ClassTag]: ArrayBuilder[T] = { + val tag = implicitly[ClassTag[T]] + tag.runtimeClass match { + case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] + case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] + case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] + case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] + case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] + case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] + case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] + case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] + case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] + case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + } + } + + /** A class for array builders for arrays of reference types. + * + * This builder can be reused. + * + * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. + */ + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { + + protected var elems: Array[T] = _ + + private def mkArray(size: Int): Array[T] = { + if (capacity == size && capacity > 0) elems + else if (elems eq null) new Array[T](size) + else java.util.Arrays.copyOf[T](elems, size) + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: T): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[T] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def clear(): Unit = { + super.clear() + if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) + } + + override def equals(other: Any): Boolean = other match { + case x: ofRef[_] => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofRef" + } + + /** A class for array builders for arrays of `byte`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofByte extends ArrayBuilder[Byte] { + + protected var elems: Array[Byte] = _ + + private def mkArray(size: Int): Array[Byte] = { + val newelems = new Array[Byte](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Byte): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Byte] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofByte => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofByte" + } + + /** A class for array builders for arrays of `short`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofShort extends ArrayBuilder[Short] { + + protected var elems: Array[Short] = _ + + private def mkArray(size: Int): Array[Short] = { + val newelems = new Array[Short](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Short): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Short] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofShort => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofShort" + } + + /** A class for array builders for arrays of `char`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofChar extends ArrayBuilder[Char] { + + protected var elems: Array[Char] = _ + + private def mkArray(size: Int): Array[Char] = { + val newelems = new Array[Char](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Char): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Char] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofChar => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofChar" + } + + /** A class for array builders for arrays of `int`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofInt extends ArrayBuilder[Int] { + + protected var elems: Array[Int] = _ + + private def mkArray(size: Int): Array[Int] = { + val newelems = new Array[Int](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Int): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Int] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofInt => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofInt" + } + + /** A class for array builders for arrays of `long`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofLong extends ArrayBuilder[Long] { + + protected var elems: Array[Long] = _ + + private def mkArray(size: Int): Array[Long] = { + val newelems = new Array[Long](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Long): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Long] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofLong => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofLong" + } + + /** A class for array builders for arrays of `float`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofFloat extends ArrayBuilder[Float] { + + protected var elems: Array[Float] = _ + + private def mkArray(size: Int): Array[Float] = { + val newelems = new Array[Float](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Float): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Float] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofFloat => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofFloat" + } + + /** A class for array builders for arrays of `double`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofDouble extends ArrayBuilder[Double] { + + protected var elems: Array[Double] = _ + + private def mkArray(size: Int): Array[Double] = { + val newelems = new Array[Double](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Double): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Double] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofDouble => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofDouble" + } + + /** A class for array builders for arrays of `boolean`s. It can be reused. */ + @SerialVersionUID(3L) + class ofBoolean extends ArrayBuilder[Boolean] { + + protected var elems: Array[Boolean] = _ + + private def mkArray(size: Int): Array[Boolean] = { + val newelems = new Array[Boolean](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Boolean): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Boolean] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofBoolean => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofBoolean" + } + + /** A class for array builders for arrays of `Unit` type. It can be reused. */ + @SerialVersionUID(3L) + final class ofUnit extends ArrayBuilder[Unit] { + + protected def elems: Array[Unit] = throw new UnsupportedOperationException() + + def addOne(elem: Unit): this.type = { + size += 1 + this + } + + override def addAll(xs: IterableOnce[Unit]^): this.type = { + size += xs.iterator.size + this + } + + override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { + size += length + this + } + + def result() = { + val ans = new Array[Unit](size) + var i = 0 + while (i < size) { ans(i) = (); i += 1 } + ans + } + + override def equals(other: Any): Boolean = other match { + case x: ofUnit => (size == x.size) + case _ => false + } + + protected[this] def resize(size: Int): Unit = () + + override def toString = "ArrayBuilder.ofUnit" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala new file mode 100644 index 000000000000..d72e483a7f60 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -0,0 +1,646 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag +import language.experimental.captureChecking + +/** An implementation of a double-ended queue that internally uses a resizable circular buffer. + * + * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) + * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) + * and thus insertions and removals from end/beginning are fast. + * + * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. + * + * @tparam A the type of this ArrayDeque's elements. + * + * @define Coll `mutable.ArrayDeque` + * @define coll array deque + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class ArrayDeque[A] protected ( + protected var array: Array[AnyRef], + private[ArrayDeque] var start: Int, + private[ArrayDeque] var end: Int +) extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with IterableFactoryDefaults[A, ArrayDeque] + with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] + with Cloneable[ArrayDeque[A]] + with DefaultSerializable { + + reset(array, start, end) + + private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") + requireBounds(idx = start, until = array.length) + requireBounds(idx = end, until = array.length) + this.array = array + this.start = start + this.end = end + } + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + // No-Op override to allow for more efficient stepper in a minor release. + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) + + def apply(idx: Int): A = { + requireBounds(idx) + _get(idx) + } + + def update(idx: Int, elem: A): Unit = { + requireBounds(idx) + _set(idx, elem) + } + + def addOne(elem: A): this.type = { + ensureSize(length + 1) + appendAssumingCapacity(elem) + } + + def prepend(elem: A): this.type = { + ensureSize(length + 1) + prependAssumingCapacity(elem) + } + + @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { + array(end) = elem.asInstanceOf[AnyRef] + end = end_+(1) + this + } + + @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { + start = start_-(1) + array(start) = elem.asInstanceOf[AnyRef] + this + } + + override def prependAll(elems: IterableOnce[A]^): this.type = { + val it = elems.iterator + if (it.nonEmpty) { + val n = length + // The following code resizes the current collection at most once and traverses elems at most twice + elems.knownSize match { + // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq + case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) + + // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront + case srcLength if mustGrow(srcLength + n) => + val finalLength = srcLength + n + val array2 = ArrayDeque.alloc(finalLength) + it.copyToArray(array2.asInstanceOf[Array[A]]) + copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + + // Just fill up from (start - srcLength) to (start - 1) and move back start + case srcLength => + // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` + var i = 0 + while(i < srcLength) { + _set(i - srcLength, it.next()) + i += 1 + } + start = start_-(srcLength) + } + } + this + } + + override def addAll(elems: IterableOnce[A]^): this.type = { + elems.knownSize match { + case srcLength if srcLength > 0 => + ensureSize(srcLength + length) + elems.iterator.foreach(appendAssumingCapacity) + case _ => elems.iterator.foreach(+=) + } + this + } + + def insert(idx: Int, elem: A): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prepend(elem) + } else if (idx == n) { + addOne(elem) + } else { + val finalLength = n + 1 + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + array2(idx) = elem.asInstanceOf[AnyRef] + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (n <= idx * 2) { + var i = n - 1 + while(i >= idx) { + _set(i + 1, _get(i)) + i -= 1 + } + end = end_+(1) + i += 1 + _set(i, elem) + } else { + var i = 0 + while(i < idx) { + _set(i - 1, _get(i)) + i += 1 + } + start = start_-(1) + _set(i, elem) + } + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prependAll(elems) + } else if (idx == n) { + addAll(elems) + } else { + // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) + val (it, srcLength) = { + val _srcLength = elems.knownSize + if (_srcLength >= 0) (elems.iterator, _srcLength) + else { + val indexed = IndexedSeq.from(elems) + (indexed.iterator, indexed.size) + } + } + if (it.nonEmpty) { + val finalLength = srcLength + n + // Either we resize right away or move prefix left or suffix right + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + it.copyToArray(array2.asInstanceOf[Array[A]], idx) + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx >= n) { // Cheaper to shift the suffix right + var i = n - 1 + while(i >= idx) { + _set(i + srcLength, _get(i)) + i -= 1 + } + end = end_+(srcLength) + while(it.hasNext) { + i += 1 + _set(i, it.next()) + } + } else { // Cheaper to shift prefix left + var i = 0 + while(i < idx) { + _set(i - srcLength, _get(i)) + i += 1 + } + start = start_-(srcLength) + while(it.hasNext) { + _set(i, it.next()) + i += 1 + } + } + } + } + } + + def remove(idx: Int, count: Int): Unit = { + if (count > 0) { + requireBounds(idx) + val n = length + val removals = Math.min(n - idx, count) + val finalLength = n - removals + val suffixStart = idx + removals + // If we know we can resize after removing, do it right away using arrayCopy + // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left + if (shouldShrink(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx <= finalLength) { // Cheaper to move the prefix right + var i = suffixStart - 1 + while(i >= removals) { + _set(i, _get(i - removals)) + i -= 1 + } + while(i >= 0) { + _set(i, null.asInstanceOf[A]) + i -= 1 + } + start = start_+(removals) + } else { // Cheaper to move the suffix left + var i = idx + while(i < finalLength) { + _set(i, _get(i + removals)) + i += 1 + } + while(i < n) { + _set(i, null.asInstanceOf[A]) + i += 1 + } + end = end_-(removals) + } + } else { + require(count == 0, s"removing negative number of elements: $count") + } + } + + def remove(idx: Int): A = { + val elem = this(idx) + remove(idx, 1) + elem + } + + override def subtractOne(elem: A): this.type = { + val idx = indexOf(elem) + if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API + this + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the first element (throws exception when empty) + * See also removeHeadOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeHead(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) + + @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + val elem = array(start) + array(start) = null + start = start_+(1) + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the last element (throws exception when empty) + * See also removeLastOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeLast(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) + + @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + end = end_-(1) + val elem = array(end) + array(end) = null + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * Remove all elements from this collection and return the elements while emptying this data structure + * @return + */ + def removeAll(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Remove all elements from this collection and return the elements in reverse while emptying this data structure + * @return + */ + def removeAllReverse(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the left of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(headOption.exists(f)) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the right of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(lastOption.exists(f)) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** Returns the first element which satisfies the given predicate after or at some start index + * and removes this element from the collections + * + * @param p the predicate used for choosing the first element + * @param from the start index + * @return the first element of the queue for which p yields true + */ + def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { + val i = indexWhere(p, from) + if (i < 0) None else Some(remove(i)) + } + + /** Returns all elements in this collection which satisfy the given predicate + * and removes those elements from this collections. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { + val res = scala.collection.immutable.Seq.newBuilder[A] + var i, j = 0 + while (i < size) { + if (p(this(i))) { + res += this(i) + } else { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + if (i != j) takeInPlace(j) + res.result() + } + + @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) + + def length = end_-(start) + + override def isEmpty = start == end + + override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) + + override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque + + /** + * Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = { + while(nonEmpty) { + removeHeadAssumingNonEmpty() + } + } + + /** + * Clears this buffer and shrinks to @param size + * + * @param size + * @return + */ + def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { + reset(array = ArrayDeque.alloc(size), start = 0, end = 0) + this + } + + protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + new ArrayDeque[A](array, start = 0, end) + + override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) + if (copied > 0) { + copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) + } + copied + } + + override def toArray[B >: A: ClassTag]: Array[B] = + copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) + + /** + * Trims the capacity of this ArrayDeque's instance to be the current size + */ + def trimToSize(): Unit = resize(length) + + // Utils for common modular arithmetic: + @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) + @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) + @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) + @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) + + // Note: here be overflow dragons! This is used for int overflow + // assumptions in resize(). Use caution changing. + @inline private[this] def mustGrow(len: Int) = { + len >= array.length + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def shouldShrink(len: Int) = { + // To avoid allocation churn, only shrink when array is large + // and less than 2/5 filled. + array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def canShrink(len: Int) = { + array.length > ArrayDeque.DefaultInitialSize && array.length - len > len + } + + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + + @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] + + // Assumes that 0 <= len. + private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { + val n = length + val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) + reset(array = array2, start = 0, end = n) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayDeque" +} + +/** + * $factoryInfo + * @define coll array deque + * @define Coll `ArrayDeque` + */ +@SerialVersionUID(3L) +object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { + + def from[B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { + val s = coll.knownSize + if (s >= 0) { + val array = alloc(s) + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") + new ArrayDeque[B](array, start = 0, end = s) + } else new ArrayDeque[B]() ++= coll + } + + def newBuilder[A]: Builder[A, ArrayDeque[A]] = + new GrowableBuilder[A, ArrayDeque[A]](empty) { + override def sizeHint(size: Int): Unit = { + elems.ensureSize(size) + } + } + + def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + + final val DefaultInitialSize = 16 + + /** + * We try to not repeatedly resize arrays smaller than this + */ + private[ArrayDeque] final val StableSize = 128 + + /** + * Allocates an array whose size is next power of 2 > `len` + * Largest possible len is 1<<30 - 1 + * + * @param len + * @return + */ + private[mutable] def alloc(len: Int) = { + require(len >= 0, s"Non-negative array size required") + val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 + require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") + new Array[AnyRef](Math.max(size, DefaultInitialSize)) + } +} + +trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { + protected def array: Array[AnyRef] + + final override def clone(): C = klone() + + protected def klone(): C + + protected def ofArray(array: Array[AnyRef], end: Int): C + + protected def start_+(idx: Int): Int + + @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = + if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") + + /** + * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray + * This copies maxItems elements from this collections srcStart to dest's destStart + * If we reach the end of either collections before we could copy maxItems, we simply stop copying + * + * @param dest + * @param srcStart + * @param destStart + * @param maxItems + */ + def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { + requireBounds(destStart, dest.length+1) + val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) + if (toCopy > 0) { + requireBounds(srcStart) + val startIdx = start_+(srcStart) + val block1 = Math.min(toCopy, array.length - startIdx) + Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) + val block2 = toCopy - block1 + if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) + } + dest + } + + override def reverse: C = { + val n = length + val arr = ArrayDeque.alloc(n) + var i = 0 + while(i < n) { + arr(i) = this(n - i - 1).asInstanceOf[AnyRef] + i += 1 + } + ofArray(arr, n) + } + + override def slice(from: Int, until: Int): C = { + val n = length + val left = Math.max(0, Math.min(n, from)) + val right = Math.max(0, Math.min(n, until)) + val len = right - left + if (len <= 0) { + empty + } else if (len >= n) { + klone() + } else { + val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) + ofArray(array2, len) + } + } + + override def sliding(window: Int, step: Int): Iterator[C] = { + require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") + length match { + case 0 => Iterator.empty + case n if n <= window => Iterator.single(slice(0, length)) + case n => + val lag = if (window > step) window - step else 0 + Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) + } + } + + override def grouped(n: Int): Iterator[C] = sliding(n, n) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..70762e5b340d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -0,0 +1,350 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.util.Arrays + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable + with Pure { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { + val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + def elemTag = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + def elemTag = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + def elemTag = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + def elemTag = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + def elemTag = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + def elemTag = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + def elemTag = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + def elemTag = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + def elemTag = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala new file mode 100644 index 000000000000..dcb8a157389b --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -0,0 +1,393 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.immutable.Range +import BitSetOps.{LogWL, MaxSize} +import scala.annotation.implicitNotFound +import language.experimental.captureChecking + +/** + * A class for mutable bitsets. + * + * $bitsetinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] + * section on `Mutable Bitsets` for more information. + * + * @define Coll `BitSet` + * @define coll bitset + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class BitSet(protected[collection] final var elems: Array[Long]) + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedIterableOps[Int, Set, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) + + def this() = this(0) + + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + override def unsorted: Set[Int] = this + + protected[collection] final def nwords: Int = elems.length + + protected[collection] final def word(idx: Int): Long = + if (idx < nwords) elems(idx) else 0L + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = + if (elems.length == 0) empty + else new BitSet(elems) + + def addOne(elem: Int): this.type = { + require(elem >= 0) + if (!contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + this + } + + def subtractOne(elem: Int): this.type = { + require(elem >= 0) + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } + this + } + + def clear(): Unit = { + elems = new Array[Long](elems.length) + } + + protected final def updateWord(idx: Int, w: Long): Unit = { + ensureCapacity(idx) + elems(idx) = w + } + + protected final def ensureCapacity(idx: Int): Unit = { + require(idx < MaxSize) + if (idx >= nwords) { + var newlen = nwords + while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) + val elems1 = new Array[Long](newlen) + Array.copy(elems, 0, elems1, 0, nwords) + elems = elems1 + } + } + + def unconstrained: collection.Set[Int] = this + + /** Updates this bitset to the union with another bitset by performing a bitwise "or". + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + elems(i) = elems(i) | other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: collection.BitSet): this.type = { + // Different from other operations: no need to ensure capacity because + // anything beyond the capacity is 0. Since we use other.word which is 0 + // off the end, we also don't need to make sure we stay in bounds there. + var i = 0 + val thisnwords = nwords + while (i < thisnwords) { + elems(i) = elems(i) & other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + + elems(i) = elems(i) ^ other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: collection.BitSet): this.type = { + var i = 0 + val max = Math.min(nwords, other.nwords) + while (i < max) { + elems(i) = elems(i) & ~other.word(i) + i += 1 + } + this + } + + override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) + + def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + override def addAll(xs: IterableOnce[Int]^): this.type = xs match { + case bs: collection.BitSet => + this |= bs + case range: Range => + if (range.nonEmpty) { + val start = range.min + if (start >= 0) { + val end = range.max + val endIdx = end >> LogWL + ensureCapacity(endIdx) + + if (range.step == 1 || range.step == -1) { + val startIdx = start >> LogWL + val wordStart = startIdx * BitSetOps.WordLength + val wordMask = -1L << (start - wordStart) + + if (endIdx > startIdx) { + elems(startIdx) |= wordMask + java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) + elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) + } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) + } else super.addAll(range) + } else super.addAll(range) + } + this + + case sorted: collection.SortedSet[Int] => + // if `sorted` is using the regular Int ordering, ensure capacity for the largest + // element up front to avoid multiple resizing allocations + if (sorted.nonEmpty) { + val ord = sorted.ordering + if (ord eq Ordering.Int) { + ensureCapacity(sorted.lastKey >> LogWL) + } else if (ord eq Ordering.Int.reverse) { + ensureCapacity(sorted.firstKey >> LogWL) + } + val iter = sorted.iterator + while (iter.hasNext) { + addOne(iter.next()) + } + } + + this + + case other => + super.addAll(other) + } + + override def subsetOf(that: collection.Set[Int]): Boolean = that match { + case bs: collection.BitSet => + val thisnwords = this.nwords + val bsnwords = bs.nwords + val minWords = Math.min(thisnwords, bsnwords) + + // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there + var i = bsnwords + while (i < thisnwords) { + if (word(i) != 0L) return false + i += 1 + } + + // the higher range of `this` is all `0`s, fall back to lower range + var j = 0 + while (j < minWords) { + if ((word(j) & ~bs.word(j)) != 0L) return false + j += 1 + } + + true + case other => + super.subsetOf(other) + } + + override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { + case bs: collection.BitSet => this &~= bs + case other => super.subtractAll(other) + } + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + i -= 1 + } + + if (i < 0) { + fromBitMaskNoCopy(Array(currentWord)) + } else { + val minimumNonZeroIndex: Int = i + 1 + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newArray) + } + } else { + // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index + val newElems = elems.clone() + var i = bsnwords - 1 + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newElems) + } + case _ => super.diff(that) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word + // index which lets us avoid: + // * over-allocating -- the resulting array will be exactly the right size + // * multiple resizing allocations -- the array is allocated one time, not log(n) times. + var i = nwords - 1 + var newArray: Array[Long] = null + while (i >= 0) { + val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + if (w != 0L) { + if (newArray eq null) { + newArray = new Array(i + 1) + } + newArray(i) = w + } + i -= 1 + } + if (newArray eq null) { + empty + } else { + fromBitMaskNoCopy(newArray) + } + } + + override def filterInPlace(p: Int => Boolean): this.type = { + val thisnwords = nwords + var i = 0 + while (i < thisnwords) { + elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) + i += 1 + } + this + } + + override def toBitMask: Array[Long] = elems.clone() +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) + + def empty: BitSet = new BitSet() + + def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSet(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else new BitSet(elems) + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala new file mode 100644 index 000000000000..1f7fc76972a2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -0,0 +1,233 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.nowarn +import language.experimental.captureChecking + + +/** A `Buffer` is a growable and shrinkable `Seq`. */ +trait Buffer[A] + extends Seq[A] + with SeqOps[A, Buffer, Buffer[A]] + with Growable[A] + with Shrinkable[A] + with IterableFactoryDefaults[A, Buffer] { + + override def iterableFactory: SeqFactory[Buffer] = Buffer + + override def knownSize: Int = super[Seq].knownSize + + //TODO Prepend is a logical choice for a readable name of `+=:` but it conflicts with the renaming of `append` to `add` + /** Prepends a single element at the front of this $coll. + * + * @param elem the element to $add. + * @return the $coll itself + */ + def prepend(elem: A): this.type + + /** Appends the given elements to this buffer. + * + * @param elem the element to append. + */ + @`inline` final def append(elem: A): this.type = addOne(elem) + + @deprecated("Use appendAll instead", "2.13.0") + @`inline` final def append(elems: A*): this.type = addAll(elems) + + /** Appends the elements contained in a iterable object to this buffer. + * @param xs the iterable object containing the elements to append. + */ + @`inline` final def appendAll(xs: IterableOnce[A]^): this.type = addAll(xs) + + + /** Alias for `prepend` */ + @`inline` final def +=: (elem: A): this.type = prepend(elem) + + def prependAll(elems: IterableOnce[A]^): this.type = { insertAll(0, elems); this } + + @deprecated("Use prependAll instead", "2.13.0") + @`inline` final def prepend(elems: A*): this.type = prependAll(elems) + + /** Alias for `prependAll` */ + @inline final def ++=:(elems: IterableOnce[A]^): this.type = prependAll(elems) + + /** Inserts a new element at a given index into this buffer. + * + * @param idx the index where the new elements is inserted. + * @param elem the element to insert. + * @throws IndexOutOfBoundsException if the index `idx` is not in the valid range + * `0 <= idx <= length`. + */ + @throws[IndexOutOfBoundsException] + def insert(idx: Int, elem: A): Unit + + /** Inserts new elements at the index `idx`. Opposed to method + * `update`, this method will not replace an element with a new + * one. Instead, it will insert a new element at index `idx`. + * + * @param idx the index where a new element will be inserted. + * @param elems the iterable object providing all elements to insert. + * @throws IndexOutOfBoundsException if `idx` is out of bounds. + */ + @throws[IndexOutOfBoundsException] + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit + + /** Removes the element at a given index position. + * + * @param idx the index which refers to the element to delete. + * @return the element that was formerly at index `idx`. + */ + @throws[IndexOutOfBoundsException] + def remove(idx: Int): A + + /** Removes the element on a given index position. It takes time linear in + * the buffer size. + * + * @param idx the index which refers to the first element to remove. + * @param count the number of elements to remove. + * @throws IndexOutOfBoundsException if the index `idx` is not in the valid range + * `0 <= idx <= length - count` (with `count > 0`). + * @throws IllegalArgumentException if `count < 0`. + */ + @throws[IndexOutOfBoundsException] + @throws[IllegalArgumentException] + def remove(idx: Int, count: Int): Unit + + /** Removes a single element from this buffer, at its first occurrence. + * If the buffer does not contain that element, it is unchanged. + * + * @param x the element to remove. + * @return the buffer itself + */ + def subtractOne (x: A): this.type = { + val i = indexOf(x) + if (i != -1) remove(i) + this + } + + /** Removes the first ''n'' elements of this buffer. + * + * @param n the number of elements to remove from the beginning + * of this buffer. + */ + @deprecated("use dropInPlace instead", since = "2.13.4") + def trimStart(n: Int): Unit = dropInPlace(n) + + /** Removes the last ''n'' elements of this buffer. + * + * @param n the number of elements to remove from the end + * of this buffer. + */ + @deprecated("use dropRightInPlace instead", since = "2.13.4") + def trimEnd(n: Int): Unit = dropRightInPlace(n) + + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type + + // +=, ++=, clear inherited from Growable + // Per remark of @ichoran, we should preferably not have these: + // + // def +=:(elem: A): this.type = { insert(0, elem); this } + // def +=:(elem1: A, elem2: A, elems: A*): this.type = elem1 +=: elem2 +=: elems ++=: this + // def ++=:(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } + + def dropInPlace(n: Int): this.type = { remove(0, normalized(n)); this } + def dropRightInPlace(n: Int): this.type = { + val norm = normalized(n) + remove(length - norm, norm) + this + } + def takeInPlace(n: Int): this.type = { + val norm = normalized(n) + remove(norm, length - norm) + this + } + def takeRightInPlace(n: Int): this.type = { remove(0, length - normalized(n)); this } + def sliceInPlace(start: Int, end: Int): this.type = takeInPlace(end).dropInPlace(start) + private def normalized(n: Int): Int = math.min(math.max(n, 0), length) + + def dropWhileInPlace(p: A => Boolean): this.type = { + val idx = indexWhere(!p(_)) + if (idx < 0) { clear(); this } else dropInPlace(idx) + } + def takeWhileInPlace(p: A => Boolean): this.type = { + val idx = indexWhere(!p(_)) + if (idx < 0) this else takeInPlace(idx) + } + def padToInPlace(len: Int, elem: A): this.type = { + while (length < len) +=(elem) + this + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Buffer" +} + +trait IndexedBuffer[A] extends IndexedSeq[A] + with IndexedSeqOps[A, IndexedBuffer, IndexedBuffer[A]] + with Buffer[A] + with IterableFactoryDefaults[A, IndexedBuffer] { + + override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer + + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + // There's scope for a better implementation which copies elements in place. + var i = 0 + val s = size + val newElems = new Array[(IterableOnce[A]^{f*})](s) + while (i < s) { newElems(i) = f(this(i)); i += 1 } + clear() + i = 0 + while (i < s) { ++=(newElems(i)); i += 1 } + this + } + + def filterInPlace(p: A => Boolean): this.type = { + var i, j = 0 + while (i < size) { + if (p(apply(i))) { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + + if (i == j) this else takeInPlace(j) + } + + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { + val replaced0 = math.min(math.max(replaced, 0), length) + val i = math.min(math.max(from, 0), length) + var j = 0 + val iter = patch.iterator + while (iter.hasNext && j < replaced0 && i + j < length) { + update(i + j, iter.next()) + j += 1 + } + if (iter.hasNext) insertAll(i + j, iter) + else if (j < replaced0) remove(i + j, math.min(replaced0 - j, length - i - j)) + this + } +} + +@SerialVersionUID(3L) +object Buffer extends SeqFactory.Delegate[Buffer](ArrayBuffer) + +@SerialVersionUID(3L) +object IndexedBuffer extends SeqFactory.Delegate[IndexedBuffer](ArrayBuffer) + +/** Explicit instantiation of the `Buffer` trait to reduce class file size in subclasses. */ +abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Builder.scala b/tests/pos-special/stdlib/collection/mutable/Builder.scala new file mode 100644 index 000000000000..dd57cb75da91 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Builder.scala @@ -0,0 +1,92 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import language.experimental.captureChecking + + +/** Base trait for collection builders. + * + * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) + * is undefined. No further methods should be called. It is common for mutable collections to be their own non-reusable + * Builder, in which case `result()` simply returns `this`. + * + * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` + */ +trait Builder[-A, +To] extends Growable[A] { + self: Builder[A, To]^ => + + /** Clears the contents of this builder. + * After execution of this method the builder will contain no elements. + */ + def clear(): Unit + + /** Result collection consisting of all elements appended so far. */ + def result(): To + + /** Gives a hint how many elements are expected to be added + * when the next `result` is called. Some builder classes + * will optimize their representation based on the hint. However, + * builder implementations are still required to work correctly even if the hint is + * wrong, i.e. a different number of elements is added. + * + * @param size the hint how many elements will be added. + */ + def sizeHint(size: Int): Unit = () + + /** Gives a hint that one expects the `result` of this builder + * to have the same size as the given collection, plus some delta. This will + * provide a hint only if the collection has a known size + * Some builder classes + * will optimize their representation based on the hint. However, + * builder implementations are still required to work correctly even if the hint is + * wrong, i.e. a different number of elements is added. + * + * @param coll the collection which serves as a hint for the result's size. + * @param delta a correction to add to the `coll.size` to produce the size hint. + */ + final def sizeHint(coll: scala.collection.IterableOnce[_]^, delta: Int = 0): Unit = { + val s = coll.knownSize + if (s != -1) sizeHint(s + delta) + } + + /** Gives a hint how many elements are expected to be added + * when the next `result` is called, together with an upper bound + * given by the size of some other collection. Some builder classes + * will optimize their representation based on the hint. However, + * builder implementations are still required to work correctly even if the hint is + * wrong, i.e. a different number of elements is added. + * + * @param size the hint how many elements will be added. + * @param boundingColl the bounding collection. If it is + * an IndexedSeqLike, then sizes larger + * than collection's size are reduced. + */ + // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]^): Unit = { + val s = boundingColl.knownSize + if (s != -1) { + sizeHint(scala.math.min(s, size)) + } + } + + /** A builder resulting from this builder my mapping the result using `f`. */ + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { + def addOne(x: A): this.type = { self += x; this } + def clear(): Unit = self.clear() + override def addAll(xs: IterableOnce[A]^): this.type = { self ++= xs; this } + override def sizeHint(size: Int): Unit = self.sizeHint(size) + def result(): NewTo = f(self.result()) + override def knownSize: Int = self.knownSize + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..152b6cc9ffc7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,120 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + this: CheckedIndexedSeqView[A]^ => + + protected val mutationCount: () => Int + + override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala new file mode 100644 index 000000000000..39149e98cbf0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable +import language.experimental.captureChecking + +/** A trait for cloneable collections. + * + * @tparam C Type of the collection, covariant and with reference types as upperbound. + */ +trait Cloneable[+C <: AnyRef] extends scala.Cloneable { + override def clone(): C = super.clone().asInstanceOf[C] +} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..0679ab7be201 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,889 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics +import language.experimental.captureChecking + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Growable.scala b/tests/pos-special/stdlib/collection/mutable/Growable.scala new file mode 100644 index 000000000000..3b5eabac37bf --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Growable.scala @@ -0,0 +1,102 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import language.experimental.captureChecking + +/** This trait forms part of collections that can be augmented + * using a `+=` operator and that can be cleared of all elements using + * a `clear` method. + * + * @define coll growable collection + * @define Coll `Growable` + * @define add add + * @define Add Add + */ +trait Growable[-A] extends Clearable { + + /** ${Add}s a single element to this $coll. + * + * @param elem the element to $add. + * @return the $coll itself + */ + def addOne(elem: A): this.type + + /** Alias for `addOne` */ + @`inline` final def += (elem: A): this.type = addOne(elem) + + //TODO This causes a conflict in StringBuilder; looks like a compiler bug + //@deprecated("Use addOne or += instead of append", "2.13.0") + //@`inline` final def append(elem: A): Unit = addOne(elem) + + /** ${Add}s two or more elements to this $coll. + * + * @param elem1 the first element to $add. + * @param elem2 the second element to $add. + * @param elems the remaining elements to $add. + * @return the $coll itself + */ + @deprecated("Use `++=` aka `addAll` instead of varargs `+=`; infix operations with an operand of multiple args will be deprecated", "2.13.0") + @`inline` final def += (elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= (elems: IterableOnce[A]) + + /** ${Add}s all elements produced by an IterableOnce to this $coll. + * + * @param xs the IterableOnce producing the elements to $add. + * @return the $coll itself. + */ + def addAll(xs: IterableOnce[A]^): this.type = { + if (xs.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(xs)) // avoid mutating under our own iterator + else { + val it = xs.iterator + while (it.hasNext) { + addOne(it.next()) + } + } + this + } + + /** Alias for `addAll` */ + @`inline` final def ++= (xs: IterableOnce[A]^): this.type = addAll(xs) + + /** @return The number of elements in the collection under construction, if it can be cheaply computed, + * -1 otherwise. The default implementation always returns -1. + */ + def knownSize: Int = -1 +} + +object Growable { + + /** + * Fills a `Growable` instance with the elements of a given iterable + * @param empty Instance to fill + * @param it Elements to add + * @tparam A Element type + * @return The filled instance + */ + def from[A](empty: Growable[A], it: collection.IterableOnce[A]^): empty.type = empty ++= it + +} + +/** This trait forms part of collections that can be cleared + * with a clear() call. + * + * @define coll collection + */ +trait Clearable { + /** Clears the $coll's contents. After this operation, the + * $coll is empty. + */ + def clear(): Unit +} diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala new file mode 100644 index 000000000000..4d6f989e6f3d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable +import language.experimental.captureChecking + +/** The canonical builder for collections that are growable, i.e. that support an + * efficient `+=` method which adds an element to the collection. + * + * GrowableBuilders can produce only a single instance of the collection they are growing. + * + * @define Coll `GrowingBuilder` + * @define coll growing builder + */ +class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) + extends Builder[Elem, To] { + + def clear(): Unit = elems.clear() + + def result(): To = elems + + def addOne(elem: Elem): this.type = { elems += elem; this } + + override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala new file mode 100644 index 000000000000..1ba651b47819 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -0,0 +1,655 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable maps using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") +class HashMap[K, V](initialCapacity: Int, loadFactor: Double) + extends AbstractMap[K, V] + with MapOps[K, V, HashMap, HashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with Serializable { + + /* The HashMap class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) + + import HashMap.Node + + /** The actual hash table. */ + private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + // + // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i + // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap + // and that is why unimproveHash simply forwards to this method + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(key: K): Boolean = findNode(key) ne null + + @`inline` private[this] def findNode(key: K): Node[K, V] = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + sizeHint(xs.knownSize) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case lhm: mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val entry = iter.next() + put0(entry.key, entry.value, entry.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } + } + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node[K, V] = null + var previousNode: Node[K, V] = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } + + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue + } + } + + override def subtractAll(xs: IterableOnce[K]^): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[K] => + hs.foreachWithHashWhile { (k, h) => + remove0(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[K] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[K] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = new Node[K, V](key, hash, value, null) + case old => + var prev: Node[K, V] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + if(prev eq null) table(idx) = new Node(key, hash, value, old) + else prev.next = new Node(key, hash, value, prev.next) + } + contentSize += 1 + null + } + + private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + private[this] var node: Node[K, V] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[K, V]): A + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): A = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[(K, V)] = + if(size == 0) Iterator.empty + else new HashMapIterator[(K, V)] { + protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) + } + + override def keysIterator: Iterator[K] = + if(size == 0) Iterator.empty + else new HashMapIterator[K] { + protected[this] def extract(nd: Node[K, V]) = nd.key + } + + override def valuesIterator: Iterator[V] = + if(size == 0) Iterator.empty + else new HashMapIterator[V] { + protected[this] def extract(nd: Node[K, V]) = nd.value + } + + + /** Returns an iterator over the nodes stored in this HashMap */ + private[collection] def nodeIterator: Iterator[Node[K, V]] = + if(size == 0) Iterator.empty + else new HashMapIterator[Node[K, V]] { + protected[this] def extract(nd: Node[K, V]) = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). + asInstanceOf[S with EfficientSplit] + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[K, V] = preLow + var lastHigh: Node[K, V] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd.value) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd.value + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findNode(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findNode(key, hash) + } + if(nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def remove(key: K): Option[V] = remove0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreachEntry(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def filterInPlace(p: (K, V) => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key, head.value)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key, next.value)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) + private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val len = table.length + var i = 0 + while (i < len) { + var n = table(i) + while (n ne null) { + n.value = f(n.key, n.value) + n = n.next + } + i += 1 + } + this + } + + override def mapFactory: MapFactory[HashMap] = HashMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "HashMap" + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new HashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[K, V]): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { + def key: K = _key + def hash: Int = _hash + def value: V = _value + def value_= (v: V): Unit = _value = v + def next: Node[K, V] = _next + def next_= (n: Node[K, V]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K, V] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if(_next ne null) _next.foreach(f) + } + + @tailrec + def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if(_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala new file mode 100644 index 000000000000..487abc74bb82 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -0,0 +1,457 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable sets using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class HashSet[A](initialCapacity: Int, loadFactor: Double) + extends AbstractSet[A] + with SetOps[A, HashSet, HashSet[A]] + with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with Serializable { + + def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) + + import HashSet.Node + + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(elem: A): Boolean = findNode(elem) ne null + + @`inline` private[this] def findNode(elem: A): Node[A] = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } + + override def addAll(xs: IterableOnce[A]^): this.type = { + sizeHint(xs.knownSize) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } + + override def subtractAll(xs: IterableOnce[A]^): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + table(idx) = new Node(elem, hash, null) + case old => + var prev: Node[A] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + if(prev eq null) + table(idx) = new Node(elem, hash, old) + else + prev.next = new Node(elem, hash, prev.next) + } + contentSize += 1 + true + } + + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) + + private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { + private[this] var i = 0 + private[this] var node: Node[A] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[A]): B + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): B = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[A] = new HashSetIterator[A] { + override protected[this] def extract(nd: Node[A]): A = nd.key + } + + /** Returns an iterator over the nodes stored in this HashSet */ + private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { + override protected[this] def extract(nd: Node[A]): Node[A] = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) + val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[A] = preLow + var lastHigh: Node[A] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + /* + private[mutable] def checkTable(): Unit = { + var i = 0 + var count = 0 + var prev: Node[A] = null + while(i < table.length) { + var n = table(i) + prev = null + while(n != null) { + count += 1 + assert(index(n.hash) == i) + if(prev ne null) assert(prev.hash <= n.hash) + prev = n + n = n.next + } + i += 1 + } + assert(contentSize == count) + } + */ + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: A => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) + + override protected[this] def className = "HashSet" + + override def hashCode: Int = { + val setIterator = this.iterator + val hashIterator: Iterator[Any] = + if (setIterator.isEmpty) setIterator + else new HashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[A]): Any = { + hash = unimproveHash(nd.hash) + this + } + } + MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + def from[B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashSet[B](cap, defaultLoadFactor) ++= it + } + + def empty[A]: HashSet[A] = new HashSet[A] + + def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { + def key: K = _key + def hash: Int = _hash + def next: Node[K] = _next + def next_= (n: Node[K]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: K => U): Unit = { + f(_key) + if(_next ne null) _next.foreach(f) + } + + override def toString = s"Node($key, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala new file mode 100644 index 000000000000..64af941eac1f --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -0,0 +1,418 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import collection.{AbstractIterator, Iterator} + +import java.lang.Integer.{numberOfLeadingZeros, rotateRight} +import scala.util.hashing.byteswap32 + +import java.lang.Integer +import language.experimental.captureChecking + +/** This class can be used to construct data structures that are based + * on hashtables. Class `HashTable[A]` implements a hashtable + * that maps keys of type `A` to values of the fully abstract + * member type `Entry`. Classes that make use of `HashTable` + * have to provide an implementation for `Entry`. + * + * There are mainly two parameters that affect the performance of a hashtable: + * the initial size and the load factor. The size + * refers to the number of buckets in the hashtable, and the load + * factor is a measure of how full the hashtable is allowed to get before + * its size is automatically doubled. Both parameters may be changed by + * overriding the corresponding values in class `HashTable`. + * + * @tparam A type of the elements contained in this hash table. + */ +// Not used in the standard library, but used in scala-parallel-collections +private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { + // Replacing Entry type parameter by abstract type member here allows to not expose to public + // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. + // However, I'm afraid it's too late now for such breaking change. + import HashTable._ + + protected var _loadFactor = defaultLoadFactor + + /** The actual hash table. + */ + protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) + + /** The number of mappings contained in this hash table. + */ + protected[collection] var tableSize: Int = 0 + + final def size: Int = tableSize + + /** The next size value at which to resize (capacity * load factor). + */ + protected[collection] var threshold: Int = initialThreshold(_loadFactor) + + /** The array keeping track of the number of elements in 32 element blocks. + */ + protected var sizemap: Array[Int] = null + + protected var seedvalue: Int = tableSizeSeed + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + + /** The initial size of the hash table. + */ + protected def initialSize: Int = 16 + + /** The initial threshold. + */ + private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) + + private def initialCapacity = capacity(initialSize) + + private def lastPopulatedIndex = { + var idx = table.length - 1 + while (table(idx) == null && idx > 0) + idx -= 1 + + idx + } + + /** + * Initializes the collection from the input stream. `readEntry` will be called for each + * entry to be read from the input stream. + */ + private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { + _loadFactor = in.readInt() + assert(_loadFactor > 0) + + val size = in.readInt() + tableSize = 0 + assert(size >= 0) + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() + + table = new Array(capacity(sizeForThreshold(_loadFactor, size))) + threshold = newThreshold(_loadFactor, table.length) + + if (smDefined) sizeMapInit(table.length) else sizemap = null + + var index = 0 + while (index < size) { + addEntry(readEntry) + index += 1 + } + } + + /** + * Serializes the collection to the output stream by saving the load factor, collection + * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. + * + * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To + * deserialize, `init` should be used. + */ + private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { + out.writeInt(_loadFactor) + out.writeInt(tableSize) + out.writeInt(seedvalue) + out.writeBoolean(isSizeMapDefined) + + foreachEntry(writeEntry) + } + + /** Find entry with given key in table, null if not found. + */ + final def findEntry(key: A): Entry = + findEntry0(key, index(elemHashCode(key))) + + protected[collection] final def findEntry0(key: A, h: Int): Entry = { + var e = table(h).asInstanceOf[Entry] + while (e != null && !elemEquals(e.key, key)) e = e.next + e + } + + /** Add entry to table + * pre: no entry with same key exists + */ + protected[collection] final def addEntry(e: Entry): Unit = { + addEntry0(e, index(elemHashCode(e.key))) + } + + protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { + e.next = table(h).asInstanceOf[Entry] + table(h) = e + tableSize = tableSize + 1 + nnSizeMapAdd(h) + if (tableSize > threshold) + resize(2 * table.length) + } + + /** Find entry with given key in table, or add new one if not found. + * May be somewhat faster then `findEntry`/`addEntry` pair as it + * computes entry's hash index only once. + * Returns entry found in table or null. + * New entries are created by calling `createNewEntry` method. + */ + def findOrAddEntry(key: A, value: B): Entry = { + val h = index(elemHashCode(key)) + val e = findEntry0(key, h) + if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } + } + + /** Creates new entry to be immediately inserted into the hashtable. + * This method is guaranteed to be called only once and in case that the entry + * will be added. In other words, an implementation may be side-effecting. + */ + def createNewEntry(key: A, value: B): Entry + + /** Remove entry from table if present. + */ + final def removeEntry(key: A) : Entry = { + removeEntry0(key, index(elemHashCode(key))) + } + /** Remove entry from table if present. + */ + private[collection] final def removeEntry0(key: A, h: Int) : Entry = { + var e = table(h).asInstanceOf[Entry] + if (e != null) { + if (elemEquals(e.key, key)) { + table(h) = e.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e.next = null + return e + } else { + var e1 = e.next + while (e1 != null && !elemEquals(e1.key, key)) { + e = e1 + e1 = e1.next + } + if (e1 != null) { + e.next = e1.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e1.next = null + return e1 + } + } + } + null + } + + /** An iterator returning all entries. + */ + def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + def hasNext = es != null + def next() = { + val res = es + es = es.next + while (es == null && idx > 0) { + idx = idx - 1 + es = iterTable(idx) + } + res.asInstanceOf[Entry] + } + } + + /** Avoid iterator for a 2x faster traversal. */ + def foreachEntry[U](f: Entry => U): Unit = { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + while (es != null) { + val next = es.next // Cache next in case f removes es. + f(es.asInstanceOf[Entry]) + es = next + + while (es == null && idx > 0) { + idx -= 1 + es = iterTable(idx) + } + } + } + + /** Remove all entries from table + */ + def clearTable(): Unit = { + var i = table.length - 1 + while (i >= 0) { table(i) = null; i = i - 1 } + tableSize = 0 + nnSizeMapReset(0) + } + + private def resize(newSize: Int): Unit = { + val oldTable = table + table = new Array(newSize) + nnSizeMapReset(table.length) + var i = oldTable.length - 1 + while (i >= 0) { + var e = oldTable(i) + while (e != null) { + val h = index(elemHashCode(e.key)) + val e1 = e.next + e.next = table(h).asInstanceOf[Entry] + table(h) = e + e = e1 + nnSizeMapAdd(h) + } + i = i - 1 + } + threshold = newThreshold(_loadFactor, newSize) + } + + /* Size map handling code */ + + /* + * The following three sizeMap* functions (Add, Remove, Reset) + * are used to update the size map of the hash table. + * + * The size map logically divides the hash table into `sizeMapBucketSize` element buckets + * by keeping an integer entry for each such bucket. Each integer entry simply denotes + * the number of elements in the corresponding bucket. + * Best understood through an example, see: + * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) + * sizemap = [ 2 | 3 ] (2 entries) + * where sizeMapBucketSize == 4. + * + * By default the size map is not initialized, so these methods don't do anything, thus, + * their impact on hash table performance is negligible. However, if the hash table + * is converted into a parallel hash table, the size map is initialized, as it will be needed + * there. + */ + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) += 1 + } + + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) -= 1 + } + + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + val nsize = calcSizeMapSize(tableLength) + if (sizemap.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap, 0) + } + + private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize + + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + + // discards the previous sizemap and only allocates a new one + protected def sizeMapInit(tableLength: Int): Unit = { + sizemap = new Array[Int](calcSizeMapSize(tableLength)) + } + + // discards the previous sizemap and populates the new one + protected final def sizeMapInitAndRebuild() = { + sizeMapInit(table.length) + + // go through the buckets, count elements + var tableidx = 0 + var bucketidx = 0 + val tbl = table + var tableuntil = 0 + if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize + val totalbuckets = totalSizeMapBuckets + while (bucketidx < totalbuckets) { + var currbucketsize = 0 + while (tableidx < tableuntil) { + var e = tbl(tableidx) + while (e ne null) { + currbucketsize += 1 + e = e.next + } + tableidx += 1 + } + sizemap(bucketidx) = currbucketsize + tableuntil += sizeMapBucketSize + bucketidx += 1 + } + } + + private[collection] def printSizeMap() = { + println(sizemap.to(collection.immutable.List)) + } + + protected final def sizeMapDisable() = sizemap = null + + protected final def isSizeMapDefined = sizemap ne null + + // override to automatically initialize the size map + protected def alwaysInitSizeMap = false + + /* End of size map handling code */ + + protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) + + /** + * Note: we take the most significant bits of the hashcode, not the lower ones + * this is of crucial importance when populating the table in parallel + */ + protected[collection] final def index(hcode: Int): Int = { + val ones = table.length - 1 + val exponent = Integer.numberOfLeadingZeros(ones) + (improve(hcode, seedvalue) >>> exponent) & ones + } +} + +private[collection] object HashTable { + /** The load factor for the hash table (in 0.001 step). + */ + private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% + private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible + + private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt + + private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt + + private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) + + trait HashUtils[KeyType] { + protected final def sizeMapBucketBitSize = 5 + // so that: + protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize + + protected[collection] def elemHashCode(key: KeyType) = key.## + + /** + * Defer to a high-quality hash in [[scala.util.hashing]]. + * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. + *

+ * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 + * {{{ + * var h: Int = hcode + ~(hcode << 9) + * h = h ^ (h >>> 14) + * h = h + (h << 4) + * h ^ (h >>> 10) + * }}} + * the rest of the computation is due to SI-5293 + */ + protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) + } + + /** + * Returns a power of two >= `target`. + */ + private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** Class used internally. + */ +private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { + val key: A + var next: E = _ +} diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala new file mode 100644 index 000000000000..1af98162e9f3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + + +/** + * Reusable builder for immutable collections + */ +abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) + extends ReusableBuilder[A, C] { + + protected var elems: C = empty + + def clear(): Unit = { elems = empty } + + def result(): C = elems + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala new file mode 100644 index 000000000000..022970b4c56f --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -0,0 +1,84 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable +import language.experimental.captureChecking + +trait IndexedSeq[T] extends Seq[T] + with scala.collection.IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] { + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) + +trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] + extends scala.collection.IndexedSeqOps[A, CC, C] + with SeqOps[A, CC, C] { + + /** Modifies this $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return this $coll modified by replacing all elements with the + * result of applying the given function `f` to each element + * of this $coll. + */ + def mapInPlace(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.SeqOps.sorted]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + val len = this.length + if (len > 1) { + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- this) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + update(i, arr(i).asInstanceOf[A]) + i += 1 + } + } + this + } + + /** Sorts this $coll in place according to a comparison function. + * + * @see [[scala.collection.SeqOps.sortWith]] + */ + def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) + + /** Sorts this $coll in place according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.collection.SeqOps.sortBy]] + */ + def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/Iterable.scala b/tests/pos-special/stdlib/collection/mutable/Iterable.scala new file mode 100644 index 000000000000..bf286157b376 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Iterable.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking + +trait Iterable[A] + extends collection.Iterable[A] + with collection.IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => + + override def iterableFactory: IterableFactory[Iterable] = Iterable +} + +/** + * $factoryInfo + * @define coll mutable collection + * @define Coll `mutable.Iterable` + */ +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) + +/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ +abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]: + this: AbstractIterable[A]^ => diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala new file mode 100644 index 000000000000..528c39e49bdf --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -0,0 +1,510 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + + +/** This class implements mutable maps using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `LinkedHashMap` + * @define coll linked hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") +class LinkedHashMap[K, V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] + with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap + + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + + private[collection] def _firstEntry: Entry = firstEntry + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: (K, V) = + if (size > 0) (lastEntry.key, lastEntry.value) + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") + + override def lastOption: Option[(K, V)] = + if (size > 0) Some((lastEntry.key, lastEntry.value)) + else None + + override def head: (K, V) = + if (size > 0) (firstEntry.key, firstEntry.value) + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") + + override def headOption: Option[(K, V)] = + if (size > 0) Some((firstEntry.key, firstEntry.value)) + else None + + override def size = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def get(key: K): Option[V] = { + val e = findEntry(key) + if (e == null) None + else Some(e.value) + } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def contains(key: K): Boolean = { + if (getClass eq classOf[LinkedHashMap[_, _]]) + findEntry(key) != null + else + super.contains(key) // A subclass might override `get`, use the default implementation `contains`. + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def remove(key: K): Option[V] = removeEntry0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findEntry(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findEntry(key, hash) + } + if (nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if (contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def removeEntry0(elem: K, hash: Int): Entry = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: K): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + def addOne(kv: (K, V)): this.type = { + put(kv._1, kv._2) + this + } + + def subtractOne(key: K): this.type = { + remove(key) + this + } + + private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[(K, V)] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[(K, V)] { + def extract(nd: Entry): (K, V) = (nd.key, nd.value) + } + + protected class LinkedKeySet extends KeySet { + override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet + } + + override def keySet: collection.Set[K] = new LinkedKeySet + + override def keysIterator: Iterator[K] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[K] { + def extract(nd: Entry): K = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[Entry] { + def extract(nd: Entry): Entry = nd + } + + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundEntry: Entry = null + var previousEntry: Entry = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousEntry = prev + foundEntry = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findEntry(nd, nd.next, k, h) + } + + findEntry(null, nd, key, hash) + } + + val previousValue = foundEntry match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousEntry != null) previousEntry.next = foundEntry.next + else table(indexedHash) = foundEntry.next + deleteEntry(foundEntry) + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundEntry.value = newValue + } + nextValue + } + } + + override def valuesIterator: Iterator[V] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[V] { + def extract(nd: Entry): V = nd.value + } + + + override def foreach[U](f: ((K, V)) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f((cur.key, cur.value)) + cur = cur.later + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key, cur.value) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { + val e = new Entry(key, hash, value) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = createNewEntry(key, hash, value) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if (getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewEntry(key, hash, value) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + null + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new LinkedHashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashMap" +} + +/** $factoryInfo + * @define Coll `LinkedHashMap` + * @define coll linked hash map + */ +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + + def empty[K, V] = new LinkedHashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]^) = { + val newlhm = empty[K, V] + newlhm.sizeHint(it.knownSize) + newlhm.addAll(it) + newlhm + } + + def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) + + /** Class for the linked hash map entry, used internally. + */ + private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + var earlier: LinkedEntry[K, V] = null + var later: LinkedEntry[K, V] = null + var next: LinkedEntry[K, V] = null + + @tailrec + final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala new file mode 100644 index 000000000000..b9db31651ede --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -0,0 +1,349 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable sets using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam A the type of the elements contained in this set. + * + * @define Coll `LinkedHashSet` + * @define coll linked hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") +class LinkedHashSet[A] + extends AbstractSet[A] + with SetOps[A, LinkedHashSet, LinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] + with IterableFactoryDefaults[A, LinkedHashSet] + with DefaultSerializable { + + override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet + + // stepper is not overridden to use XTableStepper because that stepper would not return the + // elements in insertion order + + /*private*/ type Entry = LinkedHashSet.Entry[A] + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: A = + if (size > 0) lastEntry.key + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") + + override def lastOption: Option[A] = + if (size > 0) Some(lastEntry.key) + else None + + override def head: A = + if (size > 0) firstEntry.key + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") + + override def headOption: Option[A] = + if (size > 0) Some(firstEntry.key) + else None + + override def size: Int = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def contains(elem: A): Boolean = findEntry(elem) ne null + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def add(elem: A): Boolean = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(elem) + put0(elem, hash, index(hash)) + } + + def addOne(elem: A): this.type = { + add(elem) + this + } + + def subtractOne(elem: A): this.type = { + remove(elem) + this + } + + override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) + + private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[A] = new LinkedHashSetIterator[A] { + override def extract(nd: Entry): A = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { + override def extract(nd: Entry): Entry = nd + } + + override def foreach[U](f: A => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt + + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: A): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: A, hash: Int): Entry = { + val e = new Entry(key, hash) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { + table(idx) match { + case null => + table(idx) = createNewEntry(elem, hash) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewEntry(elem, hash) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true + } + + private[this] def remove0(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[A], 0) + val preHigh = new Entry(null.asInstanceOf[A], 0) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + val setHashIterator = + if (isEmpty) this.iterator + else { + new LinkedHashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = unimproveHash(nd.hash) + this + } + } + } + MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashSet" +} + +/** $factoryInfo + * @define Coll `LinkedHashSet` + * @define coll linked hash set + */ +@SerialVersionUID(3L) +object LinkedHashSet extends IterableFactory[LinkedHashSet] { + + override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + + def from[E](it: collection.IterableOnce[E]^) = { + val newlhs = empty[E] + newlhs.sizeHint(it.knownSize) + newlhs.addAll(it) + newlhs + } + + def newBuilder[A] = new GrowableBuilder(empty[A]) + + /** Class for the linked hash set entry, used internally. + */ + private[mutable] final class Entry[A](val key: A, val hash: Int) { + var earlier: Entry[A] = null + var later: Entry[A] = null + var next: Entry[A] = null + + @tailrec + final def findEntry(k: A, h: Int): Entry[A] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} + diff --git a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala new file mode 100644 index 000000000000..570c815644ee --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala @@ -0,0 +1,404 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.immutable.{::, List, Nil} +import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} + +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking + +/** A `Buffer` implementation backed by a list. It provides constant time + * prepend and append. Most other operations are linear. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#list-buffers "Scala's Collection Library overview"]] + * section on `List Buffers` for more information. + * + * @tparam A the type of this list buffer's elements. + * + * @define Coll `ListBuffer` + * @define coll list buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-8428291952499836345L) +class ListBuffer[A] + extends AbstractBuffer[A] + with SeqOps[A, ListBuffer, ListBuffer[A]] + with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] + with ReusableBuilder[A, immutable.List[A]] + with IterableFactoryDefaults[A, ListBuffer] + with DefaultSerializable { + @transient private[this] var mutationCount: Int = 0 + + private var first: List[A] = Nil + private var last0: ::[A] = null + private[this] var aliased = false + private[this] var len = 0 + + private type Predecessor[A0] = ::[A0] /*| Null*/ + + def iterator: Iterator[A] = new MutationTracker.CheckedIterator(first.iterator, mutationCount) + + override def iterableFactory: SeqFactory[ListBuffer] = ListBuffer + + @throws[IndexOutOfBoundsException] + def apply(i: Int) = first.apply(i) + + def length = len + override def knownSize = len + + override def isEmpty: Boolean = len == 0 + + private def copyElems(): Unit = { + val buf = new ListBuffer[A].freshFrom(this) + first = buf.first + last0 = buf.last0 + aliased = false + } + + // we only call this before mutating things, so it's + // a good place to track mutations for the iterator + private def ensureUnaliased(): Unit = { + mutationCount += 1 + if (aliased) copyElems() + } + + // Avoids copying where possible. + override def toList: List[A] = { + aliased = nonEmpty + // We've accumulated a number of mutations to `List.tail` by this stage. + // Make sure they are visible to threads that the client of this ListBuffer might be about + // to share this List with. + releaseFence() + first + } + + def result(): immutable.List[A] = toList + + /** Prepends the elements of this buffer to a given list + * + * @param xs the list to which elements are prepended + */ + def prependToList(xs: List[A]): List[A] = { + if (isEmpty) xs + else { + ensureUnaliased() + last0.next = xs + toList + } + } + + def clear(): Unit = { + mutationCount += 1 + first = Nil + len = 0 + last0 = null + aliased = false + } + + final def addOne(elem: A): this.type = { + ensureUnaliased() + val last1 = new ::[A](elem, Nil) + if (len == 0) first = last1 else last0.next = last1 + last0 = last1 + len += 1 + this + } + + // MUST only be called on fresh instances + private def freshFrom(xs: IterableOnce[A]^): this.type = { + val it = xs.iterator + if (it.hasNext) { + var len = 1 + var last0 = new ::[A](it.next(), Nil) + first = last0 + while (it.hasNext) { + val last1 = new ::[A](it.next(), Nil) + last0.next = last1 + last0 = last1 + len += 1 + } + // copy local vars into instance + this.len = len + this.last0 = last0 + } + this + } + + override final def addAll(xs: IterableOnce[A]^): this.type = { + val it = xs.iterator + if (it.hasNext) { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + if (len == 0) first = fresh.first + else last0.next = fresh.first + last0 = fresh.last0 + len += fresh.length + } + this + } + + override def subtractOne(elem: A): this.type = { + ensureUnaliased() + if (isEmpty) {} + else if (first.head == elem) { + first = first.tail + reduceLengthBy(1) + } + else { + var cursor = first + while (!cursor.tail.isEmpty && cursor.tail.head != elem) { + cursor = cursor.tail + } + if (!cursor.tail.isEmpty) { + val z = cursor.asInstanceOf[::[A]] + if (z.next == last0) + last0 = z + z.next = cursor.tail.tail + reduceLengthBy(1) + } + } + this + } + + /** Reduce the length of the buffer, and null out last0 + * if this reduces the length to 0. + */ + private def reduceLengthBy(num: Int): Unit = { + len -= num + if (len <= 0) // obviously shouldn't be < 0, but still better not to leak + last0 = null + } + + private def locate(i: Int): Predecessor[A] = + if (i == 0) null + else if (i == len) last0 + else { + var j = i - 1 + var p = first + while (j > 0) { + p = p.tail + j -= 1 + } + p.asInstanceOf[Predecessor[A]] + } + + private def getNext(p: Predecessor[A]): List[A] = + if (p == null) first else p.next + + def update(idx: Int, elem: A): Unit = { + ensureUnaliased() + if (idx < 0 || idx >= len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") + if (idx == 0) { + val newElem = new :: (elem, first.tail) + if (last0 eq first) { + last0 = newElem + } + first = newElem + } else { + // `p` can not be `null` because the case where `idx == 0` is handled above + val p = locate(idx) + val newElem = new :: (elem, p.tail.tail) + if (last0 eq p.tail) { + last0 = newElem + } + p.asInstanceOf[::[A]].next = newElem + } + } + + def insert(idx: Int, elem: A): Unit = { + ensureUnaliased() + if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") + if (idx == len) addOne(elem) + else { + val p = locate(idx) + val nx = elem :: getNext(p) + if(p eq null) first = nx else p.next = nx + len += 1 + } + } + + def prepend(elem: A): this.type = { + insert(0, elem) + this + } + + // `fresh` must be a `ListBuffer` that only we have access to + private def insertAfter(prev: Predecessor[A], fresh: ListBuffer[A]): Unit = { + if (!fresh.isEmpty) { + val follow = getNext(prev) + if (prev eq null) first = fresh.first else prev.next = fresh.first + fresh.last0.next = follow + len += fresh.length + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { + if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") + val it = elems.iterator + if (it.hasNext) { + if (idx == len) addAll(it) + else { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + insertAfter(locate(idx), fresh) + } + } + } + + def remove(idx: Int): A = { + ensureUnaliased() + if (idx < 0 || idx >= len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") + val p = locate(idx) + val nx = getNext(p) + if(p eq null) { + first = nx.tail + if(first.isEmpty) last0 = null + } else { + if(last0 eq nx) last0 = p + p.next = nx.tail + } + len -= 1 + nx.head + } + + def remove(idx: Int, count: Int): Unit = + if (count > 0) { + ensureUnaliased() + if (idx < 0 || idx + count > len) throw new IndexOutOfBoundsException(s"$idx to ${idx + count} is out of bounds (min 0, max ${len-1})") + removeAfter(locate(idx), count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + private def removeAfter(prev: Predecessor[A], n: Int) = { + @tailrec def ahead(p: List[A], n: Int): List[A] = + if (n == 0) p else ahead(p.tail, n - 1) + val nx = ahead(getNext(prev), n) + if(prev eq null) first = nx else prev.next = nx + if(nx.isEmpty) last0 = prev + len -= n + } + + def mapInPlace(f: A => A): this.type = { + mutationCount += 1 + val buf = new ListBuffer[A] + for (elem <- this) buf += f(elem) + first = buf.first + last0 = buf.last0 + aliased = false // we just assigned from a new instance + this + } + + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + mutationCount += 1 + var src = first + var dst: List[A] = null + last0 = null + len = 0 + while(!src.isEmpty) { + val it = f(src.head).iterator + while(it.hasNext) { + val v = new ::(it.next(), Nil) + if(dst eq null) dst = v else last0.next = v + last0 = v + len += 1 + } + src = src.tail + } + first = if(dst eq null) Nil else dst + aliased = false // we just rebuilt a fresh, unaliased instance + this + } + + def filterInPlace(p: A => Boolean): this.type = { + ensureUnaliased() + var prev: Predecessor[A] = null + var cur: List[A] = first + while (!cur.isEmpty) { + val follow = cur.tail + if (!p(cur.head)) { + if(prev eq null) first = follow + else prev.next = follow + len -= 1 + } else { + prev = cur.asInstanceOf[Predecessor[A]] + } + cur = follow + } + last0 = prev + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[A]^, replaced: Int): this.type = { + val _len = len + val _from = math.max(from, 0) // normalized + val _replaced = math.max(replaced, 0) // normalized + val it = patch.iterator + + val nonEmptyPatch = it.hasNext + val nonEmptyReplace = (_from < _len) && (_replaced > 0) + + // don't want to add a mutation or check aliasing (potentially expensive) + // if there's no patching to do + if (nonEmptyPatch || nonEmptyReplace) { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + val i = math.min(_from, _len) + val n = math.min(_replaced, _len) + val p = locate(i) + removeAfter(p, math.min(n, _len - i)) + insertAfter(p, fresh) + } + this + } + + /** + * Selects the last element. + * + * Runs in constant time. + * + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + override def last: A = if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") else last0.head + + /** + * Optionally selects the last element. + * + * Runs in constant time. + * + * @return the last element of this $coll$ if it is nonempty, `None` if it is empty. + */ + override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ListBuffer" + +} + +@SerialVersionUID(3L) +object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { + + def from[A](coll: collection.IterableOnce[A]^): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) + + def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) + + def empty[A]: ListBuffer[A] = new ListBuffer[A] +} diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala new file mode 100644 index 000000000000..e4f9322050d5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.List +import language.experimental.captureChecking + +/** A simple mutable map backed by a list, so it preserves insertion order. + * + * @tparam K the type of the keys contained in this list map. + * @tparam V the type of the values assigned to keys in this list map. + * + * @define Coll `mutable.ListMap` + * @define coll mutable list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +class ListMap[K, V] + extends AbstractMap[K, V] + with MapOps[K, V, ListMap, ListMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + private[this] var elems: List[(K, V)] = List() + private[this] var siz: Int = 0 + + def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) + def iterator: Iterator[(K, V)] = elems.iterator + + final override def addOne(kv: (K, V)) = { + val (e, key0) = remove(kv._1, elems, List()) + elems = (key0, kv._2) :: e + siz += 1; this + } + + final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } + + @tailrec + private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { + if (elems.isEmpty) (acc, key) + else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } + else remove(key, elems.tail, elems.head :: acc) + } + + final override def clear(): Unit = { elems = List(); siz = 0 } + + final override def size: Int = siz + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override protected[this] def stringPrefix = "ListMap" +} + +/** $factoryInfo + * @define Coll `mutable.ListMap` + * @define coll mutable list map + */ +@SerialVersionUID(3L) +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +object ListMap extends MapFactory[ListMap] { + def empty[K, V]: ListMap[K, V] = new ListMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) +} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala new file mode 100644 index 000000000000..5ac4757a42eb --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -0,0 +1,674 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically substantially faster with `LongMap` than [[HashMap]]. Methods + * that act on the whole map, including `foreach` and `map` are not in + * general expected to be faster than with a generic map, save for those + * that take particular advantage of the internal structure of the map: + * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `LongMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29 entries (approximately + * 500 million). The maximum capacity is 2^30, but performance will degrade + * rapidly as 2^30 is approached. + * + */ +final class LongMap[V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[Long, V] + with MapOps[Long, V, Map, LongMap[V]] + with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] + with Serializable { + import LongMap._ + + def this() = this(LongMap.exceptionDefault, 16, true) + + // TODO: override clear() with an optimization more tailored for efficiency. + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) + + /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) + + /** Creates a new `LongMap` with an initial buffer of specified size. + * + * A LongMap can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `LongMap` with specified default values and initial buffer size. */ + def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var extraKeys: Int = 0 + private[this] var zeroValue: AnyRef = null + private[this] var minValue: AnyRef = null + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _keys: Array[Long] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int) = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + ): Unit = { + mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz + } + + override def size: Int = _size + (extraKeys+1)/2 + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override def empty: LongMap[V] = new LongMap() + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def toIndex(k: Long): Int = { + // Part of the MurmurHash3 32 bit finalizer + val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt + val x = (h ^ (h >>> 16)) * 0x85EBCA6B + (x ^ (x >>> 13)) & mask + } + + private def seekEmpty(k: Long): Int = { + var e = toIndex(k) + var x = 0 + while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e + } + + private def seekEntry(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e | MissingBit + } + + private def seekEntryOrOpen(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (q == 0) return e | MissingBit + val o = e | MissVacant + while ({ q = _keys(e); if (q==k) return e; q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + o + } + + override def contains(key: Long): Boolean = { + if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 + else seekEntry(key) >= 0 + } + + override def get(key: Long): Option[V] = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) None + else if (key == 0) Some(zeroValue.asInstanceOf[V]) + else Some(minValue.asInstanceOf[V]) + } + else { + val i = seekEntry(key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + } + + override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) default + else if (key == 0) zeroValue.asInstanceOf[V1] + else minValue.asInstanceOf[V1] + } + else { + val i = seekEntry(key) + if (i < 0) default else _values(i).asInstanceOf[V1] + } + } + + override def getOrElseUpdate(key: Long, defaultValue: => V): V = { + if (key == -key) { + val kbits = (key>>>63).toInt + 1 + if ((kbits & extraKeys) == 0) { + val value = defaultValue + extraKeys |= kbits + if (key == 0) zeroValue = value.asInstanceOf[AnyRef] + else minValue = value.asInstanceOf[AnyRef] + value + } + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + var i = seekEntryOrOpen(key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val ok = _keys + val ans = defaultValue + if (ok ne _keys) { + i = seekEntryOrOpen(key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead. + */ + override def apply(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + } + + /** The user-supplied default value for the key. Throws an exception + * if no other default behavior was specified. + */ + override def default(key: Long) = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val ok = _keys + val ov = _values + mask = newMask + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < ok.length) { + val k = ok(i) + if (k != -k) { + val j = seekEmpty(k) + _keys(j) = k + _values(j) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: Long, value: V): Option[V] = { + if (key == -key) { + if (key == 0) { + val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + ans + } + else { + val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + ans + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to a `LongMap`. + */ + override def update(key: Long, value: V): Unit = { + if (key == -key) { + if (key == 0) { + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + } + else { + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + } + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: Long, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: Long): this.type = { + if (key == -key) { + if (key == 0L) { + extraKeys &= 0x2 + zeroValue = null + } + else { + extraKeys &= 0x1 + minValue = null + } + } + else { + val i = seekEntry(key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _keys(i) = Long.MinValue + _values(i) = null + } + } + this + } + + def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var nextPair: (Long, V) = + if (extraKeys==0) null + else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) + else (Long.MinValue, minValue.asInstanceOf[V]) + + private[this] var anotherPair: (Long, V) = + if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) + else null + + private[this] var index = 0 + + def hasNext: Boolean = nextPair != null || (index < kz.length && { + var q = kz(index) + while (q == -q) { + index += 1 + if (index >= kz.length) return false + q = kz(index) + } + nextPair = (kz(index), vz(index).asInstanceOf[V]) + index += 1 + true + }) + def next() = { + if (nextPair == null && !hasNext) throw new NoSuchElementException("next") + val ans = nextPair + if (anotherPair != null) { + nextPair = anotherPair + anotherPair = null + } + else nextPair = null + ans + } + } + + // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. + override def keysIterator: Iterator[Long] = super.keysIterator + override def valuesIterator: Iterator[V] = super.valuesIterator + + override def foreach[U](f: ((Long,V)) => U): Unit = { + if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) + if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f((k, _values(i).asInstanceOf[V])) + } + i += 1 + } + } + + override def foreachEntry[U](f: (Long,V) => U): Unit = { + if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k, _values(i).asInstanceOf[V]) + } + i += 1 + } + } + + override def clone(): LongMap[V] = { + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val lm = new LongMap[V](defaultEntry, 1, false) + lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) + lm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += kv + lm + } + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + xs.iterator.foreach(kv => lm += kv) + lm + } + + override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = + clone().asInstanceOf[LongMap[V1]].addOne(key, value) + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: Long => A): Unit = { + if ((extraKeys & 1) == 1) f(0L) + if ((extraKeys & 2) == 2) f(Long.MinValue) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k) + } + i += 1 + } + } + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = { + if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(_values(i).asInstanceOf[V]) + } + i += 1 + } + } + + /** Creates a new `LongMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) + lm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] + if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) + + override protected[this] def className = "LongMap" +} + +object LongMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + + /** A builder for instances of `LongMap`. + * + * This builder can be reused to create multiple instances. + */ + final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { + private[collection] var elems: LongMap[V] = new LongMap[V] + override def addOne(entry: (Long, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new LongMap[V] + def result(): LongMap[V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `LongMap` with zero or more key/value pairs. */ + def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + + private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val lm = new LongMap[V](sz * 2) + elems.iterator.foreach{ case (k,v) => lm(k) = v } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new empty `LongMap`. */ + def empty[V]: LongMap[V] = new LongMap[V] + + /** Creates a new empty `LongMap` with the supplied default */ + def withDefault[V](default: Long -> V): LongMap[V] = new LongMap[V](default) + + /** Creates a new `LongMap` from an existing source collection. A source collection + * which is already a `LongMap` gets cloned. + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new `LongMap` with the elements of `source` + */ + def from[V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { + case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] + case _ => buildFromIterableOnce(source) + } + + def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + + /** Creates a new `LongMap` from arrays of keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { + val sz = math.min(keys.length, values.length) + val lm = new LongMap[V](sz * 2) + var i = 0 + while (i < sz) { lm(keys(i)) = values(i); i += 1 } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new `LongMap` from keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + val sz = math.min(keys.size, values.size) + val lm = new LongMap[V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() + if (lm.size < (sz >> 3)) lm.repack() + lm + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala new file mode 100644 index 000000000000..dab64ddb1f58 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -0,0 +1,271 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import language.experimental.captureChecking + +/** Base type of mutable Maps */ +trait Map[K, V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with Growable[(K, V)] + with Shrinkable[K] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + /* + //TODO consider keeping `remove` because it returns the removed entry + @deprecated("Use subtract or -= instead of remove", "2.13.0") + def remove(key: K): Option[V] = { + val old = get(key) + if(old.isDefined) subtract(key) + old + } + */ + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) +} + +/** + * @define coll mutable map + * @define Coll `mutable.Map` + */ +trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] + with Cloneable[C] + with Builder[(K, V), C] + with Growable[(K, V)] + with Shrinkable[K] + with Pure { + + def result(): C = coll + + @deprecated("Use - or remove on an immutable Map", "2.13.0") + final def - (key: K): C = clone() -= key + + @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") + final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys + + /** Adds a new key/value pair to this map and optionally returns previously bound value. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key + * before the `put` operation was executed, or `None` if `key` + * was not defined in the map before. + */ + def put(key: K, value: V): Option[V] = { + val r = get(key) + update(key, value) + r + } + + /** Adds a new key/value pair to this map. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update(key: K, value: V): Unit = { coll += ((key, value)) } + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = this.get(key) + val nextValue = remappingFunction(previousValue) + (previousValue, nextValue) match { + case (None, None) => // do nothing + case (Some(_), None) => this.remove(key) + case (_, Some(v)) => this.update(key,v) + } + nextValue + } + + /** If given key is already in this map, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key + * in map and returns that value. + * + * Concurrent map implementations may evaluate the expression `op` + * multiple times, or may evaluate `op` without inserting the result. + * + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if + * `key` is previously unbound. + * @return the value associated with key (either previously or as a result + * of executing the method). + */ + def getOrElseUpdate(key: K, op: => V): V = + get(key) match { + case Some(v) => v + case None => val d = op; this(key) = d; d + } + + /** Removes a key from this map, returning the value associated previously + * with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the map before. + */ + def remove(key: K): Option[V] = { + val r = get(key) + if (r.isDefined) this -= key + r + } + + def clear(): Unit = { keysIterator foreach -= } + + override def clone(): C = empty ++= this + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) + + /** Retains only those mappings for which the predicate + * `p` returns `true`. + * + * @param p The test predicate + */ + def filterInPlace(p: (K, V) => Boolean): this.type = { + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 + } + } + this + } + + @deprecated("Use mapValuesInPlace instead", "2.13.0") + @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) + + /** Applies a transformation function to all values contained in this map. + * The transformation function produces new values from existing keys + * associated values. + * + * @param f the transformation to apply + * @return the map itself. + */ + def mapValuesInPlace(f: (K, V) => V): this.type = { + if (!isEmpty) this match { + case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) + case _ => + val array = this.toArray[Any] + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + update(k, f(k, v)) + i += 1 + } + } + this + } + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable map + * @define Coll `mutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](HashMap) { + + @SerialVersionUID(3L) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + override def default(key: K): V = defaultValue(key) + + def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = underlying.knownSize + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def clear(): Unit = underlying.clear() + + def get(key: K): Option[V] = underlying.get(key) + + def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala new file mode 100644 index 000000000000..93b0d6ae4d73 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -0,0 +1,116 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import language.experimental.captureChecking + +/** A trait for mutable maps with multiple values assigned to a key. + * + * This class is typically used as a mixin. It turns maps which map `K` + * to `Set[V]` objects into multimaps that map `K` to `V` objects. + * + * @example {{{ + * // first import all necessary types from package `collection.mutable` + * import collection.mutable.{ HashMap, MultiMap, Set } + * + * // to create a `MultiMap` the easiest way is to mixin it into a normal + * // `Map` instance + * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] + * + * // to add key-value pairs to a multimap it is important to use + * // the method `addBinding` because standard methods like `+` will + * // overwrite the complete key-value pair instead of adding the + * // value to the existing key + * mm.addBinding(1, "a") + * mm.addBinding(2, "b") + * mm.addBinding(1, "c") + * + * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` + * + * // to check if the multimap contains a value there is method + * // `entryExists`, which allows to traverse the including set + * mm.entryExists(1, _ == "a") == true + * mm.entryExists(1, _ == "b") == false + * mm.entryExists(2, _ == "b") == true + * + * // to remove a previous added value there is the method `removeBinding` + * mm.removeBinding(1, "a") + * mm.entryExists(1, _ == "a") == false + * }}} + * + * @define coll multimap + * @define Coll `MultiMap` + */ +@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") +trait MultiMap[K, V] extends Map[K, Set[V]] { + /** Creates a new set. + * + * Classes that use this trait as a mixin can override this method + * to have the desired implementation of sets assigned to new keys. + * By default this is `HashSet`. + * + * @return An empty set of values of type `V`. + */ + protected def makeSet: Set[V] = new HashSet[V] + + /** Assigns the specified `value` to a specified `key`. If the key + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. + * + * @param key The key to which to bind the new value. + * @param value The value to bind to the key. + * @return A reference to this multimap. + */ + def addBinding(key: K, value: V): this.type = { + get(key) match { + case None => + val set = makeSet + set += value + this(key) = set + case Some(set) => + set += value + } + this + } + + /** Removes the binding of `value` to `key` if it exists, otherwise this + * operation doesn't have any effect. + * + * If this was the last value assigned to the specified key, the + * set assigned to that key will be removed as well. + * + * @param key The key of the binding. + * @param value The value to remove. + * @return A reference to this multimap. + */ + def removeBinding(key: K, value: V): this.type = { + get(key) match { + case None => + case Some(set) => + set -= value + if (set.isEmpty) this -= key + } + this + } + + /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. + * + * @param key The key for which the predicate is checked. + * @param p The predicate which a value assigned to the key must satisfy. + * @return A boolean if such a binding exists + */ + def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { + case None => false + case Some(set) => set exists p + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala new file mode 100644 index 000000000000..3e9b16540031 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.ConcurrentModificationException +import language.experimental.captureChecking + +/** + * Utilities to check that mutations to a client that tracks + * its mutations have not occurred since a given point. + * [[Iterator `Iterator`]]s that perform this check automatically + * during iteration can be created by wrapping an `Iterator` + * in a [[MutationTracker.CheckedIterator `CheckedIterator`]], + * or by manually using the [[MutationTracker.checkMutations() `checkMutations`]] + * and [[MutationTracker.checkMutationsForIteration() `checkMutationsForIteration`]] + * methods. + */ +private object MutationTracker { + + /** + * Checks whether or not the actual mutation count differs from + * the expected one, throwing an exception, if it does. + * + * @param expectedCount the expected mutation count + * @param actualCount the actual mutation count + * @param message the exception message in case of mutations + * @throws ConcurrentModificationException if the expected and actual + * mutation counts differ + */ + @throws[ConcurrentModificationException] + def checkMutations(expectedCount: Int, actualCount: Int, message: String): Unit = { + if (actualCount != expectedCount) throw new ConcurrentModificationException(message) + } + + /** + * Checks whether or not the actual mutation count differs from + * the expected one, throwing an exception, if it does. This method + * produces an exception message saying that it was called because a + * backing collection was mutated during iteration. + * + * @param expectedCount the expected mutation count + * @param actualCount the actual mutation count + * @throws ConcurrentModificationException if the expected and actual + * mutation counts differ + */ + @throws[ConcurrentModificationException] + @inline def checkMutationsForIteration(expectedCount: Int, actualCount: Int): Unit = + checkMutations(expectedCount, actualCount, "mutation occurred during iteration") + + /** + * An iterator wrapper that checks if the underlying collection has + * been mutated. + * + * @param underlying the underlying iterator + * @param mutationCount a by-name provider of the current mutation count + * @tparam A the type of the iterator's elements + */ + final class CheckedIterator[A](underlying: Iterator[A]^, mutationCount: => Int) extends AbstractIterator[A] { + private[this] val expectedCount = mutationCount + + def hasNext: Boolean = { + checkMutationsForIteration(expectedCount, mutationCount) + underlying.hasNext + } + def next(): A = underlying.next() + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala new file mode 100644 index 000000000000..978245d5bffb --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -0,0 +1,307 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.lang.Integer.numberOfLeadingZeros +import java.util.ConcurrentModificationException +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + +/** + * @define Coll `OpenHashMap` + * @define coll open hash map + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +@SerialVersionUID(3L) +object OpenHashMap extends MapFactory[OpenHashMap] { + + def empty[K, V] = new OpenHashMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it + + def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) + + /** A hash table entry. + * + * The entry is occupied if and only if its `value` is a `Some`; + * deleted if and only if its `value` is `None`. + * If its `key` is not the default value of type `Key`, the entry is occupied. + * If the entry is occupied, `hash` contains the hash value of `key`. + */ + final private class OpenEntry[Key, Value](var key: Key, + var hash: Int, + var value: Option[Value]) + + private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** A mutable hash map based on an open addressing method. The precise scheme is + * undefined, but it should make a reasonable effort to ensure that an insert + * with consecutive hash codes is not unnecessarily penalised. In particular, + * mappings of consecutive integer keys should work without significant + * performance loss. + * + * @tparam Key type of the keys in this map. + * @tparam Value type of the values in this map. + * @param initialSize the initial size of the internal hash table. + * + * @define Coll `OpenHashMap` + * @define coll open hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +class OpenHashMap[Key, Value](initialSize : Int) + extends AbstractMap[Key, Value] + with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] + with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] + with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] + with DefaultSerializable { + + import OpenHashMap.OpenEntry + private type Entry = OpenEntry[Key, Value] + + /** A default constructor creates a hashmap with initial size `8`. + */ + def this() = this(8) + + override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap + + private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) + + private[this] var mask = actualInitialSize - 1 + + /** The hash table. + * + * The table's entries are initialized to `null`, indication of an empty slot. + * A slot is either deleted or occupied if and only if the entry is non-`null`. + */ + private[this] var table = new Array[Entry](actualInitialSize) + + private[this] var _size = 0 + private[this] var deleted = 0 + + // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. + private[this] var modCount = 0 + + override def size = _size + override def knownSize: Int = size + private[this] def size_=(s : Int): Unit = _size = s + override def isEmpty: Boolean = _size == 0 + /** Returns a mangled hash code of the provided key. */ + protected def hashOf(key: Key) = { + var h = key.## + h ^= ((h >>> 20) ^ (h >>> 12)) + h ^ (h >>> 7) ^ (h >>> 4) + } + + /** Increase the size of the table. + * Copy only the occupied slots, effectively eliminating the deleted slots. + */ + private[this] def growTable() = { + val oldSize = mask + 1 + val newSize = 4 * oldSize + val oldTable = table + table = new Array[Entry](newSize) + mask = newSize - 1 + oldTable.foreach( entry => + if (entry != null && entry.value != None) + table(findIndex(entry.key, entry.hash)) = entry ) + deleted = 0 + } + + /** Return the index of the first slot in the hash table (in probe order) + * that is, in order of preference, either occupied by the given key, deleted, or empty. + * + * @param hash hash value for `key` + */ + private[this] def findIndex(key: Key, hash: Int): Int = { + var index = hash & mask + var j = 0 + + // Index of the first slot containing a deleted entry, or -1 if none found yet + var firstDeletedIndex = -1 + + var entry = table(index) + while (entry != null) { + if (entry.hash == hash && entry.key == key && entry.value != None) + return index + + if (firstDeletedIndex == -1 && entry.value == None) + firstDeletedIndex = index + + j += 1 + index = (index + j) & mask + entry = table(index) + } + + if (firstDeletedIndex == -1) index else firstDeletedIndex + } + + // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. + override def update(key: Key, value: Value): Unit = put(key, value) + + @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") + def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } + + @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") + def subtractOne (key: Key): this.type = { remove(key); this } + + override def put(key: Key, value: Value): Option[Value] = + put(key, hashOf(key), value) + + private def put(key: Key, hash: Int, value: Value): Option[Value] = { + if (2 * (size + deleted) > mask) growTable() + val index = findIndex(key, hash) + val entry = table(index) + if (entry == null) { + table(index) = new OpenEntry(key, hash, Some(value)) + modCount += 1 + size += 1 + None + } else { + val res = entry.value + if (entry.value == None) { + entry.key = key + entry.hash = hash + size += 1 + deleted -= 1 + modCount += 1 + } + entry.value = Some(value) + res + } + } + + /** Delete the hash table slot contained in the given entry. */ + @`inline` + private[this] def deleteSlot(entry: Entry) = { + entry.key = null.asInstanceOf[Key] + entry.hash = 0 + entry.value = None + + size -= 1 + deleted += 1 + } + + override def remove(key : Key): Option[Value] = { + val entry = table(findIndex(key, hashOf(key))) + if (entry != null && entry.value != None) { + val res = entry.value + deleteSlot(entry) + res + } else None + } + + def get(key : Key) : Option[Value] = { + val hash = hashOf(key) + var index = hash & mask + var entry = table(index) + var j = 0 + while(entry != null){ + if (entry.hash == hash && + entry.key == key){ + return entry.value + } + + j += 1 + index = (index + j) & mask + entry = table(index) + } + None + } + + /** An iterator over the elements of this map. Use of this iterator follows + * the same contract for concurrent modification as the foreach method. + * + * @return the iterator + */ + def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { + override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + } + + override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { + override protected def nextResult(node: Entry): Key = node.key + } + override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { + override protected def nextResult(node: Entry): Value = node.value.get + } + + private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { + private[this] var index = 0 + private[this] val initialModCount = modCount + + private[this] def advance(): Unit = { + if (initialModCount != modCount) throw new ConcurrentModificationException + while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 + } + + def hasNext = {advance(); index <= mask } + + def next() = { + advance() + val result = table(index) + index += 1 + nextResult(result) + } + protected def nextResult(node: Entry): A + } + + override def clone() = { + val it = new OpenHashMap[Key, Value] + foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) + it + } + + /** Loop over the key, value mappings of this map. + * + * The behaviour of modifying the map during an iteration is as follows: + * - Deleting a mapping is always permitted. + * - Changing the value of mapping which is already present is permitted. + * - Anything else is not permitted. It will usually, but not always, throw an exception. + * + * @tparam U The return type of the specified function `f`, return result of which is ignored. + * @param f The function to apply to each key, value mapping. + */ + override def foreach[U](f : ((Key, Value)) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f((entry.key, entry.value.get))} + ) + } + override def foreachEntry[U](f : (Key, Value) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f(entry.key, entry.value.get)} + ) + } + + private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { + table.foreach(entry => if (entry != null && entry.value != None) f(entry)) + } + + override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { + foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) + this + } + + override def filterInPlace(f : (Key, Value) => Boolean): this.type = { + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) + this + } + + override protected[this] def stringPrefix = "OpenHashMap" +} diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala new file mode 100644 index 000000000000..ec923b641a71 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -0,0 +1,403 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering +import language.experimental.captureChecking + +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". + * + * Only the `dequeue` and `dequeueAll` methods will return elements in priority + * order (while removing elements from the heap). Standard collection methods + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. + * + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) + * }}} + * + * @tparam A type of the elements in this priority queue. + * @param ord implicit ordering used to compare the elements of type `A`. + * + * @define Coll PriorityQueue + * @define coll priority queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class PriorityQueue[A](implicit val ord: Ordering[A]) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable +{ + + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + def p_size0 = size0 + def p_size0_=(s: Int) = size0 = s + def p_array = array + def p_ensureSize(n: Int) = super.ensureSize(n) + def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } + } + + private val resarr = new ResizableArrayAccess[A] + + resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? + def length: Int = resarr.length - 1 // adjust length accordingly + override def size: Int = length + override def knownSize: Int = length + override def isEmpty: Boolean = resarr.p_size0 < 2 + + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } + + def result() = this + + private def toA(x: AnyRef): A = x.asInstanceOf[A] + protected def fixUp(as: Array[AnyRef], m: Int): Unit = { + var k: Int = m + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { + resarr.p_swap(k, k / 2) + k = k / 2 + } + } + + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) + var k: Int = m + while (n >= 2 * k) { + var j = 2 * k + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) + j += 1 + if (ord.gteq(toA(as(k)), toA(as(j)))) + return k != m + else { + val h = as(k) + as(k) = as(j) + as(j) = h + k = j + } + } + k != m + } + + /** Inserts a single element into the priority queue. + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + fixUp(resarr.p_array, resarr.p_size0) + resarr.p_size0 += 1 + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = { + val from = resarr.p_size0 + for (x <- xs.iterator) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } + + /** Adds all elements to the queue. + * + * @param elems the elements to add. + */ + def enqueue(elems: A*): Unit = { this ++= elems } + + /** Returns the element with the highest priority in the queue, + * and removes this element from the queue. + * + * @throws NoSuchElementException + * @return the element with the highest priority. + */ + def dequeue(): A = + if (resarr.p_size0 > 1) { + resarr.p_size0 = resarr.p_size0 - 1 + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array + fixDown(resarr.p_array, 1, resarr.p_size0 - 1) + toA(result) + } else + throw new NoSuchElementException("no element to remove from heap") + + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) + while (nonEmpty) { + b += dequeue() + } + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] + } + + /** Returns the element with the highest priority in the queue, + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ + override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") + + /** Removes all elements from the queue. After this operation is completed, + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } + + /** Returns an iterator which yields all the elements. + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) + + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { + val revq = new PriorityQueue[A]()(ord.reverse) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) + revq + } + + + /** Returns an iterator which yields all the elements in the reverse order + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ + def reverseIterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = resarr.p_size0 - 1 + def hasNext: Boolean = i >= 1 + def next(): A = { + val n = resarr.p_array(i) + i -= 1 + toA(n) + } + } + + /** Returns a regular queue containing the same elements. + * + * Note: the order of elements is undefined. + */ + def toQueue: Queue[A] = new Queue[A] ++= this.iterator + + /** Returns a textual representation of a queue as a string. + * + * @return the string representation of this queue. + */ + override def toString() = toList.mkString("PriorityQueue(", ", ", ")") + + /** Converts this $coll to a list. + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) + + /** This method clones the priority queue. + * + * @return a priority queue with the same elements. + */ + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq.resarr.p_size0 = n + pq + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) + } + copied + } + + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) + + override protected[this] def className = "PriorityQueue" +} + + +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + + def from[E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala new file mode 100644 index 000000000000..884fbab798a5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * @define Coll `mutable.Queue` + * @define coll mutable queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with ArrayDequeOps[A, Queue, Queue[A]] + with Cloneable[Queue[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Queue] = Queue + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Queue" + + /** + * Add elements to the end of this queue + * + * @param elem + * @return this + */ + def enqueue(elem: A): this.type = this += elem + + /** Enqueue two or more elements at the end of the queue. The last element + * of the sequence will be on end of the queue. + * + * @param elems the element sequence. + * @return this + */ + def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) + + /** Enqueues all elements in the given iterable object into the queue. The + * last element in the iterable object will be on front of the new queue. + * + * @param elems the iterable object. + * @return this + */ + def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems + + /** + * Removes the first element from this queue and returns it + * + * @return + * @throws NoSuchElementException when queue is empty + */ + def dequeue(): A = removeHead() + + /** Returns the first element in the queue which satisfies the + * given predicate, and removes this element from the queue. + * + * @param p the predicate used for choosing the first element + * @return the first element of the queue for which p yields true + */ + def dequeueFirst(p: A => Boolean): Option[A] = + removeFirst(p) + + /** Returns all elements in the queue which satisfy the + * given predicate, and removes those elements from the queue. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = + removeAll(p) + + /** + * Returns and dequeues all elements from the queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @return the first element. + */ + @`inline` final def front: A = head + + override protected def klone(): Queue[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + new Queue(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll queue + * @define Coll `Queue` + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + + def from[A](source: IterableOnce[A]^): Queue[A] = empty ++= source + + def empty[A]: Queue[A] = new Queue + + def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala new file mode 100644 index 000000000000..4e079bca3245 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -0,0 +1,653 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec +import collection.{AbstractIterator, Iterator} +import java.lang.String +import language.experimental.captureChecking + +/** + * An object containing the red-black tree implementation used by mutable `TreeMaps`. + * + * The trees implemented in this object are *not* thread safe. + */ +private[collection] object RedBlackTree { + + // ---- class structure ---- + + // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. + // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. + // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) + // on the size of the range. + + final class Tree[A, B](var root: Node[A, B], var size: Int) { + def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) + } + + final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" + } + + object Tree { + def empty[A, B]: Tree[A, B] = new Tree(null, 0) + } + + object Node { + + @`inline` def apply[A, B](key: A, value: B, red: Boolean, + left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, left, right, parent) + + @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, null, null, parent) + + def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) + } + + // ---- getters ---- + + def isRed(node: Node[_, _]) = (node ne null) && node.red + def isBlack(node: Node[_, _]) = (node eq null) || !node.red + + // ---- size ---- + + def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) + def size(tree: Tree[_, _]): Int = tree.size + def isEmpty(tree: Tree[_, _]) = tree.root eq null + def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } + + // ---- search ---- + + def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { + case null => None + case node => Some(node.value) + } + + @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = + if (node eq null) null + else { + val cmp = ord.compare(key, node.key) + if (cmp < 0) getNode(node.left, key) + else if (cmp > 0) getNode(node.right, key) + else node + } + + def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null + + def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def minNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else minNodeNonNull(node) + + @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def maxNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else maxNodeNonNull(node) + + @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.right eq null) node else maxNodeNonNull(node.right) + + /** + * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such + * node. + */ + def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp <= 0) y else successor(y) + } + } + + /** + * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp > 0) y else predecessor(y) + } + } + + // ---- insertion ---- + + def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + var y: Node[A, B] = null + var x = tree.root + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + + if (cmp == 0) y.value = value + else { + val z = Node.leaf(key, value, red = true, y) + + if (y eq null) tree.root = z + else if (cmp < 0) y.left = z + else y.right = z + + fixAfterInsert(tree, z) + tree.size += 1 + } + } + + private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + rotateLeft(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateRight(tree, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + rotateRight(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateLeft(tree, z.parent.parent) + } + } + } + tree.root.red = false + } + + // ---- deletion ---- + + def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { + val z = getNode(tree.root, key) + if (z ne null) { + var y = z + var yIsRed = y.red + var x: Node[A, B] = null + var xParent: Node[A, B] = null + + if (z.left eq null) { + x = z.right + transplant(tree, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + transplant(tree, z, z.left) + xParent = z.parent + } + else { + y = minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + transplant(tree, y, y.right) + y.right = z.right + y.right.parent = y + } + transplant(tree, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) fixAfterDelete(tree, x, xParent) + tree.size -= 1 + } + } + + private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { + var x = node + var xParent = parent + while ((x ne tree.root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateLeft(tree, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + rotateRight(tree, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + rotateLeft(tree, xParent) + x = tree.root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateRight(tree, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + rotateLeft(tree, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + rotateRight(tree, xParent) + x = tree.root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + } + + // ---- helpers ---- + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + /** + * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, + * therefore, the first node), this method returns `null`. + */ + private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.left ne null) maxNodeNonNull(node.left) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.left)) { + x = y + y = y.parent + } + y + } + } + + private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.right ne null) + val y = x.right + x.right = y.left + + if (y.left ne null) y.left.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.left) x.parent.left = y + else x.parent.right = y + + y.left = x + x.parent = y + } + + private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.left ne null) + val y = x.left + x.left = y.right + + if (y.right ne null) y.right.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.right) x.parent.right = y + else x.parent.left = y + + y.right = x + x.parent = y + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { + if (to.parent eq null) tree.root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + + if (from ne null) from.parent = to.parent + } + + // ---- tree traversal ---- + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) + + private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = + if (node ne null) foreachNodeNonNull(node, f) + + private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { + if (node.left ne null) foreachNodeNonNull(node.left, f) + f((node.key, node.value)) + if (node.right ne null) foreachNodeNonNull(node.right, f) + } + + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + def g(node: Node[A, _]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + def g(node: Node[A, B]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key, node.value) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) + + private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = + if (node ne null) transformNodeNonNull(node, f) + + private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { + if (node.left ne null) transformNodeNonNull(node.left, f) + node.value = f(node.key, node.value) + if (node.right ne null) transformNodeNonNull(node.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + new EntriesIterator(tree, start, end) + + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + new KeysIterator(tree, start, end) + + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + new ValuesIterator(tree, start, end) + + private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + (implicit ord: Ordering[A]) extends AbstractIterator[R] { + + protected def nextResult(node: Node[A, B]): R + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): R = nextNode match { + case null => throw new NoSuchElementException("next on empty iterator") + case node => + nextNode = successor(node) + setNullIfAfterEnd() + nextResult(node) + } + + private[this] var nextNode: Node[A, B] = start match { + case None => minNode(tree.root) + case Some(from) => minNodeAfter(tree.root, from) + } + + private[this] def setNullIfAfterEnd(): Unit = + if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) + nextNode = null + + setNullIfAfterEnd() + } + + private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, (A, B)](tree, start, end) { + + def nextResult(node: Node[A, B]) = (node.key, node.value) + } + + private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, A](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.key + } + + private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, B](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.value + } + + // ---- debugging ---- + + /** + * Checks if the tree is in a valid state. That happens if: + * - It is a valid binary search tree; + * - All red-black properties are satisfied; + * - All non-null nodes have their `parent` reference correct; + * - The size variable in `tree` corresponds to the actual size of the tree. + */ + def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = + isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size + + /** + * Returns true if all non-null nodes have their `parent` reference correct. + */ + private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { + + def hasProperParentRefs(node: Node[A, B]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (node.left.parent ne node) || + (node.right ne null) && (node.right.parent ne node)) false + else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) + } + } + + if(tree.root eq null) true + else (tree.root.parent eq null) && hasProperParentRefs(tree.root) + } + + /** + * Returns true if this node follows the properties of a binary search tree. + */ + private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || + (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false + else isValidBST(node.left) && isValidBST(node.right) + } + } + + /** + * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red + * nodes are black and if the path from any node to any of its null children has the same number of black nodes. + */ + private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { + + def noRedAfterRed(node: Node[A, B]): Boolean = { + if (node eq null) true + else if (node.red && (isRed(node.left) || isRed(node.right))) false + else noRedAfterRed(node.left) && noRedAfterRed(node.right) + } + + def blackHeight(node: Node[A, B]): Int = { + if (node eq null) 1 + else { + val lh = blackHeight(node.left) + val rh = blackHeight(node.right) + + if (lh == -1 || lh != rh) -1 + else if (isRed(node)) lh + else lh + 1 + } + } + + isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 + } + + // building + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, Null] = size match { + case 0 => null + case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(x, null, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(k, v, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + def copyTree[A, B](n: Node[A, B]): Node[A, B] = + if(n eq null) null else { + val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) + if(c.left != null) c.left.parent = c + if(c.right != null) c.right.parent = c + c + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala new file mode 100644 index 000000000000..246e525e37d9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import language.experimental.captureChecking + +/** `ReusableBuilder` is a marker trait that indicates that a `Builder` + * can be reused to build more than one instance of a collection. In + * particular, calling `result()` followed by `clear()` will produce a + * collection and reset the builder to begin building a new collection + * of the same type. + * + * In general no method other than `clear()` may be called after `result()`. + * It is up to subclasses to implement and to document other allowed sequences + * of operations (e.g. calling other methods after `result()` in order to obtain + * different snapshots of a collection under construction). + * + * @tparam Elem the type of elements that get added to the builder. + * @tparam To the type of collection that it produced. + * + * @define multipleResults + * + * This Builder can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ +trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { + /** Clears the contents of this builder. + * After execution of this method, the builder will contain no elements. + * + * If executed immediately after a call to `result()`, this allows a new + * instance of the same type of collection to be built. + */ + override def clear(): Unit // Note: overriding for Scaladoc only! + + /** Produces a collection from the added elements. + * + * After a call to `result`, the behavior of all other methods is undefined + * save for `clear()`. If `clear()` is called, then the builder is reset and + * may be used to build another instance. + * + * @return a collection containing the elements added to this builder. + */ + override def result(): To // Note: overriding for Scaladoc only! +} diff --git a/tests/pos-special/stdlib/collection/mutable/Seq.scala b/tests/pos-special/stdlib/collection/mutable/Seq.scala new file mode 100644 index 000000000000..443eec379c1b --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Seq.scala @@ -0,0 +1,68 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactoryDefaults, SeqFactory} +import language.experimental.captureChecking + +trait Seq[A] + extends Iterable[A] + with collection.Seq[A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] { + + override def iterableFactory: SeqFactory[Seq] = Seq +} + +/** + * $factoryInfo + * @define coll mutable sequence + * @define Coll `mutable.Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](ArrayBuffer) + +/** + * @define coll mutable sequence + * @define Coll `mutable.Seq` + */ +trait SeqOps[A, +CC[_], +C <: AnyRef] + extends collection.SeqOps[A, CC, C] + with Cloneable[C] { + + override def clone(): C = { + val b = newSpecificBuilder + b ++= this + b.result() + } + + /** Replaces element at given index with a new value. + * + * @param idx the index of the element to replace. + * @param elem the new value. + * @throws IndexOutOfBoundsException if the index is not valid. + */ + @throws[IndexOutOfBoundsException] + def update(idx: Int, elem: A): Unit + + @deprecated("Use `mapInPlace` on an `IndexedSeq` instead", "2.13.0") + @`inline`final def transform(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } +} + +/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ +abstract class AbstractSeq[A] extends scala.collection.AbstractSeq[A] with Seq[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..5740490223b2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala new file mode 100644 index 000000000000..01384e993e89 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -0,0 +1,123 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} +import language.experimental.captureChecking + +/** Base trait for mutable sets */ +trait Set[A] + extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + + override def iterableFactory: IterableFactory[Set] = Set +} + +/** + * @define coll mutable set + * @define Coll `mutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] + with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below + with Cloneable[C] + with Builder[A, C] + with Growable[A] + with Shrinkable[A] { + + def result(): C = coll + + /** Check whether the set contains the given element, and add it if not. + * + * @param elem the element to be added + * @return true if the element was added + */ + def add(elem: A): Boolean = + !contains(elem) && { + coll += elem; true + } + + /** Updates the presence of a single element in this set. + * + * This method allows one to add or remove an element `elem` + * from this set depending on the value of parameter `included`. + * Typically, one would use the following syntax: + * {{{ + * set(elem) = true // adds element + * set(elem) = false // removes element + * }}} + * + * @param elem the element to be added or removed + * @param included a flag indicating whether element should be included or excluded. + */ + def update(elem: A, included: Boolean): Unit = { + if (included) add(elem) + else remove(elem) + } + + /** Removes an element from this set. + * + * @param elem the element to be removed + * @return true if this set contained the element before it was removed + */ + def remove(elem: A): Boolean = { + val res = contains(elem) + coll -= elem + res + } + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) + + /** Removes all elements from the set for which do not satisfy a predicate. + * @param p the predicate used to test elements. Only elements for + * which `p` returns `true` are retained in the set; all others + * are removed. + */ + def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val elem = array(i).asInstanceOf[A] + if (!p(elem)) { + this -= elem + } + i += 1 + } + } + this + } + + override def clone(): C = empty ++= this + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable set + * @define Coll `mutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](HashSet) + + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala new file mode 100644 index 000000000000..de2a24ecf01f --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec +import language.experimental.captureChecking + +/** This trait forms part of collections that can be reduced + * using a `-=` operator. + * + * @define coll shrinkable collection + * @define Coll `Shrinkable` + */ +trait Shrinkable[-A] { + + /** Removes a single element from this $coll. + * + * @param elem the element to remove. + * @return the $coll itself + */ + def subtractOne(elem: A): this.type + + /** Alias for `subtractOne` */ + @`inline` final def -= (elem: A): this.type = subtractOne(elem) + + /** Removes two or more elements from this $coll. + * + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return the $coll itself + */ + @deprecated("Use `--=` aka `subtractAll` instead of varargs `-=`; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def -= (elem1: A, elem2: A, elems: A*): this.type = { + this -= elem1 + this -= elem2 + this --= elems + } + + /** Removes all elements produced by an iterator from this $coll. + * + * @param xs the iterator producing the elements to remove. + * @return the $coll itself + */ + def subtractAll(xs: collection.IterableOnce[A]^): this.type = { + @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { + if (xs.nonEmpty) { + subtractOne(xs.head) + loop(xs.tail) + } + } + if (xs.asInstanceOf[AnyRef] eq this) { // avoid mutating under our own iterator + xs match { + case xs: Clearable => xs.clear() + case xs => subtractAll(Buffer.from(xs)) + } + } else { + xs match { + case xs: collection.LinearSeq[A] => loop(xs) + case xs => xs.iterator.foreach(subtractOne) + } + } + this + } + + /** Alias for `subtractAll` */ + @`inline` final def --= (xs: collection.IterableOnce[A]^): this.type = subtractAll(xs) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala new file mode 100644 index 000000000000..8017177f5720 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -0,0 +1,104 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} +import language.experimental.captureChecking + +/** + * Base type for mutable sorted map collections + */ +trait SortedMap[K, V] + extends collection.SortedMap[K, V] + with Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same sorted map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) +} + +trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends collection.SortedMapOps[K, V, CC, C] + with MapOps[K, V, Map, C] { + + def unsorted: Map[K, V] + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + @SerialVersionUID(3L) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] + with Serializable { + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + implicit def ordering: Ordering[K] = underlying.ordering + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala new file mode 100644 index 000000000000..e657fb749d7d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -0,0 +1,49 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +/** + * Base type for mutable sorted set collections + */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +/** + * $factoryInfo + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala new file mode 100644 index 000000000000..0292811d2020 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -0,0 +1,144 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.annotation.{migration, nowarn} +import scala.collection.generic.DefaultSerializable +import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} + +import language.experimental.captureChecking + +/** A stack implements a data structure which allows to store and retrieve + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") +class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Stack, Stack[A]] + with StrictOptimizedSeqOps[A, Stack, Stack[A]] + with IterableFactoryDefaults[A, Stack] + with ArrayDequeOps[A, Stack, Stack[A]] + with Cloneable[Stack[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Stack] = Stack + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Stack" + + /** + * Add elements to the top of this stack + * + * @param elem + * @return + */ + def push(elem: A): this.type = prepend(elem) + + /** Push two or more elements onto the stack. The last element + * of the sequence will be on top of the new stack. + * + * @param elems the element sequence. + * @return the stack with the new elements on top. + */ + def push(elem1: A, elem2: A, elems: A*): this.type = { + val k = elems.knownSize + ensureSize(length + (if(k >= 0) k + 2 else 3)) + prepend(elem1).prepend(elem2).pushAll(elems) + } + + /** Push all elements in the given iterable object onto the stack. The + * last element in the iterable object will be on top of the new stack. + * + * @param elems the iterable object. + * @return the stack with the new elements on top. + */ + def pushAll(elems: scala.collection.IterableOnce[A]): this.type = + prependAll(elems match { + case it: scala.collection.Seq[A] => it.view.reverse + case it => IndexedSeq.from(it).view.reverse + }) + + /** + * Removes the top element from this stack and return it + * + * @return + * @throws NoSuchElementException when stack is empty + */ + def pop(): A = removeHead() + + /** + * Pop all elements from this stack and return it + * + * @return The removed elements + */ + def popAll(): scala.collection.Seq[A] = removeAll() + + /** + * Returns and removes all elements from the top of this stack which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the top element of the stack. This method will not remove + * the element from the stack. An error is signaled if there is no + * element on the stack. + * + * @throws NoSuchElementException + * @return the top element + */ + @`inline` final def top: A = head + + override protected def klone(): Stack[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + new Stack(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll stack + * @define Coll `Stack` + */ +@SerialVersionUID(3L) +object Stack extends StrictOptimizedSeqFactory[Stack] { + + def from[A](source: IterableOnce[A]^): Stack[A] = empty ++= source + + def empty[A]: Stack[A] = new Stack + + def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala new file mode 100644 index 000000000000..c7859214821d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -0,0 +1,496 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactoryDefaults, IterableOnce} +import scala.collection.immutable.WrappedString +import language.experimental.captureChecking + +import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + charArrayOps => _, + genericWrapArray => _, + wrapCharArray => _, + wrapString => _, + //_ +} + +/** A builder of `String` which is also a mutable sequence of characters. + * + * This class provides an API mostly compatible with `java.lang.StringBuilder`, + * except where there are conflicts with the Scala collections API, such as the `reverse` method: + * [[reverse]] produces a new `StringBuilder`, and [[reverseInPlace]] mutates this builder. + * + * Mutating operations return either `this.type`, i.e., the current builder, or `Unit`. + * + * Other methods extract data or information from the builder without mutating it. + * + * The distinction is also reflected in naming conventions used by collections, + * such as `append`, which mutates, and `appended`, which does not, or `reverse`, + * which does not mutate, and `reverseInPlace`, which does. + * + * The `String` result may be obtained using either `result()` or `toString`. + * + * $multipleResults + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#stringbuilders "Scala's Collection Library overview"]] + * section on `StringBuilders` for more information. + * + * @define Coll `mutable.IndexedSeq` + * @define coll string builder + */ +@SerialVersionUID(3L) +final class StringBuilder(val underlying: java.lang.StringBuilder) extends AbstractSeq[Char] + with ReusableBuilder[Char, String] + with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, StringBuilder] + with IterableFactoryDefaults[Char, IndexedSeq] + with java.lang.CharSequence + with Serializable { + + def this() = this(new java.lang.StringBuilder) + + /** Constructs a string builder with no characters in it and an + * initial capacity specified by the `capacity` argument. + * + * @param capacity the initial capacity. + * @throws java.lang.NegativeArraySizeException if capacity < 0. + */ + def this(capacity: Int) = this(new java.lang.StringBuilder(capacity)) + + /** Constructs a string builder with initial characters + * equal to characters of `str`. + */ + def this(str: String) = this(new java.lang.StringBuilder(str)) + + /** Constructs a string builder initialized with string value `initValue` + * and with additional character capacity `initCapacity`. + */ + def this(initCapacity: Int, initValue: String) = + this(new java.lang.StringBuilder(initValue.length + initCapacity) append initValue) + + // Methods required to make this an IndexedSeq: + def apply(i: Int): Char = underlying.charAt(i) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): StringBuilder = + new StringBuilder() appendAll coll + + override protected def newSpecificBuilder: Builder[Char, StringBuilder] = + new GrowableBuilder(new StringBuilder()) + + override def empty: StringBuilder = new StringBuilder() + + @inline def length: Int = underlying.length + + def length_=(n: Int): Unit = underlying.setLength(n) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + def addOne(x: Char): this.type = { underlying.append(x); this } + + def clear(): Unit = underlying.setLength(0) + + /** Overloaded version of `addAll` that takes a string */ + def addAll(s: String): this.type = { underlying.append(s); this } + + /** Alias for `addAll` */ + def ++= (s: String): this.type = addAll(s) + + def result() = underlying.toString + + override def toString: String = result() + + override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + ct.runtimeClass match { + case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] + case _ => super.toArray + } + + /** Returns the contents of this StringBuilder as an `Array[Char]`. + * + * @return An array with the characters from this builder. + */ + def toCharArray: Array[Char] = { + val len = underlying.length + val arr = new Array[Char](len) + underlying.getChars(0, len, arr, 0) + arr + } + + // append* methods delegate to the underlying java.lang.StringBuilder: + + def appendAll(xs: String): this.type = { + underlying append xs + this + } + + /** Appends the string representation of the given argument, + * which is converted to a String with `String.valueOf`. + * + * @param x an `Any` object. + * @return this StringBuilder. + */ + def append(x: Any): this.type = { + underlying append String.valueOf(x) + this + } + + /** Appends the given String to this sequence. + * + * @param s a String. + * @return this StringBuilder. + */ + def append(s: String): this.type = { + underlying append s + this + } + + /** Appends the given CharSequence to this sequence. + * + * @param cs a CharSequence. + * @return this StringBuilder. + */ + def append(cs: java.lang.CharSequence): this.type = { + underlying.append(cs match { + // Both cases call into append(), but java SB + // looks up type at runtime and has fast path for SB. + case s: StringBuilder => s.underlying + case _ => cs + }) + this + } + + /** Appends the specified string builder to this sequence. + * + * @param s + * @return + */ + def append(s: StringBuilder): this.type = { + underlying append s.underlying + this + } + + /** Appends all the Chars in the given IterableOnce[Char] to this sequence. + * + * @param xs the characters to be appended. + * @return this StringBuilder. + */ + def appendAll(xs: IterableOnce[Char]^): this.type = { + xs match { + case x: WrappedString => underlying append x.unwrap + case x: ArraySeq.ofChar => underlying append x.array + case x: StringBuilder => underlying append x.underlying + case _ => + val ks = xs.knownSize + if (ks != 0) { + val b = underlying + if (ks > 0) b.ensureCapacity(b.length + ks) + val it = xs.iterator + while (it.hasNext) { b append it.next() } + } + } + this + } + + /** Appends all the Chars in the given Array[Char] to this sequence. + * + * @param xs the characters to be appended. + * @return a reference to this object. + */ + def appendAll(xs: Array[Char]): this.type = { + underlying append xs + this + } + + /** Appends a portion of the given Array[Char] to this sequence. + * + * @param xs the Array containing Chars to be appended. + * @param offset the index of the first Char to append. + * @param len the numbers of Chars to append. + * @return this StringBuilder. + */ + def appendAll(xs: Array[Char], offset: Int, len: Int): this.type = { + underlying.append(xs, offset, len) + this + } + + /** Append the String representation of the given primitive type + * to this sequence. The argument is converted to a String with + * String.valueOf. + * + * @param x a primitive value + * @return This StringBuilder. + */ + def append(x: Boolean): this.type = { underlying append x ; this } + def append(x: Byte): this.type = append(x.toInt) + def append(x: Short): this.type = append(x.toInt) + def append(x: Int): this.type = { underlying append x ; this } + def append(x: Long): this.type = { underlying append x ; this } + def append(x: Float): this.type = { underlying append x ; this } + def append(x: Double): this.type = { underlying append x ; this } + def append(x: Char): this.type = { underlying append x ; this } + + /** Remove a subsequence of Chars from this sequence, starting at the + * given start index (inclusive) and extending to the end index (exclusive) + * or to the end of the String, whichever comes first. + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @return This StringBuilder. + * @throws StringIndexOutOfBoundsException if start < 0 || start > end + */ + def delete(start: Int, end: Int): this.type = { + underlying.delete(start, end) + this + } + + /** Replaces a subsequence of Chars with the given String. The semantics + * are as in delete, with the String argument then inserted at index 'start'. + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @param str The String to be inserted at the start index. + * @return This StringBuilder. + * @throws StringIndexOutOfBoundsException if start < 0, start > length, or start > end + */ + def replace(start: Int, end: Int, str: String): this.type = { + underlying.replace(start, end, str) + this + } + + /** Inserts a subarray of the given Array[Char] at the given index + * of this sequence. + * + * @param index index at which to insert the subarray. + * @param str the Array from which Chars will be taken. + * @param offset the index of the first Char to insert. + * @param len the number of Chars from 'str' to insert. + * @return This StringBuilder. + * + * @throws StringIndexOutOfBoundsException if index < 0, index > length, + * offset < 0, len < 0, or (offset + len) > str.length. + */ + def insertAll(index: Int, str: Array[Char], offset: Int, len: Int): this.type = { + underlying.insert(index, str, offset, len) + this + } + + /** Inserts the String representation (via String.valueOf) of the given + * argument into this sequence at the given index. + * + * @param index the index at which to insert. + * @param x a value. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insert(index: Int, x: Any): this.type = insert(index, String.valueOf(x)) + + /** Inserts the String into this character sequence. + * + * @param index the index at which to insert. + * @param x a String. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insert(index: Int, x: String): this.type = { + underlying.insert(index, x) + this + } + + /** Inserts the given Seq[Char] into this sequence at the given index. + * + * @param index the index at which to insert. + * @param xs the Seq[Char]. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insertAll(index: Int, xs: IterableOnce[Char]^): this.type = + insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) + + /** Inserts the given Array[Char] into this sequence at the given index. + * + * @param index the index at which to insert. + * @param xs the Array[Char]. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insertAll(index: Int, xs: Array[Char]): this.type = { + underlying.insert(index, xs) + this + } + + /** Calls String.valueOf on the given primitive value, and inserts the + * String at the given index. + * + * @param index the offset position. + * @param x a primitive value. + * @return this StringBuilder. + */ + def insert(index: Int, x: Boolean): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Byte): this.type = insert(index, x.toInt) + def insert(index: Int, x: Short): this.type = insert(index, x.toInt) + def insert(index: Int, x: Int): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Long): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Float): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Double): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Char): this.type = insert(index, String.valueOf(x)) + + /** Sets the length of the character sequence. If the current sequence + * is shorter than the given length, it is padded with nulls; if it is + * longer, it is truncated. + * + * @param len the new length + * @throws IndexOutOfBoundsException if the argument is negative. + */ + def setLength(len: Int): Unit = underlying.setLength(len) + + def update(idx: Int, elem: Char): Unit = underlying.setCharAt(idx, elem) + + + /** Like reverse, but destructively updates the target StringBuilder. + * + * @return the reversed StringBuilder (same as the target StringBuilder) + */ + @deprecated("Use reverseInPlace instead", "2.13.0") + final def reverseContents(): this.type = reverseInPlace() + + /** Like reverse, but destructively updates the target StringBuilder. + * + * @return the reversed StringBuilder (same as the target StringBuilder) + */ + def reverseInPlace(): this.type = { + underlying.reverse() + this + } + + + /** Returns the current capacity, which is the size of the underlying array. + * A new array will be allocated if the current capacity is exceeded. + * + * @return the capacity + */ + def capacity: Int = underlying.capacity + + /** Ensure that the capacity is at least the given argument. + * If the argument is greater than the current capacity, new + * storage will be allocated with size equal to the given + * argument or to `(2 * capacity + 2)`, whichever is larger. + * + * @param newCapacity the minimum desired capacity. + */ + def ensureCapacity(newCapacity: Int): Unit = { underlying.ensureCapacity(newCapacity) } + + /** Returns the Char at the specified index, counting from 0 as in Arrays. + * + * @param index the index to look up + * @return the Char at the given index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def charAt(index: Int): Char = underlying.charAt(index) + + /** Removes the Char at the specified index. The sequence is + * shortened by one. + * + * @param index The index to remove. + * @return This StringBuilder. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def deleteCharAt(index: Int): this.type = { + underlying.deleteCharAt(index) + this + } + + /** Update the sequence at the given index to hold the specified Char. + * + * @param index the index to modify. + * @param ch the new Char. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def setCharAt(index: Int, ch: Char): this.type = { + underlying.setCharAt(index, ch) + this + } + + /** Returns a new String made up of a subsequence of this sequence, + * beginning at the given index and extending to the end of the sequence. + * + * target.substring(start) is equivalent to target.drop(start) + * + * @param start The starting index, inclusive. + * @return The new String. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def substring(start: Int): String = underlying.substring(start, length) + + /** Returns a new String made up of a subsequence of this sequence, + * beginning at the start index (inclusive) and extending to the + * end index (exclusive). + * + * target.substring(start, end) is equivalent to target.slice(start, end).mkString + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @return The new String. + * @throws StringIndexOutOfBoundsException If either index is out of bounds, + * or if start > end. + */ + def substring(start: Int, end: Int): String = underlying.substring(start, end) + + /** For implementing CharSequence. + */ + def subSequence(start: Int, end: Int): java.lang.CharSequence = + underlying.substring(start, end) + + /** Finds the index of the first occurrence of the specified substring. + * + * @param str the target string to search for + * @return the first applicable index where target occurs, or -1 if not found. + */ + def indexOf(str: String): Int = underlying.indexOf(str) + + /** Finds the index of the first occurrence of the specified substring. + * + * @param str the target string to search for + * @param fromIndex the smallest index in the source string to consider + * @return the first applicable index where target occurs, or -1 if not found. + */ + def indexOf(str: String, fromIndex: Int): Int = underlying.indexOf(str, fromIndex) + + /** Finds the index of the last occurrence of the specified substring. + * + * @param str the target string to search for + * @return the last applicable index where target occurs, or -1 if not found. + */ + def lastIndexOf(str: String): Int = underlying.lastIndexOf(str) + + /** Finds the index of the last occurrence of the specified substring. + * + * @param str the target string to search for + * @param fromIndex the smallest index in the source string to consider + * @return the last applicable index where target occurs, or -1 if not found. + */ + def lastIndexOf(str: String, fromIndex: Int): Int = underlying.lastIndexOf(str, fromIndex) + + /** Tests whether this builder is empty. + * + * This method is required for JDK15+ compatibility + * + * @return `true` if this builder contains nothing, `false` otherwise. + */ + override def isEmpty: Boolean = underlying.length() == 0 +} + +object StringBuilder { + @deprecated("Use `new StringBuilder()` instead of `StringBuilder.newBuilder`", "2.13.0") + def newBuilder = new StringBuilder +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala new file mode 100644 index 000000000000..dbb87a950d74 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -0,0 +1,258 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import language.experimental.captureChecking + +/** + * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + override def sortedMapFactory = TreeMap + + /** + * Creates an empty `TreeMap`. + * @param ord the implicit ordering used to compare objects of type `K`. + * @return an empty `TreeMap`. + */ + def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, None) + } + + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, None) + } + + def keysIteratorFrom(start: K): Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, Some(start)) + } + + def iteratorFrom(start: K): Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree, Some(start)) + } + + override def valuesIteratorFrom(start: K): Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, Some(start)) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( + size, tree.root, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } + + def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } + + override def clear(): Unit = RB.clear(tree) + + def get(key: K): Option[V] = RB.get(tree, key) + + /** + * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and + * vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def contains(key: K): Boolean = RB.contains(tree, key) + + override def head: (K, V) = RB.min(tree).get + + override def last: (K, V) = RB.max(tree).get + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) + + override protected[this] def className: String = "TreeMap" + + + /** + * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: K): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = + new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) + + override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None + + override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) + override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) + override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) + override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) + override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) + override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext + override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def head = headOption.get + override def headOption = { + val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) + (entry, until) match { + case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None + case _ => entry + } + } + + override def last = lastOption.get + override def lastOption = { + val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) + (entry, from) match { + case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None + case _ => entry + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized + // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) + + override def clone() = super.clone().rangeImpl(from, until) + } + +} + +/** + * $factoryInfo + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = + Growable.from(empty[K, V], it) + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + + def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala new file mode 100644 index 000000000000..59c68a768351 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -0,0 +1,219 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} +import language.experimental.captureChecking + +/** + * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam A the type of the keys contained in this tree set. + * + * @define Coll mutable.TreeSet + * @define coll mutable tree set + */ +// Original API designed in part by Lucien Pereira +sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedIterableOps[A, Set, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) + throw new NullPointerException("ordering must not be null") + + /** + * Creates an empty `TreeSet`. + * @param ord the implicit ordering used to compare objects of type `A`. + * @return an empty `TreeSet`. + */ + def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) + + override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet + + def iterator: collection.Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[A, Null] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: A): this.type = { + RB.insert(tree, elem, null) + this + } + + def subtractOne(elem: A): this.type = { + RB.delete(tree, elem) + this + } + + def clear(): Unit = RB.clear(tree) + + def contains(elem: A): Boolean = RB.contains(tree, elem) + + def unconstrained: collection.Set[A] = this + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) + + override protected[this] def className: String = "TreeSet" + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def head: A = RB.minKey(tree).get + + override def last: A = RB.maxKey(tree).get + + override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) + + override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + + /** + * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. + * + * Only keys between this projection's key range will ever appear as elements of this set, independently of whether + * the elements are added through the original set or through this view. That means that if one inserts an element in + * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. + * Mutations are always reflected in the original set, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: A): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = + new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) + + override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def iterator = RB.keysIterator(tree, from, until) + override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) + + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext + + override def head: A = headOption.get + override def headOption: Option[A] = { + val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) + (elem, until) match { + case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None + case _ => elem + } + } + + override def last: A = lastOption.get + override def lastOption = { + val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) + (elem, from) match { + case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None + case _ => elem + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized + // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: A => U): Unit = iterator.foreach(f) + + override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) + + } + +} + +/** + * $factoryInfo + * @define Coll `mutable.TreeSet` + * @define coll mutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + + def from[E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => + new TreeSet[E](ts.tree.treeCopy()) + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) + case _ => + val t: RB.Tree[E, Null] = RB.Tree.empty + val i = it.iterator + while (i.hasNext) RB.insert(t, i.next(), null) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty + def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } + def result(): TreeSet[A] = new TreeSet[A](tree) + def clear(): Unit = { tree = RB.Tree.empty } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala new file mode 100644 index 000000000000..cfb6d014ae9d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -0,0 +1,443 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag +import scala.collection.immutable.Nil +import language.experimental.captureChecking + +/** A buffer that stores elements in an unrolled linked list. + * + * Unrolled linked lists store elements in linked fixed size + * arrays. + * + * Unrolled buffers retain locality and low memory overhead + * properties of array buffers, but offer much more efficient + * element addition, since they never reallocate and copy the + * internal array. + * + * However, they provide `O(n/m)` complexity random access, + * where `n` is the number of elements, and `m` the size of + * internal array chunks. + * + * Ideal to use when: + * - elements are added to the buffer and then all of the + * elements are traversed sequentially + * - two unrolled buffers need to be concatenated (see `concat`) + * + * Better than singly linked lists for random access, but + * should still be avoided for such a purpose. + * + * @define coll unrolled buffer + * @define Coll `UnrolledBuffer` + * + */ +@SerialVersionUID(3L) +sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) + extends AbstractBuffer[T] + with Buffer[T] + with Seq[T] + with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] + with Builder[T, UnrolledBuffer[T]] + with DefaultSerializable { + + import UnrolledBuffer.Unrolled + + @transient private var headptr = newUnrolled + @transient private var lastptr = headptr + @transient private var sz = 0 + + private[collection] def headPtr = headptr + private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head + private[collection] def lastPtr = lastptr + private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last + private[collection] def size_=(s: Int) = sz = s + + protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer + protected def iterableEvidence: ClassTag[T] = tag + + override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged + + protected def newUnrolled: Unrolled[T] = new Unrolled[T](this) + + // The below would allow more flexible behavior without requiring inheritance + // that is risky because all the important internals are private. + // private var myLengthPolicy: Int => Int = x => x + // + // /** Specifies how the array lengths should vary. + // * + // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length + // * policy can be given that changes this scheme to, for instance, an + // * exponential growth. + // * + // * @param nextLength computes the length of the next array from the length of the latest one + // */ + // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } + private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) + + def classTagCompanion = UnrolledBuffer + + /** Concatenates the target unrolled buffer to this unrolled buffer. + * + * The specified buffer `that` is cleared after this operation. This is + * an O(1) operation. + * + * @param that the unrolled buffer whose elements are added to this buffer + */ + def concat(that: UnrolledBuffer[T]) = { + // bind the two together + if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr + + // update size + sz += that.sz + + // `that` is no longer usable, so clear it + // here we rely on the fact that `clear` allocates + // new nodes instead of modifying the previous ones + that.clear() + + // return a reference to this + this + } + + def addOne(elem: T) = { + lastptr = lastptr.append(elem) + sz += 1 + this + } + + def clear(): Unit = { + headptr = newUnrolled + lastptr = headptr + sz = 0 + } + + def iterator: Iterator[T] = new AbstractIterator[T] { + var pos: Int = -1 + var node: Unrolled[T] = headptr + scan() + + private def scan(): Unit = { + pos += 1 + while (pos >= node.size) { + pos = 0 + node = node.next + if (node eq null) return + } + } + def hasNext = node ne null + def next() = if (hasNext) { + val r = node.array(pos) + scan() + r + } else Iterator.empty.next() + } + + // this should be faster than the iterator + override def foreach[U](f: T => U) = headptr.foreach(f) + + def result() = this + + def length = sz + + override def knownSize: Int = sz + + def apply(idx: Int) = + if (idx >= 0 && idx < sz) headptr(idx) + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def update(idx: Int, newelem: T) = + if (idx >= 0 && idx < sz) headptr(idx) = newelem + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def mapInPlace(f: T => T): this.type = { + headptr.mapInPlace(f) + this + } + + def remove(idx: Int) = + if (idx >= 0 && idx < sz) { + sz -= 1 + headptr.remove(idx, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + @tailrec final def remove(idx: Int, count: Int): Unit = + if (count > 0) { + remove(idx) + remove(idx, count-1) + } + + def prepend(elem: T) = { + headptr = headptr prepend elem + sz += 1 + this + } + + def insert(idx: Int, elem: T): Unit = + insertAll(idx, elem :: Nil) + + def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = + if (idx >= 0 && idx <= sz) { + sz += headptr.insertAll(idx, elems, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + override def subtractOne(elem: T): this.type = { + if (headptr.subtractOne(elem, this)) { + sz -= 1 + } + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.defaultWriteObject + out writeInt sz + for (elem <- this) out writeObject elem + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + in.defaultReadObject + + val num = in.readInt + + headPtr = newUnrolled + lastPtr = headPtr + sz = 0 + var i = 0 + while (i < num) { + this += in.readObject.asInstanceOf[T] + i += 1 + } + } + + override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this + + override protected[this] def className = "UnrolledBuffer" +} + + +@SerialVersionUID(3L) +object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => + + val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) + + def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + def from[A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) + + def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + final val waterline: Int = 50 + + final def waterlineDenom: Int = 100 + + @deprecated("Use waterlineDenom instead.", "2.13.0") + final val waterlineDelim: Int = waterlineDenom + + private[collection] val unrolledlength = 32 + + /** Unrolled buffer node. + */ + class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) + private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) + + private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) + + // adds and returns itself or the new unrolled if full + @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { + array(size) = elem + size += 1 + this + } else { + next = new Unrolled[T](0, new Array[T](nextlength), null, buff) + next append elem + } + def foreach[U](f: T => U): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + def mapInPlace(f: T => T): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + chunkarr(i) = f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + @tailrec final def apply(idx: Int): T = + if (idx < size) array(idx) else next.apply(idx - size) + @tailrec final def update(idx: Int, newelem: T): Unit = + if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) + @tailrec final def locate(idx: Int): Unrolled[T] = + if (idx < size) this else next.locate(idx - size) + def prepend(elem: T) = if (size < array.length) { + // shift the elements of the array right + // then insert the element + shiftright() + array(0) = elem + size += 1 + this + } else { + // allocate a new node and store element + // then make it point to this + val newhead = new Unrolled[T](buff) + newhead append elem + newhead.next = this + newhead + } + // shifts right assuming enough space + private def shiftright(): Unit = { + var i = size - 1 + while (i >= 0) { + array(i + 1) = array(i) + i -= 1 + } + } + // returns pointer to new last if changed + @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = + if (idx < size) { + // remove the element + // then try to merge with the next bucket + val r = array(idx) + shiftleft(idx) + size -= 1 + if (tryMergeWithNext()) buffer.lastPtr = this + r + } else next.remove(idx - size, buffer) + + @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { + var i = 0 + while (i < size) { + if(array(i) == elem) { + remove(i, buffer) + return true + } + i += 1 + } + if(next ne null) next.subtractOne(elem, buffer) else false + } + + // shifts left elements after `leftb` (overwrites `leftb`) + private def shiftleft(leftb: Int): Unit = { + var i = leftb + while (i < (size - 1)) { + array(i) = array(i + 1) + i += 1 + } + nullout(i, i + 1) + } + protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { + // copy the next array, then discard the next node + Array.copy(next.array, 0, array, size, next.size) + size = size + next.size + next = next.next + if (next eq null) true else false // checks if last node was thrown out + } else false + + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { + if (idx < size) { + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode and fix tail pointer if needed + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + appended + } + else if (idx == size || (next eq null)) { + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + appended + } + else next.insertAll(idx - size, t, buffer) + } + + private def nullout(from: Int, until: Int): Unit = { + var idx = from + while (idx < until) { + array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! + idx += 1 + } + } + + // assumes this is the last node + // `thathead` and `thatlast` are head and last node + // of the other unrolled list, respectively + def bind(thathead: Unrolled[T]) = { + assert(next eq null) + next = thathead + tryMergeWithNext() + } + + override def toString: String = + array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") + } +} + +// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: +// Todo -- revisit whether inheritance is the best way to achieve this functionality +private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { + override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala new file mode 100644 index 000000000000..1f94281a0b59 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} +import language.experimental.captureChecking + +/** A hash map with references to entries which are weakly reachable. Entries are + * removed from this map when the key is no longer (strongly) referenced. This class wraps + * `java.util.WeakHashMap`. + * + * @tparam K type of keys contained in this map + * @tparam V type of values associated with the keys + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] + * section on `Weak Hash Maps` for more information. + * + * @define Coll `WeakHashMap` + * @define coll weak hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) + with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] + with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { + override def empty = new WeakHashMap[K, V] + override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "WeakHashMap" +} + +/** $factoryInfo + * @define Coll `WeakHashMap` + * @define coll weak hash map + */ +@SerialVersionUID(3L) +object WeakHashMap extends MapFactory[WeakHashMap] { + def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[K, V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) +} + diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala new file mode 100644 index 000000000000..d658ca5bc65a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + + +package object mutable { + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + type WrappedArray[X] = ArraySeq[X] + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + val WrappedArray = ArraySeq + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + type ArrayStack[X] = Stack[X] + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + val ArrayStack = Stack + + @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") + type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] + + @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") + type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] + + @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") + type IndexedOptimizedSeq[A] = IndexedSeq[A] + + @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") + type IndexedOptimizedBuffer[A] = IndexedBuffer[A] +} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala new file mode 100644 index 000000000000..ad4686be1fb2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/package.scala @@ -0,0 +1,81 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +import language.experimental.captureChecking + +package object collection { + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+X] = IterableOnce[X] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + val TraversableOnce = IterableOnce + @deprecated("Use SeqOps instead of SeqLike", "2.13.0") + type SeqLike[A, T] = SeqOps[A, Seq, T] + @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") + type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] + + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversableOnce[+X] = IterableOnce[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversableOnce = IterableOnce + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenIterable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenIterable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSeq[+X] = Seq[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSeq = Seq + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSet[X] = Set[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSet = Set + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenMap[K, +V] = Map[K, V] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenMap = Map + + /** Needed to circumvent a difficulty between dotty and scalac concerning + * the right top type for a type parameter of kind * -> *. + * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. + * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. + */ + private[scala] type AnyConstr[X] = Any + + /** An extractor used to head/tail deconstruct sequences. */ + object +: { + /** Splits a sequence into head +: tail. + * @return Some((head, tail)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = + if(t.isEmpty) None + else Some(t.head -> t.tail) + } + + /** An extractor used to init/last deconstruct sequences. */ + object :+ { + /** Splits a sequence into init :+ last. + * @return Some((init, last)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = + if(t.isEmpty) None + else Some(t.init -> t.last) + } +} diff --git a/tests/pos-special/stdlib/runtime/PStatics.scala b/tests/pos-special/stdlib/runtime/PStatics.scala new file mode 100644 index 000000000000..788a56962855 --- /dev/null +++ b/tests/pos-special/stdlib/runtime/PStatics.scala @@ -0,0 +1,19 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +// things that should be in `Statics`, but can't be yet for bincompat reasons +// TODO 3.T: move to `Statics` +private[scala] object PStatics { + final val VM_MaxArraySize = 2147483645 // == `Int.MaxValue - 2`, hotspot limit +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala index bf10e37943a8..6f067a0e5ef0 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala @@ -1622,7 +1622,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val equalsMethod: Symbol = { if (l.tpe <:< defn.BoxedNumberClass.info) { if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - else if (r.tpe <:< defn.BoxedCharClass.info) NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + else if (r.tpe <:< defn.BoxedCharClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) } else defn.BoxesRunTimeModule_externalEquals } diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala index 6f83af540bea..2454bca9d653 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala @@ -209,7 +209,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { } } // end of trait BCPickles - trait BCInnerClassGen extends caps.Pure { + trait BCInnerClassGen extends Pure { def debugLevel = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames @@ -374,7 +374,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { } case Ident(nme.WILDCARD) => // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything - case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnumTrait) => + case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnum) => val edesc = innerClasesStore.typeDescriptor(t.tpe) // the class descriptor of the enumeration class. val evalue = t.symbol.javaSimpleName // value the actual enumeration value. av.visitEnum(name, edesc, evalue) diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala index 5eb8d7a52aa2..9b8d81bbdbd1 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.report * @version 1.0 * */ -trait BCodeIdiomatic extends caps.Pure { +trait BCodeIdiomatic extends Pure { val int: DottyBackendInterface final lazy val bTypes = new BTypesFromSymbols[int.type](int) @@ -55,7 +55,8 @@ trait BCodeIdiomatic extends caps.Pure { case "18" => asm.Opcodes.V18 case "19" => asm.Opcodes.V19 case "20" => asm.Opcodes.V20 - case "21" => asm.Opcodes.V21*/ + case "21" => asm.Opcodes.V21 + case "22" => asm.Opcodes.V22*/ } lazy val majorVersion: Int = (classfileVersion & 0xFF) diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala index dda85e2d5616..f9a3a3aae105 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala @@ -14,7 +14,7 @@ import scala.tools.asm * This representation is immutable and independent of the compiler data structures, hence it can * be queried by concurrent threads. */ -abstract class BTypes extends caps.Pure { +abstract class BTypes extends Pure { val int: DottyBackendInterface import int.given @@ -47,7 +47,7 @@ abstract class BTypes extends caps.Pure { * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType * referring to BTypes. */ - /*sealed*/ trait BType extends caps.Pure { // Not sealed for now due to SI-8546 + /*sealed*/ trait BType extends Pure { // Not sealed for now due to SI-8546 final override def toString: String = this match { case UNIT => "V" case BOOL => "Z" diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala index 54dafe6f0032..d78008d65cc6 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala @@ -330,7 +330,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { .addFlagIf(sym.is(Bridge), ACC_BRIDGE | ACC_SYNTHETIC) .addFlagIf(sym.is(Artifact), ACC_SYNTHETIC) .addFlagIf(sym.isClass && !sym.isInterface, ACC_SUPER) - .addFlagIf(sym.isAllOf(JavaEnumTrait), ACC_ENUM) + .addFlagIf(sym.isAllOf(JavaEnum), ACC_ENUM) .addFlagIf(sym.is(JavaVarargs), ACC_VARARGS) .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala index 71d007370fe7..1af7e5dd705a 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala @@ -26,7 +26,8 @@ import Decorators.em import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } +import dotty.tools.tasty.core.TastyUnpickler import scala.tools.asm import scala.tools.asm.Handle @@ -285,7 +286,7 @@ class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrim throw ex finally outstream.close() - val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala index 87d816e56192..c670b2de97b1 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala @@ -2846,7 +2846,7 @@ class JSCodeGen()(using genCtx: DetachedContext) { private lazy val externalEqualsNumNum: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) private lazy val externalEqualsNumChar: Symbol = - NoSymbol // requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) private lazy val externalEqualsNumObject: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) private lazy val externalEquals: Symbol = @@ -2886,7 +2886,7 @@ class JSCodeGen()(using genCtx: DetachedContext) { val ptfm = ctx.platform if (lsym.derivesFrom(defn.BoxedNumberClass)) { if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum - else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 + else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumChar else externalEqualsNumObject } else externalEquals } diff --git a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala index 21462929833c..af7570a6edca 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala @@ -1,6 +1,6 @@ package dotty.tools.backend.sjs -class ScopedVar[A](init: A) extends caps.Pure { +class ScopedVar[A](init: A) extends Pure { import ScopedVar.Assignment private[ScopedVar] var value = init diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala index fd30d441a6ee..7b558c65e425 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -15,7 +15,7 @@ import annotation.internal.sharable /** A base class for things that have positions (currently: modifiers and trees) */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, Pure { import Positioned.{ids, nextId, debugId} private var mySpan: Span = _ diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala index c31bcb76c2c7..2072b43089fb 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -39,7 +39,7 @@ import annotation.retains * if the mapped function is either a bijection or if it is idempotent * on capture references (c.f. doc comment on `map` below). */ -sealed abstract class CaptureSet extends Showable, caps.Pure: +sealed abstract class CaptureSet extends Showable, Pure: import CaptureSet.* /** The elements of this capture set. For capture variables, diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala index 20708b98cc95..b3250eb7b536 100644 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 21 + private val maxTargetVersion = 22 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala index f307d4a36697..2061bddb9e8a 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -17,7 +17,7 @@ object Annotations { if (tree.symbol.isConstructor) tree.symbol.owner else tree.tpe.typeSymbol - abstract class Annotation extends Showable, caps.Pure { + abstract class Annotation extends Showable, Pure { def tree(using Context): Tree diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala index 47fa84b467d8..d166cec11573 100644 --- a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala +++ b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala @@ -13,7 +13,7 @@ import annotation.constructorOnly /** Realizability status */ object CheckRealizable { - sealed abstract class Realizability(val msg: String) extends caps.Pure { + sealed abstract class Realizability(val msg: String) extends Pure { def andAlso(other: => Realizability): Realizability = if (this == Realizable) other else this def mapError(f: Realizability -> Context ?-> Realizability)(using Context): Realizability = diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala index 8bf671931260..96e965903010 100644 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala @@ -707,7 +707,7 @@ trait ConstraintHandling { // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. // If that happens, we run `instanceType` again to find a new instantation. - // (we do not check for non-toplevel occurences: those should never occur + // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then instanceType(param, fromBelow, widenUnions, maxLevel) diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala index 246e359f0597..9db285975a0a 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -76,7 +76,7 @@ object Denotations { /** A PreDenotation represents a group of single denotations or a single multi-denotation * It is used as an optimization to avoid forming MultiDenotations too eagerly. */ - abstract class PreDenotation extends caps.Pure { + abstract class PreDenotation extends Pure { /** A denotation in the group exists */ def exists: Boolean diff --git a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala index 2ed9a17b9f7e..07c69afdaf24 100644 --- a/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala +++ b/tests/pos-with-compiler-cc/dotc/core/NameKinds.scala @@ -23,14 +23,14 @@ object NameKinds { @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ - abstract class NameInfo extends caps.Pure { + abstract class NameInfo extends Pure { def kind: NameKind def mkString(underlying: TermName): String def map(f: SimpleName => SimpleName): NameInfo = this } /** An abstract base class of classes that define the kind of a derived name info */ - abstract class NameKind(val tag: Int) extends caps.Pure { self => + abstract class NameKind(val tag: Int) extends Pure { self => /** The info class defined by this kind */ type ThisInfo <: Info diff --git a/tests/pos-with-compiler-cc/dotc/core/Names.scala b/tests/pos-with-compiler-cc/dotc/core/Names.scala index e6ea66f4025b..d1eba69c57a0 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Names.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Names.scala @@ -32,7 +32,7 @@ object Names { * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. */ - abstract class Name extends Designator, Showable, caps.Pure derives CanEqual { + abstract class Name extends Designator, Showable, Pure derives CanEqual { /** A type for names of the same kind as this name */ type ThisName <: Name diff --git a/tests/pos-with-compiler-cc/dotc/core/Phases.scala b/tests/pos-with-compiler-cc/dotc/core/Phases.scala index 3744b1f21122..809e24b1e6ef 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Phases.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Phases.scala @@ -285,7 +285,7 @@ object Phases { final def isTyper(phase: Phase): Boolean = phase.id == typerPhase.id } - abstract class Phase extends caps.Pure { + abstract class Phase extends Pure { /** A name given to the `Phase` that can be used to debug the compiler. For * instance, it is possible to print trees after a given phase using: diff --git a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala index 7ab68ddf78a2..f5a108a13c19 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala @@ -60,7 +60,7 @@ object Scopes { * or to delete them. These methods are provided by subclass * MutableScope. */ - abstract class Scope extends printing.Showable, caps.Pure { + abstract class Scope extends printing.Showable, Pure { /** The last scope-entry from which all others are reachable via `prev` */ private[dotc] def lastEntry: ScopeEntry | Null diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala index 24f281b36785..43eec81467d4 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala @@ -15,7 +15,7 @@ import ast.untpd import config.Printers.cyclicErrors import language.experimental.pureFunctions -abstract class TypeError(using creationContext: DetachedContext) extends Exception(""), caps.Pure: +abstract class TypeError(using creationContext: DetachedContext) extends Exception(""), Pure: /** Convert to message. This takes an additional Context, so that we * use the context when the message is first produced, i.e. when the TypeError diff --git a/tests/pos-with-compiler-cc/dotc/core/Types.scala b/tests/pos-with-compiler-cc/dotc/core/Types.scala index e4b30888a5dc..90f2e322ac1c 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Types.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Types.scala @@ -91,7 +91,7 @@ object Types { * * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`. */ - abstract class Type extends Hashable, printing.Showable, caps.Pure { + abstract class Type extends Hashable, printing.Showable, Pure { // ----- Tests ----------------------------------------------------- diff --git a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala index 7e369dd6d9cc..21140f437114 100644 --- a/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala +++ b/tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala @@ -28,7 +28,7 @@ import annotation.retains object ClassfileParser { /** Marker trait for unpicklers that can be embedded in classfiles. */ - trait Embedded extends caps.Pure + trait Embedded extends Pure /** Indicate that there is nothing to unpickle and the corresponding symbols can * be invalidated. */ diff --git a/tests/pos-with-compiler-cc/dotc/interactive/Completion.scala b/tests/pos-with-compiler-cc/dotc/interactive/Completion.scala index 6a3ac439ef5f..ead3e6c413c9 100644 --- a/tests/pos-with-compiler-cc/dotc/interactive/Completion.scala +++ b/tests/pos-with-compiler-cc/dotc/interactive/Completion.scala @@ -538,7 +538,7 @@ object Completion { /** Temporary data structure representing denotations with the same name introduced in a given scope * as a member of a type, by a local definition or by an import clause */ - private case class ScopedDenotations(denots: Seq[SingleDenotation], ctx: DetachedContext) extends caps.Pure + private case class ScopedDenotations(denots: Seq[SingleDenotation], ctx: DetachedContext) extends Pure /** * The completion mode: defines what kinds of symbols should be included in the completion diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala index 90c11e4430ec..737a37b2d4ce 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala @@ -1554,7 +1554,7 @@ object Scanners { * InBraces a pair of braces { ... } * Indented a pair of ... tokens */ - abstract class Region(val closedBy: Token) extends caps.Pure: + abstract class Region(val closedBy: Token) extends Pure: /** The region enclosing this one, or `null` for the outermost region */ def outer: Region | Null diff --git a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala index b5fbbfdf1b3a..018bf65b851d 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala @@ -28,7 +28,7 @@ object Highlighting { else mod + super.show } - case class HighlightBuffer(hl: Highlight)(using DetachedContext) extends caps.Pure { + case class HighlightBuffer(hl: Highlight)(using DetachedContext) extends Pure { private val buffer = new mutable.ListBuffer[String] buffer += hl.show diff --git a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala index 25429c8fc01b..b9da874cf9ae 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala @@ -15,7 +15,7 @@ import scala.annotation.internal.sharable /** The base class of all printers */ -abstract class Printer extends caps.Pure { +abstract class Printer extends Pure { private var prec: Precedence = GlobalPrec diff --git a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala index deb8e172a780..1c52e47ade7e 100644 --- a/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala +++ b/tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala @@ -9,7 +9,7 @@ import java.util.concurrent.atomic.AtomicInteger import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts._ -sealed trait AsyncHelper extends caps.Pure { +sealed trait AsyncHelper extends Pure { def newUnboundedQueueFixedThreadPool (nThreads: Int, diff --git a/tests/pos-with-compiler-cc/dotc/profile/Profiler.scala b/tests/pos-with-compiler-cc/dotc/profile/Profiler.scala index bb4f9ffe226b..35c98ac2a0d2 100644 --- a/tests/pos-with-compiler-cc/dotc/profile/Profiler.scala +++ b/tests/pos-with-compiler-cc/dotc/profile/Profiler.scala @@ -65,7 +65,7 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) } -sealed trait Profiler extends caps.Pure { +sealed trait Profiler extends Pure { def finished(): Unit diff --git a/tests/pos-with-compiler-cc/dotc/transform/CapturedVars.scala b/tests/pos-with-compiler-cc/dotc/transform/CapturedVars.scala index 3aaf4a23ec1c..f3b3421b8356 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/CapturedVars.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CapturedVars.scala @@ -34,7 +34,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: override def initContext(ctx: FreshContext): Unit = Captured = ctx.addLocation(util.ReadOnlySet.empty) - private class RefInfo(using DetachedContext) extends caps.Pure { + private class RefInfo(using DetachedContext) extends Pure { /** The classes for which a Ref type exists. */ val refClassKeys: collection.Set[Symbol] = defn.ScalaNumericValueClasses() `union` Set(defn.BooleanClass, defn.ObjectClass) diff --git a/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala b/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala index ca00c87161ef..eba5e0071c01 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala @@ -48,7 +48,7 @@ import scala.annotation.constructorOnly * } * */ -class CrossStageSafety(@constructorOnly ictx: DetachedContext) extends TreeMapWithStages(ictx), Checking, caps.Pure { +class CrossStageSafety(@constructorOnly ictx: DetachedContext) extends TreeMapWithStages(ictx), Checking, Pure { import tpd._ private val InAnnotation = Property.Key[Unit]() @@ -278,7 +278,7 @@ class CrossStageSafety(@constructorOnly ictx: DetachedContext) extends TreeMapWi object CrossStageSafety { import tpd._ - class QuoteTypeTags(span: Span)(using DetachedContext) extends caps.Pure { + class QuoteTypeTags(span: Span)(using DetachedContext) extends Pure { private val tags = collection.mutable.LinkedHashMap.empty[Symbol, TypeDef] diff --git a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala index ada633c04626..8fa0956bed89 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala @@ -26,7 +26,7 @@ object ForwardDepChecks: /** A class to help in forward reference checking */ class LevelInfo(val outer: OptLevelInfo, val owner: Symbol, stats: List[Tree])(using DetachedContext) - extends OptLevelInfo, caps.Pure { + extends OptLevelInfo, Pure { override val levelAndIndex: LevelAndIndex = stats.foldLeft(outer.levelAndIndex, 0) {(mi, stat) => val (m, idx) = mi diff --git a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala index bff0e8340c0b..14ccdc5c6f86 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala @@ -9,7 +9,7 @@ import Contexts._ /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ -abstract class MacroTransform extends Phase, caps.Pure { +abstract class MacroTransform extends Phase, Pure { import ast.tpd._ diff --git a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala index 2543a89af4d7..14dd7f17d58a 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala @@ -28,7 +28,7 @@ object MegaPhase { * - Other: to prepape/transform a tree that does not have a specific prepare/transform * method pair. */ - abstract class MiniPhase extends Phase, caps.Pure { + abstract class MiniPhase extends Phase, Pure { private[MegaPhase] var superPhase: MegaPhase = _ private[MegaPhase] var idxInGroup: Int = _ diff --git a/tests/pos-with-compiler-cc/dotc/transform/OverridingPairs.scala b/tests/pos-with-compiler-cc/dotc/transform/OverridingPairs.scala index 5db2872e73e5..9e02b291b14e 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/OverridingPairs.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/OverridingPairs.scala @@ -22,7 +22,7 @@ object OverridingPairs: /** The cursor class * @param base the base class that contains the overriding pairs */ - class Cursor(base: Symbol)(using DetachedContext) extends caps.Pure: + class Cursor(base: Symbol)(using DetachedContext) extends Pure: private val self = base.thisType diff --git a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala index a93545aec606..b1b268634736 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala @@ -145,7 +145,7 @@ object PatternMatcher { private var nxId = 0 /** The different kinds of plans */ - sealed abstract class Plan extends caps.Pure { val id: Int = nxId; nxId += 1 } + sealed abstract class Plan extends Pure { val id: Int = nxId; nxId += 1 } case class TestPlan(test: Test, var scrutinee: Tree, span: Span, var onSuccess: Plan) extends Plan { diff --git a/tests/pos-with-compiler-cc/dotc/transform/PickleQuotes.scala b/tests/pos-with-compiler-cc/dotc/transform/PickleQuotes.scala index f3ae6a377aab..b27bbdc0fccd 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PickleQuotes.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/PickleQuotes.scala @@ -134,7 +134,7 @@ class PickleQuotes extends MacroTransform { contents += content val holeType = if isTerm then getTermHoleType(tree.tpe) else getTypeHoleType(tree.tpe) - val hole = cpy.Hole(tree)(content = EmptyTree, TypeTree(holeType)) + val hole = cpy.Hole(tree)(content = EmptyTree, tpt = TypeTree(holeType)) if isTerm then Inlined(EmptyTree, Nil, hole).withSpan(tree.span) else hole case tree: DefTree => val newAnnotations = tree.symbol.annotations.mapconserve { annot => diff --git a/tests/pos-with-compiler-cc/dotc/transform/SymUtils.scala b/tests/pos-with-compiler-cc/dotc/transform/SymUtils.scala index 6010fe2a2a44..0a6fa9217303 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/SymUtils.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/SymUtils.scala @@ -363,8 +363,6 @@ object SymUtils: self.hasAnnotation(defn.ExperimentalAnnot) || isDefaultArgumentOfExperimentalMethod || (!self.is(Package) && self.owner.isInExperimentalScope) - || self.topLevelClass.ownersIterator.exists(p => - p.is(Package) && p.owner.isRoot && p.name == tpnme.dotty) /** The declared self type of this class, as seen from `site`, stripping * all refinements for opaque types. diff --git a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala index c0756508ffff..ec7bd6f8abc9 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Checking.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Checking.scala @@ -371,7 +371,7 @@ object Checking { /** Check that `info` of symbol `sym` is not cyclic. * @pre sym is not yet initialized (i.e. its type is a Completer). - * @return `info` where every legal F-bounded reference is proctected + * @return `info` where every legal F-bounded reference is protected * by a `LazyRef`, or `ErrorType` if a cycle was detected and reported. */ def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(using Context): Type = { diff --git a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala index 6034b10bf6f3..1f7286b02b5a 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Implicits.scala @@ -84,7 +84,7 @@ object Implicits: /** A common base class of contextual implicits and of-type implicits which * represents a set of references to implicit definitions. */ - abstract class ImplicitRefs(initctx: DetachedContext) extends caps.Pure { + abstract class ImplicitRefs(initctx: DetachedContext) extends Pure { val irefCtx: DetachedContext = if (initctx eq NoContext) initctx else initctx.retractMode(Mode.ImplicitsEnabled).detach protected given Context = irefCtx @@ -1651,7 +1651,7 @@ end Implicits * recursive references and emit a complete implicit dictionary when the outermost search * is complete. */ -abstract class SearchHistory extends caps.Pure: +abstract class SearchHistory extends Pure: val root: SearchRoot /** Does this search history contain any by name implicit arguments. */ val byname: Boolean @@ -1869,7 +1869,7 @@ final class SearchRoot extends SearchHistory: end SearchRoot /** A set of term references where equality is =:= */ -sealed class TermRefSet(using DetachedContext) extends caps.Pure: +sealed class TermRefSet(using DetachedContext) extends Pure: private val elemsMap = new util.HashMap[TermSymbol, Type | List[Type]] private val elemsBuf = new mutable.ListBuffer[TermSymbol] diff --git a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala index 3c817f054ac6..8f4ab5de30cb 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala @@ -53,7 +53,7 @@ object ImportInfo { class ImportInfo(symf: Context ?-> Symbol, val selectors: List[untpd.ImportSelector], val qualifier: untpd.Tree, - val isRootImport: Boolean = false) extends Showable, caps.Pure { + val isRootImport: Boolean = false) extends Showable, Pure { private def symNameOpt = qualifier match { case ref: untpd.RefTree => Some(ref.name.asTermName) diff --git a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala index cbf30dd97db9..94a4d6f3e71c 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala @@ -241,16 +241,16 @@ object Inferencing { * relationship _necessarily_ must hold. * * We accomplish that by: - * - replacing covariant occurences with upper GADT bound - * - replacing contravariant occurences with lower GADT bound - * - leaving invariant occurences alone + * - replacing covariant occurrences with upper GADT bound + * - replacing contravariant occurrences with lower GADT bound + * - leaving invariant occurrences alone * * Examples: * - If we have GADT cstr A <: Int, then for all A <: Int, Option[A] <: Option[Int]. * Therefore, we can approximate Option[A] ~~ Option[Int]. * - If we have A >: S <: T, then for all such A, A => A <: S => T. This * illustrates that it's fine to differently approximate different - * occurences of same type. + * occurrences of same type. * - If we have A <: Int and F <: [A] => Option[A] (note the invariance), * then we should approximate F[A] ~~ Option[A]. That is, we should * respect the invariance of the type constructor. @@ -449,7 +449,7 @@ object Inferencing { * +1 means: only covariant occurrences * 0 means: mixed or non-variant occurrences * - * We need to take the occurences in `pt` into account because a type + * We need to take the occurrences in `pt` into account because a type * variable created when typing the current tree might only appear in the * bounds of a type variable in the expected type, for example when * `ConstraintHandling#legalBound` creates type variables when approximating diff --git a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala index 77fd2c1d6d66..285f9b983cb4 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala @@ -123,7 +123,7 @@ object ProtoTypes { } /** A trait for prototypes that match all types */ - trait MatchAlways extends ProtoType, caps.Pure { + trait MatchAlways extends ProtoType, Pure { def isMatchedBy(tp1: Type, keepConstraint: Boolean)(using Context): Boolean = true def map(tm: TypeMap @retains(caps.cap))(using Context): ProtoType = this def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = x @@ -131,7 +131,7 @@ object ProtoTypes { } /** A class marking ignored prototypes that can be revealed by `deepenProto` */ - abstract case class IgnoredProto(ignored: Type) extends CachedGroundType, MatchAlways, caps.Pure: + abstract case class IgnoredProto(ignored: Type) extends CachedGroundType, MatchAlways, Pure: private var myWasDeepened = false override def revealIgnored = ignored override def deepenProto(using Context): Type = @@ -165,7 +165,7 @@ object ProtoTypes { * [ ].name: proto */ abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends CachedProxyType, ProtoType, ValueTypeOrProto, caps.Pure { + extends CachedProxyType, ProtoType, ValueTypeOrProto, Pure { /** Is the set of members of this type unknown, in the sense that we * cannot compute a non-trivial upper approximation? This is the case if: @@ -575,7 +575,7 @@ object ProtoTypes { * []: argType => resultType */ abstract case class ViewProto(argType: Type, resType: Type) - extends CachedGroundType, ApplyingProto, caps.Pure { + extends CachedGroundType, ApplyingProto, Pure { override def resultType(using Context): Type = resType diff --git a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala index 7bc9619922db..6ead3134235a 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala @@ -596,7 +596,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case JavaArrayType(elemTp) => defn.ArrayOf(escapeJavaArray(elemTp)) case _ => tp - private enum ManifestKind extends caps.Pure: // !cc! should all enums be Pure? + private enum ManifestKind extends Pure: // !cc! should all enums be Pure? case Full, Opt, Clss /** The kind that should be used for an array element, if we are `OptManifest` then this diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala index d24a9ab3ddb2..dbef59d6dee7 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala @@ -3,7 +3,7 @@ package dotc.util /** A class for the reading part of mutable or immutable maps. */ -abstract class ReadOnlyMap[Key, Value] extends caps.Pure: +abstract class ReadOnlyMap[Key, Value] extends Pure: def lookup(x: Key): Value | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala index 318a04e846fe..2ab59c4c6cb6 100644 --- a/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala +++ b/tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc.util /** A class for the readonly part of mutable sets. */ -abstract class ReadOnlySet[T] extends caps.Pure: +abstract class ReadOnlySet[T] extends Pure: /** The entry in the set such that `isEqual(x, entry)`, or else `null`. */ def lookup(x: T): T | Null diff --git a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala index 8a5a4828adfd..351cf4c8bf85 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourceFile.scala @@ -60,7 +60,7 @@ object ScriptSourceFile { } } -class SourceFile(val file: AbstractFile, computeContent: -> Array[Char]) extends interfaces.SourceFile, caps.Pure { +class SourceFile(val file: AbstractFile, computeContent: -> Array[Char]) extends interfaces.SourceFile, Pure { import SourceFile._ private var myContent: Array[Char] | Null = null diff --git a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala index ef4350741036..7ab565d58533 100644 --- a/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala +++ b/tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala @@ -12,7 +12,7 @@ import scala.annotation.internal.sharable /** A source position is comprised of a span and a source file */ case class SourcePosition(source: SourceFile, span: Span, outer: SourcePosition = NoSourcePosition) -extends SrcPos, interfaces.SourcePosition, Showable, caps.Pure { +extends SrcPos, interfaces.SourcePosition, Showable, Pure { def sourcePos(using Context) = this diff --git a/tests/pos-with-compiler/tasty/test-definitions.scala b/tests/pos-with-compiler/tasty/test-definitions.scala index 7bbeeda2083a..f2b6232a8d5e 100644 --- a/tests/pos-with-compiler/tasty/test-definitions.scala +++ b/tests/pos-with-compiler/tasty/test-definitions.scala @@ -244,7 +244,7 @@ object definitions { def isStatic: Boolean // mapped to static Java member def isObject: Boolean // an object or its class (used for a ValDef or a ClassDef extends Modifier respectively) def isTrait: Boolean // a trait (used for a ClassDef) - def isLocal: Boolean // used in conjunction with Private/private[Type] to mean private[this] extends Modifier proctected[this] + def isLocal: Boolean // used in conjunction with Private/private[Type] to mean private[this] extends Modifier protected[this] def isSynthetic: Boolean // generated by Scala compiler def isArtifact: Boolean // to be tagged Java Synthetic def isMutable: Boolean // when used on a ValDef: a var diff --git a/tests/pos/10747-shapeless-min-spec.scala b/tests/pos/10747-shapeless-min-spec.scala new file mode 100644 index 000000000000..a0dce79a7830 --- /dev/null +++ b/tests/pos/10747-shapeless-min-spec.scala @@ -0,0 +1,13 @@ +trait Monoidal { + type to[_] <: Tuple +} + +object eithers extends Monoidal { + class Wrap[T] + + type to[t] <: Tuple = Wrap[t] match { + case Wrap[Nothing] => EmptyTuple + case Wrap[other] => other match + case Either[hd, tl] => hd *: to[tl] + } +} diff --git a/tests/pos/10747-shapeless-min.scala b/tests/pos/10747-shapeless-min.scala index fbb8012fc9f2..3599dd2db469 100644 --- a/tests/pos/10747-shapeless-min.scala +++ b/tests/pos/10747-shapeless-min.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + trait Monoidal { type to[_] <: Tuple } diff --git a/tests/pos/12800.scala b/tests/pos/12800.scala new file mode 100644 index 000000000000..be625cb894e0 --- /dev/null +++ b/tests/pos/12800.scala @@ -0,0 +1,19 @@ +object Test { + type FieldType2[K, +V] = V with KeyTag2[K, V] + trait KeyTag2[K, +V] extends Any + + type WrapUpper = Tuple + type Wrap[A] = Tuple1[A] + + type Extract[A <: WrapUpper] = A match { + case Wrap[h] => h + } + + summon[Extract[Wrap[FieldType2["foo", Int]]] =:= FieldType2["foo", Int]] + + // This used to cause an error because `Tuple1[FieldType2["foo", Int]]` was + // "provablyEmpty". Since we switched to testing the combination of + // `scrut <: pattern` *and* `provablyDisjoint(scrut, pattern)` instead, this + // particular example compiles, because `FieldType2["foo", Int]` is not + // `provablyDisjoint` from `h` (`Any`). +} diff --git a/tests/pos/16583.scala b/tests/pos/16583.scala new file mode 100644 index 000000000000..06f9f01b1772 --- /dev/null +++ b/tests/pos/16583.scala @@ -0,0 +1,19 @@ +import scala.compiletime.constValueTuple + +val ll0: Tuple3["one", "two", "three"] = constValueTuple[("one", "two", "three")] +val ll1 = constValueTuple[("one", "two", "three")].toList +val ll3: List["one" | ("two" | ("three" | Nothing))] = constValueTuple[("one", "two", "three")].toList +val ll4: List["one" | ("two" | "three")] = constValueTuple[("one", "two", "three")].toList + +inline def labels[Labels <: Tuple](using ev: Tuple.Union[Labels] <:< String): List[String] = + val tmp = constValueTuple[Labels].toList + ev.substituteCo(tmp) + +def test = labels[("one", "two", "three")] + +def toList(x: Tuple): List[Tuple.Union[x.type]] = ??? +def test2[Labels <: Tuple] = toList((???): Labels) + +def i16654 = + def t1: Tuple = EmptyTuple + val t2 = t1.toList diff --git a/tests/pos/16654.scala b/tests/pos/16654.scala new file mode 100644 index 000000000000..9234c309de88 --- /dev/null +++ b/tests/pos/16654.scala @@ -0,0 +1,7 @@ +def toCsvFlat[A <: Product](a: A)(using m: scala.deriving.Mirror.ProductOf[A]) = { + def flatTuple(any: Any): Tuple = any match + case p: Product => p.productIterator.map(flatTuple).foldLeft(EmptyTuple: Tuple)(_ ++ _) + case a => Tuple1(a) + + val tuple = flatTuple(Tuple.fromProductTyped(a)).toList +} diff --git a/tests/pos/18789.scala b/tests/pos/18789.scala new file mode 100644 index 000000000000..28ddfd56305d --- /dev/null +++ b/tests/pos/18789.scala @@ -0,0 +1,4 @@ +sealed trait TypeId: + case class Foo() extends TypeId + +object TypeId {} diff --git a/tests/pos/8647.scala b/tests/pos/8647.scala index 5e8f839b27ca..f597caef4484 100644 --- a/tests/pos/8647.scala +++ b/tests/pos/8647.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + final class Two[A, B]() final class Blaaa diff --git a/tests/pos/9757.scala b/tests/pos/9757.scala index aeecfa0a472f..8f4af92c20ef 100644 --- a/tests/pos/9757.scala +++ b/tests/pos/9757.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + type RemoveFrom[R, A] = R match { case A & newType => newType } diff --git a/tests/pos/AE-9a131723f09b9f77c99c52b709965e580a61706e.scala b/tests/pos/AE-9a131723f09b9f77c99c52b709965e580a61706e.scala index 6a7de4da0d2f..76c0c3d731e9 100755 --- a/tests/pos/AE-9a131723f09b9f77c99c52b709965e580a61706e.scala +++ b/tests/pos/AE-9a131723f09b9f77c99c52b709965e580a61706e.scala @@ -1 +1 @@ -object I0 { val i1: PartialFunction[_, Int] = { case i2 => i2 } } +object I0 { val i1: PartialFunction[_, Any] = { case i2 => i2 } } diff --git a/tests/pos/Dynamic.scala b/tests/pos/Dynamic.scala new file mode 100644 index 000000000000..9f3a83468fc1 --- /dev/null +++ b/tests/pos/Dynamic.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + +package scala + +trait Dynamic extends Any diff --git a/tests/pos/TupleReverse.scala b/tests/pos/TupleReverse.scala new file mode 100644 index 000000000000..9b83280afcf1 --- /dev/null +++ b/tests/pos/TupleReverse.scala @@ -0,0 +1,16 @@ +import scala.Tuple.* +def test[T1, T2, T3, T4] = + summon[Reverse[EmptyTuple] =:= EmptyTuple] + summon[Reverse[T1 *: EmptyTuple] =:= T1 *: EmptyTuple] + summon[Reverse[(T1, T2)] =:= (T2, T1)] + summon[Reverse[(T1, T2, T3)] =:= (T3, T2, T1)] + summon[Reverse[(T1, T2, T3, T4)] =:= (T4, T3, T2, T1)] + + summon[Reverse[(T1, T2, T3, T4)] =:= Reverse[(T1, T2, T3, T4)]] + summon[Reverse[(T1, T2, T3, T4)] <:< Reverse[(Any, Any, Any, Any)]] + +def test2[Tup <: Tuple] = + summon[Reverse[Tup] =:= Reverse[Tup]] + +def test3[T1, T2, T3, T4](tup1: (T1, T2, T3, T4)) = + summon[Reverse[tup1.type] =:= (T4, T3, T2, T1)] diff --git a/tests/pos/TupleReverseOnto.scala b/tests/pos/TupleReverseOnto.scala new file mode 100644 index 000000000000..bef2d0949b29 --- /dev/null +++ b/tests/pos/TupleReverseOnto.scala @@ -0,0 +1,15 @@ +import scala.Tuple.* +def test[T1, T2, T3, T4] = + summon[ReverseOnto[(T1, T2), (T3, T4)] =:= ReverseOnto[(T1, T2), (T3, T4)]] + summon[ReverseOnto[(T1, T2), (T3, T4)] =:= (T2, T1, T3, T4)] + summon[ReverseOnto[(T1, T2), (T3, T4)] <:< (Any, Any, Any, Any)] + summon[ReverseOnto[(T1, T2), (T3, T4)] <:< ReverseOnto[(Any, Any), (Any, Any)]] + summon[ReverseOnto[(T1, T2), (T3, T4)] =:= Concat[Reverse[(T1, T2)], (T3, T4)]] + +def test2[Tup1 <: Tuple, Tup2 <: Tuple] = + summon[ReverseOnto[EmptyTuple, Tup1] =:= Tup1] + summon[ReverseOnto[Tup1, EmptyTuple] =:= Reverse[Tup1]] + +def test3[T1, T2, T3, T4](tup1: (T1, T2), tup2: (T3, T4)) = + summon[ReverseOnto[tup1.type, tup2.type] <:< (T2, T1, T3, T4)] + summon[ReverseOnto[tup1.type, tup2.type] =:= T2 *: T1 *: tup2.type] diff --git a/tests/pos/adhoc-extension/A.scala b/tests/pos/adhoc-extension/A.scala new file mode 100644 index 000000000000..efc0415cdc15 --- /dev/null +++ b/tests/pos/adhoc-extension/A.scala @@ -0,0 +1,5 @@ +//> using options -source future -deprecation -Xfatal-warnings + +package adhoc +class A +abstract class Abs diff --git a/tests/pos-custom-args/strict/adhoc-extension/B.scala b/tests/pos/adhoc-extension/B.scala similarity index 100% rename from tests/pos-custom-args/strict/adhoc-extension/B.scala rename to tests/pos/adhoc-extension/B.scala diff --git a/tests/pos/alphanumeric-infix-operator-compat/A_1_c3.0.0.scala b/tests/pos/alphanumeric-infix-operator-compat/A_1_c3.0.0.scala new file mode 100644 index 000000000000..51ea7ec45de6 --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/A_1_c3.0.0.scala @@ -0,0 +1,3 @@ +class A: + def x(i: Int) = i + infix def y(i: Int) = i diff --git a/tests/pos/alphanumeric-infix-operator-compat/B_1_c3.1.0.scala b/tests/pos/alphanumeric-infix-operator-compat/B_1_c3.1.0.scala new file mode 100644 index 000000000000..2377b01f7fec --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/B_1_c3.1.0.scala @@ -0,0 +1,3 @@ +class B: + def x(i: Int) = i + infix def y(i: Int) = i diff --git a/tests/pos/alphanumeric-infix-operator-compat/C_1_c3.2.0.scala b/tests/pos/alphanumeric-infix-operator-compat/C_1_c3.2.0.scala new file mode 100644 index 000000000000..c1e11d2547ee --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/C_1_c3.2.0.scala @@ -0,0 +1,3 @@ +class C: + def x(i: Int) = i + infix def y(i: Int) = i diff --git a/tests/pos/alphanumeric-infix-operator-compat/D_1_c3.3.0.scala b/tests/pos/alphanumeric-infix-operator-compat/D_1_c3.3.0.scala new file mode 100644 index 000000000000..0e05a819694c --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/D_1_c3.3.0.scala @@ -0,0 +1,3 @@ +class D: + def x(i: Int) = i + infix def y(i: Int) = i diff --git a/tests/pos/alphanumeric-infix-operator-compat/Test3.4_2.scala b/tests/pos/alphanumeric-infix-operator-compat/Test3.4_2.scala new file mode 100644 index 000000000000..dd6367518fb4 --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/Test3.4_2.scala @@ -0,0 +1,15 @@ +//> using options -Werror + +import language.`3.4` + +def test1(a: A, b: B, c: C, d: D): Unit = + a x 1 // ok: was compiled with 3.0 + b x 1 // ok: was compiled with 3.1 + c x 1 // ok: was compiled with 3.2 + d x 1 // ok: was compiled with 3.3 + + // ok: is marked as infix + a y 2 + b y 2 + c y 2 + d y 2 diff --git a/tests/pos/alphanumeric-infix-operator-compat/TestFuture_2.scala b/tests/pos/alphanumeric-infix-operator-compat/TestFuture_2.scala new file mode 100644 index 000000000000..cf6a9d0adfc2 --- /dev/null +++ b/tests/pos/alphanumeric-infix-operator-compat/TestFuture_2.scala @@ -0,0 +1,13 @@ +import language.future + +def test2(a: A, b: B, c: C, d: D): Unit = + a x 1 // ok: was compiled with 3.0 + b x 1 // ok: was compiled with 3.1 + c x 1 // ok: was compiled with 3.2 + d x 1 // ok: was compiled with 3.3 + + // ok: is marked as infix + a y 2 + b y 2 + c y 2 + d y 2 diff --git a/tests/pos-special/fatal-warnings/annot-constant/Annot_1.java b/tests/pos/annot-constant/Annot_1.java similarity index 100% rename from tests/pos-special/fatal-warnings/annot-constant/Annot_1.java rename to tests/pos/annot-constant/Annot_1.java diff --git a/tests/pos-special/fatal-warnings/annot-constant/Constants_1.java b/tests/pos/annot-constant/Constants_1.java similarity index 100% rename from tests/pos-special/fatal-warnings/annot-constant/Constants_1.java rename to tests/pos/annot-constant/Constants_1.java diff --git a/tests/pos/annot-constant/Test_2.scala b/tests/pos/annot-constant/Test_2.scala new file mode 100644 index 000000000000..420b33195a45 --- /dev/null +++ b/tests/pos/annot-constant/Test_2.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings -deprecation -feature + +package pkg + +object U { + println(Constants_1.foo()) // The same constant in the constant pool is first unpickled here as a boolean + println(Constants_1.BYTE) // ... and here as a byte +} diff --git a/tests/pos/boxmap-paper.scala b/tests/pos/boxmap-paper.scala index 7c2c005e6a61..aa983114ed8a 100644 --- a/tests/pos/boxmap-paper.scala +++ b/tests/pos/boxmap-paper.scala @@ -19,7 +19,7 @@ def lazyMap[A, B](c: Cell[A])(f: A => B): () ->{f} Cell[B] trait IO: def print(s: String): Unit -def test(io: IO^{cap}) = +def test(io: IO^) = val loggedOne: () ->{io} Int = () => { io.print("1"); 1 } diff --git a/tests/pos/cc-backwards-compat/A.scala b/tests/pos/cc-backwards-compat/A.scala new file mode 100644 index 000000000000..90280bf3d1a0 --- /dev/null +++ b/tests/pos/cc-backwards-compat/A.scala @@ -0,0 +1,5 @@ +package p +class A(f: Int => Int): + def foo(f: Int => Int) = ??? + def map(other: Iter): Iter = other + def pair[T](x: T): (T, T) = (x, x) diff --git a/tests/pos/cc-backwards-compat/Iter.scala b/tests/pos/cc-backwards-compat/Iter.scala new file mode 100644 index 000000000000..e2f0775a058f --- /dev/null +++ b/tests/pos/cc-backwards-compat/Iter.scala @@ -0,0 +1,12 @@ +package p +import language.experimental.captureChecking + +class Iter: + self: Iter^ => + +def test(it: Iter^) = + val f: Int ->{it} Int = ??? + val a = new A(f) + val b = a.map(it) // does not work yet + val c = a.pair(it) + val d = a.foo(f) diff --git a/tests/pos/cc-experimental.scala b/tests/pos/cc-experimental.scala new file mode 100644 index 000000000000..4ee1f6732356 --- /dev/null +++ b/tests/pos/cc-experimental.scala @@ -0,0 +1,13 @@ +//> using options -Yno-experimental + +package scala.runtime + +import language.experimental.captureChecking + +object test: + type T = Pure + +class Foo extends Object, Pure: + val x: Pure = ??? + def foo() = () + diff --git a/tests/pos/constvalue-of-failed-match-type.scala b/tests/pos/constvalue-of-failed-match-type.scala new file mode 100644 index 000000000000..985e8b0c8b56 --- /dev/null +++ b/tests/pos/constvalue-of-failed-match-type.scala @@ -0,0 +1,22 @@ +import scala.compiletime.* + +object Inlines: + inline def testInline[A](): Boolean = + inline erasedValue[A] match + case _: Tuple => + constValue[Tuple.Size[A & Tuple]] == 2 + case _ => + false +end Inlines + +case class Foo2(x: Boolean, y: String) +case class Foo3(x: Boolean, y: String, z: Int) + +object Test: + def main(args: Array[String]): Unit = + // Note: the assert's don't do anything since it's a pos test; they show intent (and pass if we run the test) + assert(!Inlines.testInline[Foo2]()) + assert(!Inlines.testInline[Foo3]()) + assert(Inlines.testInline[(Boolean, String)]()) + assert(!Inlines.testInline[(Boolean, String, Int)]()) +end Test diff --git a/tests/pos/dotty-experimental.scala b/tests/pos/dotty-experimental.scala new file mode 100644 index 000000000000..9cffddc0b8ba --- /dev/null +++ b/tests/pos/dotty-experimental.scala @@ -0,0 +1,8 @@ +//> using options -Yno-experimental + +import language.experimental.captureChecking +object test { + + val x: caps.Cap = caps.cap + +} diff --git a/tests/pos/erased-args-lifted.scala b/tests/pos/erased-args-lifted.scala new file mode 100644 index 000000000000..cf68e17dfbbb --- /dev/null +++ b/tests/pos/erased-args-lifted.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def foo(erased a: Int)(b: Int, c: Int) = 42 + def bar(i: Int): Int = { + println(1) + 42 + } + def baz: Int = { + println(1) + 2 + } + foo(bar(baz))(c = baz, b = baz) // force all args to be lifted in vals befor the call +} diff --git a/tests/pos-custom-args/erased/erased-asInstanceOf.scala b/tests/pos/erased-asInstanceOf.scala similarity index 78% rename from tests/pos-custom-args/erased/erased-asInstanceOf.scala rename to tests/pos/erased-asInstanceOf.scala index 956e325646d9..692ff3a16b05 100644 --- a/tests/pos-custom-args/erased/erased-asInstanceOf.scala +++ b/tests/pos/erased-asInstanceOf.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.erasedDefinitions trait Dataset { def select(erased c: Column): Unit = () diff --git a/tests/pos-custom-args/erased/erased-class-as-args.scala b/tests/pos/erased-class-as-args.scala similarity index 89% rename from tests/pos-custom-args/erased/erased-class-as-args.scala rename to tests/pos/erased-class-as-args.scala index 74c827fbd54b..128cd2b818e4 100644 --- a/tests/pos-custom-args/erased/erased-class-as-args.scala +++ b/tests/pos/erased-class-as-args.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + erased class A erased class B(val x: Int) extends A diff --git a/tests/pos-custom-args/erased/erased-deep-context.scala b/tests/pos/erased-deep-context.scala similarity index 80% rename from tests/pos-custom-args/erased/erased-deep-context.scala rename to tests/pos/erased-deep-context.scala index 049bf14c2e4f..2303feec5747 100644 --- a/tests/pos-custom-args/erased/erased-deep-context.scala +++ b/tests/pos/erased-deep-context.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def outer1(): Int = { def inner(erased a: Int): Int = 0 diff --git a/tests/pos/erased-extension-method.scala b/tests/pos/erased-extension-method.scala new file mode 100644 index 000000000000..f0d5f502334c --- /dev/null +++ b/tests/pos/erased-extension-method.scala @@ -0,0 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + +class IntDeco(x: Int) extends AnyVal { + def foo(erased y: Int) = x +} diff --git a/tests/pos-custom-args/erased/erased-lub-2.scala b/tests/pos/erased-lub-2.scala similarity index 80% rename from tests/pos-custom-args/erased/erased-lub-2.scala rename to tests/pos/erased-lub-2.scala index f0f9b27cc7a4..383275341904 100644 --- a/tests/pos-custom-args/erased/erased-lub-2.scala +++ b/tests/pos/erased-lub-2.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + trait Foo trait PF[A, +B] { diff --git a/tests/pos-custom-args/erased/erased-lub.scala b/tests/pos/erased-lub.scala similarity index 87% rename from tests/pos-custom-args/erased/erased-lub.scala rename to tests/pos/erased-lub.scala index d3d2183c123b..06ee25be79e1 100644 --- a/tests/pos-custom-args/erased/erased-lub.scala +++ b/tests/pos/erased-lub.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + // Verify that expressions below perform correct boxings in erasure. object Test { def id[T](t: T) = t diff --git a/tests/pos-custom-args/erased/erased-soft-keyword.scala b/tests/pos/erased-soft-keyword.scala similarity index 91% rename from tests/pos-custom-args/erased/erased-soft-keyword.scala rename to tests/pos/erased-soft-keyword.scala index fdb884628c7d..fdcc6d85ebd0 100644 --- a/tests/pos-custom-args/erased/erased-soft-keyword.scala +++ b/tests/pos/erased-soft-keyword.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + def f1(x: Int, erased y: Int) = 0 def f2(x: Int, erased: Int) = 0 inline def f3(x: Int, inline erased: Int) = 0 diff --git a/tests/pos/erased-typedef.scala b/tests/pos/erased-typedef.scala new file mode 100644 index 000000000000..bd0e012e6e6c --- /dev/null +++ b/tests/pos/erased-typedef.scala @@ -0,0 +1,10 @@ +//> using options -language:experimental.erasedDefinitions + +trait Monadless[Monad[_]] { + + type M[T] = Monad[T] + + def lift[T](body: T): Monad[T] = ??? + + def unlift[T](m: M[T]): T = ??? +} \ No newline at end of file diff --git a/tests/pos/expeimental-flag-with-lang-feature.scala b/tests/pos/expeimental-flag-with-lang-feature.scala new file mode 100644 index 000000000000..9cfb716b1015 --- /dev/null +++ b/tests/pos/expeimental-flag-with-lang-feature.scala @@ -0,0 +1,10 @@ +//> using options -experimental -Yno-experimental + +import scala.language.experimental.erasedDefinitions +import scala.language.experimental.namedTypeArguments + +erased def erasedFun(erased x: Int): Int = x + +def namedTypeArgumentsFun[T, U]: Int = + namedTypeArgumentsFun[T = Int, U = Int] + namedTypeArgumentsFun[U = Int, T = Int] diff --git a/tests/pos/expeimental-flag.scala b/tests/pos/expeimental-flag.scala new file mode 100644 index 000000000000..9d3daf12fddc --- /dev/null +++ b/tests/pos/expeimental-flag.scala @@ -0,0 +1,18 @@ +//> using options -experimental -Yno-experimental + +import scala.annotation.experimental + +class Foo: + def foo: Int = experimentalDef + +class Bar: + def bar: Int = experimentalDef +object Bar: + def bar: Int = experimentalDef + +object Baz: + def bar: Int = experimentalDef + +def toplevelMethod: Int = experimentalDef + +@experimental def experimentalDef: Int = 1 diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala b/tests/pos/experimental-imports-empty.scala similarity index 82% rename from tests/pos-custom-args/no-experimental/experimental-imports-empty.scala rename to tests/pos/experimental-imports-empty.scala index 998086c5d9a4..18d83839e7e7 100644 --- a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala +++ b/tests/pos/experimental-imports-empty.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import annotation.experimental import language.experimental.namedTypeArguments import language.experimental.genericNumberLiterals diff --git a/tests/pos/experimental-imports-top.scala b/tests/pos/experimental-imports-top.scala new file mode 100644 index 000000000000..16f44e48eb32 --- /dev/null +++ b/tests/pos/experimental-imports-top.scala @@ -0,0 +1,7 @@ +//> using options -Yno-experimental + +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased def f = 1 diff --git a/tests/pos-special/extend-java-enum.scala b/tests/pos/extend-java-enum.scala similarity index 93% rename from tests/pos-special/extend-java-enum.scala rename to tests/pos/extend-java-enum.scala index 323b6d82639f..f58922f81749 100644 --- a/tests/pos-special/extend-java-enum.scala +++ b/tests/pos/extend-java-enum.scala @@ -1,3 +1,5 @@ +//> using options -source 3.0-migration + import java.{lang as jl} final class ConfigSyntax private (name: String, ordinal: Int) diff --git a/tests/pos/fewer-braces.scala b/tests/pos/fewer-braces.scala index a446eeb2b652..a95a0367c7d5 100644 --- a/tests/pos/fewer-braces.scala +++ b/tests/pos/fewer-braces.scala @@ -11,3 +11,57 @@ def Test = x + 1 println(y) + + true // Should not count as the call true() + () + + // Setup + def times(x: Int)(f: => Unit): Unit = + (0 to x).foreach(_ => f) + val credentials = Seq() + object Path {def userHome = File} + object File {def exists = true; def /(s: String) = this} + def Credentials(f: File.type) = f + val xs = List(1,2,3) + def f(x: Int, g: Int => Int) = g(x) + val x = 4 + + // Copied from docs/_docs/reference/other-new-features/indentation.md + + times(10): + println("ah") + println("ha") + + credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() + + xs.map: + x => + val y = x - 1 + y * y + + xs.foldLeft(0): (x, y) => + x + y + + { + val x = 4 + f(x: Int, y => + x * ( + y + 1 + ) + + (x + + x) + ) + } + + x match + case 1 => print("I") + case 2 => print("II") + case 3 => print("III") + case 4 => print("IV") + case 5 => print("V") + + println(".") diff --git a/tests/pos/folds.scala b/tests/pos/folds.scala new file mode 100644 index 000000000000..c1cd737f368d --- /dev/null +++ b/tests/pos/folds.scala @@ -0,0 +1,34 @@ + +object Test: + extension [A](xs: List[A]) + def foldl[B](acc: B)(f: (A, B) => B): B = ??? + + val xs = List(1, 2, 3) + + val _ = xs.foldl(List())((y, ys) => y :: ys) + + val _ = xs.foldl(Nil)((y, ys) => y :: ys) + + def partition[a](xs: List[a], pred: a => Boolean): Tuple2[List[a], List[a]] = { + xs.foldRight/*[Tuple2[List[a], List[a]]]*/((List(), List())) { + (x, p) => if (pred (x)) (x :: p._1, p._2) else (p._1, x :: p._2) + } + } + + def snoc[A](xs: List[A], x: A) = x :: xs + + def reverse[A](xs: List[A]) = + xs.foldLeft(Nil)(snoc) + + def reverse2[A](xs: List[A]) = + xs.foldLeft(List())(snoc) + + val ys: Seq[Int] = xs + ys.foldLeft(Seq())((ys, y) => y +: ys) + ys.foldLeft(Nil)((ys, y) => y +: ys) + + def dup[A](xs: List[A]) = + xs.foldRight(Nil)((x, xs) => x :: x :: xs) + + def toSet[A](xs: Seq[A]) = + xs.foldLeft(Set.empty)(_ + _) diff --git a/tests/pos/functorial-functors.scala b/tests/pos/functorial-functors.scala new file mode 100644 index 000000000000..5e810fa989a6 --- /dev/null +++ b/tests/pos/functorial-functors.scala @@ -0,0 +1,14 @@ +class Common: + + trait Functor: + type F[X] + extension [A](x: F[A]) def map[B](f: A => B): F[B] + + trait Monad extends Functor: + extension [A](x: F[A]) + def flatMap[B](f: A => F[B]): F[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def pure[A](x: A): F[A] +end Common + diff --git a/tests/pos/given-loop-prevention.scala b/tests/pos/given-loop-prevention.scala new file mode 100644 index 000000000000..0bae0bb24fed --- /dev/null +++ b/tests/pos/given-loop-prevention.scala @@ -0,0 +1,14 @@ +//> using options -Xfatal-warnings + +class Foo + +object Bar { + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok +} + +object Baz { + @annotation.nowarn + given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed + given Foo with {} +} diff --git a/tests/pos/help.scala b/tests/pos/help.scala new file mode 100644 index 000000000000..0f674fd3ba3b --- /dev/null +++ b/tests/pos/help.scala @@ -0,0 +1,5 @@ +//> using options -help -V -W -X -Y + +// dummy source for exercising information flags +// +class Help diff --git a/tests/pos/i10107c.scala b/tests/pos/i10107c.scala index b42e62ddb7dc..98b839e29ef4 100644 --- a/tests/pos/i10107c.scala +++ b/tests/pos/i10107c.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - inline def isTrue: Boolean = true inline def oneOf(): String = { diff --git a/tests/pos/i10242.scala b/tests/pos/i10242.scala index 707c4c9f0a0c..10883633971e 100644 --- a/tests/pos/i10242.scala +++ b/tests/pos/i10242.scala @@ -1,4 +1,6 @@ -// https://github.com/lampepfl/dotty/issues/10242 +//> using options -source:3.3 + +// https://github.com/lampepfl/dotty/issues/10242 type Foo[A, B <: A] = A type Bar[A] = A match { diff --git a/tests/pos/i10247.scala b/tests/pos/i10247.scala new file mode 100644 index 000000000000..4268e309a4ae --- /dev/null +++ b/tests/pos/i10247.scala @@ -0,0 +1,22 @@ +//> using options -Xfatal-warnings -deprecation -feature + +// check that deprecation warnings of Red are not caught in its enclosing scope +enum Color(rgb: Int) { + + @deprecated("stop using Red", "0.1") + case Red extends Color(0xff0000) + + case Green extends Color(0x00ff00) + + case Blue extends Color(0x0000ff) + + final def colorCode: Option[Int] = this match { + case Red => None + case _ => Some(rgb) + } + +} + +object Color { + val deprecatedMembers = Set(Red) +} diff --git a/tests/pos/i10259.scala b/tests/pos/i10259.scala new file mode 100644 index 000000000000..101a3d869a04 --- /dev/null +++ b/tests/pos/i10259.scala @@ -0,0 +1,10 @@ +//> using options -Xfatal-warnings -deprecation -feature + +trait S[T] extends (T => T): + def apply(x: T) = ??? + extension (x: T) def show: String + +given S[Int] with + extension (x: Int) def show = x.toString + +val x = 10.show diff --git a/tests/pos/i10369.scala b/tests/pos/i10369.scala index 8689c2833664..90261230f75b 100644 --- a/tests/pos/i10369.scala +++ b/tests/pos/i10369.scala @@ -3,12 +3,6 @@ type Upgrade[T] = T match case Char => String case Boolean => Boolean -val upgrade: [t] => t => Upgrade[t] = new PolyFunction: - def apply[T](x: T): Upgrade[T] = x match - case x: Int => x.toDouble - case x: Char => x.toString - case x: Boolean => !x - val upgrade2: [t] => t => Upgrade[t] = [t] => (x: t) => x match case x: Int => x.toDouble case x: Char => x.toString diff --git a/tests/pos/i10383.scala b/tests/pos/i10383.scala new file mode 100644 index 000000000000..f14267a38041 --- /dev/null +++ b/tests/pos/i10383.scala @@ -0,0 +1,3 @@ +//> using options -source future -deprecation -Xfatal-warnings + +def r = BigInt(1) to BigInt(3) // error diff --git a/tests/pos/i10848a.scala b/tests/pos/i10848a.scala new file mode 100644 index 000000000000..434cf716c930 --- /dev/null +++ b/tests/pos/i10848a.scala @@ -0,0 +1,7 @@ +//> using options -language:experimental.erasedDefinitions + +class IsOn[T] +type On +object IsOn { + erased given IsOn[On] = new IsOn[On] +} diff --git a/tests/pos/i10848b.scala b/tests/pos/i10848b.scala new file mode 100644 index 000000000000..bff74663c359 --- /dev/null +++ b/tests/pos/i10848b.scala @@ -0,0 +1,6 @@ +//> using options -language:experimental.erasedDefinitions + +class Foo: + erased given Int = 1 + def foo(using erased x: Int): Unit = () + foo diff --git a/tests/pos/i11022.scala b/tests/pos/i11022.scala index d020669049c5..aa211426387d 100644 --- a/tests/pos/i11022.scala +++ b/tests/pos/i11022.scala @@ -1,3 +1,3 @@ -// scalac: -Werror -deprecation +//> using options -Werror -deprecation @deprecated("no CaseClass") case class CaseClass(rgb: Int) diff --git a/tests/pos/i11184a.scala b/tests/pos/i11184a.scala index b4b0beca45ee..200cc39d5199 100644 --- a/tests/pos/i11184a.scala +++ b/tests/pos/i11184a.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - inline def isTrue: Boolean = true inline def oneOf: String = inline if isTrue then "foo" else "bar" def test1 = oneOf diff --git a/tests/pos/i11184b.scala b/tests/pos/i11184b.scala index 16a12b92d15b..7d5ac480fbaf 100644 --- a/tests/pos/i11184b.scala +++ b/tests/pos/i11184b.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - inline def isTrue(): Boolean = true inline def oneOf: String = inline if isTrue() then "foo" else "bar" def test1 = oneOf diff --git a/tests/pos/i11184c.scala b/tests/pos/i11184c.scala index bcd83fb056a2..138df57e7c44 100644 --- a/tests/pos/i11184c.scala +++ b/tests/pos/i11184c.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Foo: inline def isTrue: Boolean = true inline def oneOf: String = inline if Foo.isTrue then "foo" else "bar" diff --git a/tests/pos/i11184d.scala b/tests/pos/i11184d.scala index 3807159a9c5d..37091190a175 100644 --- a/tests/pos/i11184d.scala +++ b/tests/pos/i11184d.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - inline def isTrue: Boolean = true transparent inline def oneOf: Any = inline if isTrue then isTrue else "bar" def test1 = oneOf diff --git a/tests/pos-special/fatal-warnings/i11729.scala b/tests/pos/i11729.scala similarity index 82% rename from tests/pos-special/fatal-warnings/i11729.scala rename to tests/pos/i11729.scala index 7ba41d081e3b..e97b285ac6a2 100644 --- a/tests/pos-special/fatal-warnings/i11729.scala +++ b/tests/pos/i11729.scala @@ -1,9 +1,11 @@ +//> using options -Xfatal-warnings -deprecation -feature + type Return[X] = X match case List[t] => List[t] case Any => List[X] object Return: - def apply[A](a:A):Return[A] = a match + def apply[A](a:A):Return[A] = a match case a: List[t] => a case a: Any => List(a) diff --git a/tests/pos-custom-args/erased/i11896.scala b/tests/pos/i11896.scala similarity index 100% rename from tests/pos-custom-args/erased/i11896.scala rename to tests/pos/i11896.scala diff --git a/tests/pos/i11982a.scala b/tests/pos/i11982a.scala new file mode 100644 index 000000000000..b3c68443ff04 --- /dev/null +++ b/tests/pos/i11982a.scala @@ -0,0 +1,14 @@ +package tuplefun +object Unpair { + + def pair[A, B](using a: ValueOf[A], b: ValueOf[B]): Tuple2[A, B] = + (a.value, b.value) + + def unpair[X <: Tuple2[?, ?]]( + using a: ValueOf[Tuple.Head[X]], + b: ValueOf[Tuple.Head[Tuple.Tail[X]]] + ): Tuple2[Tuple.Head[X], Tuple.Head[Tuple.Tail[X]]] = + type AA = Tuple.Head[X] + type BB = Tuple.Head[Tuple.Tail[X]] + pair[AA, BB](using a, b) +} diff --git a/tests/pos/i12077.scala b/tests/pos/i12077.scala new file mode 100644 index 000000000000..bab233143c8b --- /dev/null +++ b/tests/pos/i12077.scala @@ -0,0 +1,7 @@ +trait Wrapper[K] +trait Has0[T] + +def test[R](v: Wrapper[Has0[String] with R]):R = ??? + +val zz:Wrapper[Has0[String] with Has0[Int]] = ??? +val _ = test(zz) diff --git a/tests/pos/i12958.scala b/tests/pos/i12958.scala index 4c6cfec1f786..e509973d6898 100644 --- a/tests/pos/i12958.scala +++ b/tests/pos/i12958.scala @@ -1,5 +1,3 @@ -import scala.quoted.* - package mylib: object Export: transparent inline def exported: Any = 1 diff --git a/tests/pos-custom-args/i13044.scala b/tests/pos/i13044.scala similarity index 97% rename from tests/pos-custom-args/i13044.scala rename to tests/pos/i13044.scala index 33a20b5800c8..4c9b8b914062 100644 --- a/tests/pos-custom-args/i13044.scala +++ b/tests/pos/i13044.scala @@ -1,3 +1,5 @@ +//> using options -Xmax-inlines:33 + import scala.deriving.Mirror import scala.compiletime._ diff --git a/tests/pos/i13161.scala b/tests/pos/i13161.scala new file mode 100644 index 000000000000..4c6b2c4c32d0 --- /dev/null +++ b/tests/pos/i13161.scala @@ -0,0 +1,8 @@ +transparent inline def f: String = + inline 10 match + case _ => + inline Some["foo"]("foo") match + case Some(x) => x + +def test = + inline val failMsg = f diff --git a/tests/pos/i13190/B_2.scala b/tests/pos/i13190/B_2.scala deleted file mode 100644 index 2752778afa04..000000000000 --- a/tests/pos/i13190/B_2.scala +++ /dev/null @@ -1,15 +0,0 @@ -import Opaque.* - -object Test { - type FindField[R <: scala.Tuple, K] = R match { - case FieldType[K, f] *: t => f - case _ *: t => FindField[t, K] - } - - val f: FieldType["A", Int] = ??? - val f1: Int = f - //val f2: Int = f - - type R = FieldType["A", Int] *: FieldType["B", Double] *: FieldType["C", String] *: FieldType["D", Boolean] *: EmptyTuple - summon[FindField[R, "B"] =:= Double] -} diff --git a/tests/pos-special/fatal-warnings/i13433.scala b/tests/pos/i13433.scala similarity index 94% rename from tests/pos-special/fatal-warnings/i13433.scala rename to tests/pos/i13433.scala index 47a4a520e63a..c38199e3b917 100644 --- a/tests/pos-special/fatal-warnings/i13433.scala +++ b/tests/pos/i13433.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.reflect.TypeTest type Matcher[A] = A match { case String => String } diff --git a/tests/pos-special/fatal-warnings/i13433b.scala b/tests/pos/i13433b.scala similarity index 91% rename from tests/pos-special/fatal-warnings/i13433b.scala rename to tests/pos/i13433b.scala index 5e3625166fc0..e8316f92e330 100644 --- a/tests/pos-special/fatal-warnings/i13433b.scala +++ b/tests/pos/i13433b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.reflect.ClassTag type Matcher[A] = A match { case String => String } diff --git a/tests/pos/i13548.scala b/tests/pos/i13548.scala index 2f2f62e17878..85df1bcd576e 100644 --- a/tests/pos/i13548.scala +++ b/tests/pos/i13548.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed abstract class Foo[N, A] final case class Bar[B](foo: Foo[B, B]) extends Foo[B, B] class Test: diff --git a/tests/pos/i13558.scala b/tests/pos/i13558.scala index 0c8be379f6a9..6f18b770f467 100644 --- a/tests/pos/i13558.scala +++ b/tests/pos/i13558.scala @@ -1,5 +1,4 @@ package testcode -import language.experimental.relaxedExtensionImports class A diff --git a/tests/pos/i13848.scala b/tests/pos/i13848.scala new file mode 100644 index 000000000000..266f3edcf7ae --- /dev/null +++ b/tests/pos/i13848.scala @@ -0,0 +1,10 @@ +//> using options -Yno-experimental + +import annotation.experimental + +@main +@experimental +def run(): Unit = f + +@experimental +def f = 2 diff --git a/tests/pos/i13990.scala b/tests/pos/i13990.scala new file mode 100644 index 000000000000..80740e47bde0 --- /dev/null +++ b/tests/pos/i13990.scala @@ -0,0 +1,26 @@ + +object Test: + + inline val myInt = 1 << 6 + + // toLong + inline val char2Long: 99L = 'c'.toLong + inline val int2Long: 0L = 0.toLong + inline val long2Long: 0L = 0L.toLong + inline val int2LongPropagated: 64L = myInt.toLong + + // toInt + inline val char2Int: 99 = 'c'.toInt + inline val int2Int: 0 = 0.toInt + inline val long2Int: 0 = 0L.toInt + inline val long2IntWrapped: -2147483648 = 2147483648L.toInt + inline val int2IntPropagated: 64 = myInt.toInt + + // toChar + inline val char2Char: 'c' = 'c'.toChar + inline val int2Char: 'c' = 99.toChar + inline val long2Char: 'c' = 99L.toChar + inline val int2CharPropagated: '@' = myInt.toChar + + // chain everything + inline val wow: 1.0 = 1.toChar.toInt.toLong.toFloat.toDouble diff --git a/tests/pos/i14224.1.scala b/tests/pos/i14224.1.scala new file mode 100644 index 000000000000..c0eaa2eedbcd --- /dev/null +++ b/tests/pos/i14224.1.scala @@ -0,0 +1,11 @@ +//> using options -Werror + +// Derived from the extensive test in the gist in i14224 +// Minimising to the false positive in SealedTrait1.either + +sealed trait Foo[A, A1 <: A] +final case class Bar[A, A1 <: A](value: A1) extends Foo[A, A1] + +class Main: + def test[A, A1 <: A](foo: Foo[A, A1]): A1 = foo match + case Bar(v) => v diff --git a/tests/pos/i14287.scala b/tests/pos/i14287.scala index 1291dc8adefc..a6063c00a9cc 100644 --- a/tests/pos/i14287.scala +++ b/tests/pos/i14287.scala @@ -1,4 +1,4 @@ -// scalac: -Yno-deep-subtypes +//> using options -Yno-deep-subtypes enum Free[+F[_], A]: case Return(a: A) case Suspend(s: F[A]) diff --git a/tests/pos/i14587.hard-union-tuples.scala b/tests/pos/i14587.hard-union-tuples.scala index e1a086655e5f..0ce4f46a101f 100644 --- a/tests/pos/i14587.hard-union-tuples.scala +++ b/tests/pos/i14587.hard-union-tuples.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror class Test: type Foo = Option[String] | Option[Int] diff --git a/tests/pos/i14588.scala b/tests/pos/i14588.scala new file mode 100644 index 000000000000..80483ace34f6 --- /dev/null +++ b/tests/pos/i14588.scala @@ -0,0 +1,20 @@ +//> using options -Werror + +class Test: + def t1: Unit = + (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) match + case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => + def t2: Unit = + (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) match + case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23, x24) => + def t3: Unit = + (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) match + case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => + +object Main: + def main(args: Array[String]): Unit = { + val t = new Test + t.t1 + try { t.t2; ??? } catch case _: MatchError => () + try { t.t3; ??? } catch case _: MatchError => () + } diff --git a/tests/pos/i14637.scala b/tests/pos/i14637.scala new file mode 100644 index 000000000000..5ae3b5e7a881 --- /dev/null +++ b/tests/pos/i14637.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings -deprecation -feature + +class C + +object Givens: + given cOrdering: Ordering[C] with + override def compare(c0: C, c1: C) = 0 + val greeting = "we love Givens" \ No newline at end of file diff --git a/tests/pos/i14807.scala b/tests/pos/i14807.scala index 7d3dbbcbed66..d914e9221f03 100644 --- a/tests/pos/i14807.scala +++ b/tests/pos/i14807.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror enum Foo: case One(value: String) case Two(value: Long, month: java.time.Month) diff --git a/tests/pos/i15029.bootstrap-reg.scala b/tests/pos/i15029.bootstrap-reg.scala index 980781aada07..11081590a13c 100644 --- a/tests/pos/i15029.bootstrap-reg.scala +++ b/tests/pos/i15029.bootstrap-reg.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror // minimisation of a regression that occurred in bootstrapping class Test: def t(a: Boolean, b: Boolean) = (a, b) match diff --git a/tests/pos/i15029.more.scala b/tests/pos/i15029.more.scala index 71b80211b717..fba375cfcb33 100644 --- a/tests/pos/i15029.more.scala +++ b/tests/pos/i15029.more.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror // Like tests/pos/i15029.scala, // but with a more complicated prefix diff --git a/tests/pos/i15029.orig.scala b/tests/pos/i15029.orig.scala index f671f4fa9184..cd7f6465d530 100644 --- a/tests/pos/i15029.orig.scala +++ b/tests/pos/i15029.orig.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Schema[A] object Schema extends RecordInstances diff --git a/tests/pos/i15029.scala b/tests/pos/i15029.scala index eeaa1613ad9b..7a85fe5b30d4 100644 --- a/tests/pos/i15029.scala +++ b/tests/pos/i15029.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Schema[A] sealed trait RecordInstances: diff --git a/tests/pos-custom-args/no-experimental/i15133a.scala b/tests/pos/i15133a.scala similarity index 81% rename from tests/pos-custom-args/no-experimental/i15133a.scala rename to tests/pos/i15133a.scala index c7f55f7838c7..1aff3a5c1cfc 100644 --- a/tests/pos-custom-args/no-experimental/i15133a.scala +++ b/tests/pos/i15133a.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/pos-custom-args/no-experimental/i15133b.scala b/tests/pos/i15133b.scala similarity index 81% rename from tests/pos-custom-args/no-experimental/i15133b.scala rename to tests/pos/i15133b.scala index d14d6e84cf6e..4c235d37c698 100644 --- a/tests/pos-custom-args/no-experimental/i15133b.scala +++ b/tests/pos/i15133b.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental @experimental diff --git a/tests/pos/i15155.scala b/tests/pos/i15155.scala index a00ca742b5d3..ac23409bd0c5 100644 --- a/tests/pos/i15155.scala +++ b/tests/pos/i15155.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + import scala.reflect.ClassTag // https://github.com/json4s/json4s/blob/355d8751391773e0d79d04402a4f9fb7bfc684ec/ext/src/main/scala-3/org/json4s/ext/package.scala#L4-L8 type Aux[A] = { type Value = A } @@ -8,4 +10,4 @@ type EnumValue[A <: Enumeration] = A match { // https://github.com/json4s/json4s/blob/355d8751391773e0d79d04402a4f9fb7bfc684ec/ext/src/main/scala/org/json4s/ext/EnumSerializer.scala#L25-L26 class EnumSerializer[E <: Enumeration: ClassTag](enumeration: E) { val EnumerationClass = classOf[EnumValue[E]] -} \ No newline at end of file +} diff --git a/tests/pos/i15166/Test_2.scala b/tests/pos/i15166/Test_2.scala index 53e6da69a42c..b447aacde2ab 100644 --- a/tests/pos/i15166/Test_2.scala +++ b/tests/pos/i15166/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings object Test { val x: InterfaceAudience_JAVA_ONLY_1.Public = ??? } diff --git a/tests/pos/i15226.scala b/tests/pos/i15226.scala index d40b21e4cedb..dfb61efc6a8c 100644 --- a/tests/pos/i15226.scala +++ b/tests/pos/i15226.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror class Proj { type State = String } sealed trait ProjState: diff --git a/tests/pos/i15289.scala b/tests/pos/i15289.scala index 5eea2fe9f91f..2035d2bb68cb 100644 --- a/tests/pos/i15289.scala +++ b/tests/pos/i15289.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed abstract class Foo[A, B] final case class Bar[C](baz: C) extends Foo[C, C] diff --git a/tests/pos/i15312.scala b/tests/pos/i15312.scala deleted file mode 100644 index 28ce2f9bafe2..000000000000 --- a/tests/pos/i15312.scala +++ /dev/null @@ -1,7 +0,0 @@ -type F[t] = - t match - case {type A = Float} => Int - case {type A = Int} => String - -val a: F[{type A = Float}] = 10 -val b: F[{type A = Int}] = "asd" // Found:("asd" : String) Required: F[Object{A = Int}] \ No newline at end of file diff --git a/tests/pos/i15474.scala b/tests/pos/i15474.scala new file mode 100644 index 000000000000..f2c85120e4b2 --- /dev/null +++ b/tests/pos/i15474.scala @@ -0,0 +1,16 @@ +//> using options -Xfatal-warnings +import scala.language.implicitConversions +import scala.language.future + +object Test2: + given c: Conversion[ String, Int ] = _.toInt // now avoided, was loop not detected, could be used as a fallback to avoid the warning. + +object Prices { + opaque type Price = BigDecimal + + object Price{ + given Ordering[Price] = summon[Ordering[BigDecimal]] // was error, now avoided + } +} + + diff --git a/tests/pos/i15522.scala b/tests/pos/i15522.scala index 9b6b99872080..895867dd0ceb 100644 --- a/tests/pos/i15522.scala +++ b/tests/pos/i15522.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Coverage sealed abstract case class Range(min: Double, max: Double) extends Coverage case object Empty extends Coverage diff --git a/tests/pos/i15523.avoid.scala b/tests/pos/i15523.avoid.scala index afbfc1a69d60..1a4bae86505d 100644 --- a/tests/pos/i15523.avoid.scala +++ b/tests/pos/i15523.avoid.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror // like the original, but with a case body `a` // which caused type avoidance to infinitely recurse sealed trait Parent diff --git a/tests/pos/i15523.scala b/tests/pos/i15523.scala index cf63613c29ac..ca2d843dbe4f 100644 --- a/tests/pos/i15523.scala +++ b/tests/pos/i15523.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Parent final case class Leaf[A, B >: A](a: A, b: B) extends Parent diff --git a/tests/pos/i15579.scala b/tests/pos/i15579.scala index 64b67cf3c069..952fab51de96 100644 --- a/tests/pos/i15579.scala +++ b/tests/pos/i15579.scala @@ -1,4 +1,4 @@ -// scalac: -source:future +//> using options -source:future trait Foo[A]: def map[B](f: A => B): Foo[B] = ??? diff --git a/tests/pos/i15715.scala b/tests/pos/i15715.scala new file mode 100644 index 000000000000..d6db1002aace --- /dev/null +++ b/tests/pos/i15715.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def macroImpl(using Quotes) = + val expr = Expr(1) + Some((1, 2)).map { (x, y) => + '{ ${expr} + 1 } + } diff --git a/tests/pos/i15717.scala b/tests/pos/i15717.scala index d625d9ccb039..0e4c3f072163 100644 --- a/tests/pos/i15717.scala +++ b/tests/pos/i15717.scala @@ -1,10 +1,10 @@ -// scalac: -Werror +//> using options -Werror class Test: - def pmat(xs: java.util.Vector[_]): String = xs.get(0) match + def pmat(xs: java.util.Vector[?]): String = xs.get(0) match case d: Double => d.toString() // was: error: unreachable case, which is spurious case _ => "shrug" - def pmatR(xs: java.util.Vector[_]): String = + def pmatR(xs: java.util.Vector[?]): String = val scr = xs.get(0) 1.0 match case `scr` => scr.toString() // for the reverse provablyDisjoint case diff --git a/tests/pos/i15867.scala b/tests/pos/i15867.scala new file mode 100644 index 000000000000..2e62177ba590 --- /dev/null +++ b/tests/pos/i15867.scala @@ -0,0 +1,19 @@ +enum SUB[-A, +B]: + case Refl[S]() extends SUB[S, S] + +class Pow(self: Int): + def **(other: Int): Int = math.pow(self, other).toInt + +given fromList[T]: Conversion[List[T], Pow] = ??? + +given fromInt: Conversion[Int, Pow] = Pow(_) + +def foo[T](t1: T, ev: T SUB List[Int]) = + ev match { case SUB.Refl() => + t1 ** 2 // error + } + +def baz[T](t2: T, ev: T SUB Int) = + ev match { case SUB.Refl() => + t2 ** 2 // works + } diff --git a/tests/pos/i15867.specs2.scala b/tests/pos/i15867.specs2.scala new file mode 100644 index 000000000000..da89b2cba9f0 --- /dev/null +++ b/tests/pos/i15867.specs2.scala @@ -0,0 +1,9 @@ +class Foo: + given Conversion[String, Data] with + def apply(str: String): Data = new Data(str) + + class Data(str: String): + def |(str: String) = new Data(this.str + str) + +class Bar extends Foo: + "str" | "ing" diff --git a/tests/pos/i15926.contra.scala b/tests/pos/i15926.contra.scala new file mode 100644 index 000000000000..2e7d848c923c --- /dev/null +++ b/tests/pos/i15926.contra.scala @@ -0,0 +1,7 @@ +trait Show[-A >: Nothing] + +type MT1[I <: Show[Nothing], N] = I match + case Show[a] => N match + case Int => a + +val a = summon[MT1[Show[String], Int] =:= String] diff --git a/tests/pos/i15926.extract.scala b/tests/pos/i15926.extract.scala new file mode 100644 index 000000000000..45177bd3c946 --- /dev/null +++ b/tests/pos/i15926.extract.scala @@ -0,0 +1,24 @@ +// like pos/i15926.scala +// but with the nested match type extracted +// which is a workaround that fixed the problem +sealed trait Nat +final case class Zero() extends Nat +final case class Succ[+N <: Nat]() extends Nat + +final case class Neg[+N <: Succ[Nat]]() + +type Sum[X, Y] = Y match + case Zero => X + case Succ[y] => Sum[Succ[X], y] + +type IntSum[A, B] = B match + case Neg[b] => IntSumNeg[A, b] + +type IntSumNeg[A, B] = A match + case Neg[a] => Neg[Sum[a, B]] + +type One = Succ[Zero] +type Two = Succ[One] + +class Test: + def test() = summon[IntSum[Neg[One], Neg[One]] =:= Neg[Two]] diff --git a/tests/pos/i15926.min.scala b/tests/pos/i15926.min.scala new file mode 100644 index 000000000000..531467fd4a0f --- /dev/null +++ b/tests/pos/i15926.min.scala @@ -0,0 +1,22 @@ +// like pos/i15926.scala +// but minimised to the subset of paths needed +// to fail the specific test case +sealed trait Nat +final case class Zero() extends Nat +final case class Succ[+N <: Nat]() extends Nat + +final case class Neg[+N <: Succ[Nat]]() + +type Sum[X, Y] = Y match + case Zero => X + case Succ[y] => Sum[Succ[X], y] + +type IntSum[A, B] = B match + case Neg[b] => A match + case Neg[a] => Neg[Sum[a, b]] + +type One = Succ[Zero] +type Two = Succ[One] + +class Test: + def test() = summon[IntSum[Neg[One], Neg[One]] =:= Neg[Two]] diff --git a/tests/pos/i15926.scala b/tests/pos/i15926.scala new file mode 100644 index 000000000000..df4fc2271cd2 --- /dev/null +++ b/tests/pos/i15926.scala @@ -0,0 +1,28 @@ + +@main def main(): Unit = + println(summon[Sum[Minus[Succ[Zero]], Minus[Succ[Zero]]] =:= Minus[Succ[Succ[Zero]]]]) + +sealed trait IntT +sealed trait NatT extends IntT +final case class Zero() extends NatT +final case class Succ[+N <: NatT](n: N) extends NatT +final case class Minus[+N <: Succ[NatT]](n: N) extends IntT + +type NatSum[X <: NatT, Y <: NatT] <: NatT = Y match + case Zero => X + case Succ[y] => NatSum[Succ[X], y] + +type NatDif[X <: NatT, Y <: NatT] <: IntT = Y match + case Zero => X + case Succ[y] => X match + case Zero => Minus[Y] + case Succ[x] => NatDif[x, y] + +type Sum[X <: IntT, Y <: IntT] <: IntT = Y match + case Zero => X + case Minus[y] => X match + case Minus[x] => Minus[NatSum[x, y]] + case _ => NatDif[X, y] + case _ => X match + case Minus[x] => NatDif[Y, x] + case _ => NatSum[X, Y] \ No newline at end of file diff --git a/tests/pos/i15964.scala b/tests/pos/i15964.scala index 5713f19e4419..33797da76610 100644 --- a/tests/pos/i15964.scala +++ b/tests/pos/i15964.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait T class C extends T diff --git a/tests/pos/i15967.scala b/tests/pos/i15967.scala index 0ef00ae0cea1..1bf03a87cdd4 100644 --- a/tests/pos/i15967.scala +++ b/tests/pos/i15967.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait A[-Z] final case class B[Y]() extends A[Y] diff --git a/tests/pos-custom-args/no-experimental/i16091.scala b/tests/pos/i16091.scala similarity index 79% rename from tests/pos-custom-args/no-experimental/i16091.scala rename to tests/pos/i16091.scala index 0324aeacccc9..349e16e6d7e6 100644 --- a/tests/pos-custom-args/no-experimental/i16091.scala +++ b/tests/pos/i16091.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + import scala.annotation.experimental object Macro { diff --git a/tests/pos/i16123.scala b/tests/pos/i16123.scala index 2cba8f6dc96f..4b3cf483d8e7 100644 --- a/tests/pos/i16123.scala +++ b/tests/pos/i16123.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Nat case class Zero() extends Nat case class Succ[N <: Nat](n: N) extends Nat diff --git a/tests/pos/i16186.scala b/tests/pos/i16186.scala new file mode 100644 index 000000000000..9a8948cae529 --- /dev/null +++ b/tests/pos/i16186.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +class Test: + val x = 42 + val tup23 = (x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x) + + tup23 match { + case (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => "Tuple Pattern" + } diff --git a/tests/pos/i16339.scala b/tests/pos/i16339.scala index 72582b778193..95062f516354 100644 --- a/tests/pos/i16339.scala +++ b/tests/pos/i16339.scala @@ -1,7 +1,8 @@ -// scalac: -Werror +//> using options -Werror sealed trait Get[X, +X2 <: X] case class Bar[Y, Y2 <: Y](value: Y2) extends Get[Y, Y2] + class Test: def t1[Z, Z2 <: Z](get: Get[Z, Z2]) = get match case Bar(_) => diff --git a/tests/pos/i16408.min1.scala b/tests/pos/i16408.min1.scala new file mode 100644 index 000000000000..b35199edbfcd --- /dev/null +++ b/tests/pos/i16408.min1.scala @@ -0,0 +1,28 @@ +//> using options -source:3.3 + +object Helpers: + type NodeFun[R] = Matchable // compiles without [R] parameter + + type URIFun[R] = R match + case GetURI[u] => u & NodeFun[R] + + private type GetURI[U] = RDF { type URI = U } +end Helpers + +trait RDF: + type URI + +trait ROps[R <: RDF]: + def auth(uri: Helpers.URIFun[R]): String + +object TraitRDF extends RDF: + override type URI = TraitTypes.UriImpl + + val rops = new ROps[TraitRDF.type] { + override def auth(uri: Helpers.URIFun[TraitRDF.type]): String = ??? + } +end TraitRDF + +object TraitTypes: + trait UriImpl // doesn't compile + // class UriImpl // compiles diff --git a/tests/pos/i16408.min2.scala b/tests/pos/i16408.min2.scala new file mode 100644 index 000000000000..3eb9d395ac4b --- /dev/null +++ b/tests/pos/i16408.min2.scala @@ -0,0 +1,22 @@ +object Helpers: + type NodeFun[R] = Matchable // compiles without [R] parameter + + type URIFun[R] = R match + case RDF[u] => u & NodeFun[R] +end Helpers + +trait RDF[URIParam] + +trait ROps[R <: RDF[?]]: + def auth(uri: Helpers.URIFun[R]): String + +object TraitRDF extends RDF[TraitTypes.UriImpl]: + + val rops = new ROps[TraitRDF.type] { + override def auth(uri: Helpers.URIFun[TraitRDF.type]): String = ??? + } +end TraitRDF + +object TraitTypes: + trait UriImpl // doesn't compile + // class UriImpl // compiles diff --git a/tests/pos/i16408.scala b/tests/pos/i16408.scala new file mode 100644 index 000000000000..fc894baeb958 --- /dev/null +++ b/tests/pos/i16408.scala @@ -0,0 +1,59 @@ +//> using options -source:3.3 + +import scala.util.Try + +trait RDF: + rdf => + + type R = rdf.type + type Node <: Matchable + type URI <: Node + + given rops: ROps[R] +end RDF + +object RDF: + type Node[R <: RDF] = R match + case GetNode[n] => Matchable //n & rNode[R] + + type URI[R <: RDF] <: Node[R] = R match + case GetURI[u] => u & Node[R] + + private type GetNode[N] = RDF { type Node = N } + private type GetURI[U] = RDF { type URI = U } +end RDF + +trait ROps[R <: RDF]: + def mkUri(str: String): Try[RDF.URI[R]] + def auth(uri: RDF.URI[R]): Try[String] + +object TraitTypes: + trait Node: + def value: String + + trait Uri extends Node + + def mkUri(u: String): Uri = + new Uri { def value = u } + +object TraitRDF extends RDF: + import TraitTypes as tz + + override opaque type Node <: Matchable = tz.Node + override opaque type URI <: Node = tz.Uri + + given rops: ROps[R] with + override def mkUri(str: String): Try[RDF.URI[R]] = Try(tz.mkUri(str)) + override def auth(uri: RDF.URI[R]): Try[String] = + Try(java.net.URI.create(uri.value).getAuthority()) + +end TraitRDF + +class Test[R <: RDF](using rops: ROps[R]): + import rops.given + lazy val uriT: Try[RDF.URI[R]] = rops.mkUri("https://bblfish.net/#i") + lazy val x: String = "uri authority=" + uriT.map(u => rops.auth(u)) + +@main def run = + val test = Test[TraitRDF.type] + println(test.x) diff --git a/tests/pos/i16435.scala b/tests/pos/i16435.scala index 3fb36efc55c9..fb738e3041e4 100644 --- a/tests/pos/i16435.scala +++ b/tests/pos/i16435.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror trait Base: type Value inline def oov: Option[Option[Value]] = None diff --git a/tests/pos/i16451.CanForward.scala b/tests/pos/i16451.CanForward.scala index a09a26f22acc..f45eace440e3 100644 --- a/tests/pos/i16451.CanForward.scala +++ b/tests/pos/i16451.CanForward.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror abstract class Namer: private enum CanForward: case Yes diff --git a/tests/pos/i16451.DiffUtil.scala b/tests/pos/i16451.DiffUtil.scala index 3ade8bb73aa7..4a2f16aa05ed 100644 --- a/tests/pos/i16451.DiffUtil.scala +++ b/tests/pos/i16451.DiffUtil.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror object DiffUtil: private sealed trait Patch private final case class Unmodified(str: String) extends Patch diff --git a/tests/pos/i16451.default.scala b/tests/pos/i16451.default.scala index 2751f4901b5f..bc6c7a403be8 100644 --- a/tests/pos/i16451.default.scala +++ b/tests/pos/i16451.default.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror import java.lang.reflect.* import scala.annotation.tailrec diff --git a/tests/pos/i16539.min.scala b/tests/pos/i16539.min.scala index d02e34461ab6..22a96bc12910 100644 --- a/tests/pos/i16539.min.scala +++ b/tests/pos/i16539.min.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Tag[A] sealed trait Foo diff --git a/tests/pos/i16539.scala b/tests/pos/i16539.scala index c5cf45b214ef..e03991e10889 100644 --- a/tests/pos/i16539.scala +++ b/tests/pos/i16539.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror sealed trait Tag[A] enum Hidden: diff --git a/tests/pos/i16596.more.scala b/tests/pos/i16596.more.scala new file mode 100644 index 000000000000..2470eb9eb3c2 --- /dev/null +++ b/tests/pos/i16596.more.scala @@ -0,0 +1,35 @@ +import scala.compiletime.ops.int.* + +object NatExample { + sealed trait Nat + object Nat { + case object Zero extends Nat + case class Succ[N <: Nat](prev: N) extends Nat + + given zero: Zero.type = Zero + given buildSucc[N <: Nat](using n: N): Succ[N] = Succ(n) + + def value[N <: Nat](using n: N): N = n + + type FromInt[I <: Int] <: Nat = I match + case 0 => Zero.type + case _ => Succ[FromInt[I - 1]] + + summon[FromInt[0] =:= Zero.type] + summon[FromInt[1] =:= Succ[Zero.type]] + summon[FromInt[2] =:= Succ[Succ[Zero.type]]] + summon[FromInt[3] =:= Succ[Succ[Succ[Zero.type]]]] + summon[FromInt[4] =:= Succ[Succ[Succ[Succ[Zero.type]]]]] + + @main def test = { + require(summon[FromInt[0]] == Zero) + require(summon[FromInt[1]] == Succ(Zero)) + require(summon[FromInt[2]] == Succ(Succ(Zero))) + require(summon[FromInt[3]] == Succ(Succ(Succ(Zero)))) + // we can summon 4 if we write it out: + require(summon[Succ[Succ[Succ[Succ[Zero.type]]]]] == Succ(Succ(Succ(Succ(Zero))))) + // was: we cannot summon 4 using the match type + require(summon[FromInt[4]] == Succ(Succ(Succ(Succ(Zero))))) + } + } +} diff --git a/tests/pos/i16596.orig.scala b/tests/pos/i16596.orig.scala new file mode 100644 index 000000000000..0a562c6936c0 --- /dev/null +++ b/tests/pos/i16596.orig.scala @@ -0,0 +1,28 @@ +import scala.compiletime.ops.int + +type Count0[N,T] <: Tuple = (N,T) match + case (0,_) => EmptyTuple + case (N,String) => String *: Count0[int.-[N, 1], String] + case (N,Int) => Int *: Count0[int.-[N, 1], Int] + case (N,Float) => Float *: Count0[int.-[N, 1], Float] + case (N,Double) => Double *: Count0[int.-[N, 1], Double] + + +type Count1[N,T] <: Tuple = (N,T) match + case (0,T) => EmptyTuple + case (N,String) => String *: Count1[int.-[N, 1], String] + case (N,Int) => Int *: Count1[int.-[N, 1], Int] + case (N,Float) => Float *: Count1[int.-[N, 1], Float] + case (N,Double) => Double *: Count1[int.-[N, 1], Double] + +def t01 = summon[Count0[1, Int] =:= Int *: EmptyTuple ] +def t02 = summon[Count0[2, Int] =:= Int *: Int *: EmptyTuple] +def t03 = summon[Count0[3, Int] =:= Int *: Int *: Int *: EmptyTuple] +def t04 = summon[Count0[4, Int] =:= Int *: Int *: Int *: Int *: EmptyTuple] +def t05 = summon[Count0[5, Int] =:= Int *: Int *: Int *: Int *: Int *: EmptyTuple] + +def t11 = summon[Count1[1, Int] =:= Int *: EmptyTuple ] +def t12 = summon[Count1[2, Int] =:= Int *: Int *: EmptyTuple] +def t13 = summon[Count1[3, Int] =:= Int *: Int *: Int *: EmptyTuple] // was: Fail from here +def t14 = summon[Count1[4, Int] =:= Int *: Int *: Int *: Int *: EmptyTuple] +def t15 = summon[Count1[5, Int] =:= Int *: Int *: Int *: Int *: Int *: EmptyTuple] diff --git a/tests/pos/i16596.scala b/tests/pos/i16596.scala new file mode 100644 index 000000000000..679417548df0 --- /dev/null +++ b/tests/pos/i16596.scala @@ -0,0 +1,14 @@ +import scala.compiletime.ops.int, int.- + +type Count[N, T] <: Tuple = (N, T) match + case (0, T) => EmptyTuple + case (N, T) => T *: Count[N - 1, T] + +val a: Count[3, Int] = (1, 2, 3) +val b: Count[4, Int] = (1, 2, 3, 4) +val c: Count[5, Int] = (1, 2, 3, 4, 5) +val d: Count[6, Int] = (1, 2, 3, 4, 5, 6) +val z: Count[23, Int] = ( + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23) diff --git a/tests/pos/i16649-irrefutable.scala b/tests/pos/i16649-irrefutable.scala new file mode 100644 index 000000000000..7a5a98733922 --- /dev/null +++ b/tests/pos/i16649-irrefutable.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +import quoted.* + +def foo(using Quotes)(x: Expr[Int]) = + val '{ $y } = x + val '{ $a: Any } = x + val '{ $b: Int } = x + val '[List[Int]] = Type.of[List[Int]] diff --git a/tests/pos/i16657.scala b/tests/pos/i16657.scala new file mode 100644 index 000000000000..5c2c0aa6567d --- /dev/null +++ b/tests/pos/i16657.scala @@ -0,0 +1,13 @@ +//> using options -Werror + +class Test: + val (_, ( + _, _, _, _, _, _, _, _, _, _, // 10 + _, _, _, _, _, _, _, _, _, _, // 20 + _, c22, _ // 23 + )) = // nested pattern has 23 elems + (0, ( + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 20, + 1, 2, 3 + )) // ok, exhaustive, reachable, conforming and irrefutable diff --git a/tests/pos/i16706.scala b/tests/pos/i16706.scala index 87fd015c69bb..e94ce37c0c86 100644 --- a/tests/pos/i16706.scala +++ b/tests/pos/i16706.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + import scala.deriving.Mirror import scala.reflect.ClassTag @@ -14,4 +16,4 @@ transparent inline given derived[A]( sealed trait Foo case class FooA(a: Int) extends Foo -val instance = derived[Foo] // error \ No newline at end of file +val instance = derived[Foo] // error diff --git a/tests/pos/i16756.scala b/tests/pos/i16756.scala new file mode 100644 index 000000000000..fa54dccd7eee --- /dev/null +++ b/tests/pos/i16756.scala @@ -0,0 +1,16 @@ +class DependentPoly { + + sealed trait Col[V] { + + trait Wrapper + val wrapper: Wrapper = ??? + } + + object Col1 extends Col[Int] + + object Col2 extends Col[Double] + + val polyFn: [C <: DependentPoly.this.Col[?]] => (x: C) => x.Wrapper = + [C <: Col[?]] => (x: C) => (x.wrapper: x.Wrapper) +} + diff --git a/tests/pos/i16777.scala b/tests/pos/i16777.scala index 4218aea29d9f..302ace3ea9aa 100644 --- a/tests/pos/i16777.scala +++ b/tests/pos/i16777.scala @@ -1,4 +1,4 @@ -// scalac: -Ykind-projector:underscores +//> using options -Ykind-projector:underscores sealed abstract class Free[+S[_, _], +E, +A] { @inline final def flatMap[S1[e, a] >: S[e, a], B, E1 >: E](fun: A => Free[S1, E1, B]): Free[S1, E1, B] = Free.FlatMapped[S1, E, E1, A, B](this, fun) diff --git a/tests/pos/i16808.scala b/tests/pos/i16808.scala new file mode 100644 index 000000000000..43e77634a535 --- /dev/null +++ b/tests/pos/i16808.scala @@ -0,0 +1,4 @@ +//> using options -source future -deprecation -Xfatal-warnings + +def collectKeys[A, B, C](xs: Map[A, B])(f: PartialFunction[A, C]): Map[C, B] = + xs.collect{ case (f(c) , b) => (c, b) } diff --git a/tests/pos/i16920.scala b/tests/pos/i16920.scala index dd4f5804a4fd..d52e7e453e7e 100644 --- a/tests/pos/i16920.scala +++ b/tests/pos/i16920.scala @@ -1,4 +1,3 @@ -import language.experimental.relaxedExtensionImports object One: extension (s: String) diff --git a/tests/pos/i16994.scala b/tests/pos/i16994.scala new file mode 100644 index 000000000000..74fc62acb131 --- /dev/null +++ b/tests/pos/i16994.scala @@ -0,0 +1,2 @@ +type ZZ = String ?=> Int +def f(xs: ZZ*) = xs.zipWithIndex.foreach((f: ZZ, i) => f(using "s")) diff --git a/tests/pos/i17149.scala b/tests/pos/i17149.scala new file mode 100644 index 000000000000..7a659334badb --- /dev/null +++ b/tests/pos/i17149.scala @@ -0,0 +1,6 @@ +type Ext[S <: Seq[_]] = S match { + case Seq[t] => t +} + +val _ = implicitly[Ext[Seq[Int]] =:= Int] // e.scala: Cannot prove that e.Ext[Seq[Int]] =:= Int +val _ = summon[Ext[Seq[Int]] =:= Int] \ No newline at end of file diff --git a/tests/pos/i17186a.scala b/tests/pos/i17186a.scala new file mode 100644 index 000000000000..7160ac001ba8 --- /dev/null +++ b/tests/pos/i17186a.scala @@ -0,0 +1,5 @@ +type second[X <: Tuple2[Any, Any]] = Tuple.Head[Tuple.Tail[X]] +type middle[X <: Tuple3[Any, Any, Any]] = Tuple.Head[Tuple.Tail[X]] + +val a: Tuple.Head[Tuple.Tail[Tuple2[Int, String]]] = ??? +val b: Tuple.Head[Tuple.Tail[Tuple3[Int, String, Boolean]]] = ??? diff --git a/tests/pos/i17186b.scala b/tests/pos/i17186b.scala new file mode 100644 index 000000000000..36efd647816c --- /dev/null +++ b/tests/pos/i17186b.scala @@ -0,0 +1,5 @@ +type SecondOfTwo[X <: Tuple2[Any, Any]] = Tuple.Head[Tuple.Tail[X]] +val a = implicitly[SecondOfTwo[Tuple2[Int, String]] =:= String] + +type LastOfThree[X <: Tuple3[Any, Any, Any]] = Tuple.Tail[Tuple.Tail[X]] +val b = implicitly[LastOfThree[Tuple3[Int, String, Boolean]] =:= Tuple1[Boolean]] diff --git a/tests/pos/i17192.scala b/tests/pos/i17192.scala new file mode 100644 index 000000000000..129da358b3ba --- /dev/null +++ b/tests/pos/i17192.scala @@ -0,0 +1,11 @@ +class Ifce[BT <: Boolean] extends Selectable: + type RT = BT match + case true => this.type { val v1: Int } + case false => this.type + def cast : RT = this.asInstanceOf[RT] + def selectDynamic(key: String): Any = ??? + +class Test: + def t1: Unit = + val full = (new Ifce[true]).cast + val v1 = full.v1 diff --git a/tests/pos/i17230.min1.scala b/tests/pos/i17230.min1.scala index e2df63e168c1..9ab79433fae2 100644 --- a/tests/pos/i17230.min1.scala +++ b/tests/pos/i17230.min1.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror trait Foo: type Bar[_] diff --git a/tests/pos/i17230.orig.scala b/tests/pos/i17230.orig.scala index d72a0082a116..279ae41fc32e 100644 --- a/tests/pos/i17230.orig.scala +++ b/tests/pos/i17230.orig.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror import scala.util.* trait Transaction { diff --git a/tests/pos/i17257.min.scala b/tests/pos/i17257.min.scala new file mode 100644 index 000000000000..f4e101cdbe21 --- /dev/null +++ b/tests/pos/i17257.min.scala @@ -0,0 +1,16 @@ +//> using options -Yno-deep-subtypes:false +// Minimisation of tests/run-macros/i17257 +// to understand how changes to match type reduction +// impacted this use of Tuple.IsMappedBy. +// +// During match type reduction +// if we do NOT simplify the case lambda parameter instances +// then this large tuple make TypeComparer breach LogPendingSubTypesThreshold +// which, under -Yno-deep-subtypes, crashes the compilation. +class C[+A] +def foo[T <: Tuple : Tuple.IsMappedBy[C]] = ??? +def bar[X] = foo[( + C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], + C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], C[X], + C[X], C[X], C[X], +)] diff --git a/tests/pos-special/fatal-warnings/i17314.scala b/tests/pos/i17314.scala similarity index 91% rename from tests/pos-special/fatal-warnings/i17314.scala rename to tests/pos/i17314.scala index 23f988741bed..2d0c409ced10 100644 --- a/tests/pos-special/fatal-warnings/i17314.scala +++ b/tests/pos/i17314.scala @@ -1,4 +1,4 @@ -// scalac: "-Wunused:all" +//> using options -Xfatal-warnings -Wunused:all -deprecation -feature import java.net.URI diff --git a/tests/pos/i17314a.scala b/tests/pos/i17314a.scala new file mode 100644 index 000000000000..4bce56d8bbed --- /dev/null +++ b/tests/pos/i17314a.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings -Wunused:all -deprecation -feature + +package foo: + class Foo[T] + given Foo[Int] = new Foo[Int] + + +package bar: + import foo.{given foo.Foo[Int]} + import foo.Foo + + val repro: Foo[Int] = summon[Foo[Int]] diff --git a/tests/pos/i17391/Bar.java b/tests/pos/i17391/Bar.java new file mode 100644 index 000000000000..6d432926bde2 --- /dev/null +++ b/tests/pos/i17391/Bar.java @@ -0,0 +1,7 @@ +package mypkg; + +class Base { + public void retainAll(String x) {} +} + +public class Bar extends Base {} diff --git a/tests/pos/i17391/test.scala b/tests/pos/i17391/test.scala new file mode 100644 index 000000000000..dbcc0ec39de8 --- /dev/null +++ b/tests/pos/i17391/test.scala @@ -0,0 +1,11 @@ +def test(): Unit = +{ + val x: Foo[mypkg.Bar[String]] = ??? + val y: mypkg.Bar[String] = ??? + + y.retainAll("fd") // works + x.f.retainAll("fd"); // error + +} + +class Foo[T](val f: T) diff --git a/tests/pos/i17395-spec.scala b/tests/pos/i17395-spec.scala new file mode 100644 index 000000000000..0adeda703b99 --- /dev/null +++ b/tests/pos/i17395-spec.scala @@ -0,0 +1,29 @@ +trait TC[T] + +object TC { + def optionTCForPart[T](implicit tc: TC[ExtractPart[T]]): TC[Option[ExtractPart[T]]] = new TC[Option[ExtractPart[T]]] {} +} + +trait ThingWithPart { + type Part +} + +type ExtractPart[T] = T match { + case PartField[t] => t +} +type PartField[T] = ThingWithPart { type Part = T } + +class ValuePartHolder extends ThingWithPart { + type Part = Value +} + +class Value +object Value { + implicit val tcValue: TC[Value] = new {} +} + +@main def main(): Unit = { +// import Value.tcValue // explicit import works around the issue, but shouldn't be necessary + val tc = TC.optionTCForPart[ValuePartHolder] + println(tc) +} diff --git a/tests/pos/i17395.scala b/tests/pos/i17395.scala new file mode 100644 index 000000000000..dbe1b08ab2d1 --- /dev/null +++ b/tests/pos/i17395.scala @@ -0,0 +1,27 @@ +//> using options -source:3.3 + +trait TC[T] + +object TC { + def optionTCForPart[T](implicit tc: TC[ExtractPart[T]]): TC[Option[ExtractPart[T]]] = new TC[Option[ExtractPart[T]]] {} +} + +type ExtractPart[T] = T match { + case PartField[t] => t +} +type PartField[T] = Any { type Part = T } + +class ValuePartHolder { + type Part = Value +} + +class Value +object Value { + implicit val tcValue: TC[Value] = new {} +} + +@main def main(): Unit = { +// import Value.tcValue // explicit import works around the issue, but shouldn't be necessary + val tc = TC.optionTCForPart[ValuePartHolder] + println(tc) +} diff --git a/tests/pos/i17465.scala b/tests/pos/i17465.scala new file mode 100644 index 000000000000..00a59f7681d2 --- /dev/null +++ b/tests/pos/i17465.scala @@ -0,0 +1,45 @@ +def test1[A, B]: Unit = { + def f[T](x: T{ def *(y: Int): T }): T = ??? + def test = f[scala.collection.StringOps | String]("Hello") + locally: + val test1 : (scala.collection.StringOps | String) { def *(y: Int): (scala.collection.StringOps | String) } = ??? + val test2 : (scala.collection.StringOps | String) { def *(y: Int): (scala.collection.StringOps | String) } = test1 + + locally: + val test1 : (Int | String) { def foo(x: Int): Int } = ??? + val test2 : (Int | String) { def foo(x: Int): Int } = test1 + + locally: + val test1 : ((Int | String) & Any) { def foo(): Int } = ??? + val test2 : ((Int | String) & Any) { def foo(): Int } = test1 + + locally: + val test1 : Int { def foo(): Int } = ??? + val test2 : Int { def foo(): Int } = test1 + + locally: + val test1 : (Int | String) { def foo(): Int } = ??? + val test2 : (Int | String) & Any = test1 + + locally: + val test1 : (Int | B) { def *(y: Int): Int } = ??? + val test2 : (Int | B) { def *(y: Int): Int } = test1 + + locally: + val test1 : (Int | String) = ??? + val test2 : (Int | String) = test1 + + type Foo = Int | String + locally: + val test1 : Foo { type T = Int } = ??? + val test2 : (Int | String) = test1 +} + +def test2: Unit = { + import reflect.Selectable.reflectiveSelectable + + trait A[T](x: T{ def *(y: Int): T }): + def f: T = x * 2 + + class B extends A("Hello") +} diff --git a/tests/pos/i17525.scala b/tests/pos/i17525.scala new file mode 100644 index 000000000000..7b99d3d7651a --- /dev/null +++ b/tests/pos/i17525.scala @@ -0,0 +1,5 @@ +object Extract { + transparent inline def unapply(value: String): Option[Tuple] = Some((1, "two")) +} +def fail(): Unit = "" match { case Extract(a, b) => f(a, b) } +def f(n: Int, s: String): Unit = () diff --git a/tests/pos/i17556.scala b/tests/pos/i17556.scala new file mode 100644 index 000000000000..9f14cbfbb7c1 --- /dev/null +++ b/tests/pos/i17556.scala @@ -0,0 +1,8 @@ +sealed trait A { + // must be `object` or `case class` + object X extends A + case class Y() extends A +} + +// companion object must exist +object A \ No newline at end of file diff --git a/tests/pos/i17584a.scala b/tests/pos/i17584a.scala new file mode 100644 index 000000000000..6bed4d935831 --- /dev/null +++ b/tests/pos/i17584a.scala @@ -0,0 +1,7 @@ + +import language.experimental.erasedDefinitions +trait A: + erased def g = 1 +trait B extends A: + erased def f = super.g +class C extends B \ No newline at end of file diff --git a/tests/pos/i17588.scala b/tests/pos/i17588.scala new file mode 100644 index 000000000000..5ac63d0dcc05 --- /dev/null +++ b/tests/pos/i17588.scala @@ -0,0 +1,2 @@ +class StringBox(inner: String): + export inner.* \ No newline at end of file diff --git a/tests/pos/i17631.scala b/tests/pos/i17631.scala new file mode 100644 index 000000000000..7b8a064493df --- /dev/null +++ b/tests/pos/i17631.scala @@ -0,0 +1,34 @@ +//> using options -Xfatal-warnings -Wunused:all -deprecation -feature + +object foo { + type Bar +} + +import foo.Bar + +def Test = { + + type Person = { val name: String } + + def good: Person = ??? + def bad1: { val name: String } = ??? + def bad2 = (good: { val name: String }) + def justIs: { val bar: Bar } = ??? + (bad1, bad2, justIs) +} + +class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + +object Main { + + type Person = Record { val name: String; val age: Int } + + locally { + def good: Person = ??? + def bad1: Record { val name: String; val age: Int } = ??? + def bad2 = (good: Record { val name: String; val age: Int }) + (bad1, bad2) + } +} diff --git a/tests/pos/i17762.scala b/tests/pos/i17762.scala new file mode 100644 index 000000000000..65275c4619db --- /dev/null +++ b/tests/pos/i17762.scala @@ -0,0 +1,21 @@ +//> using options -Xfatal-warnings -Wunused:all + +class SomeType + +def testIt(st1: SomeType, st2: SomeType): Boolean = + given CanEqual[SomeType, SomeType] = CanEqual.derived + st1 == st2 + +object HasCanEqual: + given f: CanEqual[SomeType, SomeType] = CanEqual.derived + +object UsesCanEqual: + import HasCanEqual.given + def testIt(st1: SomeType, st2: SomeType): Boolean = + st1 == st2 + +object UsesCanEqual2: + import HasCanEqual.f + + def testIt(st1: SomeType, st2: SomeType): Boolean = + st1 == st2 \ No newline at end of file diff --git a/tests/pos/i17763/CopyableBuilder.java b/tests/pos/i17763/CopyableBuilder.java new file mode 100644 index 000000000000..ad19f3404d7e --- /dev/null +++ b/tests/pos/i17763/CopyableBuilder.java @@ -0,0 +1,2 @@ +public interface CopyableBuilder, T extends ToCopyableBuilder> { +} diff --git a/tests/pos/i17763/Crash.scala b/tests/pos/i17763/Crash.scala new file mode 100644 index 000000000000..e6182c4d2e35 --- /dev/null +++ b/tests/pos/i17763/Crash.scala @@ -0,0 +1 @@ +class Crash diff --git a/tests/pos/i17763/ToCopyableBuilder.java b/tests/pos/i17763/ToCopyableBuilder.java new file mode 100644 index 000000000000..1928385a6cdc --- /dev/null +++ b/tests/pos/i17763/ToCopyableBuilder.java @@ -0,0 +1,2 @@ +public interface ToCopyableBuilder, T extends ToCopyableBuilder> { +} diff --git a/tests/pos/i17948.all.scala b/tests/pos/i17948.all.scala new file mode 100644 index 000000000000..2c184872ff38 --- /dev/null +++ b/tests/pos/i17948.all.scala @@ -0,0 +1,44 @@ +object O: + opaque type T = Int + + inline def get0: Int = Do.get // no proxy needed + inline def get1: Int = Do.get: O.T // no proxy needed + inline def get2: Int = Do.get: T // proxied + + inline def set0: Unit = Do.set(0) // was: broken + inline def set1: Unit = Do.set(1: O.T) // no proxy needed + inline def set2: Unit = Do.set(2: T) // proxied + + inline def mod0: Int = Do.mod(0) // was: broken + inline def mod1: Int = Do.mod(1): O.T // was: broken + inline def mod2: Int = Do.mod(2): T // was: broken + inline def mod3: Int = Do.mod(3: O.T) // no proxy needed + inline def mod4: Int = Do.mod(4: O.T): O.T // no proxy needed + inline def mod5: Int = Do.mod(5: O.T): T // proxied + inline def mod6: Int = Do.mod(6: T) // proxied + inline def mod7: Int = Do.mod(7: T): O.T // proxied + inline def mod8: Int = Do.mod(8: T): T // proxied + +class Test: + def testGet0: Int = O.get0 + def testGet1: Int = O.get1 + def testGet2: Int = O.get2 + + def testSet0: Unit = O.set0 + def testSet1: Unit = O.set1 + def testSet2: Unit = O.set2 + + def testMod0: Int = O.mod0 + def testMod1: Int = O.mod1 + def testMod2: Int = O.mod2 + def testMod3: Int = O.mod3 + def testMod4: Int = O.mod4 + def testMod5: Int = O.mod5 + def testMod6: Int = O.mod6 + def testMod7: Int = O.mod7 + def testMod8: Int = O.mod8 + +object Do: + def get: O.T = ??? + def set(x: O.T): Unit = () + def mod(x: O.T): O.T = x diff --git a/tests/pos/i17948.scala b/tests/pos/i17948.scala new file mode 100644 index 000000000000..3fb927e139fd --- /dev/null +++ b/tests/pos/i17948.scala @@ -0,0 +1,12 @@ +object O: + opaque type T = Int + inline def x: Int = P.id(2) + +object P: + def id(x: O.T): O.T = x + +object Test { + def main(args: Array[String]): Unit = println(foo()) + + def foo(): Int = O.x +} diff --git a/tests/pos/i18062.scala b/tests/pos/i18062.scala new file mode 100644 index 000000000000..48863c4349c7 --- /dev/null +++ b/tests/pos/i18062.scala @@ -0,0 +1,14 @@ +trait CB[X] { def get: X } + +trait WrapperConvert[F[_], G[_]]: + def conv[X](fx: F[X]): G[X] + +object WrapperConvert: + implicit def id[F[_]]: WrapperConvert[F, F] = new WrapperConvert[F, F]: + def conv[X](fx: F[X]): F[X] = fx + +transparent inline given convertX[F[_], X](using wc: WrapperConvert[F, CB]): Conversion[F[X], X] = + new Conversion[F[X], X]: + def apply(fx: F[X]) = wc.conv(fx).get + +def test(cb: CB[Int], x: Int): Int = cb + x diff --git a/tests/pos/i18083.scala b/tests/pos/i18083.scala new file mode 100644 index 000000000000..c7e35a51f4d0 --- /dev/null +++ b/tests/pos/i18083.scala @@ -0,0 +1,9 @@ +sealed trait A +case class Sub1() extends A +case object Sub2 extends A + +def test(x: A | Null): Int = + if x == null then return 0 + x match + case Sub1() => 1 + case Sub2 => 2 diff --git a/tests/pos/i18091.scala b/tests/pos/i18091.scala new file mode 100644 index 000000000000..ef896cedb751 --- /dev/null +++ b/tests/pos/i18091.scala @@ -0,0 +1,5 @@ +trait B(val y: Int) + +class C extends B(20) { + def foo(): Unit = println(y) +} \ No newline at end of file diff --git a/tests/pos/i18096.scala b/tests/pos/i18096.scala new file mode 100644 index 000000000000..c2ef9ededdb3 --- /dev/null +++ b/tests/pos/i18096.scala @@ -0,0 +1,4 @@ +trait F1[-T1, +R] extends AnyRef { def apply(v1: T1): R } +class R { def l: List[Any] = Nil } +class S { def m[T](f: F1[R, ? <: List[T]]): S = this } +class T1 { def t1(s: S) = s.m((r: R) => r.l) } diff --git a/tests/pos/i18135.scala b/tests/pos/i18135.scala new file mode 100644 index 000000000000..0be6321dc74d --- /dev/null +++ b/tests/pos/i18135.scala @@ -0,0 +1,26 @@ +class Conf + +class Bar(_conf: Conf) { + implicit val conf: Conf = _conf +} + +class Foo(conf: Conf) extends Bar(conf) +//class Foo(_conf: Conf) extends Bar(_conf) +// using a different name fixes it + +class Test { + def test(foo: Foo) = { + import foo.* + //implicit val conf: Conf = foo.conf + // manually redefining it also fixes it + assert(conf != null) + assert(implicitly[Conf] != null) + } + def test2(foo: Foo) = { + import foo.conf + //implicit val conf: Conf = foo.conf + // manually redefining it also fixes it + assert(conf != null) + assert(implicitly[Conf] != null) + } +} \ No newline at end of file diff --git a/tests/pos/i18175.scala b/tests/pos/i18175.scala new file mode 100644 index 000000000000..2480ddccc320 --- /dev/null +++ b/tests/pos/i18175.scala @@ -0,0 +1,106 @@ +import scala.compiletime.ops.int.{ +, -, Max } +import scala.compiletime.ops.string.{ Substring, Length, Matches, CharAt } + +class Regex[P] private() extends Serializable: + def unapply(s: CharSequence)(implicit n: Regex.Sanitizer[P]): Option[P] = ??? + +object Regex: + def apply[R <: String & Singleton](regex: R): Regex[Compile[R]] = ??? + + abstract class Sanitizer[T] + object Sanitizer: + given Sanitizer[EmptyTuple] = ??? + given stringcase[T <: Tuple: Sanitizer]: Sanitizer[String *: T] = ??? + given optioncase[T <: Tuple: Sanitizer]: Sanitizer[Option[String] *: T] = ??? + given Sanitizer[String] = ??? + given Sanitizer[Option[String]] = ??? + + type Compile[R <: String] = Matches["", R] match + case _ => Reverse[EmptyTuple, Loop[R, 0, Length[R], EmptyTuple, IsPiped[R, 0, Length[R], 0]]] + + type Loop[R <: String, Lo <: Int, Hi <: Int, Acc <: Tuple, Opt <: Int] <: Tuple = Lo match + case Hi => Acc + case _ => CharAt[R, Lo] match + case '\\' => CharAt[R, Lo + 1] match + case 'Q' => Loop[R, ToClosingQE[R, Lo + 2], Hi, Acc, Opt] + case _ => Loop[R, Lo + 2, Hi, Acc, Opt] + case '[' => Loop[R, ToClosingBracket[R, Lo + 1, 0], Hi, Acc, Opt] + case ')' => Loop[R, Lo + 1, Hi, Acc, Max[0, Opt - 1]] + case '(' => Opt match + case 0 => IsMarked[R, ToClosingParenthesis[R, Lo + 1, 0], Hi] match + case true => IsCapturing[R, Lo + 1] match + case false => Loop[R, Lo + 1, Hi, Acc, 1] + case true => Loop[R, Lo + 1, Hi, Option[String] *: Acc, 1] + case false => IsCapturing[R, Lo + 1] match + case false => Loop[R, Lo + 1, Hi, Acc, IsPiped[R, Lo + 1, Hi, 0]] + case true => Loop[R, Lo + 1, Hi, String *: Acc, IsPiped[R, Lo + 1, Hi, 0]] + case _ => IsCapturing[R, Lo + 1] match + case false => Loop[R, Lo + 1, Hi, Acc, Opt + 1] + case true => Loop[R, Lo + 1, Hi, Option[String] *: Acc, Opt + 1] + case _ => Loop[R, Lo + 1, Hi, Acc, Opt] + + type IsCapturing[R <: String, At <: Int] <: Boolean = CharAt[R, At] match + case '?' => CharAt[R, At + 1] match + case '<' => CharAt[R, At + 2] match + case '=' | '!' => false + case _ => true + case _ => false + case _ => true + + type IsMarked[R <: String, At <: Int, Hi <: Int] <: Boolean = At match + case Hi => false + case _ => CharAt[R, At] match + case '?' | '*' => true + case '{' => CharAt[R, At + 1] match + case '0' => true + case _ => false + case _ => false + + type IsPiped[R <: String, At <: Int, Hi <: Int, Lvl <: Int] <: Int = At match + case Hi => 0 + case _ => CharAt[R, At] match + case '\\' => CharAt[R, At + 1] match + case 'Q' => IsPiped[R, ToClosingQE[R, At + 2], Hi, Lvl] + case _ => IsPiped[R, At + 2, Hi, Lvl] + case '[' => IsPiped[R, ToClosingBracket[R, At + 1, 0], Hi, Lvl] + case '(' => IsPiped[R, ToClosingParenthesis[R, At + 1, 0], Hi, Lvl + 1] + case '|' => 1 + case ')' => 0 + case _ => IsPiped[R, At + 1, Hi, Lvl] + + type ToClosingParenthesis[R <: String, At <: Int, Lvl <: Int] <: Int = CharAt[R, At] match + case '\\' => CharAt[R, At + 1] match + case 'Q' => ToClosingParenthesis[R, ToClosingQE[R, At + 2], Lvl] + case _ => ToClosingParenthesis[R, At + 2, Lvl] + case '[' => ToClosingParenthesis[R, ToClosingBracket[R, At + 1, 0], Lvl] + case ')' => Lvl match + case 0 => At + 1 + case _ => ToClosingParenthesis[R, At + 1, Lvl - 1] + case '(' => ToClosingParenthesis[R, At + 1, Lvl + 1] + case _ => ToClosingParenthesis[R, At + 1, Lvl] + + type ToClosingBracket[R <: String, At <: Int, Lvl <: Int] <: Int = CharAt[R, At] match + case '\\' => CharAt[R, At + 1] match + case 'Q' => ToClosingBracket[R, ToClosingQE[R, At + 2], Lvl] + case _ => ToClosingBracket[R, At + 2, Lvl] + case '[' => ToClosingBracket[R, At + 1, Lvl + 1] + case ']' => Lvl match + case 0 => At + 1 + case _ => ToClosingBracket[R, At + 1, Lvl - 1] + case _ => ToClosingBracket[R, At + 1, Lvl] + + type ToClosingQE[R <: String, At <: Int] <: Int = CharAt[R, At] match + case '\\' => CharAt[R, At + 1] match + case 'E' => At + 2 + case _ => ToClosingQE[R, At + 2] + case _ => ToClosingQE[R, At + 1] + + type Reverse[Acc <: Tuple, X <: Tuple] <: Tuple = X match + case x *: xs => Reverse[x *: Acc, xs] + case EmptyTuple => Acc + +object Test: + def main(args: Array[String]): Unit = + val r75 = Regex("(x|y|z[QW])*(longish|loquatious|excessive|overblown[QW])*") + "xyzQzWlongishoverblownW" match + case r75((Some(g0), Some(g1))) => ??? // failure diff --git a/tests/pos/i18183.migration.scala b/tests/pos/i18183.migration.scala new file mode 100644 index 000000000000..b361f578a428 --- /dev/null +++ b/tests/pos/i18183.migration.scala @@ -0,0 +1,38 @@ +//> using options -source:3.0-migration + +// A not-fully-minimal reproduction of the CI failure in http4s +// While implementing the fix to name "shadowing" in implicit lookup. + +import scala.util.control.NoStackTrace + +final case class EitherT[F[_], A, B](value: F[Either[A, B]]) { + def semiflatMap[D](f: B => F[D])(implicit F: Monad[F]): EitherT[F, A, D] = ??? +} + +trait Applicative[F[_]] { + def pure[A](x: A): F[A] +} +trait Monad[F[_]] extends Applicative[F] +trait Async[F[_]] extends Monad[F] + +final class Request[+F[_]] + +final case class RequestCookie(name: String, content: String) + +final class CSRF2[F[_], G[_]](implicit F: Async[F]) { self => + import CSRF2._ + + def signToken[M[_]](rawToken: String)(implicit F: Async[M]): M[CSRFToken] = ??? + + def refreshedToken[M[_]](implicit F: Async[M]): EitherT[M, CSRFCheckFailed, CSRFToken] = + EitherT(extractRaw("")).semiflatMap(signToken[M]) + + def extractRaw[M[_]: Async](rawToken: String): M[Either[CSRFCheckFailed, String]] = ??? +} + +object CSRF2 { + type CSRFToken + + case object CSRFCheckFailed extends Exception("CSRF Check failed") with NoStackTrace + type CSRFCheckFailed = CSRFCheckFailed.type +} diff --git a/tests/pos/i18211.scala b/tests/pos/i18211.scala new file mode 100644 index 000000000000..c5ec30ba5d61 --- /dev/null +++ b/tests/pos/i18211.scala @@ -0,0 +1,39 @@ +import scala.compiletime.ops.int.* + +type AnyInt[A <: Int] <: Int = A match { + case _ => A +} + +type IndexOf[A, T <: Tuple] <: Int = T match { + case EmptyTuple => -1 + case A *: t => 0 + case _ *: t => + IndexOf[A, t] match { + case -1 => -1 + case AnyInt[a] => S[a] + } +} + +type Indexes[A, T <: Tuple] +object Indexes { + given of[A, T <: Tuple](using IndexOf[A, T] >= 0 =:= true)(using + index: ValueOf[IndexOf[A, T]], + next: Indexes[A, Tuple.Drop[T, S[IndexOf[A, T]]]] + ): Indexes[A, T] = ??? + + given empty[A, T <: Tuple](using IndexOf[A, T] =:= -1): Indexes[A, T] = ??? +} + +class GetAll[A]: + def apply[T <: Tuple](t: T)(using indexes: Indexes[A, T]): List[A] = ??? + +def getAll[A]: GetAll[A] = new GetAll[A] + +def test = + // the code here is trying to get all values from a tuple that has type [X] as a list + + // this works if there are only two strings in the tuple + getAll[String](("str1", 1, "str2", false)) + + //but this not compiles if there are more than two strings in the tuple + getAll[String](("str1", 1, "str2", false, "str3")) diff --git a/tests/pos/i18226.scala b/tests/pos/i18226.scala new file mode 100644 index 000000000000..b583b94744a7 --- /dev/null +++ b/tests/pos/i18226.scala @@ -0,0 +1,11 @@ +trait F[-R] + +trait Row[A] + +def eliminateInt[R](f: F[R & Row[Int]]): F[R] = new F[R] {} + +val x = new F[Row[Int] & Row[String]] {} + +val _ = eliminateInt[Row[String]](x) // compiles OK when given explicit type +val y = eliminateInt(x) // was error +val _: F[Row[String]] = y \ No newline at end of file diff --git a/tests/pos/i18226a.scala b/tests/pos/i18226a.scala new file mode 100644 index 000000000000..e8db52adbc72 --- /dev/null +++ b/tests/pos/i18226a.scala @@ -0,0 +1,19 @@ +class Has[A] +trait Foo + +class TestAspect[+LowerR, -UpperR] + +class Spec[-R] { + def foo[R1 <: R](aspect: TestAspect[R1, R1]): Unit = {} +} + +class SuiteBuilder[R <: Has[_]] { + def toSpec( + spec: Spec[R & Has[Foo]], + aspect: TestAspect[ + R & Has[Foo], + R & Has[Foo] + ] + ) = + spec.foo(aspect) +} \ No newline at end of file diff --git a/tests/pos/i18247.scala b/tests/pos/i18247.scala new file mode 100644 index 000000000000..b3805f5a7d02 --- /dev/null +++ b/tests/pos/i18247.scala @@ -0,0 +1,10 @@ +sealed trait Op +object Op { + case object `==` extends Op +} + +def t1(a: Op): true = { + a match { + case Op.`==` => true // was: won't compile + } +} diff --git a/tests/pos/i18253.orig.scala b/tests/pos/i18253.orig.scala new file mode 100644 index 000000000000..9efe1224ebfd --- /dev/null +++ b/tests/pos/i18253.orig.scala @@ -0,0 +1,15 @@ +import compiletime.ops.int.Max + +trait DFSInt[W <: Int] +trait Candidate[R]: + type OutW <: Int +object Candidate: + given [W <: Int, R <: DFSInt[W]]: Candidate[R] with + type OutW = W + +def foo[R](rhs: R)(using icR: Candidate[R]): DFSInt[Max[8, icR.OutW]] = ??? + +object Test: + def check[A](a: A, clue: Int = 1): Unit = ??? + val x: DFSInt[8] = ??? + check(foo(x)) diff --git a/tests/pos/i18253.scala b/tests/pos/i18253.scala new file mode 100644 index 000000000000..8f395ee8e943 --- /dev/null +++ b/tests/pos/i18253.scala @@ -0,0 +1,14 @@ +import scala.compiletime.ops.int.Max + +trait Foo[A] +trait Bar[B]: + type Out <: Int +object Bar: + given inst[C <: Int]: Bar[C] with + type Out = C + +class Test: + def mkFoo(using bx: Bar[2]): Foo[Max[1, bx.Out]] = ??? + def check[Y](yy: Y, clue: Int = 1): Unit = () + + def test: Unit = check(mkFoo) diff --git a/tests/pos/i18261.min/Main_0.scala b/tests/pos/i18261.min/Main_0.scala new file mode 100644 index 000000000000..23d7cbe28198 --- /dev/null +++ b/tests/pos/i18261.min/Main_0.scala @@ -0,0 +1,5 @@ +type Id[T] = Any match { case Any => T } + +class Foo[A] +object Foo: + given inst[X, Y <: Id[X]]: Foo[Y] = new Foo[Y] diff --git a/tests/pos/i18261.min/Test_1.scala b/tests/pos/i18261.min/Test_1.scala new file mode 100644 index 000000000000..24536da598d3 --- /dev/null +++ b/tests/pos/i18261.min/Test_1.scala @@ -0,0 +1,4 @@ +class Test: + def test: Unit = + summon[Foo[Int]] + summon[Foo[Long]] diff --git a/tests/pos/i18261/DFBits_0.scala b/tests/pos/i18261/DFBits_0.scala new file mode 100644 index 000000000000..63b5abb495ef --- /dev/null +++ b/tests/pos/i18261/DFBits_0.scala @@ -0,0 +1,7 @@ +trait DFBits[W <: Int] + +trait Candidate[R]: + type OutW <: Int +object Candidate: + given [W <: Int, R <: Foo[DFBits[W]]]: Candidate[R] with + type OutW = W diff --git a/tests/pos/i18261/Foo_0.scala b/tests/pos/i18261/Foo_0.scala new file mode 100644 index 000000000000..bd3486ac7a30 --- /dev/null +++ b/tests/pos/i18261/Foo_0.scala @@ -0,0 +1,2 @@ +type Foo[T] = T match + case Any => T diff --git a/tests/pos/i18261/Test_1.scala b/tests/pos/i18261/Test_1.scala new file mode 100644 index 000000000000..3b91847e3326 --- /dev/null +++ b/tests/pos/i18261/Test_1.scala @@ -0,0 +1,5 @@ +def baz[L](lhs: L)(using icL: Candidate[L]): DFBits[Int] = ??? +object Test: + val x: DFBits[8] = ??? + val z: DFBits[Int] = baz(x) + summon[Candidate[z.type]] diff --git a/tests/pos/i18263.orig.scala b/tests/pos/i18263.orig.scala new file mode 100644 index 000000000000..68b000580f08 --- /dev/null +++ b/tests/pos/i18263.orig.scala @@ -0,0 +1,16 @@ +sealed trait Scope +sealed trait Domain extends Scope +object Domain extends Domain + +trait Baz[T] +def baz(using ck: Scope): Baz[ck.type] = ??? + +class Foo extends scala.reflect.Selectable: + type TScope = Domain + final protected given TScope = Domain + +object ID: + val internal1 = new Foo: + val ii = new Foo: + val x = baz + val z = internal1.ii.x //error diff --git a/tests/pos/i18263.scala b/tests/pos/i18263.scala new file mode 100644 index 000000000000..4fe79999afe7 --- /dev/null +++ b/tests/pos/i18263.scala @@ -0,0 +1,15 @@ +final class Bar +final class Inv[T] +class Foo extends scala.reflect.Selectable: + type Boo = Bar + final given boo1: Boo = new Bar + +class Test: + def mkInv(using bar: Bar): Inv[bar.type] = new Inv() + + def test: Unit = + val foo1 /* : Foo { val foo2: { z1 => Foo { val inv1: Inv[(z1.boo1 : z1.Boo)] }}} */ = new Foo: + val foo2 /* : { z1 => Foo { val inv1: Inv[(z1.boo1 : z1.Boo)] }} */ = new Foo: + val inv1 /* : Inv[( boo1 : Boo)] */ = mkInv /* (this.boo1) */ + val inv2 = foo1.foo2.inv1 // error + () diff --git a/tests/pos/i18275.scala b/tests/pos/i18275.scala new file mode 100644 index 000000000000..2890cd88e49a --- /dev/null +++ b/tests/pos/i18275.scala @@ -0,0 +1,8 @@ +package foo + +enum MyEnum derives _root_.foo.Eq: + case One + +trait Eq[T] +object Eq: + inline def derived[T](using m: scala.deriving.Mirror.Of[T]): Eq[T] = ??? diff --git a/tests/pos/i18316.orig.scala b/tests/pos/i18316.orig.scala new file mode 100644 index 000000000000..1d7d6ce8043b --- /dev/null +++ b/tests/pos/i18316.orig.scala @@ -0,0 +1,24 @@ +import scala.language.implicitConversions +object squerel { + trait EqualityExpression + object PrimitiveTypeMode: + implicit def intToTE(f: Int): TypedExpression[Int] = ??? + + trait TypedExpression[A1]: + def ===[A2](b: TypedExpression[A2]): EqualityExpression = ??? +} + +object scalactic { + trait TripleEqualsSupport: + class Equalizer[L](val leftSide: L): + def ===(rightSide: Any): Boolean = ??? + + trait TripleEquals extends TripleEqualsSupport: + implicit def convertToEqualizer[T](left: T): Equalizer[T] = ??? +} + +import squerel.PrimitiveTypeMode._ // remove to make code compile +object Test extends scalactic.TripleEquals { + import squerel.PrimitiveTypeMode._ + val fails: squerel.EqualityExpression = 1 === 1 +} diff --git a/tests/pos/i18316.scala b/tests/pos/i18316.scala new file mode 100644 index 000000000000..82f56a33468a --- /dev/null +++ b/tests/pos/i18316.scala @@ -0,0 +1,13 @@ +class R1 +class R2 + +class Foo { def meth(x: Int): R1 = null } +class Bar { def meth(x: Int): R2 = null } + +object Impl { implicit def mkFoo(i: Int): Foo = null } +trait Trait { implicit def mkBar(i: Int): Bar = null } + +import Impl.mkFoo // remove to make code compile +object Test extends Trait: + import Impl.mkFoo + val fails: R1 = 1.meth(1) diff --git a/tests/pos/i18345.scala b/tests/pos/i18345.scala new file mode 100644 index 000000000000..2064a4ce75e9 --- /dev/null +++ b/tests/pos/i18345.scala @@ -0,0 +1,28 @@ +extension (vec: Seq[Int]) + def iterate[T](body: (() => Int) => T): T = + val iterator = vec.iterator + body(() => iterator.nextOption().getOrElse(0)) + +def withSequence[T](n: Int)(body: Seq[Int] => T): T = + body((0 to n)) + +def test = + + withSequence(2): + _.iterate: next => + next() + next() + next() + next() + + withSequence(2): + _.iterate: + next => + next() + next() + next() + next() + + withSequence(2): x => + x.iterate: + next => + next() + next() + next() + next() + + withSequence(2): x => + x.iterate: next => + next() + next() + next() + next() + diff --git a/tests/pos/i18364.Tup.scala b/tests/pos/i18364.Tup.scala new file mode 100644 index 000000000000..806342934e67 --- /dev/null +++ b/tests/pos/i18364.Tup.scala @@ -0,0 +1,10 @@ +// Capturing the regression will implementing the fix for i18364 +// That broke in CI, "case _" "Unreachable case except for null" +// Because IArray is an opaque alias of Array +object Tup: + /** Convert an immutable array into a tuple of unknown arity and types */ + def fromIArray[T](xs: IArray[T]): Tuple = + val xs2: IArray[Object] = xs match + case xs: IArray[Object] @unchecked => xs + case _ => xs.map(_.asInstanceOf[Object]) + runtime.Tuples.fromIArray(xs2) diff --git a/tests/pos/i18366.scala b/tests/pos/i18366.scala new file mode 100644 index 000000000000..698510ad13a2 --- /dev/null +++ b/tests/pos/i18366.scala @@ -0,0 +1,10 @@ +//> using options -Xfatal-warnings -Wunused:all + +trait Builder { + def foo(): Unit +} + +def repro = + val builder: Builder = ??? + import builder.{foo => bar} + bar() \ No newline at end of file diff --git a/tests/pos/i18453.workaround.scala b/tests/pos/i18453.workaround.scala new file mode 100644 index 000000000000..2c562279f0e8 --- /dev/null +++ b/tests/pos/i18453.workaround.scala @@ -0,0 +1,8 @@ +trait Box[T] + +class Test: + def f[A, B](c: A => A & B)(using ba: Box[A]): Unit = ??? + + def g[X, Y](using bx: Box[X]): Unit = + def d(t: X): X & Y = t.asInstanceOf[X & Y] + f(u => d(u)) diff --git a/tests/pos/i18453.zio.scala b/tests/pos/i18453.zio.scala new file mode 100644 index 000000000000..32a9ebd0321c --- /dev/null +++ b/tests/pos/i18453.zio.scala @@ -0,0 +1,33 @@ +// Minimised from zio's ZLayer ++ + +// In an attempt to fix i18453 +// this would break zio's ZLayer +// in the "would-error" cases +class Cov[+W]: + def add[X >: W, Y](y: Cov[Y]): Cov[X & Y] = ??? + def pre[Y >: W, X](x: Cov[X]): Cov[X & Y] = ??? + +class Test: + def a1[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & B & C] = a.add(b).add(c) + def a2[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A with B with C] = a.add(b).add(c) // would-error + + def b1[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = a.add(b).add(c) // would-error (a2) + def b2[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = a.add(b).add(c) + def b3[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = a.add(b.add(c)) + def b4[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = a.add(b.add(c)) + + + def c3[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & B & C] = a.pre(b).pre(c) + def c4[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A with B with C] = a.pre(b).pre(c) // would-error + + def d1[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = a.pre(b).pre(c) // would-error (c4) + def d2[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = a.pre(b).pre(c) + def d3[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = a.pre(b.pre(c)) + def d4[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = a.pre(b.pre(c)) + + + def add[X, Y](x: Cov[X], y: Cov[Y]): Cov[X & Y] = ??? + def e1[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = add(add(a, b), c) // alt assoc: ok! + def e2[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = add(add(a, b), c) // reg assoc: ok + def e3[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[A & (B & C)] = add(a, add(b, c)) // reg assoc: ok + def e4[A, B, C](a: Cov[A], b: Cov[B], c: Cov[C]): Cov[(A & B) & C] = add(a, add(b, c)) // alt assoc: ok! diff --git a/tests/pos/i18488.scala b/tests/pos/i18488.scala new file mode 100644 index 000000000000..c225a2c20711 --- /dev/null +++ b/tests/pos/i18488.scala @@ -0,0 +1,15 @@ +trait AbstractTable[T] + +trait Query[E, U] + +class TableQuery[E <: AbstractTable[?]] extends Query[E, Extract[E]] + +type Extract[E] = E match + case AbstractTable[t] => t + +trait BaseCrudRepository[E[T[_]]]: + + type EntityTable <: AbstractTable[E[Option]] + + def filterById: Query[EntityTable, Extract[EntityTable]] = + new TableQuery[EntityTable] diff --git a/tests/pos/i18601.scala b/tests/pos/i18601.scala new file mode 100644 index 000000000000..63468e2d8c32 --- /dev/null +++ b/tests/pos/i18601.scala @@ -0,0 +1,19 @@ +//> using options -Werror +extension (sc: StringContext) + def m: StringContext = sc + def unapply(string: String): Option[String] = + val pattern = sc.parts.head + if string.length == pattern.length then Some(string) else None + +class Test: + def parse(x: PartialFunction[String, String]) = x + + val pf = parse { + case m"x$s" => s + case m"xx$s" => s // was: unreachable + } + + // proof that the second case isn't unreachable (matches "ab") + def t1 = pf.applyOrElse("a", _ => ".") // "a" + def t2 = pf.applyOrElse("ab", _ => ".") // "ab" + def t3 = pf.applyOrElse("abc", _ => ".") // "." diff --git a/tests/pos/i18601b.scala b/tests/pos/i18601b.scala new file mode 100644 index 000000000000..5646b909bd67 --- /dev/null +++ b/tests/pos/i18601b.scala @@ -0,0 +1,26 @@ +//> using options -Werror + +// like pos/i18601 +// but with a dedicated SC class +// that made the false positive redundancy warning go away + +extension (sc: StringContext) + def m: SC = SC(sc) + +class SC(sc: StringContext): + def unapply(string: String): Option[String] = + val pattern = sc.parts.head + if string.length == pattern.length then Some(string) else None + +class Test: + def parse(x: PartialFunction[String, String]) = x + + val pf = parse { + case m"x$s" => s + case m"xx$s" => s // was: not unreachable (as a counter-example) + } + + // proof that the second case isn't unreachable (matches "ab") + def t1 = pf.applyOrElse("a", _ => ".") // "a" + def t2 = pf.applyOrElse("ab", _ => ".") // "ab" + def t3 = pf.applyOrElse("abc", _ => ".") // "." diff --git a/tests/pos/i18623.scala b/tests/pos/i18623.scala new file mode 100644 index 000000000000..e34575c6e697 --- /dev/null +++ b/tests/pos/i18623.scala @@ -0,0 +1,15 @@ +final abstract class ForcedRecompilationToken[T] +object ForcedRecompilationToken { + implicit def default: ForcedRecompilationToken["abc"] = null +} + +class GoodNoParens[T](implicit ev: ForcedRecompilationToken[T]) +type BadNoParens[T] = GoodNoParens[T] + +// error +object A extends BadNoParens + +// ok +object B extends BadNoParens() +object C extends GoodNoParens + diff --git a/tests/pos/i18623a.scala b/tests/pos/i18623a.scala new file mode 100644 index 000000000000..043bac046896 --- /dev/null +++ b/tests/pos/i18623a.scala @@ -0,0 +1,20 @@ +final abstract class ForcedRecompilationToken[T] +object ForcedRecompilationToken { + implicit def default: ForcedRecompilationToken["abc"] = null +} + +object x { +class GoodNoParens[T](implicit ev: ForcedRecompilationToken[T]) +} +export x.GoodNoParens as BadNoParens + +// error +object A extends BadNoParens + +// ok +object B extends BadNoParens() +object C extends x.GoodNoParens + +object App extends App { + println("compiled") +} \ No newline at end of file diff --git a/tests/pos/i18626.min1.scala b/tests/pos/i18626.min1.scala new file mode 100644 index 000000000000..ae895db4b29c --- /dev/null +++ b/tests/pos/i18626.min1.scala @@ -0,0 +1,14 @@ +sealed trait Animal +object Cat extends Animal +object Dog extends Animal + +type Mammal = Cat.type | Dog.type + +class Test: + def t1 = + val mammals: List[Mammal] = ??? + val result = mammals.head + val mammal: Mammal = result // was: Type Mismatch Error: + // Found: (result : Animal) + // Required: Mammal + () diff --git a/tests/pos/i18626.scala b/tests/pos/i18626.scala new file mode 100644 index 000000000000..0fda265f3812 --- /dev/null +++ b/tests/pos/i18626.scala @@ -0,0 +1,32 @@ +trait Random[F1[_]]: + def element[T1](list: Seq[T1]): F1[T1] = ??? + +trait Monad[F2[_]]: + def map[A1, B1](fa: F2[A1])(f: A1 => B1): F2[B1] + +object Monad: + extension [F3[_]: Monad, A3](fa: F3[A3]) + def map[B3](f: A3 => B3): F3[B3] = ??? + +sealed trait Animal +object Cat extends Animal +object Dog extends Animal + +type Mammal = Cat.type | Dog.type +val mammals: List[Mammal] = ??? + +class Work[F4[_]](random: Random[F4])(using mf: Monad[F4]): + def result1: F4[Mammal] = + mf.map(fa = random.element(mammals))(a => a) + + def result2: F4[Mammal] = Monad.map(random.element(mammals))(a => a) + + import Monad.* + + def result3: F4[Mammal] = random + .element(mammals) + .map { a => + a // was: Type Mismatch Error: + // Found: (a : Animal) + // Required: Cat.type | Dog.type +} diff --git a/tests/pos/i18649.scala b/tests/pos/i18649.scala new file mode 100644 index 000000000000..d013d5219a1e --- /dev/null +++ b/tests/pos/i18649.scala @@ -0,0 +1,7 @@ +object Test: + // always inferred Nothing for `x` + def contextFunctionWildcardExplicit: ? ?=> String = x ?=> "foo" + + // used to infer TYPEBOUNDS for the type of the argument + def contextFunctionWildcardInserted: ? ?=> String = "foo" +end Test diff --git a/tests/pos/i18654/AbstractQueryPart.java b/tests/pos/i18654/AbstractQueryPart.java new file mode 100644 index 000000000000..d56e0820982a --- /dev/null +++ b/tests/pos/i18654/AbstractQueryPart.java @@ -0,0 +1,9 @@ +package org.jooq.impl; + +import org.jooq.Configuration; + +abstract class AbstractQueryPart { + Configuration configuration() { + return null; + } +} diff --git a/tests/pos/i18654/AbstractRoutine.java b/tests/pos/i18654/AbstractRoutine.java new file mode 100644 index 000000000000..ea8a56c3395e --- /dev/null +++ b/tests/pos/i18654/AbstractRoutine.java @@ -0,0 +1,11 @@ +package org.jooq.impl; + +import org.jooq.Configuration; +import org.jooq.Attachable; + +public abstract class AbstractRoutine extends AbstractQueryPart implements Attachable { + @Override + public final Configuration configuration() { + return null; + } +} diff --git a/tests/pos/i18654/Attachable.java b/tests/pos/i18654/Attachable.java new file mode 100644 index 000000000000..2b458af120bc --- /dev/null +++ b/tests/pos/i18654/Attachable.java @@ -0,0 +1,5 @@ +package org.jooq; + +public interface Attachable { + Configuration configuration(); +} diff --git a/tests/pos/i18654/Configuration.java b/tests/pos/i18654/Configuration.java new file mode 100644 index 000000000000..0dd97aed6730 --- /dev/null +++ b/tests/pos/i18654/Configuration.java @@ -0,0 +1,3 @@ +package org.jooq; + +public interface Configuration {} \ No newline at end of file diff --git a/tests/pos/i18654/MyRoutineScala.scala b/tests/pos/i18654/MyRoutineScala.scala new file mode 100644 index 000000000000..088f18e92741 --- /dev/null +++ b/tests/pos/i18654/MyRoutineScala.scala @@ -0,0 +1,6 @@ +package com.example + +import org.jooq.impl.AbstractRoutine + +// Works in Scala 2.12 and 2.13 but is broken in Scala 3 +class MyRoutineScala extends AbstractRoutine[String] {} diff --git a/tests/pos/i18699.scala b/tests/pos/i18699.scala new file mode 100644 index 000000000000..4bd3fbaad890 --- /dev/null +++ b/tests/pos/i18699.scala @@ -0,0 +1,7 @@ +import language.experimental.captureChecking +trait Cap: + def use: Int = 42 + +def test2(cs: List[Cap^]): Unit = + val t0: Cap^{cs*} = cs.head // error + var t1: Cap^{cs*} = cs.head // error \ No newline at end of file diff --git a/tests/pos/i18713.scala b/tests/pos/i18713.scala new file mode 100644 index 000000000000..0c406aa09d2e --- /dev/null +++ b/tests/pos/i18713.scala @@ -0,0 +1,18 @@ +import language.experimental.relaxedExtensionImports + +class A +object AA: + extension (a: A) + def f = ??? + def f_=(x: String) = ??? + +object BB: + extension (b: Long) + def f = ??? + def f_=(x: String) = ??? + +def test(a: A) = + import AA.* + import BB.* + a.f + a.f = "aa" diff --git a/tests/pos/i18715.scala b/tests/pos/i18715.scala new file mode 100644 index 000000000000..5023ef211fa3 --- /dev/null +++ b/tests/pos/i18715.scala @@ -0,0 +1,5 @@ +case class Foo(x: Int = 0) + +extension (x: Any) + private def foo = Foo + export foo.apply diff --git a/tests/pos/i18721.min/core.scala b/tests/pos/i18721.min/core.scala new file mode 100644 index 000000000000..02b172fdd73b --- /dev/null +++ b/tests/pos/i18721.min/core.scala @@ -0,0 +1,5 @@ +class Foo +class Bar extends Selectable: + def selectDynamic(name: String): Foo = null + +inline def meth(inline foo: Foo): Foo = foo diff --git a/tests/pos/i18721.min/main.scala b/tests/pos/i18721.min/main.scala new file mode 100644 index 000000000000..a62ee8b0562b --- /dev/null +++ b/tests/pos/i18721.min/main.scala @@ -0,0 +1,3 @@ +class Test: + def t1(bar: Bar { val foo: Foo }): Any = + meth(bar.foo) diff --git a/tests/pos/i18744.scala b/tests/pos/i18744.scala new file mode 100644 index 000000000000..6e2d630c52b8 --- /dev/null +++ b/tests/pos/i18744.scala @@ -0,0 +1,13 @@ +package dotty.tools.dotc.typer + +object Color: + def apply(): Int = ??? + +extension (u: Unit) + def foo(that: String, f: Int => Int): Int = ??? + def foo(that: Long, f: Int => Int): Int = ??? + +def test = + val c = Color() + ().foo("", (_: Int) => c) + ().foo("", (_: Int) => Color()) \ No newline at end of file diff --git a/tests/pos/i18745.scala b/tests/pos/i18745.scala new file mode 100644 index 000000000000..2184acc4770f --- /dev/null +++ b/tests/pos/i18745.scala @@ -0,0 +1,14 @@ +object Color: + def apply(i: Int): Int = i + +type Plane + +object Plane: + extension (plane: Plane) + def zipWith(that: String, f: Int => Int): Int = ??? + def zipWith(that: Int, f: Int => Int): Int = ??? + +import Plane.zipWith + +def test(p: Plane) = + p.zipWith("", (_: Int) => Color(25)) \ No newline at end of file diff --git a/tests/pos/i18768.scala b/tests/pos/i18768.scala new file mode 100644 index 000000000000..67a5bd127200 --- /dev/null +++ b/tests/pos/i18768.scala @@ -0,0 +1,44 @@ +package minimized: + object Module: + object Exportee: + + opaque type Id = Long + + def apply(): Id = ??? + + extension (e: Id) + def updated: Id = ??? + + + object Client: + export Module.* + val x = Exportee().updated + +package original: + object Module: + trait EntityDef: + type Id + type Record + type Entity = (Id, Record) + + extension (e: Entity) + def updated: Entity = e + + case class Exportee() + object Exportee extends EntityDef: + opaque type Id = Long + type Record = Exportee + + def apply(id: Long): Entity = (id, Exportee()) + + object Client: + export Module.* + val x = Exportee(1L).updated + + + object ClientWorkingWithManualExport: + export Module.{Exportee as _, *} + type Exportee = Module.Exportee + val Exportee = Module.Exportee + + val x = Exportee(1L).updated diff --git a/tests/pos/i18769.scala b/tests/pos/i18769.scala new file mode 100644 index 000000000000..be5db80b7727 --- /dev/null +++ b/tests/pos/i18769.scala @@ -0,0 +1,9 @@ +trait Arb[Fx[_]] { + def pure[A](x: A): Fx[A] +} + +class PfOps(private val self: Int) extends AnyVal { + def pf[Fy[_]](m: Arb[Fy]): PartialFunction[Int, Fy[Int]] = { + case x => m.pure(x) + } +} diff --git a/tests/pos/i18784/Macro_1.scala b/tests/pos/i18784/Macro_1.scala new file mode 100644 index 000000000000..2eb93205d5ac --- /dev/null +++ b/tests/pos/i18784/Macro_1.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macro { + inline def repeated = ${Macro.repeatedImpl} + def repeatedImpl(using Quotes):Expr[List[Int]] = { + import quotes.reflect.* + val args = List(Expr(1), Expr(2)) + val listObjectTerm = '{ List }.asTerm + Apply( + TypeApply( + Select.unique(listObjectTerm, "apply"), + List(TypeTree.of[Int]) + ), + List( + Typed( + Repeated(args.map(_.asTerm), TypeTree.of[Int]), + Inferred(defn.RepeatedParamClass.typeRef.appliedTo(TypeRepr.of[Int])))) + ).asExprOf[List[Int]] + } +} \ No newline at end of file diff --git a/tests/pos/i18784/Test_2.scala b/tests/pos/i18784/Test_2.scala new file mode 100644 index 000000000000..a5fb441b4137 --- /dev/null +++ b/tests/pos/i18784/Test_2.scala @@ -0,0 +1,2 @@ +def Test: Unit = + Macro.repeated diff --git a/tests/pos/i18795.scala b/tests/pos/i18795.scala new file mode 100644 index 000000000000..56b068e9421f --- /dev/null +++ b/tests/pos/i18795.scala @@ -0,0 +1,15 @@ +package example + +object Main extends App with Test { + load("")() +} + +trait Test { + + def load[T]( + doLoad: T + )( + description: T => Option[String] = (x: T) => None // <--- compile error here + ): Unit = ??? + +} \ No newline at end of file diff --git a/tests/pos/i18808.scala b/tests/pos/i18808.scala new file mode 100644 index 000000000000..0556b3285d00 --- /dev/null +++ b/tests/pos/i18808.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +import language.future + +type F[X] = X match + case List[_] => Int + +type G[X] = X match + case List[?] => Int diff --git a/tests/pos/i18867-3.3.scala b/tests/pos/i18867-3.3.scala new file mode 100644 index 000000000000..4eb0ecd9b200 --- /dev/null +++ b/tests/pos/i18867-3.3.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import language.`3.3` + +def foo(x: Int) = x + +def test = foo _ diff --git a/tests/pos/i18867-3.4.scala b/tests/pos/i18867-3.4.scala new file mode 100644 index 000000000000..e2630c0cb95c --- /dev/null +++ b/tests/pos/i18867-3.4.scala @@ -0,0 +1,5 @@ +import language.`3.4` + +def foo(x: Int) = x + +def test = foo _ // warn diff --git a/tests/pos/i18909.scala b/tests/pos/i18909.scala new file mode 100644 index 000000000000..87d1befc6c41 --- /dev/null +++ b/tests/pos/i18909.scala @@ -0,0 +1,4 @@ +import language.experimental.captureChecking +def foo(): Unit = + val r1: IArray[String] = ??? + val r2: String = IArray.head(r1) \ No newline at end of file diff --git a/tests/pos/i18918.scala b/tests/pos/i18918.scala new file mode 100644 index 000000000000..0ec6f29b2b89 --- /dev/null +++ b/tests/pos/i18918.scala @@ -0,0 +1,17 @@ + +trait SuperTrait { + sealed trait InnerTrait + case class Foo() extends InnerTrait +} + +trait OtherTrait + +trait TraitWithSelfType extends SuperTrait { this: OtherTrait => + summon[deriving.Mirror.Of[Foo]] + summon[deriving.Mirror.Of[InnerTrait]] +} + +object Implementation extends TraitWithSelfType, OtherTrait { + summon[deriving.Mirror.Of[Foo]] + summon[deriving.Mirror.Of[InnerTrait]] +} diff --git a/tests/pos/i18927.scala b/tests/pos/i18927.scala new file mode 100644 index 000000000000..1549335544c7 --- /dev/null +++ b/tests/pos/i18927.scala @@ -0,0 +1,14 @@ +class A + +class B { + val a = new A + + class C(i: Int) { + def this() = { + this(1) + class Inner() { + println(a) + } + } + } +} \ No newline at end of file diff --git a/tests/pos/i18933.scala b/tests/pos/i18933.scala new file mode 100644 index 000000000000..7424344d5edf --- /dev/null +++ b/tests/pos/i18933.scala @@ -0,0 +1,4 @@ +//> using options -Werror + +infix enum Extends[A, B]: + case Ev[B, A <: B]() extends (A Extends B) diff --git a/tests/pos/i19001.case1.scala b/tests/pos/i19001.case1.scala new file mode 100644 index 000000000000..3e1a67caf308 --- /dev/null +++ b/tests/pos/i19001.case1.scala @@ -0,0 +1,19 @@ +import java.util.concurrent.CompletionStage +import scala.concurrent.Future + +trait ActorRef[-T]: + def ask[Res](replyTo: ActorRef[Res] => T): Future[Res] = ??? + +implicit final class FutureOps[T](private val f: Future[T]) extends AnyVal: + def asJava: CompletionStage[T] = ??? + +class AskPattern[Req, Res]: + val actor: ActorRef[Req] = ??? + val messageFactory: ActorRef[Res] => Req = ??? + + def failing(): CompletionStage[Res] = actor.ask(messageFactory.apply).asJava + def workaround1(): CompletionStage[Res] = actor.ask[Res](messageFactory.apply).asJava + def workaround2(): CompletionStage[Res] = actor.ask(messageFactory).asJava + + val jMessageFactory: java.util.function.Function[ActorRef[Res], Req] = ??? + def originalFailingCase(): CompletionStage[Res] = actor.ask(jMessageFactory.apply).asJava diff --git a/tests/pos/i19001.case2.scala b/tests/pos/i19001.case2.scala new file mode 100644 index 000000000000..547441c58ff2 --- /dev/null +++ b/tests/pos/i19001.case2.scala @@ -0,0 +1,16 @@ +import scala.util.{Try, Success, Failure} + +trait ActorRef[-T] +trait ActorContext[T]: + def ask[Req, Res](target: ActorRef[Req], createRequest: ActorRef[Res] => Req)(mapResponse: Try[Res] => T): Unit + +@main def Test = + val context: ActorContext[Int] = ??? + val askMeRef: ActorRef[Request] = ??? + + case class Request(replyTo: ActorRef[Int]) + + context.ask(askMeRef, Request.apply) { + case Success(res) => res // error: expected Int, got Any + case Failure(ex) => throw ex + } diff --git a/tests/pos/i19001.case3.scala b/tests/pos/i19001.case3.scala new file mode 100644 index 000000000000..cc3f8e558fc8 --- /dev/null +++ b/tests/pos/i19001.case3.scala @@ -0,0 +1,12 @@ +trait IO[A]: + def map[B](f: A => B): IO[B] = ??? + +trait RenderResult[T]: + def value: T + +def IOasync[T](f: (Either[Throwable, T] => Unit) => Unit): IO[T] = ??? + +def render[T]: IO[T] = { + def register(cb: Either[Throwable, RenderResult[T]] => Unit): Unit = ??? + IOasync(register).map(_.value) // map should take RenderResult[T], but uses Any +} diff --git a/tests/pos/i19006a.scala b/tests/pos/i19006a.scala new file mode 100644 index 000000000000..d38100997987 --- /dev/null +++ b/tests/pos/i19006a.scala @@ -0,0 +1,11 @@ +import java.util.Map.Entry; +import java.util.function.BiConsumer; +import java.lang.Iterable + +trait HttpHeaders extends Iterable[Entry[String, String]] { + def forEach(action: BiConsumer[String, String]): Unit = ??? +} + +@main def Test = + val headers: HttpHeaders = ??? + headers.forEach((a, b) => ???) diff --git a/tests/pos/i19006b.scala b/tests/pos/i19006b.scala new file mode 100644 index 000000000000..fbec70423ae9 --- /dev/null +++ b/tests/pos/i19006b.scala @@ -0,0 +1,27 @@ +import java.util.function.Function + +trait HttpClient +trait HttpRequest +trait HttpResponse +trait ClientRequestContext + +trait DecoratingHttpClientFunction { + def execute(delegate: HttpClient, ctx: ClientRequestContext, req: HttpRequest): HttpResponse +} + +class AbstractClientOptionsBuilder: + def decorator(fn: Function[? <: HttpClient, ? <: HttpClient]): AbstractClientOptionsBuilder = ??? + def decorator(fn: DecoratingHttpClientFunction): AbstractClientOptionsBuilder = ??? + +class WebClientBuilder extends AbstractClientOptionsBuilder: + override def decorator(fn: Function[? <: HttpClient, ? <: HttpClient]): WebClientBuilder = ??? + override def decorator(fn: DecoratingHttpClientFunction): WebClientBuilder = ??? + +class ArmeriaClientBuilder[F[_]]: + type DecoratingFunction = (HttpClient, ClientRequestContext, HttpRequest) => HttpResponse + def clientBuilder: WebClientBuilder = ??? + + def withDecorator(decorator: DecoratingFunction): ArmeriaClientBuilder[F] = { + clientBuilder.decorator(decorator(_, _, _)) + this + } diff --git a/tests/pos/i19007/MyRunConfigurationScala.scala b/tests/pos/i19007/MyRunConfigurationScala.scala new file mode 100644 index 000000000000..3ce74344cc8b --- /dev/null +++ b/tests/pos/i19007/MyRunConfigurationScala.scala @@ -0,0 +1,2 @@ + +class MyRunConfigurationScala extends RunConfigurationBase diff --git a/tests/pos/i19007/RunConfiguration.java b/tests/pos/i19007/RunConfiguration.java new file mode 100644 index 000000000000..ee67eb4f6c89 --- /dev/null +++ b/tests/pos/i19007/RunConfiguration.java @@ -0,0 +1,4 @@ + +public interface RunConfiguration extends Cloneable { + RunConfiguration clone(); +} \ No newline at end of file diff --git a/tests/pos/i19007/RunConfigurationBase.java b/tests/pos/i19007/RunConfigurationBase.java new file mode 100644 index 000000000000..410c625c746f --- /dev/null +++ b/tests/pos/i19007/RunConfigurationBase.java @@ -0,0 +1,6 @@ +public abstract class RunConfigurationBase extends UserDataHolderBase implements RunConfiguration { + @Override + public RunConfiguration clone() { + return null; + } +} \ No newline at end of file diff --git a/tests/pos/i19007/UserDataHolderBase.java b/tests/pos/i19007/UserDataHolderBase.java new file mode 100644 index 000000000000..a91607086761 --- /dev/null +++ b/tests/pos/i19007/UserDataHolderBase.java @@ -0,0 +1,8 @@ +import java.util.concurrent.atomic.AtomicReference; + +public class UserDataHolderBase extends AtomicReference { + @Override + protected Object clone() { + return null; + } +} diff --git a/tests/pos/i19009.case1.scala b/tests/pos/i19009.case1.scala new file mode 100644 index 000000000000..84738dcf384a --- /dev/null +++ b/tests/pos/i19009.case1.scala @@ -0,0 +1,18 @@ +trait Player[+P] +trait RatingPeriod[P]: + def games: Map[P, Vector[ScoreVsPlayer[P]]] + +trait ScoreVsPlayer[+P] + +def updated[P](playerID: P, matchResults: IndexedSeq[ScoreVsPlayer[P]], lookup: P => Option[Player[P]]): Player[P] = ??? + +trait Leaderboard[P]: + def playersByIdInNoParticularOrder: Map[P, Player[P]] + + def after[P2 >: P](ratingPeriod: RatingPeriod[? <: P]): Leaderboard[P2] = + val competingPlayers = ratingPeriod.games.iterator.map { (id, matchResults) => + updated(id, matchResults, playersByIdInNoParticularOrder.get) // error + // workaround: + updated[P](id, matchResults, playersByIdInNoParticularOrder.get) + } + ??? diff --git a/tests/pos/i19009.case2.scala b/tests/pos/i19009.case2.scala new file mode 100644 index 000000000000..8c395aa48e46 --- /dev/null +++ b/tests/pos/i19009.case2.scala @@ -0,0 +1,10 @@ +object NodeOrdering: + def postOrderNumbering[NodeType](cfgEntry: NodeType, expand: NodeType => Iterator[NodeType]): Map[NodeType, Int] = ??? + +trait CfgNode +trait Method extends CfgNode + +def postOrder = + def method: Method = ??? + def expand(x: CfgNode): Iterator[CfgNode] = ??? + NodeOrdering.postOrderNumbering(method, expand) diff --git a/tests/pos/i19009.case3.scala b/tests/pos/i19009.case3.scala new file mode 100644 index 000000000000..b2b17b312af0 --- /dev/null +++ b/tests/pos/i19009.case3.scala @@ -0,0 +1,31 @@ +trait Bound[+E] + +trait SegmentT[E, +S] +object SegmentT: + trait WithPrev[E, +S] extends SegmentT[E, S] + +trait SegmentSeqT[E, +S]: + def getSegmentForBound(bound: Bound[E]): SegmentT[E, S] with S + +abstract class AbstractSegmentSeq[E, +S] extends SegmentSeqT[E, S] + +trait MappedSegmentBase[E, S] + +type MappedSegment[E, S] = AbstractMappedSegmentSeq.MappedSegment[E, S] + +object AbstractMappedSegmentSeq: + type MappedSegment[E, S] = SegmentT[E, MappedSegmentBase[E, S]] with MappedSegmentBase[E, S] + +abstract class AbstractMappedSegmentSeq[E, S] + extends AbstractSegmentSeq[E, MappedSegmentBase[E, S]]: + def originalSeq: SegmentSeqT[E, S] + + final override def getSegmentForBound(bound: Bound[E]): MappedSegment[E, S] = + searchFrontMapper(frontMapperGeneral, originalSeq.getSegmentForBound(bound)) + + protected final def frontMapperGeneral(original: SegmentT[E, S]): MappedSegment[E, S] = ??? + + protected def searchFrontMapper[Seg >: SegmentT.WithPrev[E, S] <: SegmentT[E, S], R]( + mapper: Seg => R, + original: Seg + ): R = ??? diff --git a/tests/pos/i19009.min3.scala b/tests/pos/i19009.min3.scala new file mode 100644 index 000000000000..f59a4485b219 --- /dev/null +++ b/tests/pos/i19009.min3.scala @@ -0,0 +1,9 @@ +trait Foo[A] +trait Bar[B] extends Foo[B] + +class Test[C]: + def put[X >: Bar[C]](fn: X => Unit, x1: X): Unit = () + def id(foo: Foo[C]): Foo[C] = foo + + def t1(foo2: Foo[C]): Unit = + put(id, foo2) // was: error: exp: Bar[C], got (foo2 : Foo[C]) diff --git a/tests/pos/i19013-a.scala b/tests/pos/i19013-a.scala new file mode 100644 index 000000000000..0a6a5e2d1179 --- /dev/null +++ b/tests/pos/i19013-a.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings + +def handle[E <: Exception](f: => Unit): Option[E] = + try + f + None + catch case e: E @unchecked => Some(e) + +val r: RuntimeException = handle[RuntimeException](throw new Exception()).get diff --git a/tests/pos/i19013-b.scala b/tests/pos/i19013-b.scala new file mode 100644 index 000000000000..a4ec71654ff5 --- /dev/null +++ b/tests/pos/i19013-b.scala @@ -0,0 +1,11 @@ +//> using options -Xfatal-warnings + +case class CustomException(x: Any) extends Exception("") + +def handle[E](f: => Unit): Option[E] = + try + f + None + catch case CustomException(e: E @unchecked ) => Some(e) + +val r: RuntimeException = handle[RuntimeException](throw new Exception()).get \ No newline at end of file diff --git a/tests/pos/i19031.ci-reg1.scala b/tests/pos/i19031.ci-reg1.scala new file mode 100644 index 000000000000..3c15a3eb9afc --- /dev/null +++ b/tests/pos/i19031.ci-reg1.scala @@ -0,0 +1,16 @@ +//> using options -Werror + +sealed trait Mark[T] + +trait Foo[T] +class Bar1[T] extends Foo[T] +class Bar2[T] extends Foo[T] with Mark[T] + +class Test: + def t1(foo: Foo[Int]): Unit = foo match + case _: Mark[t] => + case _ => + + def t2[F <: Foo[Int]](foo: F): Unit = foo match + case _: Mark[t] => + case _ => diff --git a/tests/pos/i19031.ci-reg2.scala b/tests/pos/i19031.ci-reg2.scala new file mode 100644 index 000000000000..e5b12cc17655 --- /dev/null +++ b/tests/pos/i19031.ci-reg2.scala @@ -0,0 +1,15 @@ +//> using options -Werror + +trait Outer: + sealed trait Foo + case class Bar1() extends Foo + case class Bar2() extends Foo + case object Bar3 extends Foo + + def foos: List[Foo] + +class Test: + def t1(out: Outer) = out.foos.collect: + case out.Bar1() => 1 + case out.Bar2() => 2 + case out.Bar3 => 3 diff --git a/tests/pos/i19031.scala b/tests/pos/i19031.scala new file mode 100644 index 000000000000..e56744017255 --- /dev/null +++ b/tests/pos/i19031.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +sealed trait A: + class B extends A + +class Test: + def t1(a: A): Boolean = + a match + case b: A#B => true diff --git a/tests/pos/i19084.scala b/tests/pos/i19084.scala new file mode 100644 index 000000000000..ab44f0e48b43 --- /dev/null +++ b/tests/pos/i19084.scala @@ -0,0 +1,14 @@ +//> using options -Werror + +class Test: + def t1(y: ( + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + "Bob", Int, 33, Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int) + ): Unit = y match + case b @ (x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, + "Bob", y1, 33, y2, + z0, z1, z2, z3, z4, z5, z6, z7, z8, z9) + => // was: !!! spurious unreachable case warning + () + case _ => () diff --git a/tests/pos/i19157.scala b/tests/pos/i19157.scala new file mode 100644 index 000000000000..019403adba73 --- /dev/null +++ b/tests/pos/i19157.scala @@ -0,0 +1,11 @@ +//> using options -Werror + +class Test: + inline def count(inline x: Boolean) = x match + case true => 1 + case false => 0 + + assert(count(true) == 1) + assert(count(false) == 0) + var x = true + assert(count(x) == 1) diff --git a/tests/pos/i19198.scala b/tests/pos/i19198.scala new file mode 100644 index 000000000000..23550aac807f --- /dev/null +++ b/tests/pos/i19198.scala @@ -0,0 +1,13 @@ +import deriving.Mirror +import compiletime.summonInline + +inline def check1[Tps <: NonEmptyTuple]: Unit = + summonInline[Mirror.Of[Tuple.Head[Tps]]] + +inline def check2[Tps <: NonEmptyTuple]: Unit = + type FromType = Tuple.Head[Tps] + summonInline[Mirror.Of[FromType]] + +@main def Test: Unit = + check1[Option[Int] *: EmptyTuple] // Ok + check2[Option[Int] *: EmptyTuple] // Error: FromType is widened to Any in Syntheziser diff --git a/tests/pos/i19219.orig.scala b/tests/pos/i19219.orig.scala new file mode 100644 index 000000000000..9577edea6c50 --- /dev/null +++ b/tests/pos/i19219.orig.scala @@ -0,0 +1,21 @@ +object Test: + class Custom extends scala.Product1[String]: + def length: Int = ??? + def apply(i: Int): Boolean = ??? + def drop(n: Int): scala.Seq[Boolean] = ??? + def toSeq: scala.Seq[Boolean] = ??? + + def canEqual(that: Any): Boolean = ??? + + val _1: String = ??? + val _2: String = ??? + val _3: Seq[String] = ??? + + class Unapplied: + def isEmpty: Boolean = ??? + def get: Custom = ??? + + object A: + def unapplySeq(i: Int): Unapplied = ??? + + val A(rest: _*) = 1 diff --git a/tests/pos/i19220.orig.scala b/tests/pos/i19220.orig.scala new file mode 100644 index 000000000000..89a3088abb1b --- /dev/null +++ b/tests/pos/i19220.orig.scala @@ -0,0 +1,20 @@ +object Test: + class Custom extends scala.Product1[String]: + def length: Int = ??? + def apply(i: Int): Boolean = ??? + def drop(n: Int): scala.Seq[Boolean] = ??? + def toSeq: scala.Seq[Boolean] = ??? + + def canEqual(that: Any): Boolean = ??? + + val _1: String = ??? + val _3: Seq[String] = ??? + + class Unapplied: + def isEmpty: Boolean = ??? + def get: Custom = ??? + + object A: + def unapplySeq(i: Int): Unapplied = ??? + + val A(a, rest*) = 1 diff --git a/tests/pos/i19221.orig.scala b/tests/pos/i19221.orig.scala new file mode 100644 index 000000000000..7ce5267e3701 --- /dev/null +++ b/tests/pos/i19221.orig.scala @@ -0,0 +1,17 @@ +object Test: + class Custom extends scala.Product1[String]: + def length: Int = ??? + def apply(i: Int): Boolean = ??? + def drop(n: Int): scala.Seq[Boolean] = ??? + def toSeq: scala.Seq[Boolean] = ??? + + def canEqual(that: Any): Boolean = ??? + + val _1: String = ??? + val _2: String = ??? + val _3: Seq[String] = ??? + + object A: + def unapplySeq(i: Int): Custom = ??? + + val A(a, rest*) = 1 diff --git a/tests/pos/i19221.scala b/tests/pos/i19221.scala new file mode 100644 index 000000000000..1cf80e27bf68 --- /dev/null +++ b/tests/pos/i19221.scala @@ -0,0 +1,49 @@ +// Product component types, and the sequence element type +final class A; final class B; final class C +final class E + +// Conforms to both sequence matches and product sequence matches +class Both extends Product1[A]: + def length: Int = toSeq.length + def apply(i: Int): E = toSeq.apply(i) + def drop(n: Int): Seq[E] = toSeq.drop(n) + def toSeq: Seq[E] = Seq(new E, new E) + + def canEqual(that: Any) = that.isInstanceOf[Both @unchecked] + + val _1: A = new A + val _2: B = new B + val _3: Seq[C] = Seq(new C) + +// Like Both, but with a missing _2 +class AlmostBoth extends Product1[A]: + def length: Int = toSeq.length + def apply(i: Int): E = toSeq.apply(i) + def drop(n: Int): Seq[E] = toSeq.drop(n) + def toSeq: Seq[E] = Seq(new E, new E) + + def canEqual(that: Any) = that.isInstanceOf[AlmostBoth @unchecked] + + val _1: A = new A + val _3: Seq[C] = Seq(new C) + +// An extractor result holder, to return Both or BothAlmost +class GetBoth { def isEmpty: Boolean = false; def get = new Both } +class GetAlmostBoth { def isEmpty: Boolean = false; def get = new AlmostBoth } + +// The extractors +object Both { def unapplySeq(x: Any): Both = new Both } +object AlmostBoth { def unapplySeq(x: Any): AlmostBoth = new AlmostBoth } +object GetBoth { def unapplySeq(x: Any): GetBoth = new GetBoth } +object GetAlmostBoth { def unapplySeq(x: Any): GetAlmostBoth = new GetAlmostBoth } + +class Test: + def t1a(x: Any): Seq[E] = x match { case Both(es*) => es } + def t1b(x: Any): Seq[E] = x match { case AlmostBoth(es*) => es } + def t1c(x: Any): Seq[E] = x match { case GetBoth(es*) => es } + def t1d(x: Any): Seq[E] = x match { case GetAlmostBoth(es*) => es } + + def t2a(x: Any): (E, Seq[E]) = x match { case Both(e, es*) => (e, es) } + def t2b(x: Any): (E, Seq[E]) = x match { case AlmostBoth(e, es*) => (e, es) } + def t2c(x: Any): (E, Seq[E]) = x match { case GetBoth(e, es*) => (e, es) } + def t2d(x: Any): (E, Seq[E]) = x match { case GetAlmostBoth(e, es*) => (e, es) } diff --git a/tests/pos/i19354.orig.scala b/tests/pos/i19354.orig.scala new file mode 100644 index 000000000000..0301974a9e59 --- /dev/null +++ b/tests/pos/i19354.orig.scala @@ -0,0 +1,15 @@ +import javax.annotation.processing.{ AbstractProcessor, RoundEnvironment } +import javax.lang.model.element.{ ElementKind, PackageElement, TypeElement } + +import java.util as ju + +class P extends AbstractProcessor { + override def process(annotations: ju.Set[? <: TypeElement], roundEnv: RoundEnvironment): Boolean = { + annotations + .stream() + .flatMap(annotation => roundEnv.getElementsAnnotatedWith(annotation).stream()) + .filter(element => element.getKind == ElementKind.PACKAGE) + .map(element => element.asInstanceOf[PackageElement]) + true + } +} diff --git a/tests/pos/i19354.scala b/tests/pos/i19354.scala new file mode 100644 index 000000000000..db1d4961e79f --- /dev/null +++ b/tests/pos/i19354.scala @@ -0,0 +1,7 @@ +class Foo; class Bar +class Test: + def t1(xs: java.util.stream.Stream[? <: Foo]) = + xs.map(x => take(x)) + + def take(x: Foo) = "" + def take(x: Bar) = "" diff --git a/tests/pos/i19404.scala b/tests/pos/i19404.scala new file mode 100644 index 000000000000..8d6d4406ebb2 --- /dev/null +++ b/tests/pos/i19404.scala @@ -0,0 +1,13 @@ +given ipEncoder[IP <: IpAddress]: Encoder[IP] = Encoder[String].contramap(_.toString) + +class Encoder[A] { + final def contramap[B](f: B => A): Encoder[B] = new Encoder[B] +} + +object Encoder { + final def apply[A](implicit instance: Encoder[A]): Encoder[A] = instance + implicit final val encodeString: Encoder[String] = new Encoder[String] +} + +trait Json +trait IpAddress \ No newline at end of file diff --git a/tests/pos/i19407.scala b/tests/pos/i19407.scala new file mode 100644 index 000000000000..b7440a53540d --- /dev/null +++ b/tests/pos/i19407.scala @@ -0,0 +1,11 @@ +trait GeneratedEnum +trait Decoder[A] + +object Decoder: + given Decoder[Int] = ??? + +object GeneratedEnumDecoder: + + given [A <: GeneratedEnum]: Decoder[A] = + summon[Decoder[Int]] + ??? \ No newline at end of file diff --git a/tests/pos/i19417/defs_1.scala b/tests/pos/i19417/defs_1.scala new file mode 100644 index 000000000000..92dc10990d90 --- /dev/null +++ b/tests/pos/i19417/defs_1.scala @@ -0,0 +1,5 @@ +trait QueryParamDecoder[E]: + def emap[T](fn: E => Either[Throwable, T]): QueryParamDecoder[T] +object QueryParamDecoder: + def apply[T](implicit ev: QueryParamDecoder[T]): QueryParamDecoder[T] = ev + implicit lazy val stringQueryParamDecoder: QueryParamDecoder[String] = ??? \ No newline at end of file diff --git a/tests/pos/i19417/usage_2.scala b/tests/pos/i19417/usage_2.scala new file mode 100644 index 000000000000..c686f46280d7 --- /dev/null +++ b/tests/pos/i19417/usage_2.scala @@ -0,0 +1,2 @@ +given[E](using e: EnumOf[E]): QueryParamDecoder[E] = QueryParamDecoder[String].emap(_ => Right(???)) +trait EnumOf[E] \ No newline at end of file diff --git a/tests/pos/i2673.scala b/tests/pos/i2673.scala new file mode 100644 index 000000000000..cb426cd6be0f --- /dev/null +++ b/tests/pos/i2673.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings -deprecation -feature + +package Foos + +object Outer { + class X + object x +} diff --git a/tests/pos/i3323.scala b/tests/pos/i3323.scala new file mode 100644 index 000000000000..94d072d4a2fc --- /dev/null +++ b/tests/pos/i3323.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +class Foo { + def foo[A](lss: List[List[A]]): Unit = { + lss match { + case xss: List[List[A]] => + } + } +} diff --git a/tests/pos/i3323b.scala b/tests/pos/i3323b.scala new file mode 100644 index 000000000000..0edc6177bbe9 --- /dev/null +++ b/tests/pos/i3323b.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +class Foo { + def foo(lss: List[Int]): Unit = { + lss match { + case xss: List[Int] => + } + } +} diff --git a/tests/pos/i3589b.scala b/tests/pos/i3589b.scala new file mode 100644 index 000000000000..f4552c7f8370 --- /dev/null +++ b/tests/pos/i3589b.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +class Test { + def test(x: 1 | 2 | 3) = (x: @annotation.switch) match { + case 1 => 1 + case 2 => 2 + case 3 => 3 + } +} diff --git a/tests/pos/i4166.scala b/tests/pos/i4166.scala new file mode 100644 index 000000000000..2ce9d018b614 --- /dev/null +++ b/tests/pos/i4166.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +package foo { + class Hello +} + +package bar { + class Hello +} diff --git a/tests/pos/i4185.scala b/tests/pos/i4185.scala new file mode 100644 index 000000000000..643f479c59b6 --- /dev/null +++ b/tests/pos/i4185.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings -deprecation -feature + +object ord { + class Ord + object Ord +} diff --git a/tests/pos/i4674.scala b/tests/pos/i4674.scala new file mode 100644 index 000000000000..3b570a74e80a --- /dev/null +++ b/tests/pos/i4674.scala @@ -0,0 +1,10 @@ +//> using options -Xfatal-warnings -deprecation -feature + +class Test { + def test(x: String) = { + x.foreach { + case 's' => println("s") + case c: Char => println(c) // should compile without warning + } + } +} diff --git a/tests/pos/i5625b.scala b/tests/pos/i5625b.scala index b2621f9020a8..db2092c18bbc 100644 --- a/tests/pos/i5625b.scala +++ b/tests/pos/i5625b.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + object Test { type AV[t <: AnyVal] = t @@ -13,4 +15,4 @@ object Test { summon[LeafElem[Array[Int]] =:= Int] summon[LeafElem[Iterable[Int]] =:= Int] summon[LeafElem[Int] =:= Int] -} \ No newline at end of file +} diff --git a/tests/pos-special/fatal-warnings/i5970.scala b/tests/pos/i5970.scala similarity index 75% rename from tests/pos-special/fatal-warnings/i5970.scala rename to tests/pos/i5970.scala index 51adb8cd0535..e2e79f6c3f11 100644 --- a/tests/pos-special/fatal-warnings/i5970.scala +++ b/tests/pos/i5970.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + object Test extends App { case class Foo[T](t: T) diff --git a/tests/pos/i6190a.scala b/tests/pos/i6190a.scala new file mode 100644 index 000000000000..b6d21662389e --- /dev/null +++ b/tests/pos/i6190a.scala @@ -0,0 +1,8 @@ +//> using options -Xfatal-warnings -deprecation -feature + +case class Rule(name: String) +object Rule extends (String => Rule) { + def apply(name: String): Rule = new Rule(name) +} + +def foo = List("1", "2").map(Rule) diff --git a/tests/pos/i6190c.scala b/tests/pos/i6190c.scala new file mode 100644 index 000000000000..37a837addb62 --- /dev/null +++ b/tests/pos/i6190c.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + +case class Rule(name: String) + +def foo = List("1", "2").map(Rule.apply) diff --git a/tests/pos-special/fatal-warnings/i6290.scala b/tests/pos/i6290.scala similarity index 83% rename from tests/pos-special/fatal-warnings/i6290.scala rename to tests/pos/i6290.scala index b5694dfa296a..bc3646f1e1d2 100644 --- a/tests/pos-special/fatal-warnings/i6290.scala +++ b/tests/pos/i6290.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature -source:3.3 + class TC { type T } class C(using TC { type T = Int }) diff --git a/tests/pos/i6621.scala b/tests/pos/i6621.scala new file mode 100644 index 000000000000..599a75ba776e --- /dev/null +++ b/tests/pos/i6621.scala @@ -0,0 +1,10 @@ +//> using options -Xfatal-warnings -deprecation -feature + +object Unapply { + def unapply(a: Any): Option[(Int, Int)] = + Some((1, 2)) +} + +object Test { + val Unapply(x, y) = "": @unchecked +} diff --git a/tests/pos/i6716.scala b/tests/pos/i6716.scala index 446cd49c9214..f02559af1e82 100644 --- a/tests/pos/i6716.scala +++ b/tests/pos/i6716.scala @@ -1,15 +1,14 @@ -trait Monad[T] +//> using options -Xfatal-warnings -source 3.4 + class Foo -object Foo { - given Monad[Foo] with {} -} -opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok } -object Test { - val mf = summon[Monad[Foo]] - val mb = summon[Monad[Bar]] -} \ No newline at end of file +object Baz { + @annotation.nowarn + given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed + given Foo with {} +} diff --git a/tests/pos-special/fatal-warnings/i7219b.scala b/tests/pos/i7219b.scala similarity index 80% rename from tests/pos-special/fatal-warnings/i7219b.scala rename to tests/pos/i7219b.scala index 869c72b3b7d0..91dc0d136420 100644 --- a/tests/pos-special/fatal-warnings/i7219b.scala +++ b/tests/pos/i7219b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + object Foo { enum MyEnum { case Red diff --git a/tests/pos/i7219c.scala b/tests/pos/i7219c.scala new file mode 100644 index 000000000000..1ddfeca39ff1 --- /dev/null +++ b/tests/pos/i7219c.scala @@ -0,0 +1,20 @@ +//> using options -Xfatal-warnings -deprecation -feature + +object Foo { + enum MyEnum { + case Red + case Blue(msg: String) + } + export MyEnum._ +} + +object Bar { + type Blue = Foo.Blue +} + +import Foo.* + +def foo(a: MyEnum): Seq[Bar.Blue] = a match { + case Red => Seq.empty + case m: Foo.Blue => Seq(m) +} diff --git a/tests/pos/i7296.scala b/tests/pos/i7296.scala new file mode 100644 index 000000000000..67fb3eee48a4 --- /dev/null +++ b/tests/pos/i7296.scala @@ -0,0 +1,4 @@ +//> using options -source future -deprecation -Xfatal-warnings + +class Foo: + private var blah: Double = 0L diff --git a/tests/pos/i7424.scala b/tests/pos/i7424.scala index 33987610519e..416b57bfffe0 100644 --- a/tests/pos/i7424.scala +++ b/tests/pos/i7424.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror object GADT { import =:=._ diff --git a/tests/pos/i7424b.scala b/tests/pos/i7424b.scala index 5fdf878a2330..5407270e1a97 100644 --- a/tests/pos/i7424b.scala +++ b/tests/pos/i7424b.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror object GADT { import =:=._ diff --git a/tests/pos/i7424c.scala b/tests/pos/i7424c.scala index e27b17964253..b40ef00a48dd 100644 --- a/tests/pos/i7424c.scala +++ b/tests/pos/i7424c.scala @@ -1,6 +1,6 @@ -// scalac: -Werror +//> using options -Werror object Main extends App: - enum Extends[A, B]: + infix enum Extends[A, B]: case Ev[B, A <: B]() extends (A Extends B) def cast(a: A): B = this match { diff --git a/tests/pos/i7575.scala b/tests/pos/i7575.scala new file mode 100644 index 000000000000..ea991e649e8c --- /dev/null +++ b/tests/pos/i7575.scala @@ -0,0 +1,3 @@ +//> using options -language:dynamics + +class Foo() extends Dynamic diff --git a/tests/pos-custom-args/erased/i7868.scala b/tests/pos/i7868.scala similarity index 95% rename from tests/pos-custom-args/erased/i7868.scala rename to tests/pos/i7868.scala index 02d6ad0e7ca6..f4d7da6acb5b 100644 --- a/tests/pos-custom-args/erased/i7868.scala +++ b/tests/pos/i7868.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import language.experimental.namedTypeArguments import scala.compiletime.* import scala.compiletime.ops.int.* diff --git a/tests/pos-custom-args/erased/i7878.scala b/tests/pos/i7878.scala similarity index 85% rename from tests/pos-custom-args/erased/i7878.scala rename to tests/pos/i7878.scala index 63b082d52ca0..05a1b6093e6a 100644 --- a/tests/pos-custom-args/erased/i7878.scala +++ b/tests/pos/i7878.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Boom { import scala.compiletime.* trait Fail[A <: Int, B <: Int] diff --git a/tests/pos/i8715.scala b/tests/pos/i8715.scala deleted file mode 100644 index 0490ce53c8cf..000000000000 --- a/tests/pos/i8715.scala +++ /dev/null @@ -1,2 +0,0 @@ -@main -def Test = List(42) match { case List(xs @ (ys*)) => xs } diff --git a/tests/pos/i8758.scala b/tests/pos/i8758.scala new file mode 100644 index 000000000000..ad170750c09e --- /dev/null +++ b/tests/pos/i8758.scala @@ -0,0 +1,7 @@ +//> using options -Xfatal-warnings -deprecation -feature + +def test = "?johndoe" match { + case s":$name" => println(s":name $name") + case s"{$name}" => println(s"{name} $name") + case s"?$pos" => println(s"pos $pos") +} diff --git a/tests/pos/i8781.scala b/tests/pos/i8781.scala new file mode 100644 index 000000000000..857ff43b9c0a --- /dev/null +++ b/tests/pos/i8781.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings -deprecation -feature + +@main +def Test = + + val x: Int | String = 1 + + println(x.isInstanceOf[Int]) + + x match + case _: Int => println("Int") + case _: String => println("String") diff --git a/tests/pos/i8875.scala b/tests/pos/i8875.scala new file mode 100644 index 000000000000..c0de263417e0 --- /dev/null +++ b/tests/pos/i8875.scala @@ -0,0 +1,7 @@ +//> using options -Xprint:getters + +class A { + extension (a: Int) { + def foo: Int = 1 + } +} \ No newline at end of file diff --git a/tests/pos-custom-args/no-experimental/i8945.scala b/tests/pos/i8945.scala similarity index 92% rename from tests/pos-custom-args/no-experimental/i8945.scala rename to tests/pos/i8945.scala index 5dded16f0160..2ae8fc268cbf 100644 --- a/tests/pos-custom-args/no-experimental/i8945.scala +++ b/tests/pos/i8945.scala @@ -1,3 +1,5 @@ +//> using options -Yno-experimental + // src-2/MacroImpl.scala trait Context { object universe { diff --git a/tests/pos-special/fatal-warnings/i8956.scala b/tests/pos/i8956.scala similarity index 86% rename from tests/pos-special/fatal-warnings/i8956.scala rename to tests/pos/i8956.scala index 6b935946de17..a6937a3f2363 100644 --- a/tests/pos-special/fatal-warnings/i8956.scala +++ b/tests/pos/i8956.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + type Numeric = Double | Int val v1 = 100 diff --git a/tests/pos-special/fatal-warnings/i9260.scala b/tests/pos/i9260.scala similarity index 82% rename from tests/pos-special/fatal-warnings/i9260.scala rename to tests/pos/i9260.scala index df548f393eea..cf740eb3c096 100644 --- a/tests/pos-special/fatal-warnings/i9260.scala +++ b/tests/pos/i9260.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + package asts enum Ast[-T >: Null]: diff --git a/tests/pos/i9267.scala b/tests/pos/i9267.scala new file mode 100644 index 000000000000..49f65ff5e3a1 --- /dev/null +++ b/tests/pos/i9267.scala @@ -0,0 +1,3 @@ +//> using options -Ystop-after:erasure + +class A diff --git a/tests/pos/i9751.scala b/tests/pos/i9751.scala new file mode 100644 index 000000000000..78c9116f77d1 --- /dev/null +++ b/tests/pos/i9751.scala @@ -0,0 +1,13 @@ +//> using options -Xfatal-warnings -deprecation -feature + +object Test { + extension (x: Int) + inline def times(inline op: Unit): Unit = { + var count = 0 + while count < x do + op + count += 1 + } + + 10.times { println("hello") } +} diff --git a/tests/pos-special/fatal-warnings/i9751b.scala b/tests/pos/i9751b.scala similarity index 78% rename from tests/pos-special/fatal-warnings/i9751b.scala rename to tests/pos/i9751b.scala index 6336b0c47fd4..bbbe8052a20c 100644 --- a/tests/pos-special/fatal-warnings/i9751b.scala +++ b/tests/pos/i9751b.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + object Test { inline def f(inline x: Boolean): Unit = inline if x then println() diff --git a/tests/pos/i9776.scala b/tests/pos/i9776.scala new file mode 100644 index 000000000000..73efe2531918 --- /dev/null +++ b/tests/pos/i9776.scala @@ -0,0 +1,40 @@ +//> using options -Xfatal-warnings -deprecation -feature + +import scala.annotation.switch + +sealed trait Fruit + +object Fruit { + case object Apple extends Fruit + case object Banana extends Fruit + case object Orange extends Fruit + + def isCitrus(fruit: Fruit): Boolean = + (fruit: @switch) match { + case Orange => true + case _ => false + } +} + + +sealed trait TaggedFruit { + def tag: Int +} + +object TaggedFruit { + case object Apple extends TaggedFruit { + val tag = 1 + } + case object Banana extends TaggedFruit { + val tag = 2 + } + case object Orange extends TaggedFruit { + val tag = 3 + } + + def isCitrus(fruit: TaggedFruit): Boolean = + (fruit.tag: @switch) match { + case 3 => true + case _ => false + } +} diff --git a/tests/pos/i9804.scala b/tests/pos/i9804.scala new file mode 100644 index 000000000000..80b0de79b97f --- /dev/null +++ b/tests/pos/i9804.scala @@ -0,0 +1,7 @@ +//> using options -Xfatal-warnings -deprecation -feature + +import scala.quoted.* + +def f[A: Type](e: Expr[A])(using Quotes): Expr[A] = e match { + case '{ $e2 } => e2 +} diff --git a/tests/new/imports-pos.scala b/tests/pos/imports-pos.scala similarity index 88% rename from tests/new/imports-pos.scala rename to tests/pos/imports-pos.scala index a9b90dbf4527..fae48876507d 100644 --- a/tests/new/imports-pos.scala +++ b/tests/pos/imports-pos.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + package test; import java.lang.System as S diff --git a/tests/new/infer2-pos.scala b/tests/pos/infer2-pos.scala similarity index 84% rename from tests/new/infer2-pos.scala rename to tests/pos/infer2-pos.scala index 2ce88be544b9..0854623ca93a 100644 --- a/tests/new/infer2-pos.scala +++ b/tests/pos/infer2-pos.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + package test class Lst[T] case class cons[T](x: T, xs: Lst[T]) extends Lst[T] diff --git a/tests/pos/inline-eta.scala b/tests/pos/inline-eta.scala new file mode 100644 index 000000000000..1fc0246fba11 --- /dev/null +++ b/tests/pos/inline-eta.scala @@ -0,0 +1,9 @@ +class Foo(x: Int) + +object A: + inline def bar(x: Int): Int = x + val g1 = bar + val g2: Int => Int = bar + + def foo(xs: List[Int]) = + xs.map(Foo.apply) // use the `inline def apply` constructor proxy diff --git a/tests/pos/intersection.scala b/tests/pos/intersection.scala index 094e6a4e9e8e..9b7e15b317e3 100644 --- a/tests/pos/intersection.scala +++ b/tests/pos/intersection.scala @@ -40,4 +40,15 @@ object Test { def fooAB1: Int = fooAB def fooBA = (??? : B with A).f def fooBA1: Int = fooBA -} \ No newline at end of file +} + +object Test2: + class Row[+X] + class A + class B + class C extends Row[A] + class D extends Row[B] + val x: C & D = ??? + val y: Row[A & B] = x + + diff --git a/tests/pos/irrefutable.scala b/tests/pos/irrefutable.scala deleted file mode 100644 index 0a792b644a09..000000000000 --- a/tests/pos/irrefutable.scala +++ /dev/null @@ -1,22 +0,0 @@ -// The test which this should perform but does not -// is that f1 is recognized as irrefutable and f2 is not -// This can be recognized via the generated classes: -// -// A$$anonfun$f1$1.class -// A$$anonfun$f2$1.class -// A$$anonfun$f2$2.class -// -// The extra one in $f2$ is the filter. -// -// !!! Marking with exclamation points so maybe someday -// this test will be finished. -class A { - case class Foo[T](x: T) - - def f1(xs: List[Foo[Int]]) = { - for (Foo(x: Int) <- xs) yield x - } - def f2(xs: List[Foo[Any]]) = { - for (Foo(x: Int) <- xs) yield x - } -} diff --git a/tests/pos/jdk-8-app.scala b/tests/pos/jdk-8-app.scala new file mode 100644 index 000000000000..593547b6e377 --- /dev/null +++ b/tests/pos/jdk-8-app.scala @@ -0,0 +1,7 @@ +//> using options -release:8 + +import java.time.LocalDate + +object Jdk8App extends App { + println(LocalDate.now()) +} diff --git a/tests/pos/kind-projector-underscores.scala b/tests/pos/kind-projector-underscores.scala new file mode 100644 index 000000000000..f72a300a64eb --- /dev/null +++ b/tests/pos/kind-projector-underscores.scala @@ -0,0 +1,61 @@ +//> using options -Ykind-projector:underscores + +package kind_projector + +trait Foo[F[_]] +trait Qux[F[_, _]] +trait Baz[F[_], A, B] + +trait FooPlus[+F[+_]] +trait QuxPlus[+F[+_, +_]] +trait BazPlus[+F[+_], +A, +B] + +trait FooMinus[-F[-_]] +trait QuxMinus[-F[-_, -_]] +trait BazMinus[-F[-_], -A, -B] + +class Bar1 extends Foo[Either[Int, _]] +class Bar2 extends Foo[Either[_, Int]] +class Bar3 extends Foo[_ => Int] +class Bar4 extends Foo[Int => _] +class Bar5 extends Foo[(Int, _, Int)] +class Bar6 extends Foo[λ[x => Either[Int, x]]] +class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] +class Bar8 extends Foo[Baz[Int => _, _, Int]] +class Bar9 extends Foo[λ[x => Baz[x => _, Int, x]]] + +class BarPlus1 extends FooPlus[Either[Int, +_]] +class BarPlus2 extends FooPlus[Either[+_, Int]] +class BarPlus3 extends FooPlus[Int => +_] +class BarPlus4 extends FooPlus[(Int, +_, Int)] +class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] +class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] +class BarPlus7 extends FooPlus[BazPlus[Int => +_, +_, Int]] + +class BarMinus1 extends FooMinus[-_ => Int] + +class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -_]] +class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-_, Int]] +class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+_ => Int] +class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -_] +class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -_, Int)] +class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] +class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] +class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -_, -_, Int]] +class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -_, Int, x]]] + +class BackticksAreFine1 extends FooPlus[Either[Int, `-_`]] +class BackticksAreFine2 extends FooPlus[Either[`-_`, Int]] +class BackticksAreFine3 extends FooMinus[`+_` => Int] +class BackticksAreFine4 extends FooPlus[Int => `-_`] +class BackticksAreFine5 extends FooPlus[(Int, `-_`, Int)] +class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-_`, `-_`, Int]] +class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-_`, Int, x]]] + +class SpacesAreFine1 extends FooPlus[Either[Int, - _ ]] +class SpacesAreFine2 extends FooPlus[Either[ - _ , Int]] +class SpacesAreFine3 extends FooMinus[ + _ => Int] +class SpacesAreFine4 extends FooPlus[Int => - _] +class SpacesAreFine5 extends FooPlus[(Int, - _, Int)] +class SpacesAreFine6 extends FooPlus[BazPlus[Int => - _ , - _, Int]] +class SpacesAreFine7 extends Foo[λ[`-x` => BazPlus[x => - _ , Int, x]]] diff --git a/tests/pos/kind-projector.scala b/tests/pos/kind-projector.scala new file mode 100644 index 000000000000..ff787d0111e2 --- /dev/null +++ b/tests/pos/kind-projector.scala @@ -0,0 +1,62 @@ +//> using options -Ykind-projector + +package kind_projector + +trait Foo[F[_]] +trait Qux[F[_, _]] +trait Baz[F[_], A, B] + +trait FooPlus[+F[+_]] +trait QuxPlus[+F[+_, +_]] +trait BazPlus[+F[+_], +A, +B] + +trait FooMinus[-F[-_]] +trait QuxMinus[-F[-_, -_]] +trait BazMinus[-F[-_], -A, -B] + +class Bar1 extends Foo[Either[Int, *]] +class Bar2 extends Foo[Either[*, Int]] +class Bar3 extends Foo[* => Int] +class Bar4 extends Foo[Int => *] +class Bar5 extends Foo[(Int, *, Int)] +class Bar6 extends Foo[λ[x => Either[Int, x]]] +class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] +class Bar8 extends Foo[Baz[Int => *, *, Int]] +class Bar9 extends Foo[λ[x => Baz[x => *, Int, x]]] + +class BarPlus1 extends FooPlus[Either[Int, +*]] +class BarPlus2 extends FooPlus[Either[+*, Int]] +class BarPlus3 extends FooPlus[Int => +*] +class BarPlus4 extends FooPlus[(Int, +*, Int)] +class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] +class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] +class BarPlus7 extends FooPlus[BazPlus[Int => +*, +*, Int]] + +class BarMinus1 extends FooMinus[-* => Int] + +class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -*]] +class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-*, Int]] +class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+* => Int] +class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -*] +class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -*, Int)] +class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] +class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] +class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -*, -*, Int]] +class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -*, Int, x]]] + +class BackticksAreFine1 extends FooPlus[Either[Int, `-*`]] +class BackticksAreFine2 extends FooPlus[Either[`-*`, Int]] +class BackticksAreFine3 extends FooMinus[`+*` => Int] +class BackticksAreFine4 extends FooPlus[Int => `-*`] +class BackticksAreFine5 extends FooPlus[(Int, `-*`, Int)] +class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-*`, `-*`, Int]] +class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-*`, Int, x]]] +class BackticksAreFine8 extends Foo[λ[`x` => BazPlus[x => `*`, Int, x]]] + +// https://github.com/lampepfl/dotty/issues/13141 +// i13141 +object A { + class X { type Blah = Int } + val * = new X + val a: *.Blah = 2 +} diff --git a/tests/pos/looping-givens.scala b/tests/pos/looping-givens.scala new file mode 100644 index 000000000000..0e615c8251df --- /dev/null +++ b/tests/pos/looping-givens.scala @@ -0,0 +1,11 @@ +import language.future + +class A +class B + +given joint(using a: A, b: B): (A & B) = ??? + +def foo(using a: A, b: B) = + given aa: A = summon // error + given bb: B = summon // error + given ab: (A & B) = summon // error diff --git a/tests/new/looping-jsig.scala b/tests/pos/looping-jsig.scala similarity index 93% rename from tests/new/looping-jsig.scala rename to tests/pos/looping-jsig.scala index 18777ed121b4..72f6f48f2d91 100644 --- a/tests/new/looping-jsig.scala +++ b/tests/pos/looping-jsig.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + import scala.collection.mutable.* trait BugTrack { diff --git a/tests/pos/match-type-disjoint-transitivity.scala b/tests/pos/match-type-disjoint-transitivity.scala new file mode 100644 index 000000000000..84872d8d2a3a --- /dev/null +++ b/tests/pos/match-type-disjoint-transitivity.scala @@ -0,0 +1,56 @@ +/* Tests that the following property holds for a chosen set of types (S, T, U): + * + * If S <: T and T provably disjoint from U, then S provably disjoint from U. + */ + +class Parent[T] +class Child[T] extends Parent[T] +trait ChildTrait[T] extends Parent[T] + +class OtherClass + +trait Common[A] +trait Left[A] extends Common[A] +trait Right[A] extends Common[A] + +// Since Parent[Boolean] disjoint from Parent[Int], we must have Child[Boolean] also disjoint from Parent[Int] +object Test1: + type MT[X] = X match + case Parent[Int] => Int + case Parent[Boolean] => Boolean + + def test(): Unit = + summon[MT[Parent[Int]] =:= Int] + summon[MT[Parent[Boolean]] =:= Boolean] + + summon[MT[Child[Int]] =:= Int] + summon[MT[Child[Boolean]] =:= Boolean] + end test +end Test1 + +// Since Parent[Int] disjoint from OtherClass, we must have Child[Int] and ChildTrait[T] also disjoint from OtherClass +object Test2: + type MT[X] = X match + case OtherClass => Int + case Parent[Int] => Boolean + + def test(): Unit = + summon[MT[OtherClass] =:= Int] + summon[MT[Parent[Int]] =:= Boolean] + + summon[MT[Child[Int]] =:= Boolean] + summon[MT[ChildTrait[Int]] =:= Boolean] + end test +end Test2 + +// Since Common[Int] is disjoint from Right[Boolean], we must have Left[Int] disjoint from Right[Boolean] +object Test3: + type MT[X] = X match + case Right[Boolean] => Int + case Any => Boolean + + def test(): Unit = + summon[MT[Common[Int]] =:= Boolean] + summon[MT[Left[Int]] =:= Boolean] + end test +end Test3 diff --git a/tests/pos/match-type-enumeration-value-hack.scala b/tests/pos/match-type-enumeration-value-hack.scala new file mode 100644 index 000000000000..b1f0146c012d --- /dev/null +++ b/tests/pos/match-type-enumeration-value-hack.scala @@ -0,0 +1,11 @@ +type EnumValueAux[A] = ({ type Value }) { type Value = A } + +type EnumValue[E <: Enumeration] = E match + case EnumValueAux[t] => t + +object Suit extends Enumeration: + val Hearts, Diamonds, Clubs, Spades = Val() + +object Test: + summon[Suit.Value =:= EnumValue[Suit.type]] +end Test diff --git a/tests/pos/matchable-same-type.scala b/tests/pos/matchable-same-type.scala new file mode 100644 index 000000000000..71ab788d0d16 --- /dev/null +++ b/tests/pos/matchable-same-type.scala @@ -0,0 +1,9 @@ +//> using options -Xfatal-warnings -deprecation -feature + +import scala.language.`future-migration` + +type X +def x: X = ??? +def test: Unit = + x match + case y: X => diff --git a/tests/new/matthias1.scala b/tests/pos/matthias1.scala similarity index 86% rename from tests/new/matthias1.scala rename to tests/pos/matthias1.scala index a923a529fe0f..91a79976d382 100644 --- a/tests/new/matthias1.scala +++ b/tests/pos/matthias1.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + class A() { class B() { def foo(x: B) = 0 diff --git a/tests/new/michel6.scala b/tests/pos/michel6.scala similarity index 75% rename from tests/new/michel6.scala rename to tests/pos/michel6.scala index b32e8bed75a1..c8e9c590fff4 100644 --- a/tests/new/michel6.scala +++ b/tests/pos/michel6.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + object M { def f(x: Int): Unit = {} diff --git a/tests/pos/moduletrans.scala b/tests/pos/moduletrans.scala new file mode 100644 index 000000000000..8982c5bba890 --- /dev/null +++ b/tests/pos/moduletrans.scala @@ -0,0 +1,10 @@ +//> using options -source 3.2 + +object m1 { + + class m() { + def f() = 5 + } + final val m: m = new m() + +} diff --git a/tests/pos/mt-redux-norm.perspective.scala b/tests/pos/mt-redux-norm.perspective.scala new file mode 100644 index 000000000000..96e3841c0d70 --- /dev/null +++ b/tests/pos/mt-redux-norm.perspective.scala @@ -0,0 +1,25 @@ +// while making MT post-redux consistent in its normalisation/simplification +// one version of the change broke a line of the perspective community project in CI +// this is a minimisation of the failure + +import scala.compiletime._, scala.deriving._ + +transparent inline def foo(using m: Mirror): Unit = + constValueTuple[m.MirroredElemLabels].toList.toSet // was: +//-- [E057] Type Mismatch Error: cat.scala:8:14 ---------------------------------- +//8 |def test = foo(using summon[Mirror.Of[Cat]]) +// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// |Type argument ("name" : String) | Nothing does not conform to lower bound m$proxy1.MirroredElemLabels match { +// | case EmptyTuple => Nothing +// | case h *: t => h | Tuple.Fold[t, Nothing, [x, y] =>> x | y] +// |} +// |----------------------------------------------------------------------------- +// |Inline stack trace +// |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// |This location contains code that was inlined from cat.scala:4 +//4 | constValueTuple[m.MirroredElemLabels].toList.toSet +// | ^ + +case class Cat(name: String) + +def test = foo(using summon[Mirror.Of[Cat]]) diff --git a/tests/pos/mt-scrutinee-widen3.scala b/tests/pos/mt-scrutinee-widen3.scala new file mode 100644 index 000000000000..6e06cb4e1d4c --- /dev/null +++ b/tests/pos/mt-scrutinee-widen3.scala @@ -0,0 +1,12 @@ +// Like widen2, but using a.type only, meaning it should typecheck +import scala.util.Random +val x = 42 + +type IsX[T] = + T match + case x.type => true + case _ => false + +def bothXOrNot(a: Int, b: Int)(using IsX[a.type] =:= IsX[a.type]) = ??? + +def test = bothXOrNot(Random.nextInt(), Random.nextInt()) diff --git a/tests/pos/multiple-additional-imports.scala b/tests/pos/multiple-additional-imports.scala index a86c7e8fc342..d5afc7ada7df 100644 --- a/tests/pos/multiple-additional-imports.scala +++ b/tests/pos/multiple-additional-imports.scala @@ -1,4 +1,4 @@ -// scalac: -Yimports:scala,java.lang,scala.Predef,scala.annotation,scala.util.matching +//> using options -Yimports:scala,java.lang,scala.Predef,scala.annotation,scala.util.matching class annotation extends Annotation val s: String = "str" diff --git a/tests/pos-special/fatal-warnings/not-looping-implicit.scala b/tests/pos/not-looping-implicit.scala similarity index 96% rename from tests/pos-special/fatal-warnings/not-looping-implicit.scala rename to tests/pos/not-looping-implicit.scala index a35945bfe373..90fba9f807a7 100644 --- a/tests/pos-special/fatal-warnings/not-looping-implicit.scala +++ b/tests/pos/not-looping-implicit.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.deriving.Mirror import scala.compiletime._ diff --git a/tests/pos/nullarify.scala b/tests/pos/nullarify.scala index 62d16ba11f8b..159b22d8482b 100644 --- a/tests/pos/nullarify.scala +++ b/tests/pos/nullarify.scala @@ -1,3 +1,5 @@ +//> using options -Ycheck:nullarify + object Test { def foo: Int = 2 diff --git a/tests/pos/patmat-exhaustive.scala b/tests/pos/patmat-exhaustive.scala new file mode 100644 index 000000000000..9e3cb7d8f615 --- /dev/null +++ b/tests/pos/patmat-exhaustive.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings -deprecation -feature + +def foo: Unit = + object O: + sealed abstract class A + class B extends O.A + class C extends O.A + + val x: O.A = ??? + x match + case x: B => ??? + case x: C => ??? diff --git a/tests/new/patterns.scala b/tests/pos/patterns.scala similarity index 96% rename from tests/new/patterns.scala rename to tests/pos/patterns.scala index 92a38118d7ca..78cd813dc621 100644 --- a/tests/new/patterns.scala +++ b/tests/pos/patterns.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + trait Option[+a] {} case class Some[a](x: a) extends Option[a] { diff --git a/tests/new/patterns1.scala b/tests/pos/patterns1.scala similarity index 90% rename from tests/new/patterns1.scala rename to tests/pos/patterns1.scala index f660ea054360..8ea78d3c0aa5 100644 --- a/tests/new/patterns1.scala +++ b/tests/pos/patterns1.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + trait Option[+a] case class Some[a](x: a) extends Option[a] diff --git a/tests/new/pmbug.scala b/tests/pos/pmbug.scala similarity index 82% rename from tests/new/pmbug.scala rename to tests/pos/pmbug.scala index 7d94e7a8bdfd..5cfb1c3d5da0 100644 --- a/tests/new/pmbug.scala +++ b/tests/pos/pmbug.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + object Test { def flatten[a](l: List[List[a]]): List[a] = l match { diff --git a/tests/pos/poly-erased-functions.scala b/tests/pos/poly-erased-functions.scala new file mode 100644 index 000000000000..8c7385edb86a --- /dev/null +++ b/tests/pos/poly-erased-functions.scala @@ -0,0 +1,14 @@ +import language.experimental.erasedDefinitions + +object Test: + type T1 = [X] => (erased x: X, y: Int) => Int + type T2 = [X] => (x: X, erased y: Int) => X + + val t1 = [X] => (erased x: X, y: Int) => y + val t2 = [X] => (x: X, erased y: Int) => x + + erased class A + + type T3 = [X] => (x: A, y: X) => X + + val t3 = [X] => (x: A, y: X) => y diff --git a/tests/pos/polymorphic-functions-this.scala b/tests/pos/polymorphic-functions-this.scala new file mode 100644 index 000000000000..91e1b38ed714 --- /dev/null +++ b/tests/pos/polymorphic-functions-this.scala @@ -0,0 +1,10 @@ +trait Foo: + type X + def x: X + val f: [T <: this.X] => (T, this.X) => (T, this.X) = + [T <: this.X] => (x: T, y: this.X) => (x, y) + f(x, x) + + val g: [T <: this.type] => (T, this.type) => (T, this.type) = + [T <: this.type] => (x: T, y: this.type) => (x, y) + g(this, this) diff --git a/tests/pos/private-this-future-migration.scala b/tests/pos/private-this-future-migration.scala new file mode 100644 index 000000000000..cdcc6a2c0321 --- /dev/null +++ b/tests/pos/private-this-future-migration.scala @@ -0,0 +1,5 @@ +import scala.language.`future-migration` + +class Foo: + private[this] def foo: Int = ??? // warn + protected[this] def bar: Int = ??? // warn diff --git a/tests/pos/private-this.scala b/tests/pos/private-this.scala new file mode 100644 index 000000000000..18de91df72cb --- /dev/null +++ b/tests/pos/private-this.scala @@ -0,0 +1,3 @@ +class Foo: + private[this] def foo: Int = ??? + protected[this] def bar: Int = ??? diff --git a/tests/new/private-types-after-typer.scala b/tests/pos/private-types-after-typer.scala similarity index 86% rename from tests/new/private-types-after-typer.scala rename to tests/pos/private-types-after-typer.scala index 5c20cac2a1fb..ed5fc0f2cecd 100644 --- a/tests/new/private-types-after-typer.scala +++ b/tests/pos/private-types-after-typer.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + // Testing that the type of the outer accessor in O2 // doesn't crash the compiler over private type escaping scope. trait T { diff --git a/tests/pos/projection.scala b/tests/pos/projection.scala new file mode 100644 index 000000000000..04baa8157139 --- /dev/null +++ b/tests/pos/projection.scala @@ -0,0 +1,6 @@ +//> using options -source 3.2 + +class C { type T } +object test { + def x: C#T = ??? +} diff --git a/tests/pos/provably-disjoint-infinite-recursion-1.scala b/tests/pos/provably-disjoint-infinite-recursion-1.scala new file mode 100644 index 000000000000..a6b2e698f5e2 --- /dev/null +++ b/tests/pos/provably-disjoint-infinite-recursion-1.scala @@ -0,0 +1,4 @@ +class Test { + def isTraversableAgain(from: Iterator[Int]): Boolean = + from.isInstanceOf[Iterable[?]] +} diff --git a/tests/pos/provably-disjoint-infinite-recursion-2.scala b/tests/pos/provably-disjoint-infinite-recursion-2.scala new file mode 100644 index 000000000000..204d6bfbbcf3 --- /dev/null +++ b/tests/pos/provably-disjoint-infinite-recursion-2.scala @@ -0,0 +1,10 @@ +type Tupled[A] <: Tuple = A match + case Tuple => A & Tuple + case _ => A *: EmptyTuple + +enum Day: + case Saturday, Sunday + +type Foo = Tupled[Day] + +def foo(): Foo = Day.Saturday *: EmptyTuple diff --git a/tests/pos/scala3mock.scala b/tests/pos/scala3mock.scala new file mode 100644 index 000000000000..73f25701d1c2 --- /dev/null +++ b/tests/pos/scala3mock.scala @@ -0,0 +1,11 @@ +class MockFunction1[T1]: + def expects(v1: T1 | Foo): Any = ??? + def expects(matcher: String): Any = ??? + +def when[T1](f: T1 => Any): MockFunction1[T1] = ??? + +class Foo + +def main = + val f: Foo = new Foo + when((x: Foo) => "").expects(f) diff --git a/tests/new/selftails.scala b/tests/pos/selftails.scala similarity index 95% rename from tests/new/selftails.scala rename to tests/pos/selftails.scala index a4253b80c7b0..8a68e851906b 100644 --- a/tests/new/selftails.scala +++ b/tests/pos/selftails.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + package net.liftweb.util /** diff --git a/tests/new/seqtest2.scala b/tests/pos/seqtest2.scala similarity index 85% rename from tests/new/seqtest2.scala rename to tests/pos/seqtest2.scala index 239b1b58168d..3e55c006b3d2 100644 --- a/tests/new/seqtest2.scala +++ b/tests/pos/seqtest2.scala @@ -1,3 +1,5 @@ +//> using options -source 3.2 + object test { val b = List(1, 2, 3); diff --git a/tests/pos/single-additional-import.scala b/tests/pos/single-additional-import.scala index d8ca5b54e05e..432826b74883 100644 --- a/tests/pos/single-additional-import.scala +++ b/tests/pos/single-additional-import.scala @@ -1,2 +1,2 @@ -// scalac: -Yimports:scala.annotation +//> using options -Yimports:scala.annotation class annotation extends Annotation diff --git a/tests/pos/source-import-3-3-migration.scala b/tests/pos/source-import-3-3-migration.scala new file mode 100644 index 000000000000..a8d1d1683288 --- /dev/null +++ b/tests/pos/source-import-3-3-migration.scala @@ -0,0 +1 @@ +import language.`3.3-migration` diff --git a/tests/pos/source-import-3-3.scala b/tests/pos/source-import-3-3.scala new file mode 100644 index 000000000000..4d580541ebcf --- /dev/null +++ b/tests/pos/source-import-3-3.scala @@ -0,0 +1 @@ +import language.`3.3` diff --git a/tests/pos/source-import-3-4-migration.scala b/tests/pos/source-import-3-4-migration.scala new file mode 100644 index 000000000000..5270f165a30b --- /dev/null +++ b/tests/pos/source-import-3-4-migration.scala @@ -0,0 +1 @@ +import language.`3.4-migration` diff --git a/tests/pos/source-import-3-4.scala b/tests/pos/source-import-3-4.scala new file mode 100644 index 000000000000..7ae4f41ac146 --- /dev/null +++ b/tests/pos/source-import-3-4.scala @@ -0,0 +1 @@ +import language.`3.4` diff --git a/tests/pos-special/spec-t5545/S_1.scala b/tests/pos/spec-t5545/S_1.scala similarity index 100% rename from tests/pos-special/spec-t5545/S_1.scala rename to tests/pos/spec-t5545/S_1.scala diff --git a/tests/pos-special/spec-t5545/S_2.scala b/tests/pos/spec-t5545/S_2.scala similarity index 100% rename from tests/pos-special/spec-t5545/S_2.scala rename to tests/pos/spec-t5545/S_2.scala diff --git a/tests/pos/stats-in-empty-pkg.scala b/tests/pos/stats-in-empty-pkg.scala new file mode 100644 index 000000000000..cbfade71f8b9 --- /dev/null +++ b/tests/pos/stats-in-empty-pkg.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings -deprecation -feature + +def foo = 23 +val bar = foo +var baz = bar +type Qux = Int diff --git a/tests/pos-special/fatal-warnings/strict-pattern-bindings-3.0-migration.scala b/tests/pos/strict-pattern-bindings-3.0-migration.scala similarity index 95% rename from tests/pos-special/fatal-warnings/strict-pattern-bindings-3.0-migration.scala rename to tests/pos/strict-pattern-bindings-3.0-migration.scala index bab804d81fac..f1d88af0c152 100644 --- a/tests/pos-special/fatal-warnings/strict-pattern-bindings-3.0-migration.scala +++ b/tests/pos/strict-pattern-bindings-3.0-migration.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + // These tests should pass under -Xfatal-warnings with source version less than 3.2 import language.`3.0-migration` diff --git a/tests/pos-special/fatal-warnings/strict-pattern-bindings-3.1.scala b/tests/pos/strict-pattern-bindings-3.1.scala similarity index 94% rename from tests/pos-special/fatal-warnings/strict-pattern-bindings-3.1.scala rename to tests/pos/strict-pattern-bindings-3.1.scala index 8ed183dd1209..bb912204e38a 100644 --- a/tests/pos-special/fatal-warnings/strict-pattern-bindings-3.1.scala +++ b/tests/pos/strict-pattern-bindings-3.1.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + // These tests should pass under -Xfatal-warnings with source version less than 3.2 import language.`3.1` diff --git a/tests/pos/switches.scala b/tests/pos/switches.scala new file mode 100644 index 000000000000..bd7e44f1c8cf --- /dev/null +++ b/tests/pos/switches.scala @@ -0,0 +1,55 @@ +//> using options -Xfatal-warnings -deprecation -feature + +import scala.annotation.switch + +class Test { + import Test.* + + def test1(x: Int): Int = (x: @switch) match { + case 1 => 1 + case 2 | 3 | 4 => 2 + case 65 => 3 + case 72 => 4 + } + + def test2(c: Char): Boolean = (c: @switch) match { + case LF | CR | FF | SU => true + case _ => false + } + + // #1313 + def test3(x: Int, y: Int): Int = (x: @switch) match { + case 6 if y > 5 => 1 + case 6 => 2 + case 12 => 3 + case 14 => 4 + case _ => 5 + } + + def test4(x: Byte): Boolean = (x: @switch) match { + case 1 | 2 | 3 => true + case _ => false + } + + def test5(x: Short): Boolean = (x: @switch) match { + case 1 | 2 | 3 => true + case _ => false + } + + def test6(x: IntAnyVal) = (x: @switch) match { + case IntAnyVal(1) => 0 + case IntAnyVal(10) => 1 + case IntAnyVal(100) => 2 + case IntAnyVal(1000) => 3 + case IntAnyVal(10000) => 4 + } +} + +case class IntAnyVal(x: Int) extends AnyVal + +object Test { + final val LF = '\u000A' + final val CR = '\u000D' + final val FF = '\u000C' + final val SU = '\u001A' +} diff --git a/tests/pos-special/fatal-warnings/t10373.scala b/tests/pos/t10373.scala similarity index 87% rename from tests/pos-special/fatal-warnings/t10373.scala rename to tests/pos/t10373.scala index da054b9fe365..0d91313f694d 100644 --- a/tests/pos-special/fatal-warnings/t10373.scala +++ b/tests/pos/t10373.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + abstract class Foo { def bar(): Unit = this match { case Foo_1() => //do something diff --git a/tests/pos/t16827.scala b/tests/pos/t16827.scala index 17122fd9b580..ba11aac71cba 100644 --- a/tests/pos/t16827.scala +++ b/tests/pos/t16827.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror trait Outer[F[_]]: sealed trait Inner diff --git a/tests/pos/t6205.scala b/tests/pos/t6205.scala index 52078bd5f46f..a50350d20376 100644 --- a/tests/pos/t6205.scala +++ b/tests/pos/t6205.scala @@ -2,7 +2,7 @@ class A[T] class Test1 { def x(backing: Map[A[_], Any]) = - for( (k: A[kt], v) <- backing) + for(case (k: A[kt], v) <- backing) yield (k: A[kt]) } diff --git a/tests/pos-special/fatal-warnings/t6595.scala b/tests/pos/t6595.scala similarity index 90% rename from tests/pos-special/fatal-warnings/t6595.scala rename to tests/pos/t6595.scala index 82cca01c70a4..b89c8f97308f 100644 --- a/tests/pos-special/fatal-warnings/t6595.scala +++ b/tests/pos/t6595.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.annotation.switch class Foo extends AnyRef { diff --git a/tests/pos/t6963c.scala b/tests/pos/t6963c.scala index baf356ab26b7..6effd4082065 100644 --- a/tests/pos/t6963c.scala +++ b/tests/pos/t6963c.scala @@ -1,23 +1,23 @@ -// scalac: -Xmigration:2.9 -Xfatal-warnings +//> using options -Xmigration:2.9 -Xfatal-warnings // import collection.Seq object Test { - def f1(x: Any) = x.isInstanceOf[Seq[_]] + def f1(x: Any) = x.isInstanceOf[Seq[?]] def f2(x: Any) = x match { - case _: Seq[_] => true + case _: Seq[?] => true case _ => false } def f3(x: Any) = x match { - case _: Array[_] => true + case _: Array[?] => true case _ => false } - def f4(x: Any) = x.isInstanceOf[Iterable[_]] + def f4(x: Any) = x.isInstanceOf[Iterable[?]] def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match { - case (Some(_: Seq[_]), Nil, _) => 1 - case (None, List(_: List[_], _), _) => 2 + case (Some(_: Seq[?]), Nil, _) => 1 + case (None, List(_: List[?], _), _) => 2 case _ => 3 } diff --git a/tests/pos-custom-args/erased/tailrec.scala b/tests/pos/tailrec.scala similarity index 86% rename from tests/pos-custom-args/erased/tailrec.scala rename to tests/pos/tailrec.scala index cebcf4785c7a..95e667c07515 100644 --- a/tests/pos-custom-args/erased/tailrec.scala +++ b/tests/pos/tailrec.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import scala.annotation.tailrec erased class Foo1 diff --git a/tests/pos-special/fatal-warnings/tasty-parent-unapply.scala b/tests/pos/tasty-parent-unapply.scala similarity index 88% rename from tests/pos-special/fatal-warnings/tasty-parent-unapply.scala rename to tests/pos/tasty-parent-unapply.scala index f76faa8f0e93..0f882ee060d8 100644 --- a/tests/pos-special/fatal-warnings/tasty-parent-unapply.scala +++ b/tests/pos/tasty-parent-unapply.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.quoted.* object Macros { diff --git a/tests/pos/test.scala b/tests/pos/test.scala new file mode 100644 index 000000000000..3bb74b3b1386 --- /dev/null +++ b/tests/pos/test.scala @@ -0,0 +1,4 @@ +//> using options -source 3.2 + +object Test: + def test = 0 diff --git a/tests/pos/tuple-exaustivity.scala b/tests/pos/tuple-exaustivity.scala new file mode 100644 index 000000000000..a27267fc89e5 --- /dev/null +++ b/tests/pos/tuple-exaustivity.scala @@ -0,0 +1,6 @@ +//> using options -Xfatal-warnings -deprecation -feature + +def test(t: Tuple) = + t match + case Tuple() => + case head *: tail => diff --git a/tests/pos-special/fatal-warnings/type-test-matchable.scala b/tests/pos/type-test-matchable.scala similarity index 81% rename from tests/pos-special/fatal-warnings/type-test-matchable.scala rename to tests/pos/type-test-matchable.scala index f6d64d0e8aaa..579af12fa2e7 100644 --- a/tests/pos-special/fatal-warnings/type-test-matchable.scala +++ b/tests/pos/type-test-matchable.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -deprecation -feature + import scala.language.`future-migration` import scala.reflect.TypeTest diff --git a/tests/pos-special/typeclass-scaling.scala b/tests/pos/typeclass-scaling.scala similarity index 99% rename from tests/pos-special/typeclass-scaling.scala rename to tests/pos/typeclass-scaling.scala index 4b809de664f3..0db663de4989 100644 --- a/tests/pos-special/typeclass-scaling.scala +++ b/tests/pos/typeclass-scaling.scala @@ -1,3 +1,5 @@ +//> using options -Xmax-inlines 40 + import scala.collection.mutable import scala.annotation.tailrec diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala new file mode 100644 index 000000000000..07fe5a31ce5d --- /dev/null +++ b/tests/pos/typeclasses.scala @@ -0,0 +1,198 @@ +class Common: + + // this should go in Predef + infix type at [A <: { type This}, B] = A { type This = B } + + trait Ord: + type This + extension (x: This) + def compareTo(y: This): Int + def < (y: This): Boolean = compareTo(y) < 0 + def > (y: This): Boolean = compareTo(y) > 0 + + trait SemiGroup: + type This + extension (x: This) def combine(y: This): This + + trait Monoid extends SemiGroup: + def unit: This + + trait Functor: + type This[A] + extension [A](x: This[A]) def map[B](f: A => B): This[B] + + trait Monad extends Functor: + def pure[A](x: A): This[A] + extension [A](x: This[A]) + def flatMap[B](f: A => This[B]): This[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + + +object Instances extends Common: + +/* + instance Int: Ord as intOrd with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 +*/ + given intOrd: Ord with + type This = Int + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 +/* + instance List[T: Ord]: Ord as listOrd with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) +*/ + + // Proposed short syntax: + // given listOrd[T: Ord as ord]: Ord at T with + given listOrd[T](using ord: Ord { type This = T}): Ord with + type This = List[T] + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + end listOrd + +/* + instance List: Monad as listMonad with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) +*/ + + given listMonad: Monad with + type This[A] = List[A] + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + +/* + type Reader[Ctx] = X =>> Ctx => X + instance Reader[Ctx: _]: Monad as readerMonad with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x +*/ + + given readerMonad[Ctx]: Monad with + type This[X] = Ctx => X + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + //Proposed short syntax: + //extension [M: Monad as m, A](xss: M[M[A]]) + // def flatten: M[A] = + // xs.flatMap(identity) + + extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) + def flatten: m.This[A] = + xss.flatMap(identity) + + // Proposed short syntax: + //def maximum[T: Ord](xs: List[T]: T = + def maximum[T](xs: List[T])(using Ord at T): T = + xs.reduceLeft((x, y) => if (x < y) y else x) + + // Proposed short syntax: + // def descending[T: Ord as asc]: Ord at T = new Ord: + def descending[T](using asc: Ord at T): Ord at T = new Ord: + type This = T + extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) + + // Proposed short syntax: + // def minimum[T: Ord](xs: List[T]) = + def minimum[T](xs: List[T])(using Ord at T) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 30 115 853 +// wc Rust : 57 193 1466 +trait Animal: + type This + // Associated function signature; `This` refers to the implementor type. + def apply(name: String): This + + // Method signatures; these will return a string. + extension (self: This) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +/* +instance Sheep: Animal with + def apply(name: String) = Sheep(name) + extension (self: This) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") +*/ + +// Implement the `Animal` trait for `Sheep`. +given Animal with + type This = Sheep + def apply(name: String) = Sheep(name) + extension (self: This) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/unchecked-scrutinee.scala b/tests/pos/unchecked-scrutinee.scala new file mode 100644 index 000000000000..72fcd3da14e4 --- /dev/null +++ b/tests/pos/unchecked-scrutinee.scala @@ -0,0 +1,7 @@ +//> using options -Xfatal-warnings -deprecation -feature + +object Test { + (List(1: @unchecked, 2, 3): @unchecked) match { + case a :: as => + } +} \ No newline at end of file diff --git a/tests/pos/uninitialized-future-migration.scala b/tests/pos/uninitialized-future-migration.scala new file mode 100644 index 000000000000..a1e606dc90fb --- /dev/null +++ b/tests/pos/uninitialized-future-migration.scala @@ -0,0 +1,6 @@ +import scala.language.`future-migration` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // warn + var b: Int = uninitialized diff --git a/tests/pos/wildcard-type-syntax-future-migration.scala b/tests/pos/wildcard-type-syntax-future-migration.scala new file mode 100644 index 000000000000..3075c609ffdc --- /dev/null +++ b/tests/pos/wildcard-type-syntax-future-migration.scala @@ -0,0 +1,6 @@ +import scala.language.`future-migration` + +def test = + Seq() match + case _: List[_] => // warn + case _: Seq[?] => diff --git a/tests/pos/wildcard-type-syntax.scala b/tests/pos/wildcard-type-syntax.scala new file mode 100644 index 000000000000..5e354b7b21ac --- /dev/null +++ b/tests/pos/wildcard-type-syntax.scala @@ -0,0 +1,6 @@ +//> using options -Werror + +def test = + Seq() match + case _: List[_] => + case _: Seq[?] => diff --git a/tests/pos/with-type-operator-3.3.scala b/tests/pos/with-type-operator-3.3.scala new file mode 100644 index 000000000000..2b40939d71b0 --- /dev/null +++ b/tests/pos/with-type-operator-3.3.scala @@ -0,0 +1,5 @@ +//> using options -Werror + +import scala.language.`3.3` + +def foo: Int with String = ??? diff --git a/tests/pos/with-type-operator-3.4-migration.scala b/tests/pos/with-type-operator-3.4-migration.scala new file mode 100644 index 000000000000..27761a5e4a7f --- /dev/null +++ b/tests/pos/with-type-operator-3.4-migration.scala @@ -0,0 +1,3 @@ +import scala.language.`3.4-migration` + +def foo: Int with String = ??? // warn diff --git a/tests/pos/xfatalWarnings.scala b/tests/pos/xfatalWarnings.scala new file mode 100644 index 000000000000..ba278fc87aa3 --- /dev/null +++ b/tests/pos/xfatalWarnings.scala @@ -0,0 +1,14 @@ +//> using options -nowarn -Xfatal-warnings +// succeeds despite -Xfatal-warnings because of -nowarn + +object xfatalWarnings { + val opt:Option[String] = Some("test") + + opt match { // error when running with -Xfatal-warnings + case None => + } + + object Test { + while (true) {} // should be ok. no "pure expression does nothing in statement position" issued. + } +} diff --git a/tests/printing/annot-printing.check b/tests/printing/annot-printing.check index 99529ef452e7..9369dba4fae3 100644 --- a/tests/printing/annot-printing.check +++ b/tests/printing/annot-printing.check @@ -1,11 +1,11 @@ [[syntax trees at end of typer]] // tests/printing/annot-printing.scala package { import scala.annotation.* - class Foo() extends annotation.Annotation() {} - class Bar(s: String) extends annotation.Annotation() { + class Foo() extends scala.annotation.Annotation() {} + class Bar(s: String) extends scala.annotation.Annotation() { private[this] val s: String } - class Xyz(i: Int) extends annotation.Annotation() { + class Xyz(i: Int) extends scala.annotation.Annotation() { private[this] val i: Int } final lazy module val Xyz: Xyz = new Xyz() diff --git a/tests/printing/i19019.check b/tests/printing/i19019.check new file mode 100644 index 000000000000..1cbaef492ed8 --- /dev/null +++ b/tests/printing/i19019.check @@ -0,0 +1,37 @@ +[[syntax trees at end of typer]] // tests/printing/i19019.scala +package { + final lazy module val ObjectWithSelf: ObjectWithSelf = new ObjectWithSelf() + final module class ObjectWithSelf() extends Object() { + this: ObjectWithSelf.type => + final lazy module val StaticObjectNoSelf: ObjectWithSelf.StaticObjectNoSelf + = new ObjectWithSelf.StaticObjectNoSelf() + final module class StaticObjectNoSelf() extends Object() { + this: ObjectWithSelf.StaticObjectNoSelf.type => + def foo: Any = this + } + final lazy module val StaticObjectWithSelf: + ObjectWithSelf.StaticObjectWithSelf = + new ObjectWithSelf.StaticObjectWithSelf() + final module class StaticObjectWithSelf() extends Object() { + self: ObjectWithSelf.StaticObjectWithSelf.type => + def foo: Any = self + } + class Container() extends Object() { + final lazy module val NonStaticObjectNoSelf: + Container.this.NonStaticObjectNoSelf = + new Container.this.NonStaticObjectNoSelf() + final module class NonStaticObjectNoSelf() extends Object() { + this: Container.this.NonStaticObjectNoSelf.type => + def foo: Any = this + } + final lazy module val NonStaticObjectWithSelf: + Container.this.NonStaticObjectWithSelf = + new Container.this.NonStaticObjectWithSelf() + final module class NonStaticObjectWithSelf() extends Object() { + self: Container.this.NonStaticObjectWithSelf.type => + def foo: Any = self + } + } + } +} + diff --git a/tests/printing/i19019.scala b/tests/printing/i19019.scala new file mode 100644 index 000000000000..b91089b8e00d --- /dev/null +++ b/tests/printing/i19019.scala @@ -0,0 +1,23 @@ +object ObjectWithSelf: + object StaticObjectNoSelf: + def foo: Any = this + end StaticObjectNoSelf + + object StaticObjectWithSelf: + self => + + def foo: Any = self + end StaticObjectWithSelf + + class Container: + object NonStaticObjectNoSelf: + def foo: Any = this + end NonStaticObjectNoSelf + + object NonStaticObjectWithSelf: + self => + + def foo: Any = self + end NonStaticObjectWithSelf + end Container +end ObjectWithSelf diff --git a/tests/printing/transformed/lazy-vals-new.check b/tests/printing/transformed/lazy-vals-new.check index 4b81cd457a38..05471e5677dc 100644 --- a/tests/printing/transformed/lazy-vals-new.check +++ b/tests/printing/transformed/lazy-vals-new.check @@ -23,9 +23,9 @@ package { lazy def x(): Int = { val result: Object = A.x$lzy1 - if result.isInstanceOf[Int] then scala.Int.unbox(result) else - if result.eq(scala.runtime.LazyVals.NullValue) then - scala.Int.unbox(null) else scala.Int.unbox(A.x$lzyINIT1()) + if result.isInstanceOf[Int] then Int.unbox(result) else + if result.eq(scala.runtime.LazyVals.NullValue) then Int.unbox(null) + else Int.unbox(A.x$lzyINIT1()) } private def x$lzyINIT1(): Object = while do @@ -41,7 +41,7 @@ package { var resultNullable: Object = null try { - resultNullable = scala.Int.box(2) + resultNullable = Int.box(2) if resultNullable.eq(null) then result = scala.runtime.LazyVals.NullValue else result = resultNullable diff --git a/tests/rewrites/alphanumeric-infix-operator.check b/tests/rewrites/alphanumeric-infix-operator.check new file mode 100644 index 000000000000..8ff077e856cf --- /dev/null +++ b/tests/rewrites/alphanumeric-infix-operator.check @@ -0,0 +1,3 @@ +extension (x: Int) def foo(y: Int): Int = x + y + +def f: Unit = 2 `foo` 4 diff --git a/tests/rewrites/alphanumeric-infix-operator.scala b/tests/rewrites/alphanumeric-infix-operator.scala new file mode 100644 index 000000000000..450f44834f05 --- /dev/null +++ b/tests/rewrites/alphanumeric-infix-operator.scala @@ -0,0 +1,3 @@ +extension (x: Int) def foo(y: Int): Int = x + y + +def f: Unit = 2 foo 4 diff --git a/tests/rewrites/private-this.check b/tests/rewrites/private-this.check new file mode 100644 index 000000000000..1a6443cdf152 --- /dev/null +++ b/tests/rewrites/private-this.check @@ -0,0 +1,12 @@ +class Foo: + private def foo1: Int = ??? + private def foo2: Int = ??? + private def foo3: Int = ??? + private def foo4: Int = ??? + private def foo5: Int = ??? + + protected def bar1: Int = ??? + protected def bar2: Int = ??? + protected def bar3: Int = ??? + protected def bar4: Int = ??? + protected def bar5: Int = ??? diff --git a/tests/rewrites/private-this.scala b/tests/rewrites/private-this.scala new file mode 100644 index 000000000000..5f5b71f26abe --- /dev/null +++ b/tests/rewrites/private-this.scala @@ -0,0 +1,12 @@ +class Foo: + private[this] def foo1: Int = ??? + private[ this] def foo2: Int = ??? + private[this ] def foo3: Int = ??? + private[ this ] def foo4: Int = ??? + private [this] def foo5: Int = ??? + + protected[this] def bar1: Int = ??? + protected[ this] def bar2: Int = ??? + protected[this ] def bar3: Int = ??? + protected[ this ] def bar4: Int = ??? + protected [this] def bar5: Int = ??? diff --git a/tests/rewrites/rewrites3x-fatal-warnings.scala b/tests/rewrites/rewrites3x-fatal-warnings.scala new file mode 100644 index 000000000000..48e2d35b0fdd --- /dev/null +++ b/tests/rewrites/rewrites3x-fatal-warnings.scala @@ -0,0 +1,10 @@ +import scala.{collection => coll, runtime=>_, _} +import coll._ + +def f(xs: Int*) = xs.sum +def test = + f(List(1, 2, 3): _*) + +def g = { implicit x: Int => + x + 1 +} \ No newline at end of file diff --git a/tests/rewrites/rewrites3x.check b/tests/rewrites/rewrites3x.check new file mode 100644 index 000000000000..c83a80a59375 --- /dev/null +++ b/tests/rewrites/rewrites3x.check @@ -0,0 +1,13 @@ +import scala.{collection as coll, runtime as _, *} +import coll.* + +def f(xs: Int*) = xs.sum +def test = + f(List(1, 2, 3)*) + +def g = { implicit (x: Int) => + x + 1 +} + +def foo(x: Int) = x +def testTrailingUnderscoreEtaExpansion = foo diff --git a/tests/rewrites/rewrites3x.scala b/tests/rewrites/rewrites3x.scala index 48e2d35b0fdd..8d066bb76a91 100644 --- a/tests/rewrites/rewrites3x.scala +++ b/tests/rewrites/rewrites3x.scala @@ -7,4 +7,7 @@ def test = def g = { implicit x: Int => x + 1 -} \ No newline at end of file +} + +def foo(x: Int) = x +def testTrailingUnderscoreEtaExpansion = foo _ diff --git a/tests/rewrites/uninitialized-var.check b/tests/rewrites/uninitialized-var.check new file mode 100644 index 000000000000..3809938512a7 --- /dev/null +++ b/tests/rewrites/uninitialized-var.check @@ -0,0 +1,2 @@ +class Foo: + var a: Int = scala.compiletime.uninitialized diff --git a/tests/rewrites/uninitialized-var.scala b/tests/rewrites/uninitialized-var.scala new file mode 100644 index 000000000000..910734b33350 --- /dev/null +++ b/tests/rewrites/uninitialized-var.scala @@ -0,0 +1,2 @@ +class Foo: + var a: Int = _ diff --git a/tests/rewrites/with-type-operator.check b/tests/rewrites/with-type-operator.check new file mode 100644 index 000000000000..6d59e0eacb95 --- /dev/null +++ b/tests/rewrites/with-type-operator.check @@ -0,0 +1 @@ +def foo: Int & String = ??? diff --git a/tests/rewrites/with-type-operator.scala b/tests/rewrites/with-type-operator.scala new file mode 100644 index 000000000000..6dbd8ded14ee --- /dev/null +++ b/tests/rewrites/with-type-operator.scala @@ -0,0 +1 @@ +def foo: Int with String = ??? diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala deleted file mode 100644 index 7cd9b03b7bb6..000000000000 --- a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala +++ /dev/null @@ -1,30 +0,0 @@ -import scala.compiletime.* - -object Test { - import Logging.* - - def main(args: Array[String]): Unit = { - runLog() - runBasic() - } - - def runLog(): Unit = { - trace("I'm a trace msg") - debug("I'm a debug msg") - info("I'm a info msg") - warn("I'm a warn msg") - } - - def runBasic(): Unit = { - printEnv("a") - printEnv("b") - printEnv("c.b.a") - printEnv("wat") - } - - inline def printEnv(inline k: String): Unit = - inline MacroEnv.get(k) match - case Some(v) => println(s"$k = [$v]") - case None => println(k + " is not defined") - -} diff --git a/tests/run-custom-args/Xmacro-settings/simple/Test.scala b/tests/run-custom-args/Xmacro-settings/simple/Test.scala deleted file mode 100644 index 97587362c835..000000000000 --- a/tests/run-custom-args/Xmacro-settings/simple/Test.scala +++ /dev/null @@ -1,10 +0,0 @@ -import x.* - -object Test { - - def main(args: Array[String]):Unit = - assert(M.settingsContains("one")) - assert(!M.settingsContains("notwo")) - assert(M.settingsContains("two")) - -} diff --git a/tests/run-custom-args/Yread-comments/i12351/Test_2.scala b/tests/run-custom-args/Yread-comments/i12351/Test_2.scala deleted file mode 100644 index 5afd97061411..000000000000 --- a/tests/run-custom-args/Yread-comments/i12351/Test_2.scala +++ /dev/null @@ -1,4 +0,0 @@ -@main def Test(): Unit = { - println(getDocString[Data]) - assert(getDocString[Data].nonEmpty) -} diff --git a/tests/run-custom-args/Yread-comments/i12352/Main.scala b/tests/run-custom-args/Yread-comments/i12352/Main.scala deleted file mode 100644 index 78d0906652b2..000000000000 --- a/tests/run-custom-args/Yread-comments/i12352/Main.scala +++ /dev/null @@ -1,5 +0,0 @@ -@main def Test(): Unit = { - val res = getDocString[scala.quoted.Quotes] - println(res) - assert(res.nonEmpty) -} diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-2/Test_2.scala b/tests/run-custom-args/Yretain-trees/tasty-definitions-2/Test_2.scala deleted file mode 100644 index 6bb234543066..000000000000 --- a/tests/run-custom-args/Yretain-trees/tasty-definitions-2/Test_2.scala +++ /dev/null @@ -1,11 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - println(Foo.inspectBody(Foo.foo)) - println(Foo.inspectBody(Foo.bar)) - - 3 match { - case x => - println(Foo.inspectBody(x)) - } - } -} diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-3/Test_2.scala b/tests/run-custom-args/Yretain-trees/tasty-definitions-3/Test_2.scala deleted file mode 100644 index 3663f00a8198..000000000000 --- a/tests/run-custom-args/Yretain-trees/tasty-definitions-3/Test_2.scala +++ /dev/null @@ -1,15 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - println(Foo.inspectBody(foo)) - println(Foo.inspectBody(bar)) - - 3 match { - case x => - println(Foo.inspectBody(x)) - } - } - - def foo: Int = 1 + 2 - val bar: Int = 2 + 3 -} diff --git a/tests/run-custom-args/Yretain-trees/tasty-extractors-owners/quoted_2.scala b/tests/run-custom-args/Yretain-trees/tasty-extractors-owners/quoted_2.scala deleted file mode 100644 index 5b793c829269..000000000000 --- a/tests/run-custom-args/Yretain-trees/tasty-extractors-owners/quoted_2.scala +++ /dev/null @@ -1,24 +0,0 @@ - -import Macros.* - -object Test { - def main(args: Array[String]): Unit = { - printOwners { - def foo = { - def bar = 1 - val bar2 = 2 - bar - } - val foo2 = { - def baz = 3 - val baz2 = 4 - baz - } - class A { - type B = Int - def b = 5 - val b2 = 6 - } - } - } -} diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-1/quoted_2.scala b/tests/run-custom-args/Yretain-trees/tasty-load-tree-1/quoted_2.scala deleted file mode 100644 index 2aa76a7f2822..000000000000 --- a/tests/run-custom-args/Yretain-trees/tasty-load-tree-1/quoted_2.scala +++ /dev/null @@ -1,7 +0,0 @@ - -object Test { - def main(args: Array[String]): Unit = { - println(Foo.inspectBody(Foo.foo)) - println(Foo.inspectBody(Foo.bar)) - } -} diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-2/quoted_2.scala b/tests/run-custom-args/Yretain-trees/tasty-load-tree-2/quoted_2.scala deleted file mode 100644 index 2cffe72c5a74..000000000000 --- a/tests/run-custom-args/Yretain-trees/tasty-load-tree-2/quoted_2.scala +++ /dev/null @@ -1,10 +0,0 @@ - -object Test { - def main(args: Array[String]): Unit = { - println(Foo.inspectBody(foo)) - println(Foo.inspectBody(bar)) - } - - def foo: Int = 1 + 2 - val bar: Int = 2 + 3 -} diff --git a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala index 63aee49f8454..20a6a33d3e02 100644 --- a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala +++ b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala @@ -3,7 +3,7 @@ package strawman.collections import Predef.{augmentString as _, wrapString as _, *} import scala.reflect.ClassTag -import annotation.unchecked.uncheckedVariance +import annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import annotation.tailrec /** A strawman architecture for new collections. It contains some @@ -215,7 +215,7 @@ object CollectionStrawMan5 { } def length: Int = if (isEmpty) 0 else 1 + tail.length - protected[this] def newBuilder = new ListBuffer[A] @uncheckedVariance + protected[this] def newBuilder = new ListBuffer[A @uncheckedVariance @uncheckedCaptures] def ++:[B >: A](prefix: List[B]): List[B] = if (prefix.isEmpty) this else Cons(prefix.head, prefix.tail ++: this) @@ -227,7 +227,7 @@ object CollectionStrawMan5 { if (n > 0) tail.drop(n - 1) else this } - case class Cons[+A](x: A, private[collections] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally + case class Cons[+A](x: A, private[collections] var next: List[A @uncheckedVariance @uncheckedCaptures]) // sound because `next` is used only locally extends List[A] { override def isEmpty = false override def head = x @@ -244,7 +244,7 @@ object CollectionStrawMan5 { type C[X] = List[X] def fromIterable[B](coll: Iterable[B]^): List[B] = coll match { case coll: List[B] => coll - case _ => ListBuffer.fromIterable(coll).result + case _ => ListBuffer.fromIterable[B @uncheckedCaptures](coll).result } } diff --git a/tests/run-custom-args/captures/colltest5/Test_2.scala b/tests/run-custom-args/captures/colltest5/Test_2.scala index fbb22039c327..f6f47b536541 100644 --- a/tests/run-custom-args/captures/colltest5/Test_2.scala +++ b/tests/run-custom-args/captures/colltest5/Test_2.scala @@ -61,7 +61,7 @@ object Test { println(xs16) } - def viewOps(xs: View[Int]^{cap}) = { + def viewOps(xs: View[Int]^) = { val strPlusInt: (String, Int) => String = _ + _ val intPlusStr: (Int, String) => String = _ + _ val isEven: Int => Boolean = _ % 2 == 0 diff --git a/tests/run-custom-args/captures/minicheck.scala b/tests/run-custom-args/captures/minicheck.scala index bdc591580482..a6aca38ae704 100644 --- a/tests/run-custom-args/captures/minicheck.scala +++ b/tests/run-custom-args/captures/minicheck.scala @@ -5,7 +5,7 @@ import annotation.{experimental, tailrec, constructorOnly} import collection.mutable import language.`3.3` -case class Symbol(name: String, initOwner: Symbol | Null) extends caps.Pure: +case class Symbol(name: String, initOwner: Symbol | Null) extends Pure: def owner = initOwner.nn private var myInfo: Type = uninitialized def infoOrCompleter: Type = myInfo @@ -29,7 +29,7 @@ object NoSymbol extends Symbol("", null): override def exists: Boolean = false override def orElse(alt: => Symbol): Symbol = alt -abstract class Type extends caps.Pure: +abstract class Type extends Pure: def exists = true def show: String case class IntType()(using @constructorOnly c: Context) extends Type: diff --git a/tests/run-custom-args/erased/erased-1.scala b/tests/run-custom-args/erased/erased-1.scala deleted file mode 100644 index acb8ce68f9bf..000000000000 --- a/tests/run-custom-args/erased/erased-1.scala +++ /dev/null @@ -1,14 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - fun(foo) - } - - def foo = { - println("foo") - 42 - } - def fun(erased boo: Int): Unit = { - println("fun") - } -} diff --git a/tests/run-custom-args/erased/erased-14.scala b/tests/run-custom-args/erased/erased-14.scala deleted file mode 100644 index 6486ba8085c4..000000000000 --- a/tests/run-custom-args/erased/erased-14.scala +++ /dev/null @@ -1,15 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - new Foo - } - -} - -class Foo { - erased val x: Int = { - println("x") - 42 - } - println("Foo") -} diff --git a/tests/run-custom-args/erased/erased-15.check b/tests/run-custom-args/erased/erased-15.check deleted file mode 100644 index f1880f44381b..000000000000 --- a/tests/run-custom-args/erased/erased-15.check +++ /dev/null @@ -1 +0,0 @@ -Foo.apply diff --git a/tests/run-custom-args/erased/erased-15.scala b/tests/run-custom-args/erased/erased-15.scala deleted file mode 100644 index 02b70f9125d6..000000000000 --- a/tests/run-custom-args/erased/erased-15.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.runtime.ErasedFunction - -object Test { - - def main(args: Array[String]): Unit = { - new Foo().apply(foo) - } - - def foo = { - println("foo") - 42 - } -} - -class Foo extends ErasedFunction { - def apply(erased x: Int): Int = { - println("Foo.apply") - 42 - } -} diff --git a/tests/run-custom-args/erased/erased-19.scala b/tests/run-custom-args/erased/erased-19.scala deleted file mode 100644 index ff6a05f7de3d..000000000000 --- a/tests/run-custom-args/erased/erased-19.scala +++ /dev/null @@ -1,10 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - { - (erased x: Int) => 42 - } - - println("ok") - } -} diff --git a/tests/run-custom-args/erased/erased-23.check b/tests/run-custom-args/erased/erased-23.check deleted file mode 100644 index efbbe4dddf06..000000000000 --- a/tests/run-custom-args/erased/erased-23.check +++ /dev/null @@ -1,2 +0,0 @@ -lambda1 -lambda2 diff --git a/tests/run-custom-args/erased/erased-26.scala b/tests/run-custom-args/erased/erased-26.scala deleted file mode 100644 index eed903ac2753..000000000000 --- a/tests/run-custom-args/erased/erased-26.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - col("abc")(true) - } - def col[S](s: String)(erased ev: Boolean): Unit = println(s) -} diff --git a/tests/run-custom-args/erased/erased-3.scala b/tests/run-custom-args/erased/erased-3.scala deleted file mode 100644 index 68d2f8b629c2..000000000000 --- a/tests/run-custom-args/erased/erased-3.scala +++ /dev/null @@ -1,21 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - fun(foo1)(foo2) - } - - def foo1: Int = { - println("foo1") - 42 - } - - def foo2: String = { - println("foo2") - "abc" - } - - def fun(erased a: Int)(erased b: String): Unit = { - println("fun") - } - -} diff --git a/tests/run-custom-args/erased/erased-4.scala b/tests/run-custom-args/erased/erased-4.scala deleted file mode 100644 index 8725a7ebc00f..000000000000 --- a/tests/run-custom-args/erased/erased-4.scala +++ /dev/null @@ -1,25 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - fun(foo1)(foo2) - fun2(foo1)(foo2) - } - - def foo1: Int = { - println("foo1") - 42 - } - - def foo2: String = { - println("foo2") - "abc" - } - - def fun(a: Int)(erased b: String): Unit = { - println("fun " + a) - } - - def fun2(erased a: Int)(b: String): Unit = { - println("fun2 " + b) - } -} diff --git a/tests/run-custom-args/erased/erased-5.scala b/tests/run-custom-args/erased/erased-5.scala deleted file mode 100644 index 043d1a4781a7..000000000000 --- a/tests/run-custom-args/erased/erased-5.scala +++ /dev/null @@ -1,20 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - fun(foo(1))(foo(2))(foo(3))(foo(4)) - fun2(foo(1))(foo(2))(foo(3))(foo(4)) - } - - def foo(i: Int): Int = { - println("foo") - i - } - - def fun(a: Int)(erased b: Int)(c: Int)(erased d: Int): Unit = { - println("fun " + a + " " + c) - } - - def fun2(erased a2: Int)(b2: Int)(erased c2: Int)(d2: Int): Unit = { - println("fun2 " + b2 + " " + d2) - } -} diff --git a/tests/run-custom-args/erased/erased-6.scala b/tests/run-custom-args/erased/erased-6.scala deleted file mode 100644 index ec113fbb426b..000000000000 --- a/tests/run-custom-args/erased/erased-6.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - new Foo(foo) - } - - def foo: Int = { - println("foo") - 42 - } - -} - -class Foo(erased a: Int) { - println("Foo") -} diff --git a/tests/run-custom-args/erased/erased-value-class.scala b/tests/run-custom-args/erased/erased-value-class.scala deleted file mode 100644 index a87b0e8bf4af..000000000000 --- a/tests/run-custom-args/erased/erased-value-class.scala +++ /dev/null @@ -1,16 +0,0 @@ -object Test { - - def main(args: Array[String]): Unit = { - new Bar(c)(c).foo() - identity(new Bar(c)(c)).foo() - } - - def c = { - println("c") - 3 - } -} - -class Bar(x: Int)(erased y: Int) extends AnyVal { - def foo() = x -} diff --git a/tests/run-custom-args/erased/lambdas.scala b/tests/run-custom-args/erased/lambdas.scala deleted file mode 100644 index 4c1746283099..000000000000 --- a/tests/run-custom-args/erased/lambdas.scala +++ /dev/null @@ -1,38 +0,0 @@ -// lambdas should parse and work - -type F = (erased Int, String) => String -type S = (Int, erased String) => Int - -def useF(f: F) = f(5, "a") -def useS(f: S) = f(5, "a") - -val ff: F = (erased x, y) => y - -val fs: S = (x, erased y) => x -val fsExpl = (x: Int, erased y: String) => x - -// contextual lambdas should work - -type FC = (Int, erased String) ?=> Int - -def useCtx(f: FC) = f(using 5, "a") - -val fCv: FC = (x, erased y) ?=> x -val fCvExpl = (x: Int, erased y: String) ?=> x - -// nested lambdas should work - -val nested: Int => (String, erased Int) => FC = a => (_, erased _) => (c, erased d) ?=> a + c - -@main def Test() = - assert("a" == useF(ff)) - - assert(5 == useS(fs)) - assert(5 == useS(fsExpl)) - assert(5 == useS { (x, erased y) => x }) - - assert(5 == useCtx(fCv)) - assert(5 == useCtx(fCvExpl)) - assert(5 == useCtx { (x, erased y) ?=> x }) - - assert(6 == useCtx(nested(1)("b", 2))) diff --git a/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala b/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala deleted file mode 100644 index 66f8475da96d..000000000000 --- a/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.annotation.MacroAnnotation -import scala.annotation.internal.ErasedParam -import scala.quoted._ - -class NewAnnotation extends scala.annotation.Annotation - -class erasedParamsMethod extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - import quotes.reflect._ - tree match - case ClassDef(name, ctr, parents, self, body) => - val erasedInt = AnnotatedType(TypeRepr.of[Int], '{ new ErasedParam }.asTerm) - val methType = MethodType(List("x", "y"))(_ => List(erasedInt, TypeRepr.of[Int]), _ => TypeRepr.of[Int]) - - assert(methType.hasErasedParams) - assert(methType.erasedParams == List(true, false)) - - val methSym = Symbol.newMethod(tree.symbol, "takesErased", methType, Flags.EmptyFlags, Symbol.noSymbol) - val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) - - val clsDef = ClassDef.copy(tree)(name, ctr, parents, self, methDef :: body) - - List(clsDef) - case _ => - report.error("Annotation only supports `class`") - List(tree) diff --git a/tests/run-custom-args/erased/quotes-reflection.check b/tests/run-custom-args/erased/quotes-reflection.check deleted file mode 100644 index 838479e0b7af..000000000000 --- a/tests/run-custom-args/erased/quotes-reflection.check +++ /dev/null @@ -1,10 +0,0 @@ -method : () isGiven=false isImplicit=false erasedArgs=List() -method m1: (i: scala.Int) isGiven=true isImplicit=false erasedArgs=List(false) -method m2: (i: scala.Int) isGiven=false isImplicit=false erasedArgs=List(true) -method m3: (i: scala.Int, j: scala.Int) isGiven=false isImplicit=false erasedArgs=List(false, true) -method m4: (i: EC) isGiven=false isImplicit=false erasedArgs=List(true) -val l1: scala.ContextFunction1[scala.Int, scala.Int] -val l2: scala.runtime.ErasedFunction with apply: (x$0: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) -val l3: scala.runtime.ErasedFunction with apply: (x$0: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=true erasedParams=List(true) -val l4: scala.runtime.ErasedFunction with apply: (x$0: scala.Int, x$1: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(false, true) -val l5: scala.runtime.ErasedFunction with apply: (x$0: EC @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) diff --git a/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala b/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala deleted file mode 100644 index f7b1187433f0..000000000000 --- a/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala +++ /dev/null @@ -1,35 +0,0 @@ -import scala.quoted.* - -inline def inspect[A]: String = - ${ inspect2[A] } - -def inspect2[A: Type](using Quotes): Expr[String] = { - import quotes.reflect.* - - val methods = TypeRepr.of[A].typeSymbol.declarations - val names = methods.map { m => - m.tree match - case dd @ DefDef(name, params, r, body) => - val paramStr = - params.map { - case ps: TermParamClause => - val params = ps.params.map(p => s"${p.name}: ${p.tpt.show}").mkString("(", ", ", ")") - s"$params isGiven=${ps.isGiven} isImplicit=${ps.isImplicit} erasedArgs=${ps.erasedArgs}" - case ps: TypeParamClause => ps.params.map(_.show).mkString("[", ", ", "]") - }.mkString("") - s"method $name: $paramStr" - case vd @ ValDef(name, tpt, body) => - tpt.tpe match - case Refinement(parent, "apply", tpe: MethodType) if parent == defn.ErasedFunctionClass.typeRef => - assert(tpt.tpe.isErasedFunctionType) - - val params = tpe.paramNames.zip(tpe.paramTypes).map((n, t) => s"$n: ${t.show}").mkString("(", ", ", ")") - s"val $name: ${parent.show} with apply: ${params} isImplicit=${tpe.isImplicit} erasedParams=${tpe.erasedParams}" - case _ => - s"val $name: ${tpt.show}" - case td @ TypeDef(name, tpt) => s"type $name: ${tpt.show}" - case _ => s"something else: $m" - } - - Expr(names.mkString("\n")) -} diff --git a/tests/run-custom-args/fatal-warnings/convertible.scala b/tests/run-custom-args/fatal-warnings/convertible.scala deleted file mode 100644 index 3479f53df3c8..000000000000 --- a/tests/run-custom-args/fatal-warnings/convertible.scala +++ /dev/null @@ -1,32 +0,0 @@ -import language.experimental.into - -class Text(val str: String) - -given Conversion[String, Text] = Text(_) - -@main def Test = - - def f(x: into Text, y: => into Text, zs: into Text*) = - println(s"${x.str} ${y.str} ${zs.map(_.str).mkString(" ")}") - - f("abc", "def") // ok - f("abc", "def", "xyz", "uvw") // ok - f("abc", "def", "xyz", Text("uvw")) // ok - - def g(x: into () => Text) = - println(x().str) - - g(() => "hi") - -trait A[X]: - def f(x: X): Unit = () - -trait B[X] extends A[X]: - override def f(x: X) = super.f(x) - -trait C[X] extends A[X]: - override def f(x: into X) = super.f(x) - -class D[X] extends B[X], C[X] - -def f = new D[Text].f("abc") diff --git a/tests/run-custom-args/run-macros-erased/macro-erased/1.scala b/tests/run-custom-args/run-macros-erased/macro-erased/1.scala deleted file mode 100644 index 36f583a7dc91..000000000000 --- a/tests/run-custom-args/run-macros-erased/macro-erased/1.scala +++ /dev/null @@ -1,21 +0,0 @@ -import scala.quoted.* - -object Macro { - inline def foo1(i: Int) = $ { case1('{ i }) } - inline def foo2(i: Int) = $ { case2(1)('{ i }) } - inline def foo3(i: Int) = $ { case3('{ i })(1) } - inline def foo4(i: Int) = $ { case4(1)('{ i }, '{ i }) } - inline def foo5(i: Int) = $ { case5('{ i }, '{ i })(1) } - inline def foo6(i: Int) = $ { case6(1)('{ i })('{ i }) } - inline def foo7(i: Int) = $ { case7('{ i })(1)('{ i }) } - inline def foo8(i: Int) = $ { case8('{ i })('{ i })(1) } - - def case1(erased i: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case2 (i: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case3(erased i: Expr[Int]) (j: Int)(using Quotes): Expr[Int] = '{ 0 } - def case4 (h: Int)(erased i: Expr[Int], erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case5(erased i: Expr[Int], erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } - def case6 (h: Int)(erased i: Expr[Int])(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case7(erased i: Expr[Int]) (h: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case8(erased i: Expr[Int])(erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } -} diff --git a/tests/run-custom-args/tasty-inspector/tastyPaths.check b/tests/run-custom-args/tasty-inspector/tastyPaths.check deleted file mode 100644 index 4aab4bcb2590..000000000000 --- a/tests/run-custom-args/tasty-inspector/tastyPaths.check +++ /dev/null @@ -1,2 +0,0 @@ -List(/tastyPaths/I8163.class) -`reflect.SourceFile.current` cannot be called within the TASTy ispector diff --git a/tests/run-deep-subtype/Tuple-reverse.check b/tests/run-deep-subtype/Tuple-reverse.check new file mode 100644 index 000000000000..e889132e88a5 --- /dev/null +++ b/tests/run-deep-subtype/Tuple-reverse.check @@ -0,0 +1,78 @@ +(0) +(0,0) +(1,0,0) +(2,1,0,0) +(3,2,1,0,0) +(4,3,2,1,0,0) +(5,4,3,2,1,0,0) +(6,5,4,3,2,1,0,0) +(7,6,5,4,3,2,1,0,0) +(8,7,6,5,4,3,2,1,0,0) +(9,8,7,6,5,4,3,2,1,0,0) +(10,9,8,7,6,5,4,3,2,1,0,0) +(11,10,9,8,7,6,5,4,3,2,1,0,0) +(12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +(24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,0) +() +(1) +(2,1) +(3,2,1) +(4,3,2,1) +(5,4,3,2,1) +(6,5,4,3,2,1) +(7,6,5,4,3,2,1) +(8,7,6,5,4,3,2,1) +(9,8,7,6,5,4,3,2,1) +(10,9,8,7,6,5,4,3,2,1) +(11,10,9,8,7,6,5,4,3,2,1) +(12,11,10,9,8,7,6,5,4,3,2,1) +(13,12,11,10,9,8,7,6,5,4,3,2,1) +(14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +() +(1) +(2,1) +(3,2,1) +(4,3,2,1) +(5,4,3,2,1) +(6,5,4,3,2,1) +(7,6,5,4,3,2,1) +(8,7,6,5,4,3,2,1) +(9,8,7,6,5,4,3,2,1) +(10,9,8,7,6,5,4,3,2,1) +(11,10,9,8,7,6,5,4,3,2,1) +(12,11,10,9,8,7,6,5,4,3,2,1) +(13,12,11,10,9,8,7,6,5,4,3,2,1) +(14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) +(25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1) diff --git a/tests/run-deep-subtype/Tuple-reverse.scala b/tests/run-deep-subtype/Tuple-reverse.scala new file mode 100644 index 000000000000..b0461d167fab --- /dev/null +++ b/tests/run-deep-subtype/Tuple-reverse.scala @@ -0,0 +1,73 @@ +import scala.reflect.ClassTag + +object Test { + def main(args: Array[String]): Unit = { + + def testArray[T: ClassTag](n: Int, elem: Int => T): Unit = { + val t: Int *: Tuple = 0 *: Tuple.fromArray(Array.tabulate(n)(elem)) + println(t.reverse) + } + + for (i <- 0 to 25) + testArray(i, j => j) + + val tuple: Tuple3[Int, Boolean, String] = (1, true, "hello") + val reversedTuple: Tuple3[String, Boolean, Int] = tuple.reverse + + assert(reversedTuple == ("hello", true, 1)) + + println(EmptyTuple.reverse) + println(Tuple1(1).reverse) + println((1, 2).reverse) + println((1, 2, 3).reverse) + println((1, 2, 3, 4).reverse) + println((1, 2, 3, 4, 5).reverse) + println((1, 2, 3, 4, 5, 6).reverse) + println((1, 2, 3, 4, 5, 6, 7).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24).reverse) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25).reverse) + + println(Tuple().reverse) + println((1 *: Tuple()).reverse) + println((1 *: 2 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: Tuple()).reverse) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: 25 *: Tuple()).reverse) + } +} diff --git a/tests/run-custom-args/i5256.scala b/tests/run-deep-subtype/i5256.scala similarity index 100% rename from tests/run-custom-args/i5256.scala rename to tests/run-deep-subtype/i5256.scala diff --git a/tests/run-custom-args/tuple-cons.scala b/tests/run-deep-subtype/tuple-cons.scala similarity index 100% rename from tests/run-custom-args/tuple-cons.scala rename to tests/run-deep-subtype/tuple-cons.scala diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv.check b/tests/run-macros/Xmacro-settings-compileTimeEnv.check similarity index 100% rename from tests/run-custom-args/Xmacro-settings/compileTimeEnv.check rename to tests/run-macros/Xmacro-settings-compileTimeEnv.check diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Logging.scala b/tests/run-macros/Xmacro-settings-compileTimeEnv/Logging.scala similarity index 100% rename from tests/run-custom-args/Xmacro-settings/compileTimeEnv/Logging.scala rename to tests/run-macros/Xmacro-settings-compileTimeEnv/Logging.scala diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/MacroEnv.scala b/tests/run-macros/Xmacro-settings-compileTimeEnv/MacroEnv.scala similarity index 100% rename from tests/run-custom-args/Xmacro-settings/compileTimeEnv/MacroEnv.scala rename to tests/run-macros/Xmacro-settings-compileTimeEnv/MacroEnv.scala diff --git a/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala new file mode 100644 index 000000000000..620d2b85d185 --- /dev/null +++ b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala @@ -0,0 +1,32 @@ +//> using options -Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO + +import scala.compiletime.* + +object Test { + import Logging.* + + def main(args: Array[String]): Unit = { + runLog() + runBasic() + } + + def runLog(): Unit = { + trace("I'm a trace msg") + debug("I'm a debug msg") + info("I'm a info msg") + warn("I'm a warn msg") + } + + def runBasic(): Unit = { + printEnv("a") + printEnv("b") + printEnv("c.b.a") + printEnv("wat") + } + + inline def printEnv(inline k: String): Unit = + inline MacroEnv.get(k) match + case Some(v) => println(s"$k = [$v]") + case None => println(k + " is not defined") + +} diff --git a/tests/run-custom-args/Xmacro-settings/simple/M1.scala b/tests/run-macros/Xmacro-settings-simple/M1.scala similarity index 100% rename from tests/run-custom-args/Xmacro-settings/simple/M1.scala rename to tests/run-macros/Xmacro-settings-simple/M1.scala diff --git a/tests/run-macros/Xmacro-settings-simple/Test.scala b/tests/run-macros/Xmacro-settings-simple/Test.scala new file mode 100644 index 000000000000..59289bc3642b --- /dev/null +++ b/tests/run-macros/Xmacro-settings-simple/Test.scala @@ -0,0 +1,12 @@ +//> using options -Xmacro-settings:one,two,three + +import x.* + +object Test { + + def main(args: Array[String]):Unit = + assert(M.settingsContains("one")) + assert(!M.settingsContains("notwo")) + assert(M.settingsContains("two")) + +} diff --git a/tests/run-macros/expr-mirror-info.check b/tests/run-macros/expr-mirror-info.check new file mode 100644 index 000000000000..c851c720caa1 --- /dev/null +++ b/tests/run-macros/expr-mirror-info.check @@ -0,0 +1,4 @@ +(Foo,List(x, y, z),Foo[scala.Long],Foo[scala.Long],List(scala.Int, scala.Double, scala.Long)) +(Foo,List(x, y, z),Foo[scala.Long],Foo[scala.Long],List(scala.Int, scala.Double, scala.Long)) +(Bar,List(A, B),Bar,Bar,List(Bar.A.type, Bar.B)) +(Bar,List(A, B),Bar,Bar,List(Bar.A.type, Bar.B)) diff --git a/tests/run-macros/expr-mirror-info/Lib_1.scala b/tests/run-macros/expr-mirror-info/Lib_1.scala new file mode 100644 index 000000000000..d4d50ce71012 --- /dev/null +++ b/tests/run-macros/expr-mirror-info/Lib_1.scala @@ -0,0 +1,52 @@ +import scala.deriving.Mirror +import scala.quoted.* + +object MirroredExpr: + extension (mirror: Expr[Mirror]) + def mirroredMonoType(using Quotes): Option[Type[?]] = + mirror match + case '{ $_ : Mirror { type MirroredMonoType = t } } => Some(Type.of[t]) + case _ => None + + def mirroredType(using Quotes): Option[Type[?]] = + mirror match + case '{ $_ : Mirror { type MirroredType = t } } => Some(Type.of[t]) + case _ => None + + def mirroredLabel(using Quotes): Option[String] = + mirror match + case '{ type label <: String; $_ : Mirror { type MirroredLabel = label } } => + Type.valueOfConstant[label] + case _ => None + + def mirroredElemTypes(using Quotes): Option[List[Type[?]]] = + mirror match + case '{ type labels <: Tuple; $_ : Mirror { type MirroredElemTypes = labels } } => + tupleTypes[labels] + case _ => None + + def mirroredElemLabels(using Quotes): Option[List[String]] = + mirror match + case '{ type labels <: Tuple; $_ : Mirror { type MirroredElemLabels = labels } } => + Type.valueOfTuple[labels].map(_.toList.asInstanceOf[List[String]]) + case _ => None + + private def tupleTypes[T <: Tuple : Type](using Quotes): Option[List[Type[?]]] = + import quotes.reflect.* + val cons = Symbol.classSymbol("scala.*:") + def rec(tpe: TypeRepr): Option[List[Type[?]]] = + tpe.widenTermRefByName.dealias match + case AppliedType(fn, tpes) if defn.isTupleClass(fn.typeSymbol) => + tpes.foldRight(Option(List.empty[Type[?]])) { + case (_, None) => None + case (tpe, Some(acc)) => Some(tpe.asType :: acc) + case _ => None + } + case AppliedType(tp, List(headType, tail)) if tp.derivesFrom(cons) => + rec(tail) match + case Some(tailTypes) => Some(headType.asType :: tailTypes) + case None => None + case tpe => + if tpe.derivesFrom(Symbol.classSymbol("scala.EmptyTuple")) then Some(Nil) + else None + rec(TypeRepr.of[T]) diff --git a/tests/run-macros/expr-mirror-info/Macro_1.scala b/tests/run-macros/expr-mirror-info/Macro_1.scala new file mode 100644 index 000000000000..eea6bade48ca --- /dev/null +++ b/tests/run-macros/expr-mirror-info/Macro_1.scala @@ -0,0 +1,41 @@ +import scala.deriving.Mirror +import scala.quoted.* + +inline def reflectMirrorInfo[T](using mirror: Mirror.Of[T]): Any = ${ reflectMirrorInfoExpr[T]('mirror) } + +private def reflectMirrorInfoExpr[T: Type](mirror: Expr[Mirror.Of[T]])(using Quotes): Expr[Any] = + val mirroredLabel: String = MirroredExpr.mirroredLabel(mirror).getOrElse(quotes.reflect.report.errorAndAbort("MirroredLabel not found")) + val mirroredElemLabels = MirroredExpr.mirroredElemLabels(mirror).getOrElse(quotes.reflect.report.errorAndAbort("MirroredElemLabels not found")) + val mirroredMonoType: Type[?] = MirroredExpr.mirroredMonoType(mirror).getOrElse(quotes.reflect.report.errorAndAbort("MirroredMonoType not found")) + val mirroredType: Type[?] = MirroredExpr.mirroredType(mirror).getOrElse(quotes.reflect.report.errorAndAbort("MirroredType not found")) + val mirroredElemTypes: List[Type[?]] = MirroredExpr.mirroredElemTypes(mirror).getOrElse(quotes.reflect.report.errorAndAbort("MirroredElemTypes not found")) + + val mirroredMonoTypeString = mirroredMonoType match + case '[t] => Type.show[t] + val mirroredTypeString = mirroredType match + case '[t] => Type.show[t] + val mirroredElemTypesStrings = mirroredElemTypes.map { + case '[t] => Type.show[t] + } + + Expr((mirroredLabel, mirroredElemLabels, mirroredMonoTypeString, mirroredTypeString, mirroredElemTypesStrings)) + +inline def reflectMirrorInfo2[T](using mirror: Mirror.Of[T]): Any = ${ reflectMirrorInfoExpr2[T]('mirror) } + +private def reflectMirrorInfoExpr2[T: Type](mirror: Expr[Mirror.Of[T]])(using Quotes): Expr[Any] = + import MirroredExpr.* + val mirroredLabel: String = mirror.mirroredLabel.getOrElse(quotes.reflect.report.errorAndAbort("MirroredLabel not found")) + val mirroredElemLabels = mirror.mirroredElemLabels.getOrElse(quotes.reflect.report.errorAndAbort("MirroredElemLabels not found")) + val mirroredMonoType: Type[?] = mirror.mirroredMonoType.getOrElse(quotes.reflect.report.errorAndAbort("MirroredMonoType not found")) + val mirroredType: Type[?] = mirror.mirroredType.getOrElse(quotes.reflect.report.errorAndAbort("MirroredType not found")) + val mirroredElemTypes: List[Type[?]] = mirror.mirroredElemTypes.getOrElse(quotes.reflect.report.errorAndAbort("MirroredElemTypes not found")) + + val mirroredMonoTypeString = mirroredMonoType match + case '[t] => Type.show[t] + val mirroredTypeString = mirroredType match + case '[t] => Type.show[t] + val mirroredElemTypesStrings = mirroredElemTypes.map { + case '[t] => Type.show[t] + } + + Expr((mirroredLabel, mirroredElemLabels, mirroredMonoTypeString, mirroredTypeString, mirroredElemTypesStrings)) diff --git a/tests/run-macros/expr-mirror-info/Test_2.scala b/tests/run-macros/expr-mirror-info/Test_2.scala new file mode 100644 index 000000000000..0b72d0032bd1 --- /dev/null +++ b/tests/run-macros/expr-mirror-info/Test_2.scala @@ -0,0 +1,12 @@ +import scala.deriving.Mirror + +case class Foo[T](x: Int, y: Double, z: T) +enum Bar: + case A + case B(b: Int) + +@main def Test: Unit = + println(reflectMirrorInfo[Foo[Long]]) + println(reflectMirrorInfo2[Foo[Long]]) + println(reflectMirrorInfo[Bar]) + println(reflectMirrorInfo2[Bar]) diff --git a/tests/run-macros/i10863.check b/tests/run-macros/i10863.check index 93857b07d72c..a21266bf4833 100644 --- a/tests/run-macros/i10863.check +++ b/tests/run-macros/i10863.check @@ -1 +1 @@ -[A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A] +[A >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.List[A] diff --git a/tests/run-custom-args/Yread-comments/i12351/GetDocString_1.scala b/tests/run-macros/i12351/GetDocString_1.scala similarity index 100% rename from tests/run-custom-args/Yread-comments/i12351/GetDocString_1.scala rename to tests/run-macros/i12351/GetDocString_1.scala diff --git a/tests/run-macros/i12351/Test_2.scala b/tests/run-macros/i12351/Test_2.scala new file mode 100644 index 000000000000..e480b3c7e86e --- /dev/null +++ b/tests/run-macros/i12351/Test_2.scala @@ -0,0 +1,6 @@ +//> using options -Yread-docs + +@main def Test(): Unit = { + println(getDocString[Data]) + assert(getDocString[Data].nonEmpty) +} diff --git a/tests/run-custom-args/Yread-comments/i12352/Macro.scala b/tests/run-macros/i12352/Macro.scala similarity index 100% rename from tests/run-custom-args/Yread-comments/i12352/Macro.scala rename to tests/run-macros/i12352/Macro.scala diff --git a/tests/run-macros/i12352/Main.scala b/tests/run-macros/i12352/Main.scala new file mode 100644 index 000000000000..b62bd80eaf2c --- /dev/null +++ b/tests/run-macros/i12352/Main.scala @@ -0,0 +1,7 @@ +//> using options -Yread-docs + +@main def Test(): Unit = { + val res = getDocString[scala.quoted.Quotes] + println(res) + assert(res.nonEmpty) +} diff --git a/tests/run-macros/i16734a.check b/tests/run-macros/i16734a.check new file mode 100644 index 000000000000..730fe709b146 --- /dev/null +++ b/tests/run-macros/i16734a.check @@ -0,0 +1,67 @@ +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K1Inv[F] +F +A + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K1Cov[F] +F ++A + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K1Con[F] +F +-A + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2InvInv[F] +F +A, B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2InvCov[F] +F +A, +B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2InvCon[F] +F +A, -B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2CovInv[F] +F ++A, B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2CovCov[F] +F ++A, +B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2CovCon[F] +F ++A, -B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2ConInv[F] +F +-A, B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2ConCov[F] +F +-A, +B + +[F >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> K2ConCon[F] +F +-A, -B + +[G >: scala.Nothing <: [A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any, C >: scala.Nothing <: scala.Any, D >: scala.Nothing <: [X1 >: scala.Nothing <: scala.Any, Y1 >: scala.Nothing <: scala.Any, Z1 >: scala.Nothing <: scala.Any] =>> scala.Any, E >: scala.Nothing <: [X2 >: scala.Nothing <: scala.Any, Y2 >: scala.Nothing <: scala.Any, Z2 >: scala.Nothing <: scala.Any] =>> scala.Any, F >: scala.Nothing <: [X3 >: scala.Nothing <: scala.Any, Y3 >: scala.Nothing <: scala.Any, Z3 >: scala.Nothing <: scala.Any] =>> scala.Any] =>> scala.Any] =>> KFunky[G] +G +A, +B, -C, D, +E, -F +X1, +Y1, -Z1 +X2, +Y2, -Z2 +X3, +Y3, -Z3 + +[A >: scala.Nothing <: scala.Any, F >: scala.Nothing <: [B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> F[A] +A, +F +B + +[A >: scala.Nothing <: scala.Any, F >: scala.Nothing <: [B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> F[A] ++A, +F ++B + +[A >: scala.Nothing <: scala.Any, F >: scala.Nothing <: [B >: scala.Nothing <: scala.Any] =>> scala.Any] =>> F[A] +-A, +F +-B + diff --git a/tests/run-macros/i16734a/Macro_1.scala b/tests/run-macros/i16734a/Macro_1.scala new file mode 100644 index 000000000000..eedbe2d0e6a1 --- /dev/null +++ b/tests/run-macros/i16734a/Macro_1.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +inline def variances[A <: AnyKind]: String = + ${variancesImpl[A]} + +def variancesImpl[A <: AnyKind: Type](using Quotes): Expr[String] = + import quotes.reflect.* + def loop(tpe: TypeRepr): List[String] = + tpe match + case tpe: TypeLambda => + tpe.paramNames.zip(tpe.paramVariances).map { (name, variance) => + if variance == Flags.Covariant then "+" + name + else if variance == Flags.Contravariant then "-" + name + else name + }.mkString(", ") :: tpe.paramTypes.flatMap(loop) + case tpe: TypeBounds => + loop(tpe.low) ++ loop(tpe.hi) + case _ => + Nil + val res = (Type.show[A] :: loop(TypeRepr.of[A])).mkString("", "\n", "\n") + Expr(res) diff --git a/tests/run-macros/i16734a/Test_2.scala b/tests/run-macros/i16734a/Test_2.scala new file mode 100644 index 000000000000..c6b02c145841 --- /dev/null +++ b/tests/run-macros/i16734a/Test_2.scala @@ -0,0 +1,34 @@ +trait K1Inv[F[A]] +trait K1Cov[F[+A]] +trait K1Con[F[-A]] + +trait K2InvInv[F[A, B]] +trait K2InvCov[F[A, +B]] +trait K2InvCon[F[A, -B]] +trait K2CovInv[F[+A, B]] +trait K2CovCov[F[+A, +B]] +trait K2CovCon[F[+A, -B]] +trait K2ConInv[F[-A, B]] +trait K2ConCov[F[-A, +B]] +trait K2ConCon[F[-A, -B]] + + +trait KFunky[G[A, +B, -C, D[X1, +Y1, -Z1], +E[X2, +Y2, -Z2], -F[X3, +Y3, -Z3]]] + +@main def Test = + println(variances[K1Inv]) + println(variances[K1Cov]) + println(variances[K1Con]) + println(variances[K2InvInv]) + println(variances[K2InvCov]) + println(variances[K2InvCon]) + println(variances[K2CovInv]) + println(variances[K2CovCov]) + println(variances[K2CovCon]) + println(variances[K2ConInv]) + println(variances[K2ConCov]) + println(variances[K2ConCon]) + println(variances[KFunky]) + println(variances[[A, F[B]] =>> F[A]]) + println(variances[[A, F[+B]] =>> F[A]]) + println(variances[[A, F[-B]] =>> F[A]]) diff --git a/tests/run-macros/i16734b.check b/tests/run-macros/i16734b.check new file mode 100644 index 000000000000..b894ffba4fc4 --- /dev/null +++ b/tests/run-macros/i16734b.check @@ -0,0 +1,36 @@ +type F1Inv +A + +type F1Cov ++A + +type F1Con +-A + +type F2InvInv +A, B + +type F2InvCov +A, +B + +type F2InvCon +A, -B + +type F2CovInv ++A, B + +type F2CovCov ++A, +B + +type F2CovCon ++A, -B + +type F2ConInv +-A, B + +type F2ConCov +-A, +B + +type F2ConCon +-A, -B + diff --git a/tests/run-macros/i16734b/Macro_1.scala b/tests/run-macros/i16734b/Macro_1.scala new file mode 100644 index 000000000000..f1e8e12d308d --- /dev/null +++ b/tests/run-macros/i16734b/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +inline def typeVariances[A <: AnyKind]: String = + ${variancesImpl[A]} + +def variancesImpl[A <: AnyKind: Type](using Quotes): Expr[String] = + import quotes.reflect.* + val TypeBounds(_, tl: TypeLambda) = TypeRepr.of[A].typeSymbol.info: @unchecked + val variances = tl.paramNames.zip(tl.paramVariances).map { (name, variance) => + if variance == Flags.Covariant then "+" + name + else if variance == Flags.Contravariant then "-" + name + else name + }.mkString(", ") + Expr(TypeRepr.of[A].typeSymbol.toString() + "\n" + variances + "\n") diff --git a/tests/run-macros/i16734b/Test_2.scala b/tests/run-macros/i16734b/Test_2.scala new file mode 100644 index 000000000000..f5481aaf96cf --- /dev/null +++ b/tests/run-macros/i16734b/Test_2.scala @@ -0,0 +1,27 @@ +type F1Inv[A] +type F1Cov[+A] +type F1Con[-A] + +type F2InvInv[A, B] +type F2InvCov[A, +B] +type F2InvCon[A, -B] +type F2CovInv[+A, B] +type F2CovCov[+A, +B] +type F2CovCon[+A, -B] +type F2ConInv[-A, B] +type F2ConCov[-A, +B] +type F2ConCon[-A, -B] + +@main def Test = + println(typeVariances[F1Inv]) + println(typeVariances[F1Cov]) + println(typeVariances[F1Con]) + println(typeVariances[F2InvInv]) + println(typeVariances[F2InvCov]) + println(typeVariances[F2InvCon]) + println(typeVariances[F2CovInv]) + println(typeVariances[F2CovCov]) + println(typeVariances[F2CovCon]) + println(typeVariances[F2ConInv]) + println(typeVariances[F2ConCov]) + println(typeVariances[F2ConCon]) diff --git a/tests/run-macros/i16734c.check b/tests/run-macros/i16734c.check new file mode 100644 index 000000000000..2f33f1c83dd6 --- /dev/null +++ b/tests/run-macros/i16734c.check @@ -0,0 +1,36 @@ +class C1Inv +A + +class C1Cov ++A + +class C1Con +-A + +class C2InvInv +A, B + +class C2InvCov +A, +B + +class C2InvCon +A, -B + +class C2CovInv ++A, B + +class C2CovCov ++A, +B + +class C2CovCon ++A, -B + +class C2ConInv +-A, B + +class C2ConCov +-A, +B + +class C2ConCon +-A, -B + diff --git a/tests/run-macros/i16734c/Macro_1.scala b/tests/run-macros/i16734c/Macro_1.scala new file mode 100644 index 000000000000..755aeb617d8c --- /dev/null +++ b/tests/run-macros/i16734c/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +inline def classVariances[A <: AnyKind]: String = + ${variancesImpl[A]} + +def variancesImpl[A <: AnyKind: Type](using Quotes): Expr[String] = + import quotes.reflect.* + val variances = TypeRepr.of[A].typeSymbol.typeMembers.filter(_.isTypeParam).map { sym => + if sym.paramVariance == Flags.Covariant then "+" + sym.name + else if sym.paramVariance == Flags.Contravariant then "-" + sym.name + else sym.name + } + val res = variances.mkString(TypeRepr.of[A].typeSymbol.toString + "\n", ", ", "\n") + Expr(res) diff --git a/tests/run-macros/i16734c/Test_2.scala b/tests/run-macros/i16734c/Test_2.scala new file mode 100644 index 000000000000..a49e43cba00c --- /dev/null +++ b/tests/run-macros/i16734c/Test_2.scala @@ -0,0 +1,27 @@ +class C1Inv[A] { type T } +class C1Cov[+A] { type T } +class C1Con[-A] { type T } + +class C2InvInv[A, B] { type T } +class C2InvCov[A, +B] { type T } +class C2InvCon[A, -B] { type T } +class C2CovInv[+A, B] { type T } +class C2CovCov[+A, +B] { type T } +class C2CovCon[+A, -B] { type T } +class C2ConInv[-A, B] { type T } +class C2ConCov[-A, +B] { type T } +class C2ConCon[-A, -B] { type T } + +@main def Test = + println(classVariances[C1Inv]) + println(classVariances[C1Cov]) + println(classVariances[C1Con]) + println(classVariances[C2InvInv]) + println(classVariances[C2InvCov]) + println(classVariances[C2InvCon]) + println(classVariances[C2CovInv]) + println(classVariances[C2CovCov]) + println(classVariances[C2CovCon]) + println(classVariances[C2ConInv]) + println(classVariances[C2ConCov]) + println(classVariances[C2ConCon]) diff --git a/tests/run-macros/i17105.check b/tests/run-macros/i17105.check new file mode 100644 index 000000000000..17c45e97b888 --- /dev/null +++ b/tests/run-macros/i17105.check @@ -0,0 +1,8 @@ +case single: [1st case] arg1 outside +case no-param-method (will be eta-expanded): [1st case] placeholder 2 +case curried: [2nd case] arg1, arg2 outside +case methods from outer scope: [1st case] arg1 outer-method +case refinement: Hoe got 1 +case dependent: 1 +case dependent2: 1 +case dependent3: 1 diff --git a/tests/run-macros/i17105/Lib1.scala b/tests/run-macros/i17105/Lib1.scala new file mode 100644 index 000000000000..ed2b145f7914 --- /dev/null +++ b/tests/run-macros/i17105/Lib1.scala @@ -0,0 +1,15 @@ + +// Test case for dependent types +trait DSL { + type N + def toString(n: N): String + val zero: N + def next(n: N): N +} + +object IntDSL extends DSL { + type N = Int + def toString(n: N): String = n.toString() + val zero = 0 + def next(n: N): N = n + 1 +} diff --git a/tests/run-macros/i17105/Macro_2.scala b/tests/run-macros/i17105/Macro_2.scala new file mode 100644 index 000000000000..add0c29f95d3 --- /dev/null +++ b/tests/run-macros/i17105/Macro_2.scala @@ -0,0 +1,34 @@ +import scala.quoted.* +import language.experimental.erasedDefinitions + +inline def testExpr(inline body: Any) = ${ testExprImpl('body) } +def testExprImpl(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ def g(y: String) = "placeholder" + y; $a(g): String } => + '{ $a((z: String) => s"[1st case] ${z}") } + case '{ def g(y: String)(z: String) = "placeholder" + y; $a(g): String } => + '{ $a((z1: String) => (z2: String) => s"[2nd case] ${z1}, ${z2}") } + // Refined Types + case '{ + type t + def refined(a: `t`): String = $x(a): String + $y(refined): String + } => + '{ $y($x) } + // Dependent Types + case '{ + def p(dsl: DSL): dsl.N = dsl.zero + $y(p): String + } => + '{ $y((dsl1: DSL) => dsl1.next(dsl1.zero)) } + case '{ + def p(dsl: DSL)(a: dsl.N): dsl.N = a + $y(p): String + } => + '{ $y((dsl: DSL) => (b2: dsl.N) => dsl.next(b2)) } + case '{ + def p(dsl1: DSL)(dsl2: DSL): dsl2.N = dsl2.zero + $y(p): String + } => + '{ $y((dsl1: DSL) => (dsl2: DSL) => dsl2.next(dsl2.zero)) } + case _ => Expr("not matched") diff --git a/tests/run-macros/i17105/Test_3.scala b/tests/run-macros/i17105/Test_3.scala new file mode 100644 index 000000000000..c19ac507e1a4 --- /dev/null +++ b/tests/run-macros/i17105/Test_3.scala @@ -0,0 +1,23 @@ +import reflect.Selectable.reflectiveSelectable + +class Hoe { def f(x: Int): String = s"Hoe got ${x}" } + +@main def Test: Unit = + println("case single: " + testExpr { def f(x: String) = "placeholder" + x; f("arg1") + " outside" }) + println("case no-param-method (will be eta-expanded): " + testExpr { def f(x: String) = "placeholder" + x; (() => f)()("placeholder 2") }) + println("case curried: " + testExpr { def f(x: String)(y: String) = "placeholder" + x; f("arg1")("arg2") + " outside" }) + def outer() = " outer-method" + println("case methods from outer scope: " + testExpr { def f(x: String) = "placeholder" + x; f("arg1") + outer() }) + println("case refinement: " + testExpr { def refined(a: { def f(x: Int): String }): String = a.f(1); refined(Hoe()) }) + println("case dependent: " + testExpr { + def p(a: DSL): a.N = a.zero + IntDSL.toString(p(IntDSL)) + }) + println("case dependent2: " + testExpr { + def p(dsl1: DSL)(c: dsl1.N): dsl1.N = c + IntDSL.toString(p(IntDSL)(IntDSL.zero)) + }) + println("case dependent3: " + testExpr { + def p(dsl1: DSL)(dsl2: DSL): dsl2.N = dsl2.zero + IntDSL.toString(p(IntDSL)(IntDSL)) + }) diff --git a/tests/run-macros/i17105b.check b/tests/run-macros/i17105b.check new file mode 100644 index 000000000000..40b736596a70 --- /dev/null +++ b/tests/run-macros/i17105b.check @@ -0,0 +1,3 @@ +case erased: [erased case] +case erased nested: c +case erased nested 2: d diff --git a/tests/run-macros/i17105b/Macro_1.scala b/tests/run-macros/i17105b/Macro_1.scala new file mode 100644 index 000000000000..5456470cc3d8 --- /dev/null +++ b/tests/run-macros/i17105b/Macro_1.scala @@ -0,0 +1,27 @@ +import scala.language.experimental.erasedDefinitions + +import scala.quoted.* + +inline def testExpr(inline body: Any) = ${ testExprImpl('body) } +def testExprImpl(body: Expr[Any])(using Quotes): Expr[String] = + body match + // Erased Types + case '{ def erasedfn(y: String) = "placeholder"; $a(erasedfn): String } => + Expr("This case should not match") + case '{ def erasedfn(erased y: String) = "placeholder"; $a(erasedfn): String } => + '{ $a((erased z: String) => "[erased case]") } + case '{ + def erasedfn(a: String, b: String)(c: String, d: String): String = a + $y(erasedfn): String + } => Expr("This should not match") + case '{ + def erasedfn(a: String, erased b: String)(erased c: String, d: String): String = a + $y(erasedfn): String + } => + '{ $y((a: String, erased b: String) => (erased c: String, d: String) => d) } + case '{ + def erasedfn(a: String, erased b: String)(c: String, erased d: String): String = a + $y(erasedfn): String + } => + '{ $y((a: String, erased b: String) => (c: String, erased d: String) => c) } + case _ => Expr("not matched") diff --git a/tests/run-macros/i17105b/Test_2.scala b/tests/run-macros/i17105b/Test_2.scala new file mode 100644 index 000000000000..46781b558dd4 --- /dev/null +++ b/tests/run-macros/i17105b/Test_2.scala @@ -0,0 +1,12 @@ +import scala.language.experimental.erasedDefinitions + +@main def Test: Unit = + println("case erased: " + testExpr { def erasedfn1(erased x: String) = "placeholder"; erasedfn1("arg1")}) + println("case erased nested: " + testExpr { + def erasedfn2(p: String, erased q: String)(r: String, erased s: String) = p + erasedfn2("a", "b")("c", "d") + }) + println("case erased nested 2: " + testExpr { + def erasedfn2(p: String, erased q: String)(erased r: String, s: String) = p + erasedfn2("a", "b")("c", "d") + }) diff --git a/tests/run-macros/i17905.check b/tests/run-macros/i17905.check new file mode 100644 index 000000000000..4635c4a5bf96 --- /dev/null +++ b/tests/run-macros/i17905.check @@ -0,0 +1,3 @@ +case 1: [matched 1st case] another_given outside +case 2: [matched 2nd case] given outside +case 3: [matched 1st case] another_given outside diff --git a/tests/run-macros/i17905/Macro_1.scala b/tests/run-macros/i17905/Macro_1.scala new file mode 100644 index 000000000000..31027855953e --- /dev/null +++ b/tests/run-macros/i17905/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +inline def testCtxParam(inline body: Any) = ${ testCtxParamImpl('body) } +def testCtxParamImpl(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ given i: String = "given"; def g(using s: String) = "placeholder"; $a(g, i): String } => + '{ $a(((s: String) ?=> s"[matched 1st case] ${s}"), "another_given") } + case '{ def g(using s: String) = "placeholder"; $a(g): String } => + '{ $a((s: String) ?=> s"[matched 2nd case] ${s}") } + case _ => Expr("not matched") diff --git a/tests/run-macros/i17905/Test_2.scala b/tests/run-macros/i17905/Test_2.scala new file mode 100644 index 000000000000..7c45a8168e85 --- /dev/null +++ b/tests/run-macros/i17905/Test_2.scala @@ -0,0 +1,6 @@ +@main def Test: Unit = + println("case 1: " + testCtxParam { given String = "given"; def f(using t: String) = "placeholder"; f + " outside" }) + given String = "given" + println("case 2: " + testCtxParam { def f(using t: String) = "placeholder"; f + " outside" }) + /* This is expected to match the first case. The current QuoteMatcher identifies a function with a contextual function. */ + println("case 3: " + testCtxParam { given i: String = "given"; def a(x: String) = "placeholder"; a(i) + " outside" } ) diff --git a/tests/run-macros/i18283.check b/tests/run-macros/i18283.check new file mode 100644 index 000000000000..212aad48383c --- /dev/null +++ b/tests/run-macros/i18283.check @@ -0,0 +1,5 @@ +(Test_2$package.Id,Test_2$package.Id,scala.Long) +(task.Title,task.Title,task.Title) +Task.run +(Test_2$package.Id,Test_2$package.Id,Test_2$package.Id) +(Task.this.Title,Task.this.Title,java.lang.String) diff --git a/tests/run-macros/i18283/Macro_1.scala b/tests/run-macros/i18283/Macro_1.scala new file mode 100644 index 000000000000..5fc0145d28a2 --- /dev/null +++ b/tests/run-macros/i18283/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +object Macro: + transparent inline def getType[T] = + ${ getTypeImpl[T] } + private def getTypeImpl[T: Type](using Quotes): Expr[Any] = + import quotes.reflect.* + + val tpe = TypeRepr.of[T] + val reprShow = tpe.show + tpe.asType match + case '[t] => + val typeShow = TypeRepr.of[t].show // dealiased type + Expr((Type.show[T], reprShow, typeShow)) diff --git a/tests/run-macros/i18283/Test_2.scala b/tests/run-macros/i18283/Test_2.scala new file mode 100644 index 000000000000..083040b60dac --- /dev/null +++ b/tests/run-macros/i18283/Test_2.scala @@ -0,0 +1,17 @@ +opaque type Id = Long + +class Task: + opaque type Title = String + + def a: Title = "a" + + def run = + println("Task.run") + println(Macro.getType[Id]) + println(Macro.getType[Title]) + +@main def Test = + val task = new Task + println(Macro.getType[Id]) + println(Macro.getType[task.Title]) + task.run diff --git a/tests/run-macros/i18806.check b/tests/run-macros/i18806.check new file mode 100644 index 000000000000..32f95c0d1244 --- /dev/null +++ b/tests/run-macros/i18806.check @@ -0,0 +1 @@ +hi \ No newline at end of file diff --git a/tests/run-macros/i18806/Macro_1.scala b/tests/run-macros/i18806/Macro_1.scala new file mode 100644 index 000000000000..461080b67b95 --- /dev/null +++ b/tests/run-macros/i18806/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class gen1 extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + // val meth = cls.methodMember("foo").head + // val fooTpe = cls.typeRef.memberType(meth) + + val overrideTpe = MethodType(Nil)(_ => Nil, _ => defn.StringClass.typeRef) + + val fooOverrideSym = Symbol.newMethod(cls, "foo", overrideTpe, Flags.Override, Symbol.noSymbol) + + val fooDef = DefDef(fooOverrideSym, _ => Some(Literal(StringConstant("hi")))) + + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, fooDef :: body) + List(newClassDef) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/run-macros/i18806/Test_2.scala b/tests/run-macros/i18806/Test_2.scala new file mode 100644 index 000000000000..3db56c895bdd --- /dev/null +++ b/tests/run-macros/i18806/Test_2.scala @@ -0,0 +1,14 @@ +import scala.annotation.experimental + +class Base: + def foo(): Object = ??? + +@experimental +@gen1 +class Sub extends Base +// > override def foo(): String = "hi" + +@experimental +@main def Test(): Unit = + val sub = new Sub + println(sub.foo()) diff --git a/tests/run-macros/i8514b.check b/tests/run-macros/i8514b.check index 7340243f9771..f7d907e35926 100644 --- a/tests/run-macros/i8514b.check +++ b/tests/run-macros/i8514b.check @@ -1,5 +1,5 @@ B -A[[T >: scala.Nothing <: scala.Any] => P[T], scala.Predef.String] +A[[T >: scala.Nothing <: scala.Any] =>> P[T], scala.Predef.String] java.lang.Object scala.Matchable scala.Any diff --git a/tests/run-macros/macro-erased/Macro_1.scala b/tests/run-macros/macro-erased/Macro_1.scala new file mode 100644 index 000000000000..2e863f24650b --- /dev/null +++ b/tests/run-macros/macro-erased/Macro_1.scala @@ -0,0 +1,23 @@ +import scala.language.experimental.erasedDefinitions + +import scala.quoted.* + +object Macro { + inline def foo1(i: Int) = $ { case1('{ i }) } + inline def foo2(i: Int) = $ { case2(1)('{ i }) } + inline def foo3(i: Int) = $ { case3('{ i })(1) } + inline def foo4(i: Int) = $ { case4(1)('{ i }, '{ i }) } + inline def foo5(i: Int) = $ { case5('{ i }, '{ i })(1) } + inline def foo6(i: Int) = $ { case6(1)('{ i })('{ i }) } + inline def foo7(i: Int) = $ { case7('{ i })(1)('{ i }) } + inline def foo8(i: Int) = $ { case8('{ i })('{ i })(1) } + + def case1(erased i: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case2 (i: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case3(erased i: Expr[Int]) (j: Int)(using Quotes): Expr[Int] = '{ 0 } + def case4 (h: Int)(erased i: Expr[Int], erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case5(erased i: Expr[Int], erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } + def case6 (h: Int)(erased i: Expr[Int])(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case7(erased i: Expr[Int]) (h: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case8(erased i: Expr[Int])(erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } +} diff --git a/tests/run-custom-args/run-macros-erased/macro-erased/2.scala b/tests/run-macros/macro-erased/Test_2.scala similarity index 100% rename from tests/run-custom-args/run-macros-erased/macro-erased/2.scala rename to tests/run-macros/macro-erased/Test_2.scala diff --git a/tests/run/opaque-inline/EmailAddress.scala b/tests/run-macros/opaque-inline/EmailAddress.scala similarity index 100% rename from tests/run/opaque-inline/EmailAddress.scala rename to tests/run-macros/opaque-inline/EmailAddress.scala diff --git a/tests/run/opaque-inline/EmailAddressOps.scala b/tests/run-macros/opaque-inline/EmailAddressOps.scala similarity index 100% rename from tests/run/opaque-inline/EmailAddressOps.scala rename to tests/run-macros/opaque-inline/EmailAddressOps.scala diff --git a/tests/run/opaque-inline/Test.scala b/tests/run-macros/opaque-inline/Test.scala similarity index 100% rename from tests/run/opaque-inline/Test.scala rename to tests/run-macros/opaque-inline/Test.scala diff --git a/tests/run/opaque-inline/TestEmail.scala b/tests/run-macros/opaque-inline/TestEmail.scala similarity index 100% rename from tests/run/opaque-inline/TestEmail.scala rename to tests/run-macros/opaque-inline/TestEmail.scala diff --git a/tests/run-macros/quote-match-more-that-22-splices/Macro_1.scala b/tests/run-macros/quote-match-more-that-22-splices/Macro_1.scala new file mode 100644 index 000000000000..02071e4744ac --- /dev/null +++ b/tests/run-macros/quote-match-more-that-22-splices/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def f(inline x: Any) = ${ fExpr('x) } + +def fExpr(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ ($x1, $x2, $x3, $x4, $x5, $x6, $x7, $x8, $x9, $x10, $x11, $x12, $x13, $x14, $x15, $x16, $x17, $x18, $x19, $x20, $x21, $x22, $x23, $x24) } => + '{ ($x1, $x2, $x3, $x4, $x5, $x6, $x7, $x8, $x9, $x10, $x11, $x12, $x13, $x14, $x15, $x16, $x17, $x18, $x19, $x20, $x21, $x22, $x23, $x24) } diff --git a/tests/run-macros/quote-match-more-that-22-splices/Test_2.scala b/tests/run-macros/quote-match-more-that-22-splices/Test_2.scala new file mode 100644 index 000000000000..65f917ff3e35 --- /dev/null +++ b/tests/run-macros/quote-match-more-that-22-splices/Test_2.scala @@ -0,0 +1,4 @@ +@main def Test = + val t1 = f((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24)) + val t2 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + assert(t1 == t2) diff --git a/tests/run-custom-args/run-macros-erased/reflect-isFunctionType/macro_1.scala b/tests/run-macros/reflect-isFunctionType/macro_1.scala similarity index 100% rename from tests/run-custom-args/run-macros-erased/reflect-isFunctionType/macro_1.scala rename to tests/run-macros/reflect-isFunctionType/macro_1.scala diff --git a/tests/run-custom-args/run-macros-erased/reflect-isFunctionType/test_2.scala b/tests/run-macros/reflect-isFunctionType/test_2.scala similarity index 98% rename from tests/run-custom-args/run-macros-erased/reflect-isFunctionType/test_2.scala rename to tests/run-macros/reflect-isFunctionType/test_2.scala index 647711ba7ed9..c5cc5fa23ff1 100644 --- a/tests/run-custom-args/run-macros-erased/reflect-isFunctionType/test_2.scala +++ b/tests/run-macros/reflect-isFunctionType/test_2.scala @@ -1,3 +1,5 @@ +import scala.language.experimental.erasedDefinitions + trait Box { type T } diff --git a/tests/run/splice-position.check b/tests/run-macros/splice-position.check similarity index 100% rename from tests/run/splice-position.check rename to tests/run-macros/splice-position.check diff --git a/tests/run/splice-position/Test.scala b/tests/run-macros/splice-position/Test.scala similarity index 100% rename from tests/run/splice-position/Test.scala rename to tests/run-macros/splice-position/Test.scala diff --git a/tests/run/splice-position/macros.scala b/tests/run-macros/splice-position/macros.scala similarity index 100% rename from tests/run/splice-position/macros.scala rename to tests/run-macros/splice-position/macros.scala diff --git a/tests/run-macros/tasty-dealiasKeepOpaques.check b/tests/run-macros/tasty-dealiasKeepOpaques.check new file mode 100644 index 000000000000..11835b6febbd --- /dev/null +++ b/tests/run-macros/tasty-dealiasKeepOpaques.check @@ -0,0 +1,10 @@ +java.lang.String +java.lang.String +scala.collection.immutable.List[Test_2$package.A] +scala.collection.immutable.List[scala.Int] +Test_2$package.OA +Test_2$package.OB +Test_2$package.OC[scala.Int] +Test_2$package.OA +Test_2$package.OB +Test_2$package.OC[scala.Int] diff --git a/tests/run-macros/tasty-dealiasKeepOpaques/Macro_1.scala b/tests/run-macros/tasty-dealiasKeepOpaques/Macro_1.scala new file mode 100644 index 000000000000..d1c4a482235d --- /dev/null +++ b/tests/run-macros/tasty-dealiasKeepOpaques/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def dealiasKeepOpaques[T]: String = ${ impl[T] } + +def impl[T: Type](using Quotes) : Expr[String] = { + import quotes.reflect.* + Expr(TypeRepr.of[T].dealiasKeepOpaques.show) +} diff --git a/tests/run-macros/tasty-dealiasKeepOpaques/Test_2.scala b/tests/run-macros/tasty-dealiasKeepOpaques/Test_2.scala new file mode 100644 index 000000000000..1612816ccbc5 --- /dev/null +++ b/tests/run-macros/tasty-dealiasKeepOpaques/Test_2.scala @@ -0,0 +1,29 @@ +type A = String +type B = List[A] +type C[X] = List[X] + +opaque type OA = String +object OA: + def test = println(dealiasKeepOpaques[OA]) + +opaque type OB = List[A] +object OB: + def test = println(dealiasKeepOpaques[OB]) + +opaque type OC[X] = List[X] +object OC: + def test = println(dealiasKeepOpaques[OC[Int]]) + + +@main def Test: Unit = { + println(dealiasKeepOpaques[String]) + println(dealiasKeepOpaques[A]) + println(dealiasKeepOpaques[B]) + println(dealiasKeepOpaques[C[Int]]) + println(dealiasKeepOpaques[OA]) + println(dealiasKeepOpaques[OB]) + println(dealiasKeepOpaques[OC[Int]]) + OA.test + OB.test + OC.test +} diff --git a/tests/run-macros/tasty-definitions-1.check b/tests/run-macros/tasty-definitions-1.check index 4ac0e6267028..066aeb2eeae4 100644 --- a/tests/run-macros/tasty-definitions-1.check +++ b/tests/run-macros/tasty-definitions-1.check @@ -82,8 +82,8 @@ ContextFunction22 ContextFunction23 ContextFunction24 ContextFunction25 -class java.lang.Exception: Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction` -ErasedFunction +class java.lang.Exception: Erased function classes are not supported. Use a refined `scala.PolyFunction` +PolyFunction Tuple2 Tuple3 Tuple4 diff --git a/tests/run-macros/tasty-definitions-1/quoted_1.scala b/tests/run-macros/tasty-definitions-1/quoted_1.scala index ed210706f567..9a6d6e2edd41 100644 --- a/tests/run-macros/tasty-definitions-1/quoted_1.scala +++ b/tests/run-macros/tasty-definitions-1/quoted_1.scala @@ -65,7 +65,7 @@ object Macros { // should fail printout(defn.FunctionClass(1, isErased = true).name) - printout(defn.ErasedFunctionClass.name) + printout(defn.PolyFunctionClass.name) for (i <- 2 to 22) printout(defn.TupleClass(i).name) diff --git a/tests/run-macros/tasty-definitions-1/quoted_2.scala b/tests/run-macros/tasty-definitions-1/quoted_2.scala index c3824598782c..47167a52f1d9 100644 --- a/tests/run-macros/tasty-definitions-1/quoted_2.scala +++ b/tests/run-macros/tasty-definitions-1/quoted_2.scala @@ -1,3 +1,4 @@ +//> using options -Yretain-trees object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-2.check b/tests/run-macros/tasty-definitions-2.check similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-definitions-2.check rename to tests/run-macros/tasty-definitions-2.check diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-2/Macro_1.scala b/tests/run-macros/tasty-definitions-2/Macro_1.scala similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-definitions-2/Macro_1.scala rename to tests/run-macros/tasty-definitions-2/Macro_1.scala diff --git a/tests/run-macros/tasty-definitions-2/Test_2.scala b/tests/run-macros/tasty-definitions-2/Test_2.scala new file mode 100644 index 000000000000..d95b8cafddf6 --- /dev/null +++ b/tests/run-macros/tasty-definitions-2/Test_2.scala @@ -0,0 +1,13 @@ +//> using options -Yretain-trees + +object Test { + def main(args: Array[String]): Unit = { + println(Foo.inspectBody(Foo.foo)) + println(Foo.inspectBody(Foo.bar)) + + 3 match { + case x => + println(Foo.inspectBody(x)) + } + } +} diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-3.check b/tests/run-macros/tasty-definitions-3.check similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-definitions-3.check rename to tests/run-macros/tasty-definitions-3.check diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-3/Macro_1.scala b/tests/run-macros/tasty-definitions-3/Macro_1.scala similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-definitions-3/Macro_1.scala rename to tests/run-macros/tasty-definitions-3/Macro_1.scala diff --git a/tests/run-macros/tasty-definitions-3/Test_2.scala b/tests/run-macros/tasty-definitions-3/Test_2.scala new file mode 100644 index 000000000000..15d4624883ab --- /dev/null +++ b/tests/run-macros/tasty-definitions-3/Test_2.scala @@ -0,0 +1,17 @@ +//> using options -Yretain-trees + +object Test { + + def main(args: Array[String]): Unit = { + println(Foo.inspectBody(foo)) + println(Foo.inspectBody(bar)) + + 3 match { + case x => + println(Foo.inspectBody(x)) + } + } + + def foo: Int = 1 + 2 + val bar: Int = 2 + 3 +} diff --git a/tests/run-custom-args/Yretain-trees/tasty-extractors-owners.check b/tests/run-macros/tasty-extractors-owners.check similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-extractors-owners.check rename to tests/run-macros/tasty-extractors-owners.check diff --git a/tests/run-custom-args/Yretain-trees/tasty-extractors-owners/quoted_1.scala b/tests/run-macros/tasty-extractors-owners/quoted_1.scala similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-extractors-owners/quoted_1.scala rename to tests/run-macros/tasty-extractors-owners/quoted_1.scala diff --git a/tests/run-macros/tasty-extractors-owners/quoted_2.scala b/tests/run-macros/tasty-extractors-owners/quoted_2.scala new file mode 100644 index 000000000000..75f6585676cc --- /dev/null +++ b/tests/run-macros/tasty-extractors-owners/quoted_2.scala @@ -0,0 +1,25 @@ +//> using options -Yretain-trees + +import Macros.* + +object Test { + def main(args: Array[String]): Unit = { + printOwners { + def foo = { + def bar = 1 + val bar2 = 2 + bar + } + val foo2 = { + def baz = 3 + val baz2 = 4 + baz + } + class A { + type B = Int + def b = 5 + val b2 = 6 + } + } + } +} diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-1.check b/tests/run-macros/tasty-load-tree-1.check similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-load-tree-1.check rename to tests/run-macros/tasty-load-tree-1.check diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-1/quoted_1.scala b/tests/run-macros/tasty-load-tree-1/quoted_1.scala similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-load-tree-1/quoted_1.scala rename to tests/run-macros/tasty-load-tree-1/quoted_1.scala diff --git a/tests/run-macros/tasty-load-tree-1/quoted_2.scala b/tests/run-macros/tasty-load-tree-1/quoted_2.scala new file mode 100644 index 000000000000..e7e47cb55516 --- /dev/null +++ b/tests/run-macros/tasty-load-tree-1/quoted_2.scala @@ -0,0 +1,8 @@ +//> using options -Yretain-trees + +object Test { + def main(args: Array[String]): Unit = { + println(Foo.inspectBody(Foo.foo)) + println(Foo.inspectBody(Foo.bar)) + } +} diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-2.check b/tests/run-macros/tasty-load-tree-2.check similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-load-tree-2.check rename to tests/run-macros/tasty-load-tree-2.check diff --git a/tests/run-custom-args/Yretain-trees/tasty-load-tree-2/quoted_1.scala b/tests/run-macros/tasty-load-tree-2/quoted_1.scala similarity index 100% rename from tests/run-custom-args/Yretain-trees/tasty-load-tree-2/quoted_1.scala rename to tests/run-macros/tasty-load-tree-2/quoted_1.scala diff --git a/tests/run-macros/tasty-load-tree-2/quoted_2.scala b/tests/run-macros/tasty-load-tree-2/quoted_2.scala new file mode 100644 index 000000000000..35362f8c80e9 --- /dev/null +++ b/tests/run-macros/tasty-load-tree-2/quoted_2.scala @@ -0,0 +1,11 @@ +//> using options -Yretain-trees + +object Test { + def main(args: Array[String]): Unit = { + println(Foo.inspectBody(foo)) + println(Foo.inspectBody(bar)) + } + + def foo: Int = 1 + 2 + val bar: Int = 2 + 3 +} diff --git a/tests/run-macros/tasty-simplified.check b/tests/run-macros/tasty-simplified.check index 3afb7916ac63..b70bd119fef6 100644 --- a/tests/run-macros/tasty-simplified.check +++ b/tests/run-macros/tasty-simplified.check @@ -1,4 +1,4 @@ Functor[scala.collection.immutable.List] -Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] => scala.Any] => Functor[F], Wrap[scala.Int]] -Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] => scala.Any] => Functor[F], Wrap[Dummy]] +Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] =>> scala.Any] =>> Functor[F], Wrap[scala.Int]] +Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] =>> scala.Any] =>> Functor[F], Wrap[Dummy]] Functor[scala.Option] diff --git a/tests/run-macros/tasty-simplified/quoted_2.scala b/tests/run-macros/tasty-simplified/quoted_2.scala index dcf100d6e8bb..b6bb99bf7cf9 100644 --- a/tests/run-macros/tasty-simplified/quoted_2.scala +++ b/tests/run-macros/tasty-simplified/quoted_2.scala @@ -1,3 +1,5 @@ +//> using options -source:3.3 + import Macros.simplified object Test { diff --git a/tests/run-macros/term-show.check b/tests/run-macros/term-show.check new file mode 100644 index 000000000000..91ba0308e3db --- /dev/null +++ b/tests/run-macros/term-show.check @@ -0,0 +1,21 @@ +{ + class C() { + def a: scala.Int = 0 + private[this] def b: scala.Int = 0 + private[this] def c: scala.Int = 0 + private[C] def d: scala.Int = 0 + protected def e: scala.Int = 0 + protected[this] def f: scala.Int = 0 + protected[C] def g: scala.Int = 0 + } + () +} +@scala.annotation.internal.SourceFile("tests/run-macros/term-show/Test_2.scala") trait A() extends java.lang.Object { + def imp(x: scala.Int)(implicit str: scala.Predef.String): scala.Int + def use(`x₂`: scala.Int)(using `str₂`: scala.Predef.String): scala.Int + def era(`x₃`: scala.Int)(erased `str₃`: scala.Predef.String): scala.Int + def f1(x1: scala.Int, erased x2: scala.Int): scala.Int + def f2(erased `x1₂`: scala.Int, erased `x2₂`: scala.Int): scala.Int + def f3(using `x1₃`: scala.Int, erased `x2₃`: scala.Int): scala.Int + def f4(using erased `x1₄`: scala.Int, erased `x2₄`: scala.Int): scala.Int +} diff --git a/tests/run-macros/term-show/Macro_1.scala b/tests/run-macros/term-show/Macro_1.scala new file mode 100644 index 000000000000..8e26c715d3ed --- /dev/null +++ b/tests/run-macros/term-show/Macro_1.scala @@ -0,0 +1,15 @@ +import scala.quoted.* + +object TypeToolbox { + inline def show(inline v: Any): String = ${ showImpl('v) } + private def showImpl(using Quotes)(v: Expr[Any]): Expr[String] = + import quotes.reflect.* + Expr(v.show) + + inline def showTree(inline className: String): String = ${ showTreeImpl('className) } + private def showTreeImpl(className: Expr[String])(using Quotes) : Expr[String] = + import quotes.reflect.* + val name = className.valueOrAbort + val res = Symbol.requiredClass(name).tree.show + Expr(res) +} diff --git a/tests/run-macros/term-show/Test_2.scala b/tests/run-macros/term-show/Test_2.scala new file mode 100644 index 000000000000..eebd50576930 --- /dev/null +++ b/tests/run-macros/term-show/Test_2.scala @@ -0,0 +1,30 @@ +import scala.language.experimental.erasedDefinitions + +trait A: + def imp(x: Int)(implicit str: String): Int + def use(x: Int)(using str: String): Int + def era(x: Int)(erased str: String): Int + + def f1(x1: Int, erased x2: Int): Int + def f2(erased x1: Int, erased x2: Int): Int + def f3(using x1: Int, erased x2: Int): Int + def f4(using erased x1: Int, erased x2: Int): Int + +object Test { + import TypeToolbox.* + def main(args: Array[String]): Unit = { + println(show { + class C { + def a = 0 + private def b = 0 + private[this] def c = 0 + private[C] def d = 0 + protected def e = 0 + protected[this] def f = 0 + protected[C] def g = 0 + } + }) + + println(showTree("A")) + } +} diff --git a/tests/run-macros/type-show/Test_2.scala b/tests/run-macros/type-show/Test_2.scala index d741a426cd69..ace303a6596e 100644 --- a/tests/run-macros/type-show/Test_2.scala +++ b/tests/run-macros/type-show/Test_2.scala @@ -9,7 +9,7 @@ object Test { assert(show[Int => Int] == "scala.Function1[scala.Int, scala.Int]") assert(show[(Int, String)] == "scala.Tuple2[scala.Int, scala.Predef.String]") assert(show[[X] =>> X match { case Int => Int }] == - """[X >: scala.Nothing <: scala.Any] => X match { + """[X >: scala.Nothing <: scala.Any] =>> X match { | case scala.Int => scala.Int |}""".stripMargin) assert(showStructure[[X] =>> X match { case Int => Int }] == """TypeLambda(List(X), List(TypeBounds(TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Nothing"), TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"))), MatchType(TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"), ParamRef(binder, 0), List(MatchCase(TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int"), TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int")))))""") diff --git a/tests/run-staging/i5965.check b/tests/run-staging/i5965.check index 2eab9b0a2ba1..2e991af71892 100644 --- a/tests/run-staging/i5965.check +++ b/tests/run-staging/i5965.check @@ -11,7 +11,7 @@ List(1, 2, 3) } Some(4) { - val y: [V >: scala.Nothing <: scala.Any] => scala.collection.immutable.Map[scala.Int, V][scala.Int] = scala.Predef.Map.apply[scala.Int, scala.Int](scala.Predef.ArrowAssoc[scala.Int](4).->[scala.Int](1)) + val y: [V >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.Map[scala.Int, V][scala.Int] = scala.Predef.Map.apply[scala.Int, scala.Int](scala.Predef.ArrowAssoc[scala.Int](4).->[scala.Int](1)) (y: scala.collection.immutable.Map[scala.Int, scala.Int]) } diff --git a/tests/run-staging/multi-staging.check b/tests/run-staging/multi-staging.check index 76adcfec3034..c5f53e51a7d2 100644 --- a/tests/run-staging/multi-staging.check +++ b/tests/run-staging/multi-staging.check @@ -1,5 +1,5 @@ stage1 code: ((q1: scala.quoted.Quotes) ?=> { val x1: scala.Int = 2 - scala.quoted.runtime.Expr.quote[scala.Int](1.+(scala.quoted.runtime.Expr.splice[scala.Int](((evidence$5: scala.quoted.Quotes) ?=> scala.quoted.Expr.apply[scala.Int](x1)(scala.quoted.ToExpr.IntToExpr[scala.Int])(evidence$5))))).apply(using q1) + scala.quoted.runtime.Expr.quote[scala.Int](1.+(scala.quoted.runtime.Expr.splice[scala.Int](((contextual$5: scala.quoted.Quotes) ?=> scala.quoted.Expr.apply[scala.Int](x1)(scala.quoted.ToExpr.IntToExpr[scala.Int])(contextual$5))))).apply(using q1) }) 3 diff --git a/tests/run-staging/quote-nested-2.check b/tests/run-staging/quote-nested-2.check index 7db9edb0424e..48ecf87577ab 100644 --- a/tests/run-staging/quote-nested-2.check +++ b/tests/run-staging/quote-nested-2.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) - ((evidence$2: scala.quoted.Quotes) ?=> a).apply(using q) + ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q) }) diff --git a/tests/run-staging/quote-nested-5.check b/tests/run-staging/quote-nested-5.check index 53600d16a8da..47d39cc92611 100644 --- a/tests/run-staging/quote-nested-5.check +++ b/tests/run-staging/quote-nested-5.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) - ((q2: scala.quoted.Quotes) ?=> ((evidence$2: scala.quoted.Quotes) ?=> a).apply(using q2)) + ((q2: scala.quoted.Quotes) ?=> ((contextual$2: scala.quoted.Quotes) ?=> a).apply(using q2)) }.apply(using q)) diff --git a/tests/run-staging/quote-nested-6.check b/tests/run-staging/quote-nested-6.check index 2ae8b0d26e47..81e20d6782a3 100644 --- a/tests/run-staging/quote-nested-6.check +++ b/tests/run-staging/quote-nested-6.check @@ -1,7 +1,7 @@ { - type T[X] = [A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A][X] + type T[X] = [A >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.List[A][X] val x: java.lang.String = "foo" - val z: [X >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[X][java.lang.String] = scala.List.apply[java.lang.String](x) + val z: [X >: scala.Nothing <: scala.Any] =>> scala.collection.immutable.List[X][java.lang.String] = scala.List.apply[java.lang.String](x) (x: java.lang.String) } diff --git a/tests/run-custom-args/tasty-inspector/i10359.scala b/tests/run-tasty-inspector/i10359.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i10359.scala rename to tests/run-tasty-inspector/i10359.scala diff --git a/tests/run-custom-args/tasty-inspector/i13352.scala b/tests/run-tasty-inspector/i13352.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i13352.scala rename to tests/run-tasty-inspector/i13352.scala diff --git a/tests/run-custom-args/tasty-inspector/i14027.scala b/tests/run-tasty-inspector/i14027.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i14027.scala rename to tests/run-tasty-inspector/i14027.scala diff --git a/tests/run-custom-args/tasty-inspector/i14785.scala b/tests/run-tasty-inspector/i14785.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i14785.scala rename to tests/run-tasty-inspector/i14785.scala diff --git a/tests/run-custom-args/tasty-inspector/i14788.scala b/tests/run-tasty-inspector/i14788.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i14788.scala rename to tests/run-tasty-inspector/i14788.scala diff --git a/tests/run-custom-args/tasty-inspector/i14789.scala b/tests/run-tasty-inspector/i14789.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i14789.scala rename to tests/run-tasty-inspector/i14789.scala diff --git a/tests/run-custom-args/tasty-inspector/i8163.scala b/tests/run-tasty-inspector/i8163.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i8163.scala rename to tests/run-tasty-inspector/i8163.scala diff --git a/tests/run-custom-args/tasty-inspector/i8364.scala b/tests/run-tasty-inspector/i8364.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i8364.scala rename to tests/run-tasty-inspector/i8364.scala diff --git a/tests/run-custom-args/tasty-inspector/i8389.scala b/tests/run-tasty-inspector/i8389.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i8389.scala rename to tests/run-tasty-inspector/i8389.scala diff --git a/tests/run-custom-args/tasty-inspector/i8460.scala b/tests/run-tasty-inspector/i8460.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i8460.scala rename to tests/run-tasty-inspector/i8460.scala diff --git a/tests/run-custom-args/tasty-inspector/i9970.scala b/tests/run-tasty-inspector/i9970.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/i9970.scala rename to tests/run-tasty-inspector/i9970.scala diff --git a/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala similarity index 83% rename from tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala rename to tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 5ccdb753e9b3..12ea8eb26c47 100644 --- a/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -49,24 +49,30 @@ val experimentalDefinitionInLibrary = Set( //// New feature: capture checking "scala.annotation.capability", - "scala.annotation.internal.WithPureFuns", - "scala.annotation.internal.requiresCapability", "scala.annotation.retains", "scala.annotation.retainsByName", + "scala.Pure", "scala.caps", "scala.caps$", //// New feature: into "scala.annotation.allowConversions", + //// New feature: @publicInBinary + "scala.annotation.publicInBinary", + //// New feature: Macro annotations "scala.annotation.MacroAnnotation", + //// New feature: -Ysafe-init-global + "scala.annotation.init", + "scala.annotation.init$", + "scala.annotation.init$.widen", + "scala.annotation.init$.region", + //// New APIs: Quotes - "scala.quoted.Quotes.reflectModule.FlagsModule.AbsOverride", // Can be stabilized in 3.4.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", - "scala.quoted.Quotes.reflectModule.FlagsModule.JavaAnnotation", // Cant be stabilized yet. // Need newClass variant that can add constructor parameters. // Need experimental annotation macros to check that design works. @@ -76,21 +82,20 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.SymbolModule.newModule", "scala.quoted.Quotes.reflectModule.SymbolModule.freshName", "scala.quoted.Quotes.reflectModule.SymbolMethods.info", - // Quotes for functions with erased parameters. - "scala.quoted.Quotes.reflectModule.MethodTypeMethods.erasedParams", - "scala.quoted.Quotes.reflectModule.MethodTypeMethods.hasErasedParams", - "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", - "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", - "scala.quoted.Quotes.reflectModule.defnModule.ErasedFunctionClass", // New feature: functions with erased parameters. // Need erasedDefinitions enabled. - "scala.runtime.ErasedFunction", + "scala.runtime.ErasedFunction", // will be removed (now using PolyFunction) "scala.quoted.Quotes.reflectModule.MethodTypeMethods.erasedParams", "scala.quoted.Quotes.reflectModule.MethodTypeMethods.hasErasedParams", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", - "scala.quoted.Quotes.reflectModule.defnModule.ErasedFunctionClass" + + // New feature: reverse method on Tuple + "scala.Tuple.reverse", // can be stabilized in 3.5 + "scala.Tuple$.Reverse", // can be stabilized in 3.5 + "scala.Tuple$.ReverseOnto", // can be stabilized in 3.5 + "scala.runtime.Tuples$.reverse", // can be stabilized in 3.5 ) @@ -117,9 +122,9 @@ val experimentalDefinitionInLibrary = Set( |Found @experimental definition in library not listed: |${missingFromList.toSeq.sorted.mkString("\n")} | - |If added new experimental definitions to the library, add them to the list in tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala + |If added new experimental definitions to the library, add them to the list in tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | - |Test only: sbt "scala3-bootstrapped/testCompilation tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala" + |Test only: sbt "scala3-bootstrapped/testCompilation tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala" |""".stripMargin ) assert(missingInLibrary.isEmpty, @@ -128,9 +133,9 @@ val experimentalDefinitionInLibrary = Set( |Listed @experimental definition was not found in the library |${missingInLibrary.toSeq.sorted.mkString("\n")} | - |If experimental definition was removed or stabilized, remove from the list in tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala + |If experimental definition was removed or stabilized, remove from the list in tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | - |Test only: sbt "scala3-bootstrapped/testCompilation tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala" + |Test only: sbt "scala3-bootstrapped/testCompilation tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala" |""".stripMargin ) } diff --git a/tests/run-custom-args/tasty-inspector/tasty-documentation-inspector/Foo.scala b/tests/run-tasty-inspector/tasty-documentation-inspector/Foo.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/tasty-documentation-inspector/Foo.scala rename to tests/run-tasty-inspector/tasty-documentation-inspector/Foo.scala diff --git a/tests/run-custom-args/tasty-inspector/tasty-documentation-inspector/Test.scala b/tests/run-tasty-inspector/tasty-documentation-inspector/Test.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/tasty-documentation-inspector/Test.scala rename to tests/run-tasty-inspector/tasty-documentation-inspector/Test.scala diff --git a/tests/run-custom-args/tasty-inspector/tasty-inspector/Foo.scala b/tests/run-tasty-inspector/tasty-inspector/Foo.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/tasty-inspector/Foo.scala rename to tests/run-tasty-inspector/tasty-inspector/Foo.scala diff --git a/tests/run-custom-args/tasty-inspector/tasty-inspector/Test.scala b/tests/run-tasty-inspector/tasty-inspector/Test.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/tasty-inspector/Test.scala rename to tests/run-tasty-inspector/tasty-inspector/Test.scala diff --git a/tests/run-tasty-inspector/tastyPaths.check b/tests/run-tasty-inspector/tastyPaths.check new file mode 100644 index 000000000000..f9d3486040b8 --- /dev/null +++ b/tests/run-tasty-inspector/tastyPaths.check @@ -0,0 +1,2 @@ +List(/tastyPaths/I8163.tasty) +`reflect.SourceFile.current` cannot be called within the TASTy ispector diff --git a/tests/run-custom-args/tasty-inspector/tastyPaths.scala b/tests/run-tasty-inspector/tastyPaths.scala similarity index 100% rename from tests/run-custom-args/tasty-inspector/tastyPaths.scala rename to tests/run-tasty-inspector/tastyPaths.scala diff --git a/tests/run/Pouring.check b/tests/run/Pouring.check index f07f29105c0b..c9ab84a226bb 100644 --- a/tests/run/Pouring.check +++ b/tests/run/Pouring.check @@ -1,2 +1 @@ -Vector(Empty(0), Empty(1), Fill(0), Fill(1), Pour(0,1), Pour(1,0)) -Fill(1) Pour(1,0) Empty(0) Pour(1,0) Fill(1) Pour(1,0) --> Vector(4, 6) +Illegal command line: more arguments expected diff --git a/tests/run/Pouring.scala b/tests/run/Pouring.scala index 6f4611af8bfc..5bb2a92ff8e3 100644 --- a/tests/run/Pouring.scala +++ b/tests/run/Pouring.scala @@ -1,37 +1,35 @@ -class Pouring(capacity: Vector[Int]): - type Glass = Int - type Content = Vector[Int] +type Glass = Int +type Levels = Vector[Int] - enum Move: - def apply(content: Content): Content = this match - case Empty(g) => content.updated(g, 0) - case Fill(g) => content.updated(g, capacity(g)) - case Pour(from, to) => - val amount = content(from) min (capacity(to) - content(to)) - extension (s: Content) def adjust(g: Glass, delta: Int) = s.updated(g, s(g) + delta) - content.adjust(from, -amount).adjust(to, amount) +class Pouring(capacity: Levels): + enum Move: case Empty(glass: Glass) case Fill(glass: Glass) case Pour(from: Glass, to: Glass) + + def apply(levels: Levels): Levels = this match + case Empty(glass) => + levels.updated(glass, 0) + case Fill(glass) => + levels.updated(glass, capacity(glass)) + case Pour(from, to) => + val amount = levels(from) min (capacity(to) - levels(to)) + levels.updated(from, levels(from) - amount) + .updated(to, levels(to) + amount) end Move + val glasses = 0 until capacity.length val moves = - val glasses = 0 until capacity.length - - (for g <- glasses yield Move.Empty(g)) + (for g <- glasses yield Move.Empty(g)) ++ (for g <- glasses yield Move.Fill(g)) ++ (for g1 <- glasses; g2 <- glasses if g1 != g2 yield Move.Pour(g1, g2)) - class Path(history: List[Move], val endContent: Content): + class Path(history: List[Move], val endContent: Levels): def extend(move: Move) = Path(move :: history, move(endContent)) override def toString = s"${history.reverse.mkString(" ")} --> $endContent" - end Path - - val initialContent: Content = capacity.map(x => 0) - val initialPath = Path(Nil, initialContent) - def from(paths: Set[Path], explored: Set[Content]): LazyList[Set[Path]] = + def from(paths: Set[Path], explored: Set[Levels]): LazyList[Set[Path]] = if paths.isEmpty then LazyList.empty else val extensions = @@ -44,6 +42,8 @@ class Pouring(capacity: Vector[Int]): paths #:: from(extensions, explored ++ extensions.map(_.endContent)) def solutions(target: Int): LazyList[Path] = + val initialContent: Levels = capacity.map(_ => 0) + val initialPath = Path(Nil, initialContent) for paths <- from(Set(initialPath), Set(initialContent)) path <- paths @@ -51,7 +51,7 @@ class Pouring(capacity: Vector[Int]): yield path end Pouring -@main def Test = - val problem = Pouring(Vector(4, 7)) - println(problem.moves) - println(problem.solutions(6).head) +@main def Test(target: Int, capacities: Int*) = + val problem = Pouring(capacities.toVector) + println(s"Moves: ${problem.moves}") + println(s"Solution: ${problem.solutions(target).headOption}") diff --git a/tests/run/ReplacementMatching.scala b/tests/run/ReplacementMatching.scala index 846f1c0a0966..b233709a7cae 100644 --- a/tests/run/ReplacementMatching.scala +++ b/tests/run/ReplacementMatching.scala @@ -32,12 +32,12 @@ object Test { def groupsMatching: Unit = { val Date = """(\d+)/(\d+)/(\d+)""".r - for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { + for (case Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { assert(a == "1") assert(b == "1") assert(c == "2001") } - for (Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { + for (case Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { assert(a == "1" || a == "31") assert(b == "1" || b == "12") assert(c == "2001" || c == "2000") diff --git a/tests/run/StringConcat.scala b/tests/run/StringConcat.scala index d0a9c32e1016..96d97bf6e664 100644 --- a/tests/run/StringConcat.scala +++ b/tests/run/StringConcat.scala @@ -1,3 +1,9 @@ +// scalajs: --skip + +// Under JVM 8, +// this sometimes blows a StackOverflowError +// in PrepJSInterop + @main def Test() = { // This should generally obey 15.18.1. of the JLS (String Concatenation Operator +) diff --git a/tests/run-custom-args/fatal-warnings/convertible.check b/tests/run/convertible.check similarity index 100% rename from tests/run-custom-args/fatal-warnings/convertible.check rename to tests/run/convertible.check diff --git a/tests/run/convertible.scala b/tests/run/convertible.scala new file mode 100644 index 000000000000..0670d1949fd9 --- /dev/null +++ b/tests/run/convertible.scala @@ -0,0 +1,34 @@ +//> using options -Xfatal-warnings + +import language.experimental.into + +class Text(val str: String) + +given Conversion[String, Text] = Text(_) + +@main def Test = + + def f(x: into Text, y: => into Text, zs: into Text*) = + println(s"${x.str} ${y.str} ${zs.map(_.str).mkString(" ")}") + + f("abc", "def") // ok + f("abc", "def", "xyz", "uvw") // ok + f("abc", "def", "xyz", Text("uvw")) // ok + + def g(x: into () => Text) = + println(x().str) + + g(() => "hi") + +trait A[X]: + def f(x: X): Unit = () + +trait B[X] extends A[X]: + override def f(x: X) = super.f(x) + +trait C[X] extends A[X]: + override def f(x: into X) = super.f(x) + +class D[X] extends B[X], C[X] + +def f = new D[Text].f("abc") diff --git a/tests/run-custom-args/defaults-serizaliable-no-forwarders.scala b/tests/run/defaults-serizaliable-no-forwarders.scala similarity index 92% rename from tests/run-custom-args/defaults-serizaliable-no-forwarders.scala rename to tests/run/defaults-serizaliable-no-forwarders.scala index b2762cf443bc..93ed985ce2aa 100644 --- a/tests/run-custom-args/defaults-serizaliable-no-forwarders.scala +++ b/tests/run/defaults-serizaliable-no-forwarders.scala @@ -1,3 +1,6 @@ +//> using options -Xmixin-force-forwarders:false +// scalajs: --skip + import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream} trait T1 extends Serializable { diff --git a/tests/run/enum-java.check b/tests/run/enum-java.check index 667e6a4df37c..ad609d5bfe22 100644 --- a/tests/run/enum-java.check +++ b/tests/run/enum-java.check @@ -26,7 +26,7 @@ MONDAY : 0 TUESDAY : 1 SATURDAY : 2 By-name value: MONDAY -Correctly failed to retrieve illegal name, message: enum case not found: stuff +Correctly failed to retrieve illegal name, message: enum A has no case with name: stuff Collections Test Retrieving Monday: workday diff --git a/tests/run/enum-values.scala b/tests/run/enum-values.scala index 41543e196561..51ad2467b1c8 100644 --- a/tests/run/enum-values.scala +++ b/tests/run/enum-values.scala @@ -61,7 +61,7 @@ enum ClassOnly: // this should still generate the `ordinal` and `fromOrdinal` co catch case e: java.lang.reflect.InvocationTargetException => // TODO: maybe reflect.Selectable should catch this? assert(e.getCause.isInstanceOf[java.util.NoSuchElementException] - && e.getCause.getMessage == ordinal.toString) + && e.getCause.getMessage == s"enum ${companion.getClass.getName.stripSuffix("$")} has no case with ordinal: $ordinal") fetchFromOrdinal(companion = Color, compare = Red, Green, Blue) fetchFromOrdinal(companion = Suits, compare = Clubs, Spades, Diamonds, Hearts) diff --git a/tests/run/enums-java-compat.check b/tests/run/enums-java-compat.check index 0acf636fe377..df26b421acba 100644 --- a/tests/run/enums-java-compat.check +++ b/tests/run/enums-java-compat.check @@ -6,4 +6,4 @@ TUESDAY : 1 SATURDAY : 2 Stuff : 3 By-name value: MONDAY -Correctly failed to retrieve illegal name, message: enum case not found: stuff +Correctly failed to retrieve illegal name, message: enum A has no case with name: stuff diff --git a/tests/run-custom-args/erased/erased-1.check b/tests/run/erased-1.check similarity index 100% rename from tests/run-custom-args/erased/erased-1.check rename to tests/run/erased-1.check diff --git a/tests/run/erased-1.scala b/tests/run/erased-1.scala new file mode 100644 index 000000000000..9875d7b1d68b --- /dev/null +++ b/tests/run/erased-1.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + fun(foo) + } + + def foo = { + println("foo") + 42 + } + def fun(erased boo: Int): Unit = { + println("fun") + } +} diff --git a/tests/run-custom-args/erased/erased-10.check b/tests/run/erased-10.check similarity index 100% rename from tests/run-custom-args/erased/erased-10.check rename to tests/run/erased-10.check diff --git a/tests/run-custom-args/erased/erased-10.scala b/tests/run/erased-10.scala similarity index 82% rename from tests/run-custom-args/erased/erased-10.scala rename to tests/run/erased-10.scala index 25c86c0267c8..004d07b4de37 100644 --- a/tests/run-custom-args/erased/erased-10.scala +++ b/tests/run/erased-10.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-11.check b/tests/run/erased-11.check similarity index 100% rename from tests/run-custom-args/erased/erased-11.check rename to tests/run/erased-11.check diff --git a/tests/run-custom-args/erased/erased-11.scala b/tests/run/erased-11.scala similarity index 90% rename from tests/run-custom-args/erased/erased-11.scala rename to tests/run/erased-11.scala index 36a8d08f8d7f..0963f5105142 100644 --- a/tests/run-custom-args/erased/erased-11.scala +++ b/tests/run/erased-11.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-12.check b/tests/run/erased-12.check similarity index 100% rename from tests/run-custom-args/erased/erased-12.check rename to tests/run/erased-12.check diff --git a/tests/run-custom-args/erased/erased-12.scala b/tests/run/erased-12.scala similarity index 78% rename from tests/run-custom-args/erased/erased-12.scala rename to tests/run/erased-12.scala index cde677050dd6..be641b8b95c2 100644 --- a/tests/run-custom-args/erased/erased-12.scala +++ b/tests/run/erased-12.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-13.check b/tests/run/erased-13.check similarity index 100% rename from tests/run-custom-args/erased/erased-13.check rename to tests/run/erased-13.check diff --git a/tests/run-custom-args/erased/erased-13.scala b/tests/run/erased-13.scala similarity index 75% rename from tests/run-custom-args/erased/erased-13.scala rename to tests/run/erased-13.scala index 442a3eab4c01..55dce8ac3e7b 100644 --- a/tests/run-custom-args/erased/erased-13.scala +++ b/tests/run/erased-13.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-14.check b/tests/run/erased-14.check similarity index 100% rename from tests/run-custom-args/erased/erased-14.check rename to tests/run/erased-14.check diff --git a/tests/run/erased-14.scala b/tests/run/erased-14.scala new file mode 100644 index 000000000000..7d10af210a41 --- /dev/null +++ b/tests/run/erased-14.scala @@ -0,0 +1,17 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + new Foo + } + +} + +class Foo { + erased val x: Int = { + println("x") + 42 + } + println("Foo") +} diff --git a/tests/run-custom-args/erased/erased-16.check b/tests/run/erased-16.check similarity index 100% rename from tests/run-custom-args/erased/erased-16.check rename to tests/run/erased-16.check diff --git a/tests/run-custom-args/erased/erased-16.scala b/tests/run/erased-16.scala similarity index 82% rename from tests/run-custom-args/erased/erased-16.scala rename to tests/run/erased-16.scala index 879af76765b7..f879165bffa1 100644 --- a/tests/run-custom-args/erased/erased-16.scala +++ b/tests/run/erased-16.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-17.check b/tests/run/erased-17.check similarity index 100% rename from tests/run-custom-args/erased/erased-17.check rename to tests/run/erased-17.check diff --git a/tests/run-custom-args/erased/erased-17.scala b/tests/run/erased-17.scala similarity index 77% rename from tests/run-custom-args/erased/erased-17.scala rename to tests/run/erased-17.scala index 9b9dbea70264..b64b01952192 100644 --- a/tests/run-custom-args/erased/erased-17.scala +++ b/tests/run/erased-17.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-18.check b/tests/run/erased-18.check similarity index 100% rename from tests/run-custom-args/erased/erased-18.check rename to tests/run/erased-18.check diff --git a/tests/run-custom-args/erased/erased-18.scala b/tests/run/erased-18.scala similarity index 76% rename from tests/run-custom-args/erased/erased-18.scala rename to tests/run/erased-18.scala index c53d89113a3a..46f7e44c7309 100644 --- a/tests/run-custom-args/erased/erased-18.scala +++ b/tests/run/erased-18.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-19.check b/tests/run/erased-19.check similarity index 100% rename from tests/run-custom-args/erased/erased-19.check rename to tests/run/erased-19.check diff --git a/tests/run/erased-19.scala b/tests/run/erased-19.scala new file mode 100644 index 000000000000..4bbcd1dd3bba --- /dev/null +++ b/tests/run/erased-19.scala @@ -0,0 +1,12 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + { + (erased x: Int) => 42 + } + + println("ok") + } +} diff --git a/tests/run-custom-args/erased/erased-20.check b/tests/run/erased-20.check similarity index 100% rename from tests/run-custom-args/erased/erased-20.check rename to tests/run/erased-20.check diff --git a/tests/run-custom-args/erased/erased-20.scala b/tests/run/erased-20.scala similarity index 76% rename from tests/run-custom-args/erased/erased-20.scala rename to tests/run/erased-20.scala index f54fd417eb83..7d07260c2d08 100644 --- a/tests/run-custom-args/erased/erased-20.scala +++ b/tests/run/erased-20.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-21.check b/tests/run/erased-21.check similarity index 100% rename from tests/run-custom-args/erased/erased-21.check rename to tests/run/erased-21.check diff --git a/tests/run-custom-args/erased/erased-21.scala b/tests/run/erased-21.scala similarity index 77% rename from tests/run-custom-args/erased/erased-21.scala rename to tests/run/erased-21.scala index a85c6ea62f98..804f8f9c53ee 100644 --- a/tests/run-custom-args/erased/erased-21.scala +++ b/tests/run/erased-21.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { type UU[T] = (erased T) => Int diff --git a/tests/run-custom-args/erased/erased-22.check b/tests/run/erased-22.check similarity index 100% rename from tests/run-custom-args/erased/erased-22.check rename to tests/run/erased-22.check diff --git a/tests/run-custom-args/erased/erased-22.scala b/tests/run/erased-22.scala similarity index 79% rename from tests/run-custom-args/erased/erased-22.scala rename to tests/run/erased-22.scala index 954b67604eb6..df05773e00b1 100644 --- a/tests/run-custom-args/erased/erased-22.scala +++ b/tests/run/erased-22.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.erasedDefinitions object Test { diff --git a/tests/run-custom-args/erased/erased-25.check b/tests/run/erased-25.check similarity index 100% rename from tests/run-custom-args/erased/erased-25.check rename to tests/run/erased-25.check diff --git a/tests/run-custom-args/erased/erased-25.scala b/tests/run/erased-25.scala similarity index 77% rename from tests/run-custom-args/erased/erased-25.scala rename to tests/run/erased-25.scala index 97a85dd1460d..bcfe4fe6d444 100644 --- a/tests/run-custom-args/erased/erased-25.scala +++ b/tests/run/erased-25.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { val ds: Dataset = new Dataset diff --git a/tests/run-custom-args/erased/erased-26.check b/tests/run/erased-26.check similarity index 100% rename from tests/run-custom-args/erased/erased-26.check rename to tests/run/erased-26.check diff --git a/tests/run/erased-26.scala b/tests/run/erased-26.scala new file mode 100644 index 000000000000..b3efd62bb9f6 --- /dev/null +++ b/tests/run/erased-26.scala @@ -0,0 +1,8 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + def main(args: Array[String]): Unit = { + col("abc")(true) + } + def col[S](s: String)(erased ev: Boolean): Unit = println(s) +} diff --git a/tests/run-custom-args/erased/erased-27.check b/tests/run/erased-27.check similarity index 100% rename from tests/run-custom-args/erased/erased-27.check rename to tests/run/erased-27.check diff --git a/tests/run-custom-args/erased/erased-27.scala b/tests/run/erased-27.scala similarity index 78% rename from tests/run-custom-args/erased/erased-27.scala rename to tests/run/erased-27.scala index 68b551520daa..271a9ca47db9 100644 --- a/tests/run-custom-args/erased/erased-27.scala +++ b/tests/run/erased-27.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { ({ diff --git a/tests/run-custom-args/erased/erased-28.check b/tests/run/erased-28.check similarity index 100% rename from tests/run-custom-args/erased/erased-28.check rename to tests/run/erased-28.check diff --git a/tests/run-custom-args/erased/erased-28.scala b/tests/run/erased-28.scala similarity index 83% rename from tests/run-custom-args/erased/erased-28.scala rename to tests/run/erased-28.scala index 8b916b810dff..ae33b3dc5844 100644 --- a/tests/run-custom-args/erased/erased-28.scala +++ b/tests/run/erased-28.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { var a = true def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-3.check b/tests/run/erased-3.check similarity index 100% rename from tests/run-custom-args/erased/erased-3.check rename to tests/run/erased-3.check diff --git a/tests/run/erased-3.scala b/tests/run/erased-3.scala new file mode 100644 index 000000000000..a138e7dfd6f7 --- /dev/null +++ b/tests/run/erased-3.scala @@ -0,0 +1,23 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + fun(foo1)(foo2) + } + + def foo1: Int = { + println("foo1") + 42 + } + + def foo2: String = { + println("foo2") + "abc" + } + + def fun(erased a: Int)(erased b: String): Unit = { + println("fun") + } + +} diff --git a/tests/run-custom-args/erased/erased-4.check b/tests/run/erased-4.check similarity index 100% rename from tests/run-custom-args/erased/erased-4.check rename to tests/run/erased-4.check diff --git a/tests/run/erased-4.scala b/tests/run/erased-4.scala new file mode 100644 index 000000000000..2e1e2ad64bbf --- /dev/null +++ b/tests/run/erased-4.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + fun(foo1)(foo2) + fun2(foo1)(foo2) + } + + def foo1: Int = { + println("foo1") + 42 + } + + def foo2: String = { + println("foo2") + "abc" + } + + def fun(a: Int)(erased b: String): Unit = { + println("fun " + a) + } + + def fun2(erased a: Int)(b: String): Unit = { + println("fun2 " + b) + } +} diff --git a/tests/run-custom-args/erased/erased-5.check b/tests/run/erased-5.check similarity index 100% rename from tests/run-custom-args/erased/erased-5.check rename to tests/run/erased-5.check diff --git a/tests/run/erased-5.scala b/tests/run/erased-5.scala new file mode 100644 index 000000000000..ced18c2426be --- /dev/null +++ b/tests/run/erased-5.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + fun(foo(1))(foo(2))(foo(3))(foo(4)) + fun2(foo(1))(foo(2))(foo(3))(foo(4)) + } + + def foo(i: Int): Int = { + println("foo") + i + } + + def fun(a: Int)(erased b: Int)(c: Int)(erased d: Int): Unit = { + println("fun " + a + " " + c) + } + + def fun2(erased a2: Int)(b2: Int)(erased c2: Int)(d2: Int): Unit = { + println("fun2 " + b2 + " " + d2) + } +} diff --git a/tests/run-custom-args/erased/erased-6.check b/tests/run/erased-6.check similarity index 100% rename from tests/run-custom-args/erased/erased-6.check rename to tests/run/erased-6.check diff --git a/tests/run/erased-6.scala b/tests/run/erased-6.scala new file mode 100644 index 000000000000..4577aa7a03e9 --- /dev/null +++ b/tests/run/erased-6.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + new Foo(foo) + } + + def foo: Int = { + println("foo") + 42 + } + +} + +class Foo(erased a: Int) { + println("Foo") +} diff --git a/tests/run-custom-args/erased/erased-7.check b/tests/run/erased-7.check similarity index 100% rename from tests/run-custom-args/erased/erased-7.check rename to tests/run/erased-7.check diff --git a/tests/run-custom-args/erased/erased-7.scala b/tests/run/erased-7.scala similarity index 79% rename from tests/run-custom-args/erased/erased-7.scala rename to tests/run/erased-7.scala index 6027c2e1572c..080882503b24 100644 --- a/tests/run-custom-args/erased/erased-7.scala +++ b/tests/run/erased-7.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-8.check b/tests/run/erased-8.check similarity index 100% rename from tests/run-custom-args/erased/erased-8.check rename to tests/run/erased-8.check diff --git a/tests/run-custom-args/erased/erased-8.scala b/tests/run/erased-8.scala similarity index 79% rename from tests/run-custom-args/erased/erased-8.scala rename to tests/run/erased-8.scala index afa637e709ef..ef79e27b75fb 100644 --- a/tests/run-custom-args/erased/erased-8.scala +++ b/tests/run/erased-8.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-9.check b/tests/run/erased-9.check similarity index 100% rename from tests/run-custom-args/erased/erased-9.check rename to tests/run/erased-9.check diff --git a/tests/run-custom-args/erased/erased-9.scala b/tests/run/erased-9.scala similarity index 75% rename from tests/run-custom-args/erased/erased-9.scala rename to tests/run/erased-9.scala index a989bce068a2..1c38d202640d 100644 --- a/tests/run-custom-args/erased/erased-9.scala +++ b/tests/run/erased-9.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-class-are-erased.check b/tests/run/erased-class-are-erased.check similarity index 100% rename from tests/run-custom-args/erased/erased-class-are-erased.check rename to tests/run/erased-class-are-erased.check diff --git a/tests/run-custom-args/erased/erased-class-are-erased.scala b/tests/run/erased-class-are-erased.scala similarity index 80% rename from tests/run-custom-args/erased/erased-class-are-erased.scala rename to tests/run/erased-class-are-erased.scala index b48e0265c521..45dc5206e275 100644 --- a/tests/run-custom-args/erased/erased-class-are-erased.scala +++ b/tests/run/erased-class-are-erased.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { erased class Erased() { println("Oh no!!!") diff --git a/tests/run-custom-args/erased/erased-frameless.check b/tests/run/erased-frameless.check similarity index 100% rename from tests/run-custom-args/erased/erased-frameless.check rename to tests/run/erased-frameless.check diff --git a/tests/run-custom-args/erased/erased-frameless.scala b/tests/run/erased-frameless.scala similarity index 98% rename from tests/run-custom-args/erased/erased-frameless.scala rename to tests/run/erased-frameless.scala index bc52bd4ac8fb..fe654639492a 100644 --- a/tests/run-custom-args/erased/erased-frameless.scala +++ b/tests/run/erased-frameless.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import scala.annotation.implicitNotFound // Subset of shapeless diff --git a/tests/run/erased-lambdas.scala b/tests/run/erased-lambdas.scala new file mode 100644 index 000000000000..9c107e0fa0d4 --- /dev/null +++ b/tests/run/erased-lambdas.scala @@ -0,0 +1,41 @@ +//> using options -language:experimental.erasedDefinitions +// scalajs: --skip + +// lambdas should parse and work + +type F = (erased Int, String) => String +type S = (Int, erased String) => Int + +def useF(f: F) = f(5, "a") +def useS(f: S) = f(5, "a") + +val ff: F = (erased x, y) => y + +val fs: S = (x, erased y) => x +val fsExpl = (x: Int, erased y: String) => x + +// contextual lambdas should work + +type FC = (Int, erased String) ?=> Int + +def useCtx(f: FC) = f(using 5, "a") + +val fCv: FC = (x, erased y) ?=> x +val fCvExpl = (x: Int, erased y: String) ?=> x + +// nested lambdas should work + +val nested: Int => (String, erased Int) => FC = a => (_, erased _) => (c, erased d) ?=> a + c + +@main def Test() = + assert("a" == useF(ff)) + + assert(5 == useS(fs)) + assert(5 == useS(fsExpl)) + assert(5 == useS { (x, erased y) => x }) + + assert(5 == useCtx(fCv)) + assert(5 == useCtx(fCvExpl)) + assert(5 == useCtx { (x, erased y) ?=> x }) + + assert(6 == useCtx(nested(1)("b", 2))) diff --git a/tests/run-custom-args/erased/erased-machine-state.check b/tests/run/erased-machine-state.check similarity index 100% rename from tests/run-custom-args/erased/erased-machine-state.check rename to tests/run/erased-machine-state.check diff --git a/tests/run-custom-args/erased/erased-machine-state.scala b/tests/run/erased-machine-state.scala similarity index 94% rename from tests/run-custom-args/erased/erased-machine-state.scala rename to tests/run/erased-machine-state.scala index cafe5fa6f1f8..c84f1619366d 100644 --- a/tests/run-custom-args/erased/erased-machine-state.scala +++ b/tests/run/erased-machine-state.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + import scala.annotation.implicitNotFound sealed trait State diff --git a/tests/run-custom-args/erased/erased-poly-ref.check b/tests/run/erased-poly-ref.check similarity index 100% rename from tests/run-custom-args/erased/erased-poly-ref.check rename to tests/run/erased-poly-ref.check diff --git a/tests/run-custom-args/erased/erased-poly-ref.scala b/tests/run/erased-poly-ref.scala similarity index 80% rename from tests/run-custom-args/erased/erased-poly-ref.scala rename to tests/run/erased-poly-ref.scala index 9f0d3c38d417..59badb71255d 100644 --- a/tests/run-custom-args/erased/erased-poly-ref.scala +++ b/tests/run/erased-poly-ref.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run/erased-select-prefix.check b/tests/run/erased-select-prefix.check new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/run-custom-args/erased/erased-select-prefix.scala b/tests/run/erased-select-prefix.scala similarity index 88% rename from tests/run-custom-args/erased/erased-select-prefix.scala rename to tests/run/erased-select-prefix.scala index 718779ccdcfb..b877a0d209d7 100644 --- a/tests/run-custom-args/erased/erased-select-prefix.scala +++ b/tests/run/erased-select-prefix.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run-custom-args/erased/erased-value-class.check b/tests/run/erased-value-class.check similarity index 100% rename from tests/run-custom-args/erased/erased-value-class.check rename to tests/run/erased-value-class.check diff --git a/tests/run/erased-value-class.scala b/tests/run/erased-value-class.scala new file mode 100644 index 000000000000..37cc233d4aca --- /dev/null +++ b/tests/run/erased-value-class.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.erasedDefinitions + +object Test { + + def main(args: Array[String]): Unit = { + new Bar(c)(c).foo() + identity(new Bar(c)(c)).foo() + } + + def c = { + println("c") + 3 + } +} + +class Bar(x: Int)(erased y: Int) extends AnyVal { + def foo() = x +} diff --git a/tests/run-custom-args/erased/generic-java-signatures-erased.check b/tests/run/generic-java-signatures-erased.check similarity index 100% rename from tests/run-custom-args/erased/generic-java-signatures-erased.check rename to tests/run/generic-java-signatures-erased.check diff --git a/tests/run-custom-args/erased/generic-java-signatures-erased.scala b/tests/run/generic-java-signatures-erased.scala similarity index 82% rename from tests/run-custom-args/erased/generic-java-signatures-erased.scala rename to tests/run/generic-java-signatures-erased.scala index a9731cc748fb..31c6e07599ad 100644 --- a/tests/run-custom-args/erased/generic-java-signatures-erased.scala +++ b/tests/run/generic-java-signatures-erased.scala @@ -1,3 +1,6 @@ +//> using options -language:experimental.erasedDefinitions +// scalajs: --skip + object MyErased { def f1[U](erased a: Int): Int = 0 } diff --git a/tests/run-custom-args/fatal-warnings/i11050.scala b/tests/run/i11050.scala similarity index 99% rename from tests/run-custom-args/fatal-warnings/i11050.scala rename to tests/run/i11050.scala index f0bdd23031fa..90a6ec84df85 100644 --- a/tests/run-custom-args/fatal-warnings/i11050.scala +++ b/tests/run/i11050.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings + import scala.compiletime.* import scala.deriving.* diff --git a/tests/run-custom-args/erased/i11996.scala b/tests/run/i11996.scala similarity index 89% rename from tests/run-custom-args/erased/i11996.scala rename to tests/run/i11996.scala index 050d36370ef0..9724e12b575e 100644 --- a/tests/run-custom-args/erased/i11996.scala +++ b/tests/run/i11996.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.erasedDefinitions + final class UnivEq[A] object UnivEq: diff --git a/tests/run/i12052/MirrorType.scala b/tests/run/i12052/MirrorType.scala index b8b0b442690e..cdab9a94d600 100644 --- a/tests/run/i12052/MirrorType.scala +++ b/tests/run/i12052/MirrorType.scala @@ -1,4 +1,3 @@ -import scala.quoted._ import scala.deriving._ import scala.compiletime.{erasedValue, constValue, summonFrom, summonInline} diff --git a/tests/run/i13215.scala b/tests/run/i13215.scala new file mode 100644 index 000000000000..738eb25d598a --- /dev/null +++ b/tests/run/i13215.scala @@ -0,0 +1,14 @@ +//> using options -Werror -WunstableInlineAccessors + +import scala.annotation.publicInBinary + +package foo { + trait Bar: + inline def baz = Baz + + @publicInBinary private[foo] object Baz +} + +@main def Test: Unit = + val bar = new foo.Bar {} + bar.baz diff --git a/tests/run/i15913.scala b/tests/run/i15913.scala new file mode 100644 index 000000000000..f3e98a3bfd6a --- /dev/null +++ b/tests/run/i15913.scala @@ -0,0 +1,20 @@ +// https://github.com/lampepfl/dotty/issues/15913 + +class injector[F] + +object injectorFactory { + def apply[F](overrides: String*): injector[F] = new injector[F] + + def apply[F]( + bootstrapActivation: Int = ???, + overrides: Seq[String] = Seq.empty, + ): injector[F] = new injector[F] +} + +object Test extends App { + println( + injectorFactory[String]( + bootstrapActivation = 0 + ) + ) +} diff --git a/tests/run/i15988a.scala b/tests/run/i15988a.scala new file mode 100644 index 000000000000..dba5008fd950 --- /dev/null +++ b/tests/run/i15988a.scala @@ -0,0 +1,6 @@ +import scala.compiletime.constValueTuple + +@main def Test: Unit = + assert(constValueTuple[EmptyTuple] == EmptyTuple) + assert(constValueTuple[("foo", 5, 3.14, "bar", false)] == ("foo", 5, 3.14, "bar", false)) + assert(constValueTuple[(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)] == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)) diff --git a/tests/run/i15988b.scala b/tests/run/i15988b.scala new file mode 100644 index 000000000000..4b7764d94a18 --- /dev/null +++ b/tests/run/i15988b.scala @@ -0,0 +1,21 @@ +import scala.compiletime.summonAll + +@main def Test: Unit = + assert(summonAll[EmptyTuple] == EmptyTuple) + assert(summonAll[(5, 5, 5)] == (5, 5, 5)) + assert( + summonAll[( + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + )] == ( + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + )) + +given 5 = 5 diff --git a/tests/run/i16065.scala b/tests/run/i16065.scala new file mode 100644 index 000000000000..59b4f83bc05c --- /dev/null +++ b/tests/run/i16065.scala @@ -0,0 +1,41 @@ +trait Consumer1[T]: + var x: Int = 1 // To force anonymous class generation + def accept(x: T): Unit + +trait Consumer2[T]: + def accept(x: T): Unit + +trait Producer1[T]: + var x: Int = 1 // To force anonymous class generation + def produce(x: Any): T + +trait Producer2[T]: + def produce(x: Any): T + +trait ProdCons1[T]: + var x: Int = 1 // To force anonymous class generation + def apply(x: T): T + +trait ProdCons2[T]: + var x: Int = 1 // To force anonymous class generation + def apply(x: T): T + +object Test { + def main(args: Array[String]): Unit = { + val a1: Consumer1[? >: String] = x => () + a1.accept("foo") + + val a2: Consumer2[? >: String] = x => () + a2.accept("foo") + + val b1: Producer1[? <: String] = x => "" + val bo1: String = b1.produce(1) + + val b2: Producer2[? <: String] = x => "" + val bo2: String = b2.produce(1) + + val c1: ProdCons1[? <: String] = x => x + val c2: ProdCons2[? <: String] = x => x + // Can't do much with `c1` or `c2` but we should still pass Ycheck. + } +} diff --git a/tests/run/i1692.scala b/tests/run/i1692.scala index 09459a073d6e..9416e6101efd 100644 --- a/tests/run/i1692.scala +++ b/tests/run/i1692.scala @@ -22,7 +22,7 @@ class LazyNullable(a: => Int) { lazy val l4Inf = eInf private[this] val i = "I" - // null out i even though the try ends up lifted, because the LazyVals phase runs before the LiftTry phase + // null out i even though the try needs stack stashing lazy val l5 = try i catch { case e: Exception => () } } diff --git a/tests/run/i1692b.scala b/tests/run/i1692b.scala index bd2108038ef4..8f23a0e5a3b3 100644 --- a/tests/run/i1692b.scala +++ b/tests/run/i1692b.scala @@ -24,7 +24,7 @@ class LazyNullable(a: => Int) { @threadUnsafe lazy val l4Inf = try eInf finally () // null out e, since private[this] is inferred private[this] val i = "I" - // null out i even though the try ends up lifted, because the LazyVals phase runs before the LiftTry phase + // null out i even though the try needs stack stashing @threadUnsafe lazy val l5 = try i catch { case e: Exception => () } } diff --git a/tests/run/i17115.check b/tests/run/i17115.check new file mode 100644 index 000000000000..61c83cba41ce --- /dev/null +++ b/tests/run/i17115.check @@ -0,0 +1,2 @@ +4 +5 diff --git a/tests/run/i17115.scala b/tests/run/i17115.scala new file mode 100644 index 000000000000..5a7cac5d0dc1 --- /dev/null +++ b/tests/run/i17115.scala @@ -0,0 +1,9 @@ +trait A[T <: Tuple] { val x: Int } +given empty: A[EmptyTuple] with { val x = 1 } +given inductive[Tup <: NonEmptyTuple](using A[Tuple.Tail[Tup]]): A[Tup] with { val x = summon[A[Tuple.Tail[Tup]]].x + 1 } + +object Test: + def main(args: Array[String]): Unit = + println(summon[A[(String, String, String)]].x) //this line is fine + println(summon[A[(String, String, String, String)]].x) //this line gives error +end Test diff --git a/tests/run/i17555.scala b/tests/run/i17555.scala new file mode 100644 index 000000000000..6458cb7f7efb --- /dev/null +++ b/tests/run/i17555.scala @@ -0,0 +1,17 @@ +class Root { + override def toString() = "Root" +} +trait A extends Root with B { } +trait B { + override def toString() = "B" +} +case class C() extends A { + override def toString() = super.toString() +} +class D() extends A, Serializable { + override def toString() = super.toString() +} + +@main def Test = + assert(C().toString == "B") + assert(D().toString == "B") diff --git a/tests/run/i17584.scala b/tests/run/i17584.scala new file mode 100644 index 000000000000..8444ea250e43 --- /dev/null +++ b/tests/run/i17584.scala @@ -0,0 +1,10 @@ +trait A: + inline def g = 1 +trait B extends A: + def f = super.g +class C extends B + +@main def Test = + val c = C() + assert(c.f == 1) + diff --git a/tests/run/i17584a.scala b/tests/run/i17584a.scala new file mode 100644 index 000000000000..9f0c901159c8 --- /dev/null +++ b/tests/run/i17584a.scala @@ -0,0 +1,11 @@ +trait T: + def g = 2 +trait A extends T: + inline override def g = 1 +trait B extends A: + def f = super.g +class C extends B + +@main def Test = + val c = C() + assert(c.f == 1) \ No newline at end of file diff --git a/tests/run/i17761.check b/tests/run/i17761.check new file mode 100644 index 000000000000..6e31f05b09df --- /dev/null +++ b/tests/run/i17761.check @@ -0,0 +1,8 @@ +Normal +test +ArraySeq(class java.lang.String, int) +ArraySeq(test, 42) +Transparent +test +ArraySeq(class java.lang.String, int) +ArraySeq(test, 42) diff --git a/tests/run/i17761.scala b/tests/run/i17761.scala new file mode 100644 index 000000000000..258773aef940 --- /dev/null +++ b/tests/run/i17761.scala @@ -0,0 +1,33 @@ +class MyRecord extends Selectable: + def applyDynamic(name: String, paramClasses: Class[_]*)(args: Any*): Any = { + println(name) + println(paramClasses) + println(args) + () + } + +class MyRecordTransparent extends Selectable: + inline transparent def applyDynamic(name: String, paramClasses: Class[_]*)(args: Any*): Any = { + println(name) + println(paramClasses) + println(args) + () + } + +type Person = MyRecord { + def test(a: String, b: Int): Unit +} + + +type PersonTransparent = MyRecordTransparent { + def test(a: String, b: Int): Unit +} + +val person = MyRecord().asInstanceOf[Person] +val personTransparent = MyRecordTransparent().asInstanceOf[PersonTransparent] + +@main def Test: Unit = + println("Normal") + person.test("test", 42) + println("Transparent") + personTransparent.test("test", 42) \ No newline at end of file diff --git a/tests/run/i18183.findRef.scala b/tests/run/i18183.findRef.scala new file mode 100644 index 000000000000..058f793e4ac1 --- /dev/null +++ b/tests/run/i18183.findRef.scala @@ -0,0 +1,17 @@ +// A minimised reproduction of how an initial change to combineEligibles broke Typer#findRef +case class Foo(n: Int) + +class Test: + import this.toString + + val foo1 = Foo(1) + val foo2 = Foo(2) + + def foo(using Foo): Foo = + import this.* + def bar(using Foo): Foo = summon[Foo] + bar(using foo2) + +object Test extends Test: + def main(args: Array[String]): Unit = + assert(foo(using foo1) eq foo2) diff --git a/tests/run/i18183.given.scala b/tests/run/i18183.given.scala new file mode 100644 index 000000000000..fd8415d383b7 --- /dev/null +++ b/tests/run/i18183.given.scala @@ -0,0 +1,93 @@ +case class Foo(n: Int) + +class Bar(n: Int): + given foo: Foo = new Foo(n) + +class InMethod: + def wild(bar: Bar): Unit = + import bar.* + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def givenWild(bar: Bar): Unit = + import bar.{ given, * } + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def givn(bar: Bar): Unit = + import bar.given + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def givenFoo(bar: Bar): Unit = + import bar.given Foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def named(bar: Bar): Unit = + import bar.foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def namedGivenWild(bar: Bar, bar2: Bar): Unit = + import bar.foo, bar2.{ given, * } + assert(bar.foo eq summon[Foo]) + + def givenWildNamed(bar: Bar, bar2: Bar): Unit = + import bar2.{ given, * }, bar.foo + assert(bar.foo eq summon[Foo]) + + def namedWild(bar: Bar, bar2: Bar): Unit = + import bar.foo, bar2.* + assert(bar.foo eq summon[Foo]) + + def wildNamed(bar: Bar, bar2: Bar): Unit = + import bar2.*, bar.foo + assert(bar.foo eq summon[Foo]) + +class InClassWild(bar: Bar): + import bar.* + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class InClassGivenWild(bar: Bar): + import bar.{ given, * } + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class InClassGiven(bar: Bar): + import bar.given + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class InClassGivenFoo(bar: Bar): + import bar.given Foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class InClassNamed(bar: Bar): + import bar.foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +object Test: + def main(args: Array[String]): Unit = + val bar = new Bar(1) + val bar2 = new Bar(2) + + new InMethod().wild(bar) + new InMethod().givenWild(bar) // was: error + new InMethod().givn(bar) // was: error + new InMethod().givenFoo(bar) // was: error + new InMethod().named(bar) // was: error + + new InMethod().namedWild(bar, bar2) + new InMethod().wildNamed(bar, bar2) + new InMethod().namedGivenWild(bar, bar2) // was: error + new InMethod().givenWildNamed(bar, bar2) + + new InClassWild(bar) + new InClassGivenWild(bar) // was: error + new InClassGiven(bar) // was: error + new InClassGivenFoo(bar) // was: error + new InClassNamed(bar) // was: error diff --git a/tests/run/i18183.mixed.scala b/tests/run/i18183.mixed.scala new file mode 100644 index 000000000000..131e73f098de --- /dev/null +++ b/tests/run/i18183.mixed.scala @@ -0,0 +1,141 @@ +case class Foo(n: Int) + +class OldBar(n: Int): + implicit val foo: Foo = new Foo(n) + +class NewBar(n: Int): + given foo: Foo = new Foo(n) + +class OldInMethod: + def wild(bar: OldBar): Unit = + import bar.* + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def named(bar: OldBar): Unit = + import bar.foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def namedWild(bar: OldBar, bar2: NewBar): Unit = + import bar.foo, bar2.* + assert(bar.foo eq summon[Foo]) + + def wildNamed(bar: OldBar, bar2: NewBar): Unit = + import bar2.*, bar.foo + assert(bar.foo eq summon[Foo]) + + def namedGivenWild(bar: OldBar, bar2: NewBar): Unit = + import bar.foo + import bar2.{ given, * } + assert(bar.foo eq summon[Foo]) + + def givenWildNamed(bar: OldBar, bar2: NewBar): Unit = + import bar2.{ given, * }, bar.foo + assert(bar.foo eq summon[Foo]) + +class OldInClassWild(bar: OldBar): + import bar.* + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class OldInClassNamed(bar: OldBar): + import bar.foo + given foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + +class NewInMethod: + def givenWild(bar: NewBar): Unit = + import bar.{ given, * } + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def wild(bar: NewBar): Unit = + import bar.* + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def givn(bar: NewBar): Unit = + import bar.given + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def givenFoo(bar: NewBar): Unit = + import bar.given Foo + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def named(bar: NewBar): Unit = + import bar.foo + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + def namedWild(bar: NewBar, bar2: OldBar): Unit = + import bar.foo, bar2.* + assert(bar.foo eq summon[Foo]) + + def wildNamed(bar: NewBar, bar2: OldBar): Unit = + import bar2.*, bar.foo + assert(bar.foo eq summon[Foo]) + +class NewInClassGivenWild(bar: NewBar): + import bar.{ given, * } + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class NewInClassWild(bar: NewBar): + import bar.* + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class NewInClassGiven(bar: NewBar): + import bar.given + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class NewInClassGivenFoo(bar: NewBar): + import bar.given Foo + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + +class NewInClassNamed(bar: NewBar): + import bar.foo + implicit val foo: Foo = new Foo(2) + assert(foo eq summon[Foo]) + + +object Test: + def main(args: Array[String]): Unit = + val oldBar = new OldBar(1) + val newBar = new NewBar(1) + val oldBar2 = new OldBar(2) + val newBar2 = new NewBar(2) + + + new OldInMethod().wild(oldBar) // was: error + new OldInMethod().named(oldBar) // was: error + + new OldInMethod().namedWild(oldBar, newBar2) + new OldInMethod().wildNamed(oldBar, newBar2) + new OldInMethod().namedGivenWild(oldBar, newBar2) // was: error + new OldInMethod().givenWildNamed(oldBar, newBar2) + + new OldInClassWild(oldBar) // was: error + new OldInClassNamed(oldBar) // was: error + + + new NewInMethod().wild(newBar) + new NewInMethod().givenWild(newBar) // was: error + new NewInMethod().givn(newBar) // was: error + new NewInMethod().givenFoo(newBar) // was: error + new NewInMethod().named(newBar) // was: error + + new NewInMethod().namedWild(newBar, oldBar2) // was: error + new NewInMethod().wildNamed(newBar, oldBar2) + + new NewInClassWild(newBar) + new NewInClassGivenWild(newBar) // was: error + new NewInClassGiven(newBar) // was: error + new NewInClassGivenFoo(newBar) // was: error + new NewInClassNamed(newBar) // was: error diff --git a/tests/run/i18183.scala b/tests/run/i18183.scala new file mode 100644 index 000000000000..b5279db35a94 --- /dev/null +++ b/tests/run/i18183.scala @@ -0,0 +1,49 @@ +case class Foo(n: Int) + +class Bar(n: Int): + implicit val foo: Foo = new Foo(n) + +class InMethod: + def wild(bar: Bar): Unit = + import bar._ + implicit val foo: Foo = new Foo(2) + assert(foo eq implicitly[Foo]) + + def named(bar: Bar): Unit = + import bar.foo + implicit val foo: Foo = new Foo(2) + assert(foo eq implicitly[Foo]) + + def namedWild(bar: Bar, bar2: Bar): Unit = + import bar.foo + import bar2._ + assert(bar.foo eq implicitly[Foo]) + + def wildNamed(bar: Bar, bar2: Bar): Unit = + import bar2._ + import bar.foo + assert(bar.foo eq implicitly[Foo]) + +class InClassWild(bar: Bar): + import bar._ + implicit val foo: Foo = new Foo(2) + assert(foo eq implicitly[Foo]) + +class InClassNamed(bar: Bar): + import bar.foo + implicit val foo: Foo = new Foo(2) + assert(foo eq implicitly[Foo]) + +object Test: + def main(args: Array[String]): Unit = + val bar = new Bar(1) + val bar2 = new Bar(2) + + new InMethod().wild(bar) // was: error + new InMethod().named(bar) // was: error + + new InMethod().namedWild(bar, bar2) // was: error + new InMethod().wildNamed(bar, bar2) + + new InClassWild(bar) // was: error + new InClassNamed(bar) // was: error diff --git a/tests/run/i18315.scala b/tests/run/i18315.scala new file mode 100644 index 000000000000..85824920efbd --- /dev/null +++ b/tests/run/i18315.scala @@ -0,0 +1,15 @@ +trait Sam1: + type T + def apply(x: T): T + +trait Sam2: + var x: Int = 1 // To force anonymous class generation + type T + def apply(x: T): T + +object Test: + def main(args: Array[String]): Unit = + val s1: Sam1 { type T = String } = x => x.trim + s1.apply("foo") + val s2: Sam2 { type T = Int } = x => x + 1 + s2.apply(1) diff --git a/tests/run/i18612-a.scala b/tests/run/i18612-a.scala new file mode 100644 index 000000000000..286c538ab354 --- /dev/null +++ b/tests/run/i18612-a.scala @@ -0,0 +1,6 @@ +class X extends scala.reflect.Selectable: + val `+` = "1" + +@main def Test = + val x = X() + assert(x.selectDynamic("+") == "1") \ No newline at end of file diff --git a/tests/run/i18612-b.scala b/tests/run/i18612-b.scala new file mode 100644 index 000000000000..ed02ea296e41 --- /dev/null +++ b/tests/run/i18612-b.scala @@ -0,0 +1,6 @@ +class X extends scala.reflect.Selectable: + val plus = "1" + +@main def Test = + val x = X() + assert(x.selectDynamic("plus") == "1") \ No newline at end of file diff --git a/tests/run/i18612-c.scala b/tests/run/i18612-c.scala new file mode 100644 index 000000000000..0f4ca99eac38 --- /dev/null +++ b/tests/run/i18612-c.scala @@ -0,0 +1,7 @@ +class X extends scala.reflect.Selectable: + def + = "1" + +@main def Test = + val x = X() + assert(x.selectDynamic("+") == "1") + assert(x.applyDynamic("+")() == "1") \ No newline at end of file diff --git a/tests/run/i18612-d.scala b/tests/run/i18612-d.scala new file mode 100644 index 000000000000..82bc9f67f86a --- /dev/null +++ b/tests/run/i18612-d.scala @@ -0,0 +1,7 @@ +class X extends scala.reflect.Selectable: + def plus = "1" + +@main def Test = + val x = X() + assert(x.selectDynamic("plus") == "1") + assert(x.applyDynamic("plus")() == "1") \ No newline at end of file diff --git a/tests/run/i18638.scala b/tests/run/i18638.scala new file mode 100644 index 000000000000..87877b5ba4e7 --- /dev/null +++ b/tests/run/i18638.scala @@ -0,0 +1,18 @@ +type U[H, T] = (Unit, Unit) +object O: + opaque type U[H, T] <: (Unit, Unit) = (Unit, Unit) + def u: U[Int, Int] = ((), ()) + + +def test1(u: (Unit, Unit)) = u._1 +def test2(u: U[Int, Int]) = u._1 +def test3(u: O.U[Int, Int]) = u._1 +def test4() = + (((), ()): U[Int, Int]) match + case ((), ()) => println("ok") + +@main def Test: Unit = + test1(((), ())) + test2(((), ())) + test3(O.u) + test4() diff --git a/tests/run/i18884.check b/tests/run/i18884.check new file mode 100644 index 000000000000..e70c450a107a --- /dev/null +++ b/tests/run/i18884.check @@ -0,0 +1,4 @@ +Foo1(1) +Foo2(2, 3) +Foo3(4, 5) +Foo4(6, 7) diff --git a/tests/run/i18884/A_1_c2.13.12.scala b/tests/run/i18884/A_1_c2.13.12.scala new file mode 100644 index 000000000000..23a2bee52ba5 --- /dev/null +++ b/tests/run/i18884/A_1_c2.13.12.scala @@ -0,0 +1,10 @@ +// scalajs: --skip + +package lib + +case class Foo1(private[lib] var x: Int) {} +case class Foo2(private[lib] var x: Int, private[lib] var y: Int) +case class Foo3(private[lib] var x: Int, var y: Int) +case class Foo4(var x: Int, private[lib] var y: Int) { + val z: Int = x +} diff --git a/tests/run/i18884/B_2.scala b/tests/run/i18884/B_2.scala new file mode 100644 index 000000000000..5c775529b953 --- /dev/null +++ b/tests/run/i18884/B_2.scala @@ -0,0 +1,14 @@ +import lib.* + +@main def Test: Unit = + test(new Foo1(1)) + test(new Foo2(2, 3)) + test(new Foo3(4, 5)) + test(new Foo4(6, 7)) + +def test(any: Any): Unit = + any match + case Foo1(x) => println(s"Foo1($x)") + case Foo2(x, y) => println(s"Foo2($x, $y)") + case Foo3(x, y) => println(s"Foo3($x, $y)") + case Foo4(x, y) => println(s"Foo4($x, $y)") diff --git a/tests/run/i19178.check b/tests/run/i19178.check new file mode 100644 index 000000000000..e948c0f7dd15 --- /dev/null +++ b/tests/run/i19178.check @@ -0,0 +1,4 @@ +Failure(java.util.NoSuchElementException: enum Foo has no case with ordinal: 3) +Failure(java.lang.IllegalArgumentException: enum Foo has no case with name: Bar) +Failure(java.util.NoSuchElementException: enum bar.Bar has no case with ordinal: 4) +Failure(java.lang.IllegalArgumentException: enum bar.Bar has no case with name: Baz) diff --git a/tests/run/i19178.scala b/tests/run/i19178.scala new file mode 100644 index 000000000000..ba38f4f933b0 --- /dev/null +++ b/tests/run/i19178.scala @@ -0,0 +1,14 @@ +enum Foo: + case A + +package bar { + enum Bar: + case B +} + +@main def Test = + import scala.util.Try + println(Try(Foo.fromOrdinal(3))) + println(Try(Foo.valueOf("Bar"))) + println(Try(bar.Bar.fromOrdinal(4))) + println(Try(bar.Bar.valueOf("Baz"))) diff --git a/tests/run/i19224.scala b/tests/run/i19224.scala new file mode 100644 index 000000000000..76396206dc77 --- /dev/null +++ b/tests/run/i19224.scala @@ -0,0 +1,25 @@ +// scalajs: --skip + +object Test extends App { + val field = 1 + def x(): Int => String = (i: Int) => i.toString + def y(): () => String = () => field.toString + + locally { + assert(x() == x()) // true on Scala 2, was false on Scala 3... + assert(y() == y()) // also true if `y` accesses object-local fields + + def z(): Int => String = (i: Int) => i.toString + assert(z() != z()) // lambdas in constructor are not lifted to static, so no memoization (Scala 2 lifts them, though). + } + + val t1 = new C + val t2 = new C + + locally { + assert(t1.x() == t2.x()) // true on Scala 2, was false on Scala 3... + } +} +class C { + def x(): Int => String = (i: Int) => i.toString +} \ No newline at end of file diff --git a/tests/run/i4496a.scala b/tests/run/i4496a.scala index f0d6a1b96aea..153770e242f8 100644 --- a/tests/run/i4496a.scala +++ b/tests/run/i4496a.scala @@ -5,7 +5,7 @@ class Foo3 { var a: Int = 10 } object Test { def main(args: Array[String]): Unit = { assert((new Foo1 : {val a: Int}).a == 10) - assert((new Foo2 : {val a: Int}).a == 10) - assert((new Foo3 : {val a: Int}).a == 10) + assert((new Foo2 : {def a: Int}).a == 10) + assert((new Foo3 : {def a: Int}).a == 10) } } diff --git a/tests/run/i4496b.scala b/tests/run/i4496b.scala index 2e777f64e8ac..a6ed5b105e59 100644 --- a/tests/run/i4496b.scala +++ b/tests/run/i4496b.scala @@ -18,9 +18,10 @@ object Test { // Consider one module upcasting all these instances to T. These casts are clearly well-typed. type T = {val a: Int} + type T2 = {def a: Int} def upcast1(v: Foo1): T = v - def upcast2(v: Foo2): T = v - def upcast3(v: Foo3): T = v + def upcast2(v: Foo2): T2 = v + def upcast3(v: Foo3): T2 = v // These accesses are also clearly well-typed def consume(v: T) = v.a @@ -31,24 +32,32 @@ object Test { assert(v.a == 10) } + def consume2(v: T2) = v.a + inline def consumeInl2(v: T2) = v.a + def verify2(v: T2) = { + assert(consume2(v) == 10) + assert(consumeInl2(v) == 10) + assert(v.a == 10) + } + def test(): Unit = { // These calls are also clearly well-typed, hence can't be rejected. verify(upcast1(new Foo1 { val a = 10 })) - verify(upcast2(new Foo2 { val a = 10 })) - verify(upcast3(new Foo3 { var a = 10 })) + verify2(upcast2(new Foo2 { val a = 10 })) + verify2(upcast3(new Foo3 { var a = 10 })) // Ditto, so we must override access control to the class. verify(upcast1(new FooBar1)) - verify(upcast2(new FooBar2)) - verify(upcast3(new FooBar3)) + verify2(upcast2(new FooBar2)) + verify2(upcast3(new FooBar3)) // Other testcases verify(new {val a = 10} : T) - verify(new {var a = 10} : T) - verify(new {def a = 10} : T) + verify2(new {var a = 10} : T2) + verify2(new {def a = 10} : T2) verify(new Bar1 : T) - verify(new Bar2 : T) - verify(new Bar3 : T) + verify2(new Bar2 : T2) + verify2(new Bar3 : T2) } } @@ -85,7 +94,7 @@ object Test { } object TestStructuralVar { - type T = {val a: Int; def a_=(x: Int): Unit} + type T = {def a: Int; def a_=(x: Int): Unit} def upcast3(v: Foo3): T = v def consume(v: T) = v.a inline def consumeInl(v: T) = v.a diff --git a/tests/run/i4866.check b/tests/run/i4866.check deleted file mode 100644 index f16e2a9c94df..000000000000 --- a/tests/run/i4866.check +++ /dev/null @@ -1,2 +0,0 @@ -Foo #lifted: 0 -FooLifted #lifted: 1 diff --git a/tests/run/i4866.scala b/tests/run/i4866.scala deleted file mode 100644 index 092770fb21cd..000000000000 --- a/tests/run/i4866.scala +++ /dev/null @@ -1,21 +0,0 @@ -// scalajs: --skip - -// Test that try-finally aren't lifted, but try-catch are. - -class Foo { - val field = try 1 finally () -} - -class FooLifted { - val field = try 1 catch { case e: Exception => () } finally () -} - -object Test extends App { - def numLifted(o: Object) = { - def isLifted(m: java.lang.reflect.Method) = m.getName.startsWith("lifted") - o.getClass.getDeclaredMethods.count(isLifted) - } - - println("Foo #lifted: " + numLifted(new Foo)) - println("FooLifted #lifted: " + numLifted(new FooLifted)) -} diff --git a/tests/run/i5606.scala b/tests/run/i5606.scala index aedfa0617dfc..7c3c3ae4fc40 100644 --- a/tests/run/i5606.scala +++ b/tests/run/i5606.scala @@ -1,3 +1,5 @@ +//> using options -Yretain-trees + object Test extends App { extension [A, B](f: A => B) def `$` (a: A): B = f(a) diff --git a/tests/run/i6716.check b/tests/run/i6716.check new file mode 100644 index 000000000000..bb85bd267288 --- /dev/null +++ b/tests/run/i6716.check @@ -0,0 +1,2 @@ +Foo +Foo diff --git a/tests/run/i6716.scala b/tests/run/i6716.scala new file mode 100644 index 000000000000..3bef45ac7465 --- /dev/null +++ b/tests/run/i6716.scala @@ -0,0 +1,18 @@ +//> using options -Xfatal-warnings -source future + +trait Monad[T]: + def id: String +class Foo +object Foo { + given Monad[Foo] with { def id = "Foo" } +} + +opaque type Bar = Foo +object Bar { + given Monad[Bar] = summon[Monad[Foo]] // was error, fixed by given loop prevention +} + +object Test extends App { + println(summon[Monad[Foo]].id) + println(summon[Monad[Bar]].id) +} \ No newline at end of file diff --git a/tests/run/i8577a.scala b/tests/run/i8577a.scala index fa9d23cb928a..1a5a50c73721 100644 --- a/tests/run/i8577a.scala +++ b/tests/run/i8577a.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577b.scala b/tests/run/i8577b.scala index 91744cf48527..2e3e7977f871 100644 --- a/tests/run/i8577b.scala +++ b/tests/run/i8577b.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577c.scala b/tests/run/i8577c.scala index e49743306ce4..bdc89eb7183a 100644 --- a/tests/run/i8577c.scala +++ b/tests/run/i8577c.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577d.scala b/tests/run/i8577d.scala index 8af8fad7cd56..b05d7c1ccc14 100644 --- a/tests/run/i8577d.scala +++ b/tests/run/i8577d.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577e.scala b/tests/run/i8577e.scala index 89d391ded2fd..c112f2bfde10 100644 --- a/tests/run/i8577e.scala +++ b/tests/run/i8577e.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577f.scala b/tests/run/i8577f.scala index 6ddaedc7b39f..eafa5c6f242e 100644 --- a/tests/run/i8577f.scala +++ b/tests/run/i8577f.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577g.scala b/tests/run/i8577g.scala index ec5019d4c353..e5d0991543c4 100644 --- a/tests/run/i8577g.scala +++ b/tests/run/i8577g.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577h.scala b/tests/run/i8577h.scala index 457c8138d840..7e741cb16cd2 100644 --- a/tests/run/i8577h.scala +++ b/tests/run/i8577h.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/i8577i.scala b/tests/run/i8577i.scala index 1893425b8782..0a5ee76e07e1 100644 --- a/tests/run/i8577i.scala +++ b/tests/run/i8577i.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Macro: opaque type StrCtx = StringContext def apply(ctx: StringContext): StrCtx = ctx diff --git a/tests/run/irrefutable.check b/tests/run/irrefutable.check new file mode 100644 index 000000000000..f0ac130d17af --- /dev/null +++ b/tests/run/irrefutable.check @@ -0,0 +1,5 @@ +=== mapping xs with A.f1 === +=== mapping ys with A.f2 === +filtering Foo(1) in ys +filtering Foo(2) in ys +filtering Foo(3) in ys diff --git a/tests/run/irrefutable.scala b/tests/run/irrefutable.scala new file mode 100644 index 000000000000..c62dcebaf8fb --- /dev/null +++ b/tests/run/irrefutable.scala @@ -0,0 +1,36 @@ +// This tests that A.f1 does not filter its inputs, whereas A.f2 does. +// see also: tests/neg/irrefutable.scala for an example that exercises the requirement to insert case. + +class Lst[+T](val id: String, val underlying: List[T]) { + def map[U](f: T => U): Lst[U] = new Lst(id, underlying.map(f)) + def withFilter(f: T => Boolean): Lst.WithFilter[T] = new Lst.WithFilter(this, f) +} + +object Lst: + class WithFilter[+T](lst: Lst[T], filter: T => Boolean): + def forwardingFilter[T1](filter: T1 => Boolean): T1 => Boolean = t => + println(s"filtering $t in ${lst.id}") + filter(t) + + def map[U](f: T => U): Lst[U] = Lst(lst.id, lst.underlying.withFilter(forwardingFilter(filter)).map(f)) + +case class Foo[T](x: T) + +object A { + def f1(xs: Lst[Foo[Int]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: Lst[Foo[Any]]): Lst[Int] = { + for (case Foo(x: Int) <- xs) yield x + } +} + +@main def Test = + val xs = new Lst("xs", List(Foo(1), Foo(2), Foo(3))) + println("=== mapping xs with A.f1 ===") + val xs1 = A.f1(xs) + assert(xs1.underlying == List(1, 2, 3)) + val ys = new Lst("ys", List(Foo(1: Any), Foo(2: Any), Foo(3: Any))) + println("=== mapping ys with A.f2 ===") + val ys1 = A.f2(ys) + assert(ys1.underlying == List(1, 2, 3)) diff --git a/tests/run/list-apply-eval.scala b/tests/run/list-apply-eval.scala new file mode 100644 index 000000000000..4cbba6d3e6c2 --- /dev/null +++ b/tests/run/list-apply-eval.scala @@ -0,0 +1,88 @@ +object Test: + + var counter = 0 + + def next = + counter += 1 + counter.toString + + def main(args: Array[String]): Unit = + //List.apply is subject to an optimisation in cleanup + //ensure that the arguments are evaluated in the currect order + // Rewritten to: + // val myList: List = new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), scala.collection.immutable.Nil))); + val myList = List(next, next, next) + assert(myList == List("1", "2", "3"), myList) + + val mySeq = Seq(next, next, next) + assert(mySeq == Seq("4", "5", "6"), mySeq) + + val emptyList = List[Int]() + assert(emptyList == Nil) + + // just assert it doesn't throw CCE to List + val queue = scala.collection.mutable.Queue[String]() + + // test for the cast instruction described in checkApplyAvoidsIntermediateArray + def lub(b: Boolean): List[(String, String)] = + if b then List(("foo", "bar")) else Nil + + // from minimising CI failure in oslib + // again, the lub of :: and Nil is Product, which breaks ++ (which requires IterableOnce) + def lub2(b: Boolean): Unit = + Seq(1) ++ (if (b) Seq(2) else Nil) + + // Examples of arity and nesting arity + // to find the thresholds and reproduce the behaviour of nsc + def examples(): Unit = + val max1 = List[Object]("1", "2", "3", "4", "5", "6", "7") // 7 cons w/ 7 string heads + nil + val max2 = List[Object]("1", "2", "3", "4", "5", "6", List[Object]()) // 7 cons w/ 6 string heads + 1 nil head + nil + val max3 = List[Object]("1", "2", "3", "4", "5", List[Object]("6")) + val max4 = List[Object]("1", "2", "3", "4", List[Object]("5", "6")) + + val over1 = List[Object]("1", "2", "3", "4", "5", "6", "7", "8") // wrap 8-sized array + val over2 = List[Object]("1", "2", "3", "4", "5", "6", "7", List[Object]()) // wrap 8-sized array + val over3 = List[Object]("1", "2", "3", "4", "5", "6", List[Object]("7")) // wrap 1-sized array with 7 + val over4 = List[Object]("1", "2", "3", "4", "5", List[Object]("6", "7")) // wrap 2 + + val max5 = + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + )))))))) // 7 cons + 1 nil + + val over5 = + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( + List[Object]( List[Object]() + )))))))) // 7 cons + 1-sized array wrapping nil + + val max6 = + List[Object]( // ::( + "1", "2", List[Object]( // 1, ::(2, ::(::( + "3", "4", List[Object]( // 3, ::(4, ::(::( + List[Object]() // Nil, Nil + ) // ), Nil)) + ) // ), Nil)) + ) // ) + // 7 cons + 4 string heads + 4 nils for nested lists + + val max7 = + List[Object]( // ::( + "1", "2", List[Object]( // 1, ::(2, ::(::( + "3", "4", List[Object]( // 3, ::(4, ::(::( + "5" // 5, Nil + ) // ), Nil)) + ) // ), Nil)) + ) // ) + // 7 cons + 5 string heads + 3 nils for nested lists diff --git a/tests/run-custom-args/no-useless-forwarders.scala b/tests/run/no-useless-forwarders.scala similarity index 86% rename from tests/run-custom-args/no-useless-forwarders.scala rename to tests/run/no-useless-forwarders.scala index e3acfb2a18e5..289bf64fa9ac 100644 --- a/tests/run-custom-args/no-useless-forwarders.scala +++ b/tests/run/no-useless-forwarders.scala @@ -1,3 +1,6 @@ +//> using options -Xmixin-force-forwarders:false +// scalajs: --skip + trait A { def foo(a: Int): Int = a def bar(a: Int): Int diff --git a/tests/run/noProtectedSuper.scala b/tests/run/noProtectedSuper.scala new file mode 100644 index 000000000000..41b0615d12ab --- /dev/null +++ b/tests/run/noProtectedSuper.scala @@ -0,0 +1,38 @@ +import scala.annotation.publicInBinary + +package p { + class A { + @publicInBinary protected def foo(): Int = 1 + @publicInBinary protected def fuzz(): Int = 2 + } +} +package q { + class B extends p.A { // protected accessor for foo + trait BInner { + def bar() = foo() + } + } + trait Inner extends p.A { + def bar() = foo() // shared super accessor for foo + // new super accessor for fuzz + class InnerInner { + def bar() = foo() + def baz() = fuzz() + } + } +} +object Test extends App { + val b = new q.B + val bi = new b.BInner {} + assert(bi.bar() == 1) + + class C extends p.A with q.Inner { + // implements super accessors for foo and fuzz + } + val c = new C + assert(c.bar() == 1) + + val d = new c.InnerInner + assert(d.bar() == 1) + assert(d.baz() == 2) +} diff --git a/tests/run/patmat-bind-typed.scala b/tests/run/patmat-bind-typed.scala index 10de921c5190..065babc8216c 100644 --- a/tests/run/patmat-bind-typed.scala +++ b/tests/run/patmat-bind-typed.scala @@ -1,5 +1,5 @@ object Test { - def f(xs: List[Any]) = for (key @ (dummy: String) <- xs) yield key + def f(xs: List[Any]) = for (case key @ (dummy: String) <- xs) yield key def main(args: Array[String]): Unit = { f("abc" :: Nil) foreach println diff --git a/tests/run/polymorphic-erased-functions.scala b/tests/run/polymorphic-erased-functions.scala new file mode 100644 index 000000000000..4086423d8c6a --- /dev/null +++ b/tests/run/polymorphic-erased-functions.scala @@ -0,0 +1,22 @@ +import language.experimental.erasedDefinitions + +object Test extends App { + + // Types + type F1 = [T] => (erased T) => Int + type F2 = [T, U] => (T, erased U) => T + + // Terms + val t1 = [T] => (erased t: T) => 3 + assert(t1(List(1, 2, 3)) == 3) + val t1a: F1 = t1 + val t1b: F1 = [T] => (erased t) => 3 + assert(t1b(List(1, 2, 3)) == 3) + + val t2 = [T, U] => (t: T, erased u: U) => t + assert(t2(1, "abc") == 1) + val t2a: F2 = t2 + val t2b: F2 = [T, U] => (t, erased u) => t + assert(t2b(1, "abc") == 1) + +} diff --git a/tests/run/polymorphic-functions.scala b/tests/run/polymorphic-functions.scala index 35b1469f2c3a..16a63b7efe97 100644 --- a/tests/run/polymorphic-functions.scala +++ b/tests/run/polymorphic-functions.scala @@ -27,7 +27,7 @@ object Test extends App { val t2a: F2 = t2 assert(t2(23, "foo") == Left(23)) - // Polymorphic idenity + // Polymorphic identity val pid = [T] => (t: T) => t // Method with poly function argument @@ -50,7 +50,7 @@ object Test extends App { val pf2 = [T] => (f: [U] => U => U, t: Int) => f(t) val pf20 = pf2(pid, 23) - // Implment/override + // Implement/override val phd = [T] => (ts: List[T]) => ts.headOption trait A { @@ -85,6 +85,13 @@ object Test extends App { val v0a: String = v0 assert(v0 == "foo") + // Used to fail with: Found: ... => List[T] + // Expected: ... => List[x.type] + val md2: [T] => (x: T) => List[x.type] = [T] => (x: T) => List(x) + val x = 1 + val v1 = md2(x) + val v1a: List[x.type] = v1 + // Contextual trait Show[T] { def show(t: T): String } implicit val si: Show[Int] = @@ -99,4 +106,16 @@ object Test extends App { val tt2: [T] => T => T = [T] => ((x: T) => x) val tt3: [T] => T => T = [T] => { (x: T) => x } val tt4: [T] => T => T = [T] => (x: T) => { x } + + // Inferred parameter type + val i1a: [T] => T => T = [T] => x => x + val i2b: [T] => T => T = [S] => x => x + /// This does not work currently because subtyping of polymorphic functions is not implemented. + /// val i2c: [T <: Int] => T => T = [T] => x => x + val i3a: [T, S <: List[T]] => (T, S) => List[T] = + [T, S <: List[T]] => (x, y) => x :: y + val i3b: [T, S <: List[T]] => (T, S) => List[T] = + [S, T <: List[S]] => (x, y) => x :: y + val i4: [T, S <: List[T]] => (T, S) => List[T] = + [T, S <: List[T]] => (x, y: S) => x :: y } diff --git a/tests/run/publicInBinary/Lib_1.scala b/tests/run/publicInBinary/Lib_1.scala new file mode 100644 index 000000000000..a3c6ccea8427 --- /dev/null +++ b/tests/run/publicInBinary/Lib_1.scala @@ -0,0 +1,136 @@ +//> using options -Werror -WunstableInlineAccessors + +package foo + +import scala.annotation.publicInBinary + +class Foo(@publicInBinary private[Foo] val paramVal: Int, @publicInBinary private[Foo] var paramVar: Int): + @publicInBinary + protected val protectedVal: Int = 2 + @publicInBinary + private[foo] val packagePrivateVal: Int = 2 + @publicInBinary + protected var protectedVar: Int = 2 + @publicInBinary + private[foo] var packagePrivateVar: Int = 2 + + inline def foo: Int = + paramVar = 3 + protectedVar = 3 + packagePrivateVar = 3 + paramVal + paramVar + protectedVal + packagePrivateVal + protectedVar + packagePrivateVar + +class Bar() extends Foo(3, 3): + @publicInBinary + override protected val protectedVal: Int = 2 + @publicInBinary + override private[foo] val packagePrivateVal: Int = 2 + + inline def bar: Int = protectedVal + packagePrivateVal + +class Baz() extends Foo(4, 4): + @publicInBinary + override protected val protectedVal: Int = 2 + + @publicInBinary + override private[foo] val packagePrivateVal: Int = 2 + + inline def baz: Int = protectedVal + packagePrivateVal + + +class Qux() extends Foo(5, 5): + inline def qux: Int = protectedVal + packagePrivateVal + + +@publicInBinary given Int = 1 +@publicInBinary given (using Double): Int = 1 + +trait A[T]: + def f: T +@publicInBinary given A[Int] with + def f: Int = 1 +@publicInBinary given (using Double): A[Int] with + def f: Int = 1 + +package inlines { + // Case that needed to be converted with MakeInlineablePassing + class C[T](x: T) { + @publicInBinary private[inlines] def next[U](y: U): (T, U) = (x, y) + } + class TestPassing { + inline def foo[A](x: A): (A, Int) = { + val c = new C[A](x) + c.next(1) + } + inline def bar[A](x: A): (A, String) = { + val c = new C[A](x) + c.next("") + } + } +} + +package foo { + @publicInBinary private object Foo: + @publicInBinary private[foo] def x: Int = 1 + inline def f: Int = Foo.x +} +def testFoo() = foo.f + +def localTest() = + class Foo: + @publicInBinary private[Foo] val a: Int = 1 + @publicInBinary protected val b: Int = 1 + +package traits { + trait Trait: + @publicInBinary private[Trait] val myVal = 1 + @publicInBinary private[Trait] lazy val myLazyVl = 2 + @publicInBinary private[Trait] var myVar = 2 + @publicInBinary private[Trait] def myDef = 3 + @publicInBinary private[Trait] given myGiven: Int = 4 + + @publicInBinary protected val myVal2 = 1 + @publicInBinary protected lazy val myLazyVl2 = 2 + @publicInBinary protected var myVar2 = 2 + @publicInBinary protected def myDef2 = 3 + @publicInBinary protected given myGiven2: Int = 4 + + inline def inlined: Unit = + myVar2 = 1 + myVar = 1 + myVal + myLazyVl + myVar + myDef + myGiven + + myVal2 + myLazyVl2 + myVar2 + myDef2 + myGiven2 + + def testTrait(t: Trait) = t.inlined + + class Baz extends Foo + object Baz extends Foo + + trait Foo: + inline def foo: Any = bar + @publicInBinary private[Foo] def bar: Any = 2 + end Foo + + def test() = + Baz.foo + (new Baz).foo + val baz = new Baz + baz.foo +} + +package constructors { + class Foo @publicInBinary private[constructors] (x: Int): + @publicInBinary private[constructors] def this(x: Int, y: Int) = this(x + y) + + class Bar @publicInBinary(@publicInBinary private[Bar] val x: Int): + @publicInBinary private def this(x: Int, y: Int) = this(x + y) + inline def bar: Bar = new Bar(x, x) + + inline def newFoo(x: Int) = new Foo(x) + inline def newFoo(x: Int, y: Int) = new Foo(x, y) +} + +def testConstructors() = + val f = constructors.newFoo(1) + val g = constructors.newFoo(1, 2) + val h = new constructors.Bar(1).bar diff --git a/tests/run/publicInBinary/Test_2.scala b/tests/run/publicInBinary/Test_2.scala new file mode 100644 index 000000000000..3c3e89419057 --- /dev/null +++ b/tests/run/publicInBinary/Test_2.scala @@ -0,0 +1,26 @@ +import foo.* + +@main def Test: Unit = + val foo: Foo = new Foo(1, 2) + foo.foo + + val bar = new Bar() + bar.foo + bar.bar + + val baz = new Baz() + baz.foo + baz.baz + + val qux = new Qux() + qux.foo + qux.qux + + val c = new inlines.TestPassing + c.foo(1) + c.bar(2) + + testFoo() + localTest() + traits.test() + testConstructors() diff --git a/tests/run/quoted-sematics-1.scala b/tests/run/quoted-sematics-1.scala index 84bf754dbc36..4f94c8f3c32c 100644 --- a/tests/run/quoted-sematics-1.scala +++ b/tests/run/quoted-sematics-1.scala @@ -82,7 +82,7 @@ def typeChecks(g: Gamma)(level: 0 | 1)(term: Term): Option[Type] = yield LambdaType(t, res) case App(fun, arg) => // T-App for - LambdaType(t1, t2) <- typeChecks(g)(level)(fun) + case LambdaType(t1, t2) <- typeChecks(g)(level)(fun) `t1` <- typeChecks(g)(level)(arg) yield t2 case Box(body) if level == 0 => // T-Box @@ -90,16 +90,16 @@ def typeChecks(g: Gamma)(level: 0 | 1)(term: Term): Option[Type] = case Lift(body) if level == 0 => // T-Lift for NatType <- typeChecks(g)(0)(body) yield BoxType(NatType) case Splice(body) if level == 1 => // T-Unbox - for BoxType(t) <- typeChecks(g)(0)(body) yield t + for case BoxType(t) <- typeChecks(g)(0)(body) yield t case Match(scrutinee, pat, thenp, elsep) => // T-Pat for - BoxType(t1) <- typeChecks(g)(0)(scrutinee) + case BoxType(t1) <- typeChecks(g)(0)(scrutinee) delta <- typePatChecks(g, t1)(pat) t <- typeChecks(g ++ delta)(0)(thenp) `t` <- typeChecks(g)(0)(elsep) yield t case Fix(t) if level == 0 => - for LambdaType(t1, t2) <- typeChecks(g)(0)(t) yield t2 // T-Fix + for case LambdaType(t1, t2) <- typeChecks(g)(0)(t) yield t2 // T-Fix case _ => None if res.isEmpty then println(s"Failed to type $term at level $level with environment $g") diff --git a/tests/run-custom-args/erased/quotes-add-erased.check b/tests/run/quotes-add-erased.check similarity index 100% rename from tests/run-custom-args/erased/quotes-add-erased.check rename to tests/run/quotes-add-erased.check diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala new file mode 100644 index 000000000000..56247d45cd23 --- /dev/null +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -0,0 +1,26 @@ +import scala.annotation.MacroAnnotation +import scala.annotation.internal.ErasedParam +import scala.quoted._ + +class NewAnnotation extends scala.annotation.Annotation + +class erasedParamsMethod extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val erasedInt = AnnotatedType(TypeRepr.of[Int], '{ new ErasedParam }.asTerm) + val methType = MethodType(List("x", "y"))(_ => List(erasedInt, TypeRepr.of[Int]), _ => TypeRepr.of[Int]) + + assert(methType.hasErasedParams) + assert(methType.erasedParams == List(true, false)) + + val methSym = Symbol.newMethod(tree.symbol, "takesErased", methType, Flags.Override, Symbol.noSymbol) + val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) + + val clsDef = ClassDef.copy(tree)(name, ctr, parents, self, methDef :: body) + + List(clsDef) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/run-custom-args/erased/quotes-add-erased/Test_2.scala b/tests/run/quotes-add-erased/Test_2.scala similarity index 100% rename from tests/run-custom-args/erased/quotes-add-erased/Test_2.scala rename to tests/run/quotes-add-erased/Test_2.scala diff --git a/tests/run/quotes-reflection.check b/tests/run/quotes-reflection.check new file mode 100644 index 000000000000..ed01fb47ac5f --- /dev/null +++ b/tests/run/quotes-reflection.check @@ -0,0 +1,10 @@ +method : () isGiven=false isImplicit=false erasedArgs=List() +method m1: (i: scala.Int) isGiven=true isImplicit=false erasedArgs=List(false) +method m2: (i: scala.Int) isGiven=false isImplicit=false erasedArgs=List(true) +method m3: (i: scala.Int, j: scala.Int) isGiven=false isImplicit=false erasedArgs=List(false, true) +method m4: (i: EC) isGiven=false isImplicit=false erasedArgs=List(true) +val l1: scala.ContextFunction1[scala.Int, scala.Int] +val l2: scala.PolyFunction with apply: (x: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) +val l3: scala.PolyFunction with apply: (x: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=true erasedParams=List(true) +val l4: scala.PolyFunction with apply: (x: scala.Int, y: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(false, true) +val l5: scala.PolyFunction with apply: (x: EC @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) diff --git a/tests/run/quotes-reflection/Macros_1.scala b/tests/run/quotes-reflection/Macros_1.scala new file mode 100644 index 000000000000..5945d39a097a --- /dev/null +++ b/tests/run/quotes-reflection/Macros_1.scala @@ -0,0 +1,35 @@ +import scala.quoted.* + +inline def inspect[A]: String = + ${ inspect2[A] } + +def inspect2[A: Type](using Quotes): Expr[String] = { + import quotes.reflect.* + + val methods = TypeRepr.of[A].typeSymbol.declarations + val names = methods.map { m => + m.tree match + case dd @ DefDef(name, params, r, body) => + val paramStr = + params.map { + case ps: TermParamClause => + val params = ps.params.map(p => s"${p.name}: ${p.tpt.show}").mkString("(", ", ", ")") + s"$params isGiven=${ps.isGiven} isImplicit=${ps.isImplicit} erasedArgs=${ps.erasedArgs}" + case ps: TypeParamClause => ps.params.map(_.show).mkString("[", ", ", "]") + }.mkString("") + s"method $name: $paramStr" + case vd @ ValDef(name, tpt, body) => + tpt.tpe match + case Refinement(parent, "apply", tpe: MethodType) if parent == defn.PolyFunctionClass.typeRef => + assert(tpt.tpe.isErasedFunctionType) + + val params = tpe.paramNames.zip(tpe.paramTypes).map((n, t) => s"$n: ${t.show}").mkString("(", ", ", ")") + s"val $name: ${parent.show} with apply: ${params} isImplicit=${tpe.isImplicit} erasedParams=${tpe.erasedParams}" + case _ => + s"val $name: ${tpt.show}" + case td @ TypeDef(name, tpt) => s"type $name: ${tpt.show}" + case _ => s"something else: $m" + } + + Expr(names.mkString("\n")) +} diff --git a/tests/run-custom-args/erased/quotes-reflection/Test_2.scala b/tests/run/quotes-reflection/Test_2.scala similarity index 100% rename from tests/run-custom-args/erased/quotes-reflection/Test_2.scala rename to tests/run/quotes-reflection/Test_2.scala diff --git a/tests/run/string-switch.scala b/tests/run/string-switch.scala index 0c25661972de..ece1cb088926 100644 --- a/tests/run/string-switch.scala +++ b/tests/run/string-switch.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror import annotation.switch import util.Try diff --git a/tests/run/t12290.check b/tests/run/t12290.check index c6ce23a28ef2..f5367692b5f8 100644 --- a/tests/run/t12290.check +++ b/tests/run/t12290.check @@ -64,3 +64,11 @@ XY ==== X Y ==== +582059 +==== +00 +==== +2a +==== +c3bf +==== diff --git a/tests/run/t12290/Test.scala b/tests/run/t12290/Test.scala index e6c96573f032..7b0133e61c1f 100644 --- a/tests/run/t12290/Test.scala +++ b/tests/run/t12290/Test.scala @@ -30,4 +30,16 @@ object Test extends App { println("====") println(valueOf[TextBlocks.Octal.type]) println("====") + println(hexdump(valueOf[TextBlocks.Octal.type])) + println("====") + println(hexdump(valueOf[TextBlocks.Zero.type].toString)) + println("====") + println(hexdump(valueOf[TextBlocks.Magic.type].toString)) + println("====") + println(hexdump(valueOf[TextBlocks.Maxie.type].toString)) + println("====") } + +def hexdump(s: String) = s.getBytes(io.Codec.UTF8.charSet) // java.nio.charset.StandardCharsets.UTF_8 + .map(b => f"${b & 0xff}%02x") + .mkString diff --git a/tests/run/t12290/TextBlocks.java b/tests/run/t12290/TextBlocks.java index 6a827923a052..9dd34e1546a5 100644 --- a/tests/run/t12290/TextBlocks.java +++ b/tests/run/t12290/TextBlocks.java @@ -81,4 +81,7 @@ class TextBlocks { """; final static String Octal = "X\040Y"; + final static char Zero = '\0'; + final static char Magic = '\52'; + final static char Maxie = '\377'; } diff --git a/tests/run/t6406-regextract.scala b/tests/run/t6406-regextract.scala index 18cf28865aba..4d10d3f8775c 100644 --- a/tests/run/t6406-regextract.scala +++ b/tests/run/t6406-regextract.scala @@ -20,10 +20,10 @@ object Test extends App { val t = "Last modified 2011-07-15" val p1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r val y1: Option[String] = for { - p1(year, month, day) <- p1 findFirstIn t + case p1(year, month, day) <- p1 findFirstIn t } yield year val y2: Option[String] = for { - p1(year, month, day) <- p1 findFirstMatchIn t + case p1(year, month, day) <- p1 findFirstMatchIn t } yield year println(s"$y1 $y2") diff --git a/tests/run/t6443b.scala b/tests/run/t6443b.scala index 9320b1dcfe2f..796fd9d95df4 100644 --- a/tests/run/t6443b.scala +++ b/tests/run/t6443b.scala @@ -2,7 +2,10 @@ trait A { type D >: Null <: C def foo(d: D)(d2: d.type): Unit trait C { - def bar: Unit = foo(null)(null) + def bar: Unit = { + val nul = null + foo(nul)(nul) + } } } object B extends A { diff --git a/tests/run/t6646.scala b/tests/run/t6646.scala index b96851077bf9..d1c84455c216 100644 --- a/tests/run/t6646.scala +++ b/tests/run/t6646.scala @@ -8,9 +8,9 @@ object Test { val l = List(PrimaryKey, NoNull, lower) // withFilter must be generated in these - for (option @ NoNull <- l) println("Found " + option) - for (option @ `lower` <- l) println("Found " + option) - for ((`lower`, i) <- l.zipWithIndex) println("Found " + i) + for (case option @ NoNull <- l) println("Found " + option) + for (case option @ `lower` <- l) println("Found " + option) + for (case (`lower`, i) <- l.zipWithIndex) println("Found " + i) // no withFilter for (X <- List("A single ident is always a pattern")) println(X) diff --git a/tests/run/t6968.scala b/tests/run/t6968.scala index c4e47ba0eda8..84a0969c0872 100644 --- a/tests/run/t6968.scala +++ b/tests/run/t6968.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { val mixedList = List(1,(1,2),4,(3,1),(5,4),6) - val as = for((a,b) <- mixedList) yield a + val as = for(case (a,b) <- mixedList) yield a println(as.mkString(", ")) } } diff --git a/tests/run-custom-args/typeclass-derivation1.scala b/tests/run/typeclass-derivation1.scala similarity index 100% rename from tests/run-custom-args/typeclass-derivation1.scala rename to tests/run/typeclass-derivation1.scala diff --git a/tests/run/typeclass-derivation3.check b/tests/run/typeclass-derivation3.check index 12bb24628b0c..34b8a6b81f7e 100644 --- a/tests/run/typeclass-derivation3.check +++ b/tests/run/typeclass-derivation3.check @@ -9,6 +9,6 @@ Cons(hd = Cons(hd = 11, tl = Cons(hd = 22, tl = Cons(hd = 33, tl = Nil))), tl = Cons(hd = Left(x = 1), tl = Cons(hd = Right(x = Pair(x = 2, y = 3)), tl = Nil)) Cons(hd = Left(x = 1), tl = Cons(hd = Right(x = Pair(x = 2, y = 3)), tl = Nil)) true -::(head = 1, next$access$1 = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil()))) -::(head = ::(head = 1, next$access$1 = Nil()), next$access$1 = ::(head = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil())), next$access$1 = Nil())) -::(head = Nil(), next$access$1 = ::(head = ::(head = 1, next$access$1 = Nil()), next$access$1 = ::(head = ::(head = 2, next$access$1 = ::(head = 3, next$access$1 = Nil())), next$access$1 = Nil()))) +::(head = 1, next = ::(head = 2, next = ::(head = 3, next = Nil()))) +::(head = ::(head = 1, next = Nil()), next = ::(head = ::(head = 2, next = ::(head = 3, next = Nil())), next = Nil())) +::(head = Nil(), next = ::(head = ::(head = 1, next = Nil()), next = ::(head = ::(head = 2, next = ::(head = 3, next = Nil())), next = Nil()))) diff --git a/tests/run/weak-conformance.scala b/tests/run/weak-conformance.scala index db551cf0ce15..af91698053e2 100644 --- a/tests/run/weak-conformance.scala +++ b/tests/run/weak-conformance.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import collection.mutable.ArrayBuffer object Test extends App { inline val b = 33 @@ -16,11 +14,11 @@ object Test extends App { val x8 = List(5.toByte, 11) ; x8: List[Byte] val x9: List[AnyVal] = List(1.0f, 0) - assert(x9(0).getClass == classOf[java.lang.Float]) - assert(x9(1).getClass == classOf[java.lang.Float]) // expected type not fully defined, since `List` is covariant + assert(x9(0).isInstanceOf[java.lang.Float]) + assert(x9(1).isInstanceOf[java.lang.Float]) // expected type not fully defined, since `List` is covariant val x10 = List[Any](1.0f, 0) - assert(x10(0).getClass == classOf[java.lang.Float]) - assert(x10(1).getClass == classOf[java.lang.Integer]) + assert(x10(0).isInstanceOf[java.lang.Float]) + assert(x10(1).isInstanceOf[java.lang.Integer]) } locally { @@ -35,11 +33,11 @@ object Test extends App { val x8 = ArrayBuffer(5.toByte, 11) ; x8: ArrayBuffer[Byte] val x9: ArrayBuffer[AnyVal] = ArrayBuffer(1.0f, 0) - assert(x9(0).getClass == classOf[java.lang.Float]) - assert(x9(1).getClass == classOf[java.lang.Integer]) // expected type fully defined since ArrayBuffer is nonvariant + assert(x9(0).isInstanceOf[java.lang.Float]) + assert(x9(1).isInstanceOf[java.lang.Integer]) // expected type fully defined since ArrayBuffer is nonvariant val x10 = ArrayBuffer[Any](1.0f, 0) - assert(x10(0).getClass == classOf[java.lang.Float]) - assert(x10(1).getClass == classOf[java.lang.Integer]) + assert(x10(0).isInstanceOf[java.lang.Float]) + assert(x10(1).isInstanceOf[java.lang.Integer]) } locally { @@ -56,11 +54,11 @@ object Test extends App { val x8 = Array(5.toByte, 11) ; x8: Array[Int] val x9: Array[AnyVal] = Array(1.0f, 0) - assert(x9(0).getClass == classOf[java.lang.Float]) - assert(x9(1).getClass == classOf[java.lang.Integer]) // expected type fully defined since Array is nonvariant + assert(x9(0).isInstanceOf[java.lang.Float]) + assert(x9(1).isInstanceOf[java.lang.Integer]) // expected type fully defined since Array is nonvariant val x10 = Array[Any](1.0f, 0) - assert(x10(0).getClass == classOf[java.lang.Float]) - assert(x10(1).getClass == classOf[java.lang.Integer]) + assert(x10(0).isInstanceOf[java.lang.Float]) + assert(x10(1).isInstanceOf[java.lang.Integer]) } locally { @@ -74,4 +72,4 @@ object Test extends App { val x7 = if (true) b else if (true) 33 else 'a' ; x7: Char val x8 = if (true) 5.toByte else 11 ; x8: Byte } -} \ No newline at end of file +} diff --git a/tests/run/whitebox-inline.scala b/tests/run/whitebox-inline.scala index 09e7911caaa1..74266d670075 100644 --- a/tests/run/whitebox-inline.scala +++ b/tests/run/whitebox-inline.scala @@ -1,5 +1,3 @@ -import scala.quoted._ - object Test { def main(args: Array[String]): Unit = { val a: Int = blackbox diff --git a/tests/semanticdb/expect/CaseClass.expect.scala b/tests/semanticdb/expect/CaseClass.expect.scala new file mode 100644 index 000000000000..d0c509855c70 --- /dev/null +++ b/tests/semanticdb/expect/CaseClass.expect.scala @@ -0,0 +1,7 @@ +package caseclass + +case class CaseClass/*<-caseclass::CaseClass#*/(int1/*<-caseclass::CaseClass#int1.*/: Int/*->scala::Int#*/, int2/*<-caseclass::CaseClass#int2.*/: Int/*->scala::Int#*/) + +object CaseClass/*<-caseclass::CaseClass.*/: + def apply/*<-caseclass::CaseClass.apply().*/(int/*<-caseclass::CaseClass.apply().(int)*/: Int/*->scala::Int#*/): CaseClass/*->caseclass::CaseClass#*/ = CaseClass/*->caseclass::CaseClass.*/(int/*->caseclass::CaseClass.apply().(int)*/, 0) + def apply/*<-caseclass::CaseClass.apply(+1).*/(): CaseClass/*->caseclass::CaseClass#*/ = CaseClass/*->caseclass::CaseClass.*/(0, 0) diff --git a/tests/semanticdb/expect/CaseClass.scala b/tests/semanticdb/expect/CaseClass.scala new file mode 100644 index 000000000000..576678112353 --- /dev/null +++ b/tests/semanticdb/expect/CaseClass.scala @@ -0,0 +1,7 @@ +package caseclass + +case class CaseClass(int1: Int, int2: Int) + +object CaseClass: + def apply(int: Int): CaseClass = CaseClass(int, 0) + def apply(): CaseClass = CaseClass(0, 0) diff --git a/tests/semanticdb/expect/Deprecated.expect.scala b/tests/semanticdb/expect/Deprecated.expect.scala new file mode 100644 index 000000000000..f81dd5eacf68 --- /dev/null +++ b/tests/semanticdb/expect/Deprecated.expect.scala @@ -0,0 +1,4 @@ +object Deprecated/*<-_empty_::Deprecated.*/ { + @deprecated/*->scala::deprecated#*/ def deprecatedMethod/*<-_empty_::Deprecated.deprecatedMethod().*/ = ???/*->scala::Predef.`???`().*/ + def main/*<-_empty_::Deprecated.main().*/ = deprecatedMethod/*->_empty_::Deprecated.deprecatedMethod().*/ +} diff --git a/tests/semanticdb/expect/Deprecated.scala b/tests/semanticdb/expect/Deprecated.scala new file mode 100644 index 000000000000..cd7bb5ac61de --- /dev/null +++ b/tests/semanticdb/expect/Deprecated.scala @@ -0,0 +1,4 @@ +object Deprecated { + @deprecated def deprecatedMethod = ??? + def main = deprecatedMethod +} diff --git a/tests/semanticdb/expect/Enums.expect.scala b/tests/semanticdb/expect/Enums.expect.scala index 3e1dc9087db7..404dd14b07d2 100644 --- a/tests/semanticdb/expect/Enums.expect.scala +++ b/tests/semanticdb/expect/Enums.expect.scala @@ -46,7 +46,7 @@ object Enums/*<-_empty_::Enums.*/: enum <:_empty_::Enums.`<:<`.Refl#[C]*/ <:_empty_::Enums.`<:<`#*/ C/*->_empty_::Enums.`<:<`.Refl#[C]*/) - object <:_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/ <:_empty_::Enums.`<:<`#*/ T/*->_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/) = Refl/*->_empty_::Enums.`<:<`.Refl.*/() extension [A/*<-_empty_::Enums.unwrap().[A]*/, B/*<-_empty_::Enums.unwrap().[B]*/](opt/*<-_empty_::Enums.unwrap().(opt)*/: Option/*->scala::Option#*/[A/*->_empty_::Enums.unwrap().[A]*/]) def unwrap/*<-_empty_::Enums.unwrap().*/(using ev/*<-_empty_::Enums.unwrap().(ev)*/: A/*->_empty_::Enums.unwrap().[A]*/ <:_empty_::Enums.`<:<`#*/ Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/]): Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/] = ev/*->_empty_::Enums.unwrap().(ev)*/ match diff --git a/tests/semanticdb/expect/Enums.scala b/tests/semanticdb/expect/Enums.scala index be7e2d6ce5cb..7647c9cafeb7 100644 --- a/tests/semanticdb/expect/Enums.scala +++ b/tests/semanticdb/expect/Enums.scala @@ -46,7 +46,7 @@ object Enums: enum <:<[-A, B]: case Refl[C]() extends (C <:< C) - object <:< : + object `<:<`: given [T]: (T <:< T) = Refl() extension [A, B](opt: Option[A]) def unwrap(using ev: A <:< Option[B]): Option[B] = ev match diff --git a/tests/semanticdb/expect/Extension.expect.scala b/tests/semanticdb/expect/Extension.expect.scala index b40e965d4885..f6f76b17b698 100644 --- a/tests/semanticdb/expect/Extension.expect.scala +++ b/tests/semanticdb/expect/Extension.expect.scala @@ -16,3 +16,12 @@ extension (s/*<-ext::Extension$package.readInto().(s)*/: String/*->scala::Predef trait Functor/*<-ext::Functor#*/[F/*<-ext::Functor#[F]*/[_]]: extension [T/*<-ext::Functor#map().[T]*/](t/*<-ext::Functor#map().(t)*/: F/*->ext::Functor#[F]*/[T/*->ext::Functor#map().[T]*/]) def map/*<-ext::Functor#map().*/[U/*<-ext::Functor#map().[U]*/](f/*<-ext::Functor#map().(f)*/: T/*->ext::Functor#map().[T]*/ => U/*->ext::Functor#map().[U]*/): F/*->ext::Functor#[F]*/[U/*->ext::Functor#map().[U]*/] + +opaque type Deck/*<-ext::Extension$package.Deck#*/ = Long/*->scala::Long#*/ +object Deck/*<-ext::Extension$package.Deck.*/: + extension (data/*<-ext::Extension$package.Deck.fooSize().(data)*/: Deck/*->ext::Extension$package.Deck#*/) + def fooSize/*<-ext::Extension$package.Deck.fooSize().*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ + +object DeckUsage/*<-ext::DeckUsage.*/: + val deck/*<-ext::DeckUsage.deck.*/: Deck/*->ext::Extension$package.Deck#*/ = ???/*->scala::Predef.`???`().*/ + deck/*->ext::DeckUsage.deck.*/.fooSize/*->ext::Extension$package.Deck.fooSize().*/ diff --git a/tests/semanticdb/expect/Extension.scala b/tests/semanticdb/expect/Extension.scala index c204b1ff7fcc..76a012e4b758 100644 --- a/tests/semanticdb/expect/Extension.scala +++ b/tests/semanticdb/expect/Extension.scala @@ -16,3 +16,12 @@ extension (s: String) trait Functor[F[_]]: extension [T](t: F[T]) def map[U](f: T => U): F[U] + +opaque type Deck = Long +object Deck: + extension (data: Deck) + def fooSize: Int = ??? + +object DeckUsage: + val deck: Deck = ??? + deck.fooSize diff --git a/tests/semanticdb/expect/Givens.expect.scala b/tests/semanticdb/expect/Givens.expect.scala index 5d7a9f5dc798..8cd1ee287096 100644 --- a/tests/semanticdb/expect/Givens.expect.scala +++ b/tests/semanticdb/expect/Givens.expect.scala @@ -4,11 +4,11 @@ package b object Givens/*<-a::b::Givens.*/: extension [A/*<-a::b::Givens.sayHello().[A]*/](any/*<-a::b::Givens.sayHello().(any)*/: A/*->a::b::Givens.sayHello().[A]*/) - def sayHello/*<-a::b::Givens.sayHello().*/ = s"Hello, I am $any/*->a::b::Givens.sayHello().(any)*/"/*->scala::StringContext#s().*/ + def sayHello/*<-a::b::Givens.sayHello().*/ = s/*->scala::StringContext#s().*/"Hello, I am $any/*->a::b::Givens.sayHello().(any)*/" extension [B/*<-a::b::Givens.sayGoodbye().[B]*//*<-a::b::Givens.saySoLong().[B]*/](any/*<-a::b::Givens.sayGoodbye().(any)*//*<-a::b::Givens.saySoLong().(any)*/: B/*->a::b::Givens.sayGoodbye().[B]*//*->a::b::Givens.saySoLong().[B]*/) - def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s"Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/"/*->scala::StringContext#s().*/ - def saySoLong/*<-a::b::Givens.saySoLong().*/ = s"So Long, from $any/*->a::b::Givens.saySoLong().(any)*/"/*->scala::StringContext#s().*/ + def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s/*->scala::StringContext#s().*/"Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/" + def saySoLong/*<-a::b::Givens.saySoLong().*/ = s/*->scala::StringContext#s().*/"So Long, from $any/*->a::b::Givens.saySoLong().(any)*/" val hello1/*<-a::b::Givens.hello1.*/ = 1.sayHello/*->a::b::Givens.sayHello().*/ val goodbye1/*<-a::b::Givens.goodbye1.*/ = 1.sayGoodbye/*->a::b::Givens.sayGoodbye().*/ @@ -22,6 +22,7 @@ object Givens/*<-a::b::Givens.*/: def empty/*<-a::b::Givens.given_Monoid_String.empty().*/ = "" extension (x/*<-a::b::Givens.given_Monoid_String.combine().(x)*/: String/*->scala::Predef.String#*/) def combine/*<-a::b::Givens.given_Monoid_String.combine().*/(y/*<-a::b::Givens.given_Monoid_String.combine().(y)*/: String/*->scala::Predef.String#*/) = x/*->a::b::Givens.given_Monoid_String.combine().(x)*/ +/*->java::lang::String#`+`().*/ y/*->a::b::Givens.given_Monoid_String.combine().(y)*/ - inline given int2String/*<-a::b::Givens.int2String().*/: Conversion/*->scala::Conversion#*/[Int/*->scala::Int#*/, String/*->scala::Predef.String#*/] = _.toString/*->scala::Any#toString().*/ + inline given int2String/*<-a::b::Givens.int2String().*/: Conversion/*->scala::Conversion#*/[Int/*->scala::Int#*/, String/*->scala::Predef.String#*/] with + def apply/*<-a::b::Givens.int2String#apply().*/(x/*<-a::b::Givens.int2String#apply().(x)*/: Int/*->scala::Int#*/): String/*->scala::Predef.String#*/ = x/*->a::b::Givens.int2String#apply().(x)*/.toString/*->scala::Any#toString().*/ def foo/*<-a::b::Givens.foo().*/[A/*<-a::b::Givens.foo().[A]*/](using A/*<-a::b::Givens.foo().(A)*/: Monoid/*->a::b::Givens.Monoid#*/[A/*->a::b::Givens.foo().[A]*/]): A/*->a::b::Givens.foo().[A]*/ = A/*->a::b::Givens.foo().(A)*/.combine/*->a::b::Givens.Monoid#combine().*/(A/*->a::b::Givens.foo().(A)*/.empty/*->a::b::Givens.Monoid#empty().*/)(A/*->a::b::Givens.foo().(A)*/.empty/*->a::b::Givens.Monoid#empty().*/) diff --git a/tests/semanticdb/expect/Givens.scala b/tests/semanticdb/expect/Givens.scala index 819d70cfadca..f0a56e624d5d 100644 --- a/tests/semanticdb/expect/Givens.scala +++ b/tests/semanticdb/expect/Givens.scala @@ -22,6 +22,7 @@ object Givens: def empty = "" extension (x: String) def combine(y: String) = x + y - inline given int2String: Conversion[Int, String] = _.toString + inline given int2String: Conversion[Int, String] with + def apply(x: Int): String = x.toString def foo[A](using A: Monoid[A]): A = A.combine(A.empty)(A.empty) diff --git a/tests/semanticdb/expect/ImplicitConversion.expect.scala b/tests/semanticdb/expect/ImplicitConversion.expect.scala index 635ffb4d94c9..7c1708ee7617 100644 --- a/tests/semanticdb/expect/ImplicitConversion.expect.scala +++ b/tests/semanticdb/expect/ImplicitConversion.expect.scala @@ -21,10 +21,10 @@ class ImplicitConversion/*<-example::ImplicitConversion#*/ { val x/*<-example::ImplicitConversion#x.*/: Int/*->scala::Int#*/ = message/*->example::ImplicitConversion#message.*/ // interpolators - s"Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/"/*->scala::StringContext#s().*/ - s"""Hello + s/*->scala::StringContext#s().*/"Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/" + s/*->scala::StringContext#s().*/"""Hello |$message/*->example::ImplicitConversion#message.*/ - |$number/*->example::ImplicitConversion#number.*/"""/*->scala::StringContext#s().*/.stripMargin/*->scala::collection::StringOps#stripMargin(+1).*/ + |$number/*->example::ImplicitConversion#number.*/""".stripMargin/*->scala::collection::StringOps#stripMargin(+1).*/ val a/*<-example::ImplicitConversion#a.*/: Int/*->scala::Int#*/ = char/*->example::ImplicitConversion#char.*/ val b/*<-example::ImplicitConversion#b.*/: Long/*->scala::Long#*/ = char/*->example::ImplicitConversion#char.*/ diff --git a/tests/semanticdb/expect/StructuralTypes.expect.scala b/tests/semanticdb/expect/StructuralTypes.expect.scala new file mode 100644 index 000000000000..96c7181d6f10 --- /dev/null +++ b/tests/semanticdb/expect/StructuralTypes.expect.scala @@ -0,0 +1,22 @@ +package example + +import reflect.Selectable/*->scala::reflect::Selectable.*/.reflectiveSelectable/*->scala::reflect::Selectable.reflectiveSelectable().*/ + +object StructuralTypes/*<-example::StructuralTypes.*/: + type User/*<-example::StructuralTypes.User#*/ = { + def name/*<-local0*/: String/*->scala::Predef.String#*/ + def age/*<-local1*/: Int/*->scala::Int#*/ + def foo/*<-local3*/(x/*<-local2*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ + } + + val user/*<-example::StructuralTypes.user.*/ = null.asInstanceOf/*->scala::Any#asInstanceOf().*/[User/*->example::StructuralTypes.User#*/] + user/*->example::StructuralTypes.user.*/.name/*->scala::reflect::Selectable#selectDynamic().*/ + user/*->example::StructuralTypes.user.*/.age/*->scala::reflect::Selectable#selectDynamic().*/ + val fooBar/*<-example::StructuralTypes.fooBar.*/ = user/*->example::StructuralTypes.user.*/ foo/*->scala::reflect::Selectable#applyDynamic().*/ 123 + + val V/*<-example::StructuralTypes.V.*/: Object/*->java::lang::Object#*/ { + def scalameta/*<-local4*/: String/*->scala::Predef.String#*/ + } = /*<-local6*/new: + def scalameta/*<-local5*/ = "4.0" + V/*->example::StructuralTypes.V.*/.scalameta/*->scala::reflect::Selectable#selectDynamic().*/ +end StructuralTypes/*->example::StructuralTypes.*/ \ No newline at end of file diff --git a/tests/semanticdb/expect/StructuralTypes.scala b/tests/semanticdb/expect/StructuralTypes.scala new file mode 100644 index 000000000000..5d10dbe67224 --- /dev/null +++ b/tests/semanticdb/expect/StructuralTypes.scala @@ -0,0 +1,22 @@ +package example + +import reflect.Selectable.reflectiveSelectable + +object StructuralTypes: + type User = { + def name: String + def age: Int + def foo(x: Int): Int + } + + val user = null.asInstanceOf[User] + user.name + user.age + val fooBar = user foo 123 + + val V: Object { + def scalameta: String + } = new: + def scalameta = "4.0" + V.scalameta +end StructuralTypes \ No newline at end of file diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 0ec8a8e5d84c..c8b652d85469 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -7,7 +7,8 @@ Uri => Access.scala Text => empty Language => Scala Symbols => 9 entries -Occurrences => 18 entries +Occurrences => 19 entries +Diagnostics => 4 entries Symbols: example/Access# => class Access extends Object { self: Access => +8 decls } @@ -23,6 +24,7 @@ example/Access#m7(). => method m7 => Nothing Occurrences: [0:8..0:15): example <- example/ [2:6..2:12): Access <- example/Access# +[3:2..3:2): <- example/Access#``(). [3:14..3:16): m1 <- example/Access#m1(). [3:19..3:22): ??? -> scala/Predef.`???`(). [4:20..4:22): m2 <- example/Access#m2(). @@ -40,6 +42,16 @@ Occurrences: [9:6..9:8): m7 <- example/Access#m7(). [9:11..9:14): ??? -> scala/Predef.`???`(). +Diagnostics: +[3:14..3:16): [warning] unused private member +[4:16..4:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[4:20..4:22): [warning] unused private member +[7:18..7:18): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + expect/Advanced.scala --------------------- @@ -49,7 +61,8 @@ Uri => Advanced.scala Text => empty Language => Scala Symbols => 61 entries -Occurrences => 138 entries +Occurrences => 143 entries +Diagnostics => 3 entries Synthetics => 3 entries Symbols: @@ -128,11 +141,13 @@ Occurrences: [5:21..5:31): Selectable -> scala/reflect/Selectable. [5:32..5:52): reflectiveSelectable -> scala/reflect/Selectable.reflectiveSelectable(). [7:6..7:7): C <- advanced/C# +[7:7..7:7): <- advanced/C#``(). [7:8..7:9): T <- advanced/C#[T] [8:6..8:7): t <- advanced/C#t(). [8:9..8:10): T -> advanced/C#[T] [8:13..8:16): ??? -> scala/Predef.`???`(). [11:6..11:16): Structural <- advanced/Structural# +[12:2..12:2): <- advanced/Structural#``(). [12:6..12:8): s1 <- advanced/Structural#s1(). [12:16..12:17): x <- local0 [12:19..12:22): Int -> scala/Int# @@ -162,6 +177,7 @@ Occurrences: [15:27..15:30): Int -> scala/Int# [15:35..15:38): ??? -> scala/Predef.`???`(). [16:8..16:9): T <- advanced/Structural#T# +[16:9..16:9): <- advanced/Structural#T#``(). [16:10..16:11): A <- advanced/Structural#T#[A] [16:19..16:22): foo <- advanced/Structural#T#foo. [16:31..16:32): B <- local12 @@ -172,6 +188,7 @@ Occurrences: [16:57..16:60): foo -> advanced/Structural#T#foo. [16:61..16:62): B -> local12 [19:6..19:15): Wildcards <- advanced/Wildcards# +[20:2..20:2): <- advanced/Wildcards#``(). [20:6..20:8): e1 <- advanced/Wildcards#e1(). [20:10..20:14): List -> scala/package.List# [20:20..20:23): ??? -> scala/Predef.`???`(). @@ -188,21 +205,21 @@ Occurrences: [27:6..27:9): s1x <- advanced/Test.s1x. [27:12..27:13): s -> advanced/Test.s. [27:14..27:16): s1 -> advanced/Structural#s1(). -[27:16..27:18): .x -> scala/reflect/Selectable#selectDynamic(). +[27:17..27:18): x -> scala/reflect/Selectable#selectDynamic(). [28:6..28:8): s2 <- advanced/Test.s2. [28:11..28:12): s -> advanced/Test.s. [28:13..28:15): s2 -> advanced/Structural#s2(). [29:6..29:9): s2x <- advanced/Test.s2x. [29:12..29:13): s -> advanced/Test.s. [29:14..29:16): s2 -> advanced/Structural#s2(). -[29:16..29:18): .x -> scala/reflect/Selectable#selectDynamic(). +[29:17..29:18): x -> scala/reflect/Selectable#selectDynamic(). [30:6..30:8): s3 <- advanced/Test.s3. [30:11..30:12): s -> advanced/Test.s. [30:13..30:15): s3 -> advanced/Structural#s3(). [31:6..31:9): s3x <- advanced/Test.s3x. [31:12..31:13): s -> advanced/Test.s. [31:14..31:16): s3 -> advanced/Structural#s3(). -[31:16..31:18): .m -> scala/reflect/Selectable#applyDynamic(). +[31:17..31:18): m -> scala/reflect/Selectable#applyDynamic(). [31:19..31:22): ??? -> scala/Predef.`???`(). [33:6..33:7): e <- advanced/Test.e. [33:14..33:23): Wildcards -> advanced/Wildcards# @@ -237,8 +254,9 @@ Occurrences: [47:11..47:14): foo -> advanced/Test.foo. [47:15..47:16): A -> local17 [47:19..47:22): foo -> advanced/Test.foo. -[47:22..47:24): .a -> scala/reflect/Selectable#selectDynamic(). +[47:23..47:24): a -> scala/reflect/Selectable#selectDynamic(). [52:6..52:13): HKClass <- advanced/HKClass# +[52:13..52:13): <- advanced/HKClass#``(). [52:14..52:15): F <- advanced/HKClass#[F] [52:20..52:21): T <- advanced/HKClass#``().[F][T] [52:28..52:29): U <- advanced/HKClass#``().[F][U] @@ -255,6 +273,13 @@ Occurrences: [53:37..53:38): x -> advanced/HKClass#foo().(x) [53:39..53:47): toString -> scala/Tuple2#toString(). +Diagnostics: +[20:15..20:15): [warning] `_` is deprecated for wildcard arguments of types: use `?` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[21:15..21:15): [warning] `_` is deprecated for wildcard arguments of types: use `?` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[40:12..40:15): [warning] unused local definition + Synthetics: [27:12..27:16):s.s1 => reflectiveSelectable(*) [29:12..29:16):s.s2 => reflectiveSelectable(*) @@ -269,7 +294,8 @@ Uri => Annotations.scala Text => empty Language => Scala Symbols => 23 entries -Occurrences => 52 entries +Occurrences => 55 entries +Diagnostics => 2 entries Symbols: annot/Alias. => final object Alias extends Object { self: Alias.type => +2 decls } @@ -310,6 +336,7 @@ Occurrences: [4:35..4:41): macros -> scala/language.experimental.macros. [6:1..6:16): ClassAnnotation -> com/javacp/annot/ClassAnnotation# [7:6..7:17): Annotations <- annot/Annotations# +[7:17..7:17): <- annot/Annotations#``(). [7:19..7:42): TypeParameterAnnotation -> com/javacp/annot/TypeParameterAnnotation# [7:43..7:44): T <- annot/Annotations#[T] [7:47..7:66): ParameterAnnotation -> com/javacp/annot/ParameterAnnotation# @@ -327,6 +354,7 @@ Occurrences: [17:3..17:17): TypeAnnotation -> com/javacp/annot/TypeAnnotation# [18:7..18:8): S <- annot/Annotations#S# [21:6..21:7): B <- annot/B# +[21:7..21:7): <- annot/B#``(). [21:9..21:30): ConstructorAnnotation -> com/javacp/annot/ConstructorAnnotation# [21:33..21:34): x <- annot/B#x. [21:36..21:39): Int -> scala/Int# @@ -343,6 +371,7 @@ Occurrences: [32:8..32:10): TT <- annot/M.m().[TT] [32:13..32:16): Int -> scala/Int# [32:25..32:28): ??? -> scala/Predef.`???`(). +[35:0..35:0): <- annot/T#``(). [35:1..35:16): TraitAnnotation -> com/javacp/annot/TraitAnnotation# [36:6..36:7): T <- annot/T# [38:7..38:12): Alias <- annot/Alias. @@ -350,6 +379,10 @@ Occurrences: [39:11..39:26): ClassAnnotation -> com/javacp/annot/ClassAnnotation# [39:28..39:33): param -> scala/annotation/meta/param# +Diagnostics: +[7:67..7:68): [warning] unused explicit parameter +[21:33..21:34): [warning] unused explicit parameter + expect/Anonymous.scala ---------------------- @@ -359,7 +392,8 @@ Uri => Anonymous.scala Text => empty Language => Scala Symbols => 23 entries -Occurrences => 47 entries +Occurrences => 50 entries +Diagnostics => 3 entries Synthetics => 2 entries Symbols: @@ -393,6 +427,7 @@ Occurrences: [1:13..1:21): language -> scala/language. [1:22..1:33): higherKinds -> scala/language.higherKinds. [3:6..3:15): Anonymous <- example/Anonymous# +[4:2..4:2): <- example/Anonymous#``(). [4:8..4:17): Anonymous -> example/Anonymous# [6:6..6:13): locally <- example/Anonymous#locally(). [6:14..6:15): A <- example/Anonymous#locally().[A] @@ -415,11 +450,13 @@ Occurrences: [14:11..14:14): Int -> scala/Int# [14:18..14:21): Int -> scala/Int# [14:29..14:32): ??? -> scala/Predef.`???`(). +[17:2..17:2): <- example/Anonymous#Foo#``(). [17:8..17:11): Foo <- example/Anonymous#Foo# [18:6..18:9): foo <- example/Anonymous#foo. [18:12..18:12): <- local1 [18:16..18:19): Foo -> example/Anonymous#Foo# [20:8..20:11): Bar <- example/Anonymous#Bar# +[21:4..21:4): <- example/Anonymous#Bar#``(). [21:8..21:11): bar <- example/Anonymous#Bar#bar(). [21:13..21:19): String -> scala/Predef.String# [22:6..22:10): bar1 <- example/Anonymous#bar1. @@ -436,6 +473,13 @@ Occurrences: [23:33..23:39): String -> scala/Predef.String# [23:42..23:45): ??? -> scala/Predef.`???`(). +Diagnostics: +[9:14..9:14): [warning] `_` is deprecated for wildcard arguments of types: use `?` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[9:22..9:22): [warning] `_` is deprecated for wildcard arguments of types: use `?` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[14:8..14:9): [warning] unused local definition + Synthetics: [10:2..10:9):locally => *[Unit] [13:2..13:9):locally => *[Unit] @@ -449,7 +493,7 @@ Uri => AnonymousGiven.scala Text => empty Language => Scala Symbols => 5 entries -Occurrences => 4 entries +Occurrences => 5 entries Symbols: angiven/AnonymousGiven$package. => final package object angiven extends Object { self: angiven.type => +2 decls } @@ -460,10 +504,65 @@ angiven/Foo#``(). => primary ctor (): Foo Occurrences: [0:8..0:15): angiven <- angiven/ +[2:0..2:0): <- angiven/Foo#``(). [2:6..2:9): Foo <- angiven/Foo# [4:4..4:7): bar <- angiven/AnonymousGiven$package.bar(). [4:14..4:17): Foo -> angiven/Foo# +expect/CaseClass.scala +---------------------- + +Summary: +Schema => SemanticDB v4 +Uri => CaseClass.scala +Text => empty +Language => Scala +Symbols => 22 entries +Occurrences => 17 entries + +Symbols: +caseclass/CaseClass# => case class CaseClass extends Object with Product with Serializable { self: CaseClass => +8 decls } +caseclass/CaseClass#_1(). => method _1 => Int +caseclass/CaseClass#_2(). => method _2 => Int +caseclass/CaseClass#``(). => primary ctor (val param int1: Int, val param int2: Int): CaseClass +caseclass/CaseClass#``().(int1) => val param int1: Int +caseclass/CaseClass#``().(int2) => val param int2: Int +caseclass/CaseClass#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance +caseclass/CaseClass#copy$default$2(). => method copy$default$2 => Int @uncheckedVariance +caseclass/CaseClass#copy(). => method copy (param int1: Int, param int2: Int): CaseClass +caseclass/CaseClass#copy().(int1) => param int1: Int +caseclass/CaseClass#copy().(int2) => param int2: Int +caseclass/CaseClass#int1. => val method int1 Int +caseclass/CaseClass#int2. => val method int2 Int +caseclass/CaseClass. => final object CaseClass extends Object { self: CaseClass.type => +5 decls } +caseclass/CaseClass.apply(). => method apply (param int: Int): CaseClass +caseclass/CaseClass.apply().(int) => param int: Int +caseclass/CaseClass.apply(+1). => method apply (): CaseClass +caseclass/CaseClass.apply(+2). => method apply (param int1: Int, param int2: Int): CaseClass +caseclass/CaseClass.apply(+2).(int1) => param int1: Int +caseclass/CaseClass.apply(+2).(int2) => param int2: Int +caseclass/CaseClass.unapply(). => method unapply (param x$1: CaseClass): CaseClass +caseclass/CaseClass.unapply().(x$1) => param x$1: CaseClass + +Occurrences: +[0:8..0:17): caseclass <- caseclass/ +[2:11..2:20): CaseClass <- caseclass/CaseClass# +[2:20..2:20): <- caseclass/CaseClass#``(). +[2:21..2:25): int1 <- caseclass/CaseClass#int1. +[2:27..2:30): Int -> scala/Int# +[2:32..2:36): int2 <- caseclass/CaseClass#int2. +[2:38..2:41): Int -> scala/Int# +[4:7..4:16): CaseClass <- caseclass/CaseClass. +[5:6..5:11): apply <- caseclass/CaseClass.apply(). +[5:12..5:15): int <- caseclass/CaseClass.apply().(int) +[5:17..5:20): Int -> scala/Int# +[5:23..5:32): CaseClass -> caseclass/CaseClass# +[5:35..5:44): CaseClass -> caseclass/CaseClass. +[5:45..5:48): int -> caseclass/CaseClass.apply().(int) +[6:6..6:11): apply <- caseclass/CaseClass.apply(+1). +[6:15..6:24): CaseClass -> caseclass/CaseClass# +[6:27..6:36): CaseClass -> caseclass/CaseClass. + expect/Classes.scala -------------------- @@ -473,7 +572,8 @@ Uri => Classes.scala Text => empty Language => Scala Symbols => 108 entries -Occurrences => 114 entries +Occurrences => 127 entries +Diagnostics => 6 entries Synthetics => 2 entries Symbols: @@ -593,41 +693,52 @@ Occurrences: [1:22..1:34): experimental -> scala/language.experimental. [1:35..1:41): macros -> scala/language.experimental.macros. [2:6..2:8): C1 <- classes/C1# +[2:8..2:8): <- classes/C1#``(). [2:13..2:15): x1 <- classes/C1#x1. [2:17..2:20): Int -> scala/Int# [2:30..2:36): AnyVal -> scala/AnyVal# [4:6..4:8): C2 <- classes/C2# +[4:8..4:8): <- classes/C2#``(). [4:13..4:15): x2 <- classes/C2#x2. [4:17..4:20): Int -> scala/Int# [4:30..4:36): AnyVal -> scala/AnyVal# [5:7..5:9): C2 <- classes/C2. [7:11..7:13): C3 <- classes/C3# +[7:13..7:13): <- classes/C3#``(). [7:14..7:15): x <- classes/C3#x. [7:17..7:20): Int -> scala/Int# [9:11..9:13): C4 <- classes/C4# +[9:13..9:13): <- classes/C4#``(). [9:14..9:15): x <- classes/C4#x. [9:17..9:20): Int -> scala/Int# [10:7..10:9): C4 <- classes/C4. [12:7..12:8): M <- classes/M. [13:17..13:19): C5 <- classes/M.C5# +[13:19..13:19): <- classes/M.C5#``(). [13:20..13:21): x <- classes/M.C5#x. [13:23..13:26): Int -> scala/Int# [16:11..16:13): C6 <- classes/C6# +[16:13..16:13): <- classes/C6#``(). [16:26..16:27): x <- classes/C6#x. [16:29..16:32): Int -> scala/Int# [18:6..18:8): C7 <- classes/C7# +[18:8..18:8): <- classes/C7#``(). [18:9..18:10): x <- classes/C7#x. [18:12..18:15): Int -> scala/Int# [20:6..20:8): C8 <- classes/C8# +[20:8..20:8): <- classes/C8#``(). [20:27..20:28): x <- classes/C8#x. [20:30..20:33): Int -> scala/Int# [22:6..22:8): C9 <- classes/C9# +[22:8..22:8): <- classes/C9#``(). [22:27..22:28): x <- classes/C9#x(). [22:30..22:33): Int -> scala/Int# [24:6..24:9): C10 <- classes/C10# +[24:9..24:9): <- classes/C10#``(). [24:10..24:11): s <- classes/C10#s. [24:16..24:22): String -> scala/Predef.String# [26:6..26:9): C11 <- classes/C11# +[27:2..27:2): <- classes/C11#``(). [27:6..27:9): foo <- classes/C11#foo(). [27:11..27:14): Int -> scala/Int# [27:23..27:26): ??? -> scala/Predef.`???`(). @@ -635,7 +746,9 @@ Occurrences: [28:18..28:21): Int -> scala/Int# [28:24..28:27): ??? -> scala/Predef.`???`(). [31:6..31:9): C12 <- classes/C12# +[33:2..33:2): <- classes/C12#``(). [33:8..33:15): Context <- classes/C12#Context# +[34:4..34:4): <- classes/C12#Context#``(). [34:9..34:13): Expr <- classes/C12#Context#Expr# [34:14..34:15): T <- classes/C12#Context#Expr#[T] [36:6..36:10): foo1 <- classes/C12#foo1(). @@ -702,10 +815,50 @@ Occurrences: [53:4..53:9): local -> local4 [53:10..53:11): + -> scala/Int#`+`(+4). +Diagnostics: +[18:9..18:10): [warning] unused explicit parameter +[20:23..20:23): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[20:27..20:28): [warning] unused explicit parameter +[22:23..22:23): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[22:27..22:28): [warning] unused explicit parameter +[24:10..24:11): [warning] unused explicit parameter + Synthetics: [51:16..51:27):List(1).map => *[Int] [51:16..51:20):List => *.apply[Int] +expect/Deprecated.scala +----------------------- + +Summary: +Schema => SemanticDB v4 +Uri => Deprecated.scala +Text => empty +Language => Scala +Symbols => 3 entries +Occurrences => 6 entries +Diagnostics => 1 entries + +Symbols: +_empty_/Deprecated. => final object Deprecated extends Object { self: Deprecated.type => +3 decls } +_empty_/Deprecated.deprecatedMethod(). => @deprecated method deprecatedMethod => Nothing +_empty_/Deprecated.main(). => method main => Nothing + +Occurrences: +[0:7..0:17): Deprecated <- _empty_/Deprecated. +[1:3..1:13): deprecated -> scala/deprecated# +[1:18..1:34): deprecatedMethod <- _empty_/Deprecated.deprecatedMethod(). +[1:37..1:40): ??? -> scala/Predef.`???`(). +[2:6..2:10): main <- _empty_/Deprecated.main(). +[2:13..2:29): deprecatedMethod -> _empty_/Deprecated.deprecatedMethod(). + +Diagnostics: +[2:13..2:29): [warning] method deprecatedMethod in object Deprecated is deprecated + expect/Empty.scala ------------------ @@ -715,7 +868,7 @@ Uri => Empty.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 8 entries +Occurrences => 10 entries Symbols: _empty_/A# => class A extends Object { self: A => +2 decls } @@ -727,10 +880,12 @@ _empty_/B#a(). => method a => A Occurrences: [0:6..0:7): A <- _empty_/A# +[1:2..1:2): <- _empty_/A#``(). [1:6..1:7): b <- _empty_/A#b(). [1:9..1:10): B -> _empty_/B# [1:13..1:16): ??? -> scala/Predef.`???`(). [4:6..4:7): B <- _empty_/B# +[5:2..5:2): <- _empty_/B#``(). [5:6..5:7): a <- _empty_/B#a(). [5:9..5:10): A -> _empty_/A# [5:13..5:16): ??? -> scala/Predef.`???`(). @@ -762,7 +917,8 @@ Uri => EndMarkers.scala Text => empty Language => Scala Symbols => 30 entries -Occurrences => 46 entries +Occurrences => 49 entries +Diagnostics => 3 entries Symbols: endmarkers/Container# => class Container extends Object { self: Container => +5 decls } @@ -799,6 +955,7 @@ local2 => local localDef: => Int Occurrences: [0:8..0:18): endmarkers <- endmarkers/ [2:8..2:17): MultiCtor <- endmarkers/MultiCtor# +[2:17..2:17): <- endmarkers/MultiCtor#``(). [2:22..2:23): i <- endmarkers/MultiCtor#i. [2:25..2:28): Int -> scala/Int# [3:8..3:12): <- endmarkers/MultiCtor#``(+1). @@ -814,6 +971,7 @@ Occurrences: [16:19..16:25): String -> scala/Predef.String# [18:6..18:17): topLevelVar -> endmarkers/EndMarkers$package.topLevelVar(). [20:8..20:17): Container <- endmarkers/Container# +[22:4..22:4): <- endmarkers/Container#``(). [22:8..22:11): foo <- endmarkers/Container#foo(). [24:8..24:11): foo -> endmarkers/Container#foo(). [26:8..26:11): bar <- endmarkers/Container#bar. @@ -834,6 +992,7 @@ Occurrences: [54:8..54:11): foo <- endmarkers/TestObj.foo(). [56:6..56:13): TestObj -> endmarkers/TestObj. [58:8..58:13): Stuff <- endmarkers/Stuff# +[58:13..58:13): <- endmarkers/Stuff#``(). [58:14..58:15): A <- endmarkers/Stuff#[A] [59:9..59:11): do <- endmarkers/Stuff#do(). [59:14..59:15): A -> endmarkers/Stuff#[A] @@ -844,6 +1003,11 @@ Occurrences: [64:14..64:20): String -> scala/Predef.String# [67:4..67:14): endmarkers -> endmarkers/ +Diagnostics: +[38:8..38:16): [warning] unused local definition +[42:8..42:16): [warning] unused local definition +[46:8..46:16): [warning] unused local definition + expect/EndMarkers2.scala ------------------------ @@ -875,7 +1039,7 @@ Uri => EnumVal.scala Text => empty Language => Scala Symbols => 16 entries -Occurrences => 16 entries +Occurrences => 18 entries Symbols: enumVal/A# => trait A extends Object { self: A => +1 decls } @@ -900,8 +1064,10 @@ Occurrences: [2:7..2:12): scala -> scala/ [2:13..2:20): runtime -> scala/runtime/ [2:21..2:30): EnumValue -> scala/runtime/EnumValue. +[5:0..5:0): <- enumVal/A#``(). [5:6..5:7): A <- enumVal/A# [7:5..7:10): Color <- enumVal/Color# +[7:10..7:10): <- enumVal/Color#``(). [7:15..7:18): rgb <- enumVal/Color#rgb. [7:20..7:23): Int -> scala/Int# [8:7..8:10): Red <- enumVal/Color.Red. @@ -922,7 +1088,8 @@ Uri => Enums.scala Text => empty Language => Scala Symbols => 181 entries -Occurrences => 148 entries +Occurrences => 159 entries +Diagnostics => 1 entries Synthetics => 6 entries Symbols: @@ -1112,17 +1279,20 @@ Occurrences: [0:7..0:12): Enums <- _empty_/Enums. [1:9..1:12): <:< -> _empty_/Enums.`<:<`. [3:7..3:13): Colour <- _empty_/Enums.Colour# +[4:4..4:4): <- _empty_/Enums.Colour#``(). [4:11..4:17): Colour -> _empty_/Enums.Colour. [4:18..4:21): Red -> _empty_/Enums.Colour.Red. [5:9..5:12): Red <- _empty_/Enums.Colour.Red. [5:14..5:19): Green <- _empty_/Enums.Colour.Green. [5:21..5:25): Blue <- _empty_/Enums.Colour.Blue. [7:7..7:17): Directions <- _empty_/Enums.Directions# +[8:4..8:4): <- _empty_/Enums.Directions#``(). [8:9..8:14): North <- _empty_/Enums.Directions.North. [8:16..8:20): East <- _empty_/Enums.Directions.East. [8:22..8:27): South <- _empty_/Enums.Directions.South. [8:29..8:33): West <- _empty_/Enums.Directions.West. [10:7..10:12): Suits <- _empty_/Enums.Suits# +[10:21..10:21): <- _empty_/Enums.Suits#``(). [11:9..11:15): Hearts <- _empty_/Enums.Suits.Hearts. [11:17..11:23): Spades <- _empty_/Enums.Suits.Spades. [11:25..11:30): Clubs <- _empty_/Enums.Suits.Clubs. @@ -1147,6 +1317,7 @@ Occurrences: [18:11..18:17): Spades -> _empty_/Enums.Suits.Spades. [18:20..18:25): Clubs -> _empty_/Enums.Suits.Clubs. [21:7..21:15): WeekDays <- _empty_/Enums.WeekDays# +[22:4..22:4): <- _empty_/Enums.WeekDays#``(). [22:9..22:15): Monday <- _empty_/Enums.WeekDays.Monday. [23:9..23:16): Tuesday <- _empty_/Enums.WeekDays.Tuesday. [24:9..24:18): Wednesday <- _empty_/Enums.WeekDays.Wednesday. @@ -1155,6 +1326,7 @@ Occurrences: [27:9..27:17): Saturday <- _empty_/Enums.WeekDays.Saturday. [28:9..28:15): Sunday <- _empty_/Enums.WeekDays.Sunday. [30:7..30:11): Coin <- _empty_/Enums.Coin# +[30:11..30:11): <- _empty_/Enums.Coin#``(). [30:12..30:17): value <- _empty_/Enums.Coin#value. [30:19..30:22): Int -> scala/Int# [31:9..31:14): Penny <- _empty_/Enums.Coin.Penny. @@ -1168,12 +1340,15 @@ Occurrences: [35:9..35:15): Dollar <- _empty_/Enums.Coin.Dollar. [35:26..35:30): Coin -> _empty_/Enums.Coin# [37:7..37:12): Maybe <- _empty_/Enums.Maybe# +[37:12..37:12): <- _empty_/Enums.Maybe#``(). [37:14..37:15): A <- _empty_/Enums.Maybe#[A] [38:9..38:13): Just <- _empty_/Enums.Maybe.Just# +[38:13..38:13): <- _empty_/Enums.Maybe.Just#``(). [38:14..38:19): value <- _empty_/Enums.Maybe.Just#value. [38:21..38:22): A -> _empty_/Enums.Maybe.Just#[A] [39:9..39:13): None <- _empty_/Enums.Maybe.None. [41:7..41:10): Tag <- _empty_/Enums.Tag# +[41:10..41:10): <- _empty_/Enums.Tag#``(). [41:11..41:12): A <- _empty_/Enums.Tag#[A] [42:9..42:15): IntTag <- _empty_/Enums.Tag.IntTag. [42:24..42:27): Tag -> _empty_/Enums.Tag# @@ -1182,14 +1357,16 @@ Occurrences: [43:28..43:31): Tag -> _empty_/Enums.Tag# [43:32..43:39): Boolean -> scala/Boolean# [45:7..45:10): <:< <- _empty_/Enums.`<:<`# +[45:10..45:10): <- _empty_/Enums.`<:<`#``(). [45:12..45:13): A <- _empty_/Enums.`<:<`#[A] [45:15..45:16): B <- _empty_/Enums.`<:<`#[B] [46:9..46:13): Refl <- _empty_/Enums.`<:<`.Refl# +[46:13..46:13): <- _empty_/Enums.`<:<`.Refl#``(). [46:14..46:15): C <- _empty_/Enums.`<:<`.Refl#[C] [46:28..46:29): C -> _empty_/Enums.`<:<`.Refl#[C] [46:30..46:33): <:< -> _empty_/Enums.`<:<`# [46:34..46:35): C -> _empty_/Enums.`<:<`.Refl#[C] -[48:9..48:12): <:< <- _empty_/Enums.`<:<`. +[48:10..48:13): <:< <- _empty_/Enums.`<:<`. [49:11..49:12): T <- _empty_/Enums.`<:<`.`given_<:<_T_T`().[T] [49:16..49:17): T -> _empty_/Enums.`<:<`.`given_<:<_T_T`().[T] [49:18..49:21): <:< -> _empty_/Enums.`<:<`# @@ -1220,6 +1397,7 @@ Occurrences: [54:19..54:23): Some -> scala/Some. [54:28..54:34): unwrap -> _empty_/Enums.unwrap(). [56:7..56:13): Planet <- _empty_/Enums.Planet# +[56:13..56:13): <- _empty_/Enums.Planet#``(). [56:14..56:18): mass <- _empty_/Enums.Planet#mass. [56:20..56:26): Double -> scala/Double# [56:28..56:34): radius <- _empty_/Enums.Planet#radius. @@ -1258,6 +1436,9 @@ Occurrences: [68:9..68:16): Neptune <- _empty_/Enums.Planet.Neptune. [68:25..68:31): Planet -> _empty_/Enums.Planet# +Diagnostics: +[30:12..30:17): [warning] unused explicit parameter + Synthetics: [52:9..52:13):Refl => *.unapply[Option[B]] [52:31..52:50):identity[Option[B]] => *[Function1[A, Option[B]]] @@ -1275,7 +1456,7 @@ Uri => EtaExpansion.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 8 entries +Occurrences => 9 entries Synthetics => 5 entries Symbols: @@ -1287,6 +1468,7 @@ Occurrences: [0:8..0:15): example <- example/ [2:6..2:18): EtaExpansion <- example/EtaExpansion# [3:2..3:6): Some -> scala/Some. +[3:2..3:2): <- example/EtaExpansion#``(). [3:10..3:13): map -> scala/Option#map(). [3:14..3:22): identity -> scala/Predef.identity(). [4:2..4:6): List -> scala/package.List. @@ -1310,6 +1492,7 @@ Text => empty Language => Scala Symbols => 5 entries Occurrences => 23 entries +Diagnostics => 1 entries Symbols: example/Example. => final object Example extends Object { self: Example.type => +3 decls } @@ -1343,6 +1526,9 @@ Occurrences: [9:24..9:32): classTag -> scala/reflect/package.classTag(). [9:33..9:36): Int -> scala/Int# +Diagnostics: +[2:24..2:30): [warning] unused import + expect/Extension.scala ---------------------- @@ -1351,12 +1537,18 @@ Schema => SemanticDB v4 Uri => Extension.scala Text => empty Language => Scala -Symbols => 26 entries -Occurrences => 50 entries +Symbols => 32 entries +Occurrences => 66 entries Synthetics => 1 entries Symbols: -ext/Extension$package. => final package object ext extends Object { self: ext.type => +6 decls } +ext/DeckUsage. => final object DeckUsage extends Object { self: DeckUsage.type => +2 decls } +ext/DeckUsage.deck. => val method deck Deck +ext/Extension$package. => final package object ext extends Object { self: ext.type { opaque type Deck } => +9 decls } +ext/Extension$package.Deck# => opaque type Deck +ext/Extension$package.Deck. => final object Deck extends Object { self: Deck.type => +2 decls } +ext/Extension$package.Deck.fooSize(). => method fooSize (param data: Deck): Int +ext/Extension$package.Deck.fooSize().(data) => param data: Deck ext/Extension$package.`#*#`(). => method #*# (param s: String)(param i: Int): Tuple2[String, Int] ext/Extension$package.`#*#`().(i) => param i: Int ext/Extension$package.`#*#`().(s) => param s: String @@ -1402,6 +1594,7 @@ Occurrences: [8:4..8:5): c <- ext/Extension$package.c. [8:14..8:17): #*# -> ext/Extension$package.`#*#`(). [10:6..10:10): Read <- ext/Read# +[10:10..10:10): <- ext/Read#``(). [10:12..10:13): T <- ext/Read#[T] [11:6..11:16): fromString <- ext/Read#fromString(). [11:17..11:18): s <- ext/Read#fromString().(s) @@ -1422,6 +1615,7 @@ Occurrences: [14:62..14:72): fromString -> ext/Read#fromString(). [14:73..14:74): s -> ext/Extension$package.readInto().(s) [16:6..16:13): Functor <- ext/Functor# +[16:13..16:13): <- ext/Functor#``(). [16:14..16:15): F <- ext/Functor#[F] [17:13..17:14): T <- ext/Functor#map().[T] [17:16..17:17): t <- ext/Functor#map().(t) @@ -1434,6 +1628,20 @@ Occurrences: [17:44..17:45): U -> ext/Functor#map().[U] [17:48..17:49): F -> ext/Functor#[F] [17:50..17:51): U -> ext/Functor#map().[U] +[19:12..19:16): Deck <- ext/Extension$package.Deck# +[19:19..19:23): Long -> scala/Long# +[20:7..20:11): Deck <- ext/Extension$package.Deck. +[21:13..21:17): data <- ext/Extension$package.Deck.fooSize().(data) +[21:19..21:23): Deck -> ext/Extension$package.Deck# +[22:8..22:15): fooSize <- ext/Extension$package.Deck.fooSize(). +[22:17..22:20): Int -> scala/Int# +[22:23..22:26): ??? -> scala/Predef.`???`(). +[24:7..24:16): DeckUsage <- ext/DeckUsage. +[25:6..25:10): deck <- ext/DeckUsage.deck. +[25:12..25:16): Deck -> ext/Extension$package.Deck# +[25:19..25:22): ??? -> scala/Predef.`???`(). +[26:2..26:6): deck -> ext/DeckUsage.deck. +[26:7..26:14): fooSize -> ext/Extension$package.Deck.fooSize(). Synthetics: [14:46..14:61):summon[Read[T]] => *(x$2) @@ -1447,7 +1655,7 @@ Uri => ForComprehension.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 52 entries +Occurrences => 53 entries Synthetics => 6 entries Symbols: @@ -1468,6 +1676,7 @@ local10 => param f: Tuple4[Int, Int, Int, Int] Occurrences: [0:8..0:15): example <- example/ [2:6..2:22): ForComprehension <- example/ForComprehension# +[3:2..3:2): <- example/ForComprehension#``(). [4:4..4:5): a <- local0 [4:9..4:13): List -> scala/package.List. [5:4..5:5): b <- local1 @@ -1535,12 +1744,12 @@ Schema => SemanticDB v4 Uri => Givens.scala Text => empty Language => Scala -Symbols => 29 entries -Occurrences => 65 entries +Symbols => 33 entries +Occurrences => 72 entries Synthetics => 3 entries Symbols: -a/b/Givens. => final object Givens extends Object { self: Givens.type => +12 decls } +a/b/Givens. => final object Givens extends Object { self: Givens.type => +13 decls } a/b/Givens.Monoid# => trait Monoid [typeparam A ] extends Object { self: Monoid[A] => +4 decls } a/b/Givens.Monoid#[A] => typeparam A a/b/Givens.Monoid#``(). => primary ctor [typeparam A ](): Monoid[A] @@ -1558,7 +1767,11 @@ a/b/Givens.given_Monoid_String.combine().(y) => param y: String a/b/Givens.given_Monoid_String.empty(). => method empty => String <: a/b/Givens.Monoid#empty(). a/b/Givens.goodbye1. => val method goodbye1 String a/b/Givens.hello1. => val method hello1 String -a/b/Givens.int2String(). => final implicit given inline macro int2String => Conversion[Int, String] +a/b/Givens.int2String# => implicit given class int2String extends Conversion[Int, String] { self: int2String => +2 decls } +a/b/Givens.int2String#``(). => primary ctor (): int2String +a/b/Givens.int2String#apply(). => method apply (param x: Int): String <: scala/Conversion#apply()., scala/Function1#apply(). +a/b/Givens.int2String#apply().(x) => param x: Int +a/b/Givens.int2String(). => final implicit given inline macro int2String => int2String a/b/Givens.sayGoodbye(). => method sayGoodbye [typeparam B ](param any: B): String a/b/Givens.sayGoodbye().(any) => param any: B a/b/Givens.sayGoodbye().[B] => typeparam B @@ -1578,8 +1791,8 @@ Occurrences: [5:16..5:19): any <- a/b/Givens.sayHello().(any) [5:21..5:22): A -> a/b/Givens.sayHello().[A] [6:8..6:16): sayHello <- a/b/Givens.sayHello(). +[6:19..6:20): s -> scala/StringContext#s(). [6:34..6:37): any -> a/b/Givens.sayHello().(any) -[6:37..6:38): " -> scala/StringContext#s(). [8:13..8:14): B <- a/b/Givens.sayGoodbye().[B] [8:13..8:14): B <- a/b/Givens.saySoLong().[B] [8:16..8:19): any <- a/b/Givens.sayGoodbye().(any) @@ -1587,11 +1800,11 @@ Occurrences: [8:21..8:22): B -> a/b/Givens.sayGoodbye().[B] [8:21..8:22): B -> a/b/Givens.saySoLong().[B] [9:8..9:18): sayGoodbye <- a/b/Givens.sayGoodbye(). +[9:21..9:22): s -> scala/StringContext#s(). [9:38..9:41): any -> a/b/Givens.sayGoodbye().(any) -[9:41..9:42): " -> scala/StringContext#s(). [10:8..10:17): saySoLong <- a/b/Givens.saySoLong(). +[10:20..10:21): s -> scala/StringContext#s(). [10:37..10:40): any -> a/b/Givens.saySoLong().(any) -[10:40..10:41): " -> scala/StringContext#s(). [12:6..12:12): hello1 <- a/b/Givens.hello1. [12:17..12:25): sayHello -> a/b/Givens.sayHello(). [13:6..13:14): goodbye1 <- a/b/Givens.goodbye1. @@ -1599,6 +1812,7 @@ Occurrences: [14:6..14:13): soLong1 <- a/b/Givens.soLong1. [14:18..14:27): saySoLong -> a/b/Givens.saySoLong(). [16:8..16:14): Monoid <- a/b/Givens.Monoid# +[16:14..16:14): <- a/b/Givens.Monoid#``(). [16:15..16:16): A <- a/b/Givens.Monoid#[A] [17:8..17:13): empty <- a/b/Givens.Monoid#empty(). [17:15..17:16): A -> a/b/Givens.Monoid#[A] @@ -1621,21 +1835,27 @@ Occurrences: [22:55..22:56): y -> a/b/Givens.given_Monoid_String.combine().(y) [24:15..24:25): int2String <- a/b/Givens.int2String(). [24:27..24:37): Conversion -> scala/Conversion# +[24:27..24:27): <- a/b/Givens.int2String#``(). [24:38..24:41): Int -> scala/Int# [24:43..24:49): String -> scala/Predef.String# -[24:55..24:63): toString -> scala/Any#toString(). -[26:6..26:9): foo <- a/b/Givens.foo(). -[26:10..26:11): A <- a/b/Givens.foo().[A] -[26:19..26:20): A <- a/b/Givens.foo().(A) -[26:22..26:28): Monoid -> a/b/Givens.Monoid# -[26:29..26:30): A -> a/b/Givens.foo().[A] -[26:34..26:35): A -> a/b/Givens.foo().[A] -[26:38..26:39): A -> a/b/Givens.foo().(A) -[26:40..26:47): combine -> a/b/Givens.Monoid#combine(). -[26:48..26:49): A -> a/b/Givens.foo().(A) -[26:50..26:55): empty -> a/b/Givens.Monoid#empty(). -[26:57..26:58): A -> a/b/Givens.foo().(A) -[26:59..26:64): empty -> a/b/Givens.Monoid#empty(). +[25:8..25:13): apply <- a/b/Givens.int2String#apply(). +[25:14..25:15): x <- a/b/Givens.int2String#apply().(x) +[25:17..25:20): Int -> scala/Int# +[25:23..25:29): String -> scala/Predef.String# +[25:32..25:33): x -> a/b/Givens.int2String#apply().(x) +[25:34..25:42): toString -> scala/Any#toString(). +[27:6..27:9): foo <- a/b/Givens.foo(). +[27:10..27:11): A <- a/b/Givens.foo().[A] +[27:19..27:20): A <- a/b/Givens.foo().(A) +[27:22..27:28): Monoid -> a/b/Givens.Monoid# +[27:29..27:30): A -> a/b/Givens.foo().[A] +[27:34..27:35): A -> a/b/Givens.foo().[A] +[27:38..27:39): A -> a/b/Givens.foo().(A) +[27:40..27:47): combine -> a/b/Givens.Monoid#combine(). +[27:48..27:49): A -> a/b/Givens.foo().(A) +[27:50..27:55): empty -> a/b/Givens.Monoid#empty(). +[27:57..27:58): A -> a/b/Givens.foo().(A) +[27:59..27:64): empty -> a/b/Givens.Monoid#empty(). Synthetics: [12:17..12:25):sayHello => *[Int] @@ -1651,7 +1871,7 @@ Uri => ImplicitConversion.scala Text => empty Language => Scala Symbols => 23 entries -Occurrences => 50 entries +Occurrences => 52 entries Synthetics => 6 entries Symbols: @@ -1685,6 +1905,7 @@ Occurrences: [2:13..2:21): language -> scala/language. [2:22..2:41): implicitConversions -> scala/language.implicitConversions. [4:6..4:24): ImplicitConversion <- example/ImplicitConversion# +[5:2..5:2): <- example/ImplicitConversion#``(). [5:9..5:27): ImplicitConversion -> example/ImplicitConversion. [6:15..6:28): string2Number <- example/ImplicitConversion#string2Number(). [7:6..7:12): string <- example/ImplicitConversion#string2Number().(string) @@ -1702,12 +1923,12 @@ Occurrences: [20:6..20:7): x <- example/ImplicitConversion#x. [20:9..20:12): Int -> scala/Int# [20:15..20:22): message -> example/ImplicitConversion#message. +[23:2..23:3): s -> scala/StringContext#s(). [23:11..23:18): message -> example/ImplicitConversion#message. [23:20..23:26): number -> example/ImplicitConversion#number. -[23:26..23:27): " -> scala/StringContext#s(). +[24:2..24:3): s -> scala/StringContext#s(). [25:7..25:14): message -> example/ImplicitConversion#message. [26:7..26:13): number -> example/ImplicitConversion#number. -[26:15..26:16): " -> scala/StringContext#s(). [26:17..26:28): stripMargin -> scala/collection/StringOps#stripMargin(+1). [28:6..28:7): a <- example/ImplicitConversion#a. [28:9..28:12): Int -> scala/Int# @@ -1717,6 +1938,7 @@ Occurrences: [29:16..29:20): char -> example/ImplicitConversion#char. [32:7..32:25): ImplicitConversion <- example/ImplicitConversion. [33:23..33:39): newAny2stringadd <- example/ImplicitConversion.newAny2stringadd# +[33:39..33:39): <- example/ImplicitConversion.newAny2stringadd#``(). [33:40..33:41): A <- example/ImplicitConversion.newAny2stringadd#[A] [33:55..33:59): self <- example/ImplicitConversion.newAny2stringadd#self. [33:61..33:62): A -> example/ImplicitConversion.newAny2stringadd#[A] @@ -1751,6 +1973,7 @@ Text => empty Language => Scala Symbols => 2 entries Occurrences => 16 entries +Diagnostics => 1 entries Symbols: _empty_/Imports$package. => final package object _empty_ extends Object { self: _empty_.type => +2 decls } @@ -1774,6 +1997,9 @@ Occurrences: [3:25..3:28): Int -> scala/Int# [3:30..3:33): Int -> scala/Int# +Diagnostics: +[0:26..0:34): [warning] unused import + expect/InstrumentTyper.scala ---------------------------- @@ -1783,7 +2009,7 @@ Uri => InstrumentTyper.scala Text => empty Language => Scala Symbols => 8 entries -Occurrences => 52 entries +Occurrences => 53 entries Synthetics => 2 entries Symbols: @@ -1812,6 +2038,7 @@ Occurrences: [5:13..5:17): Test -> types/Test. [7:6..7:21): InstrumentTyper <- example/InstrumentTyper# [7:24..7:28): self <- local0 +[7:24..7:24): <- example/InstrumentTyper#``(). [7:30..7:36): AnyRef -> scala/AnyRef# [8:6..8:9): all <- example/InstrumentTyper#all(). [8:12..8:16): List -> scala/package.List. @@ -1863,7 +2090,7 @@ Uri => InventedNames.scala Text => empty Language => Scala Symbols => 45 entries -Occurrences => 61 entries +Occurrences => 66 entries Synthetics => 3 entries Symbols: @@ -1916,12 +2143,15 @@ givens/Z#doZ(). => abstract method doZ => List[T] Occurrences: [0:8..0:14): givens <- givens/ [2:6..2:7): X <- givens/X# +[3:2..3:2): <- givens/X#``(). [3:6..3:9): doX <- givens/X#doX(). [3:11..3:14): Int -> scala/Int# [5:6..5:7): Y <- givens/Y# +[6:2..6:2): <- givens/Y#``(). [6:6..6:9): doY <- givens/Y#doY(). [6:11..6:17): String -> scala/Predef.String# [8:6..8:7): Z <- givens/Z# +[8:7..8:7): <- givens/Z#``(). [8:8..8:9): T <- givens/Z#[T] [9:6..9:9): doZ <- givens/Z#doZ(). [9:11..9:15): List -> scala/package.List# @@ -1944,9 +2174,11 @@ Occurrences: [21:6..21:7): X -> givens/X# [22:6..22:9): doX <- givens/InventedNames$package.given_X.doX(). [24:13..24:14): X -> givens/X# +[24:13..24:13): <- givens/InventedNames$package.given_Y#``(). [24:17..24:18): Y -> givens/Y# [25:6..25:9): doY <- givens/InventedNames$package.given_Y#doY(). [27:7..27:8): T <- givens/InventedNames$package.given_Z_T#[T] +[27:7..27:7): <- givens/InventedNames$package.given_Z_T#``(). [27:11..27:12): Z -> givens/Z# [27:13..27:14): T -> givens/InventedNames$package.given_Z_T#[T] [28:6..28:9): doZ <- givens/InventedNames$package.given_Z_T#doZ(). @@ -1990,7 +2222,7 @@ Uri => Issue1749.scala Text => empty Language => Scala Symbols => 7 entries -Occurrences => 22 entries +Occurrences => 24 entries Synthetics => 3 entries Symbols: @@ -2009,6 +2241,7 @@ Occurrences: [3:18..3:25): Ordered -> scala/math/Ordered. [3:26..3:43): orderingToOrdered -> scala/math/Ordered.orderingToOrdered(). [5:6..5:15): Issue1749 <- example/Issue1749# +[6:2..6:2): <- example/Issue1749#``(). [6:6..6:8): x1 <- example/Issue1749#x1. [7:6..7:8): x2 <- example/Issue1749#x2. [8:3..8:5): x1 -> example/Issue1749#x1. @@ -2017,6 +2250,7 @@ Occurrences: [9:14..9:16): x2 -> example/Issue1749#x2. [9:18..9:20): x2 -> example/Issue1749#x2. [12:6..12:15): Issue1854 <- example/Issue1854# +[13:2..13:2): <- example/Issue1854#``(). [13:6..13:9): map <- example/Issue1854#map. [13:12..13:22): collection -> scala/collection/ [13:23..13:30): mutable -> scala/collection/mutable/ @@ -2040,7 +2274,7 @@ Uri => Local.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 10 entries +Occurrences => 11 entries Synthetics => 1 entries Symbols: @@ -2054,6 +2288,7 @@ local2 => local id: [typeparam A ](param a: A): A Occurrences: [0:8..0:15): example <- example/ [2:6..2:11): Local <- example/Local# +[3:2..3:2): <- example/Local#``(). [3:6..3:7): a <- example/Local#a(). [4:8..4:10): id <- local2 [4:11..4:12): A <- local0 @@ -2157,7 +2392,7 @@ Uri => MetacJava.scala Text => empty Language => Scala Symbols => 10 entries -Occurrences => 62 entries +Occurrences => 63 entries Symbols: example/MetacJava# => class MetacJava extends Object { self: MetacJava => +9 decls } @@ -2177,6 +2412,7 @@ Occurrences: [2:11..2:17): javacp -> com/javacp/ [4:6..4:15): MetacJava <- example/MetacJava# [5:2..5:8): javacp -> com/javacp/ +[5:2..5:2): <- example/MetacJava#``(). [5:9..5:18): MetacJava -> com/javacp/MetacJava# [5:19..5:30): StaticInner -> com/javacp/MetacJava#StaticInner# [5:31..5:39): isStatic -> com/javacp/MetacJava#StaticInner#isStatic(). @@ -2244,7 +2480,7 @@ Uri => MethodUsages.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 80 entries +Occurrences => 81 entries Synthetics => 2 entries Symbols: @@ -2255,6 +2491,7 @@ example/MethodUsages#m. => val method m Methods[Int] Occurrences: [0:8..0:15): example <- example/ [2:6..2:18): MethodUsages <- example/MethodUsages# +[3:2..3:2): <- example/MethodUsages#``(). [3:6..3:7): m <- example/MethodUsages#m. [3:14..3:21): Methods -> example/Methods# [3:22..3:25): Int -> scala/Int# @@ -2347,7 +2584,7 @@ Uri => Methods.scala Text => empty Language => Scala Symbols => 82 entries -Occurrences => 153 entries +Occurrences => 156 entries Symbols: example/Methods# => class Methods [typeparam T ] extends Object { self: Methods[T] => +44 decls } @@ -2443,8 +2680,10 @@ Occurrences: [3:13..3:21): language -> scala/language. [3:22..3:34): existentials -> scala/language.existentials. [5:6..5:13): Methods <- example/Methods# +[5:13..5:13): <- example/Methods#``(). [5:14..5:15): T <- example/Methods#[T] [6:8..6:12): List <- example/Methods#List# +[6:12..6:12): <- example/Methods#List#``(). [6:13..6:14): T <- example/Methods#List#[T] [7:7..7:12): AList <- example/Methods#AList# [7:13..7:14): T <- example/Methods#AList#[T] @@ -2500,6 +2739,7 @@ Occurrences: [17:51..17:54): ??? -> scala/Predef.`???`(). [18:7..18:12): m8(). <- example/Methods#`m8().`(). [18:18..18:21): ??? -> scala/Predef.`???`(). +[19:2..19:2): <- example/Methods#`m9().`#``(). [19:9..19:14): m9(). <- example/Methods#`m9().`# [20:6..20:8): m9 <- example/Methods#m9(). [20:9..20:10): x <- example/Methods#m9().(x) @@ -2597,7 +2837,7 @@ Uri => NamedApplyBlock.scala Text => empty Language => Scala Symbols => 43 entries -Occurrences => 40 entries +Occurrences => 41 entries Symbols: example/NamedApplyBlockCaseClassConstruction. => final object NamedApplyBlockCaseClassConstruction extends Object { self: NamedApplyBlockCaseClassConstruction.type => +6 decls } @@ -2674,6 +2914,7 @@ Occurrences: [6:44..6:45): c -> example/NamedApplyBlockMethods.foo().(c) [9:7..9:43): NamedApplyBlockCaseClassConstruction <- example/NamedApplyBlockCaseClassConstruction. [10:13..10:16): Msg <- example/NamedApplyBlockCaseClassConstruction.Msg# +[10:16..10:16): <- example/NamedApplyBlockCaseClassConstruction.Msg#``(). [10:17..10:21): body <- example/NamedApplyBlockCaseClassConstruction.Msg#body. [10:23..10:29): String -> scala/Predef.String# [10:31..10:35): head <- example/NamedApplyBlockCaseClassConstruction.Msg#head. @@ -2695,7 +2936,7 @@ Uri => NamedArguments.scala Text => empty Language => Scala Symbols => 16 entries -Occurrences => 10 entries +Occurrences => 12 entries Symbols: example/NamedArguments# => class NamedArguments extends Object { self: NamedArguments => +4 decls } @@ -2718,7 +2959,9 @@ example/NamedArguments#``(). => primary ctor (): NamedArguments Occurrences: [0:8..0:15): example <- example/ [2:6..2:20): NamedArguments <- example/NamedArguments# +[3:2..3:2): <- example/NamedArguments#``(). [3:13..3:17): User <- example/NamedArguments#User# +[3:17..3:17): <- example/NamedArguments#User#``(). [3:18..3:22): name <- example/NamedArguments#User#name. [3:24..3:30): String -> scala/Predef.String# [4:2..4:6): User -> example/NamedArguments#User. @@ -2736,7 +2979,7 @@ Uri => NewModifiers.scala Text => empty Language => Scala Symbols => 14 entries -Occurrences => 15 entries +Occurrences => 18 entries Symbols: _empty_/NewModifiers$package. => final package object _empty_ extends Object { self: _empty_.type { opaque type OpaqueB } => +2 decls } @@ -2762,12 +3005,15 @@ Occurrences: [5:12..5:19): OpaqueB <- _empty_/NewModifiers$package.OpaqueB# [5:22..5:25): Int -> scala/Int# [7:6..7:23): NewModifiersClass <- _empty_/NewModifiersClass# +[8:2..8:2): <- _empty_/NewModifiersClass#``(). [8:14..8:15): C <- _empty_/NewModifiersClass#C# [8:18..8:21): Int -> scala/Int# [9:8..9:14): Nested <- _empty_/NewModifiersClass#Nested# +[10:4..10:4): <- _empty_/NewModifiersClass#Nested#``(). [10:16..10:28): NestedOpaque <- _empty_/NewModifiersClass#Nested#NestedOpaque# [10:31..10:34): Int -> scala/Int# [14:6..14:23): NewModifiersTrait <- _empty_/NewModifiersTrait# +[15:2..15:2): <- _empty_/NewModifiersTrait#``(). [15:14..15:15): D <- _empty_/NewModifiersTrait#D# [15:18..15:21): Int -> scala/Int# @@ -2800,7 +3046,7 @@ Uri => Overrides.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 8 entries +Occurrences => 10 entries Symbols: example/A# => trait A extends Object { self: A => +2 decls } @@ -2813,9 +3059,11 @@ example/B#foo(). => method foo => Int <: example/A#foo(). Occurrences: [0:8..0:15): example <- example/ [2:6..2:7): A <- example/A# +[2:10..2:10): <- example/A#``(). [2:14..2:17): foo <- example/A#foo(). [2:19..2:22): Int -> scala/Int# [3:6..3:7): B <- example/B# +[3:7..3:7): <- example/B#``(). [3:18..3:19): A -> example/A# [3:26..3:29): foo <- example/B#foo(). [3:31..3:34): Int -> scala/Int# @@ -2829,7 +3077,7 @@ Uri => Prefixes.scala Text => empty Language => Scala Symbols => 19 entries -Occurrences => 48 entries +Occurrences => 49 entries Symbols: prefixes/C# => class C extends Object { self: C => +6 decls } @@ -2855,6 +3103,7 @@ prefixes/Test.n3(). => method n3 => T Occurrences: [0:8..0:16): prefixes <- prefixes/ [2:6..2:7): C <- prefixes/C# +[3:2..3:2): <- prefixes/C#``(). [3:7..3:8): T <- prefixes/C#T# [4:6..4:8): m1 <- prefixes/C#m1(). [4:10..4:11): T -> prefixes/C#T# @@ -2911,7 +3160,8 @@ Uri => RecOrRefined.scala Text => empty Language => Scala Symbols => 68 entries -Occurrences => 110 entries +Occurrences => 115 entries +Diagnostics => 1 entries Synthetics => 3 entries Symbols: @@ -3008,6 +3258,7 @@ Occurrences: [4:41..4:42): z <- local9 [4:48..4:51): ??? -> scala/Predef.`???`(). [5:6..5:16): PolyHolder <- example/PolyHolder# +[6:2..6:2): <- example/PolyHolder#``(). [6:6..6:9): foo <- example/PolyHolder#foo(). [6:10..6:11): T <- example/PolyHolder#foo().[T] [6:13..6:14): t <- example/PolyHolder#foo().(t) @@ -3042,6 +3293,7 @@ Occurrences: [12:45..12:46): T -> local16 [12:49..12:50): T -> local16 [14:6..14:12): Record <- example/Record# +[14:12..14:12): <- example/Record#``(). [14:13..14:18): elems <- example/Record#elems. [14:21..14:27): String -> scala/Predef.String# [14:29..14:32): Any -> scala/Any# @@ -3062,6 +3314,7 @@ Occurrences: [20:6..20:9): age <- local20 [20:11..20:14): Int -> scala/Int# [24:6..24:7): C <- example/C# +[24:10..24:10): <- example/C#``(). [24:15..24:17): T1 <- example/C#T1# [24:24..24:26): T2 <- example/C#T2# [25:5..25:7): C2 <- example/RecOrRefined$package.C2# @@ -3070,6 +3323,7 @@ Occurrences: [25:28..25:30): T2 <- local22 [25:33..25:35): T1 -> local21 [27:6..27:23): SpecialRefinement <- example/SpecialRefinement# +[28:2..28:2): <- example/SpecialRefinement#``(). [28:6..28:13): pickOne <- example/SpecialRefinement#pickOne(). [28:14..28:15): T <- example/SpecialRefinement#pickOne().[T] [28:17..28:19): as <- example/SpecialRefinement#pickOne().(as) @@ -3077,6 +3331,7 @@ Occurrences: [28:26..28:32): Option -> scala/Option# [28:33..28:36): Any -> scala/Any# [31:6..31:25): PickOneRefinement_1 <- example/PickOneRefinement_1# +[31:25..31:25): <- example/PickOneRefinement_1#``(). [31:26..31:27): S <- example/PickOneRefinement_1#[S] [31:31..31:48): SpecialRefinement -> example/SpecialRefinement# [31:55..31:62): pickOne <- local3 @@ -3096,6 +3351,10 @@ Occurrences: [32:49..32:56): pickOne -> example/SpecialRefinement#pickOne(). [32:57..32:59): as -> example/PickOneRefinement_1#run().(as) +Diagnostics: +[32:60..32:60): [warning] The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + Synthetics: [15:23..15:34):elems.toMap => *[String, Any] [15:23..15:34):elems.toMap => *(refl[Tuple2[String, Any]]) @@ -3142,7 +3401,8 @@ Uri => Selfs.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 17 entries +Occurrences => 22 entries +Diagnostics => 1 entries Symbols: local0 => selfparam self: C1 @@ -3161,23 +3421,105 @@ selfs/C6#``(). => primary ctor (): C6 Occurrences: [0:8..0:13): selfs <- selfs/ +[2:0..2:0): <- selfs/B#``(). [2:6..2:7): B <- selfs/B# [4:6..4:8): C1 <- selfs/C1# [4:17..4:18): B -> selfs/B# +[4:17..4:17): <- selfs/C1#``(). [4:21..4:25): self <- local0 [7:6..7:8): C2 <- selfs/C2# [7:17..7:18): B -> selfs/B# +[7:17..7:17): <- selfs/C2#``(). [7:21..7:25): self <- local1 [7:27..7:28): B -> selfs/B# [10:6..10:8): C3 <- selfs/C3# [10:17..10:18): B -> selfs/B# +[10:17..10:17): <- selfs/C3#``(). [10:21..10:25): self <- local2 [10:27..10:28): B -> selfs/B# [10:34..10:36): C1 -> selfs/C1# [13:6..13:8): C6 <- selfs/C6# [13:17..13:18): B -> selfs/B# +[13:17..13:17): <- selfs/C6#``(). [13:27..13:28): B -> selfs/B# +Diagnostics: +[10:29..10:33): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + +expect/StructuralTypes.scala +---------------------------- + +Summary: +Schema => SemanticDB v4 +Uri => StructuralTypes.scala +Text => empty +Language => Scala +Symbols => 12 entries +Occurrences => 33 entries +Diagnostics => 1 entries +Synthetics => 4 entries + +Symbols: +example/StructuralTypes. => final object StructuralTypes extends Object { self: StructuralTypes.type => +5 decls } +example/StructuralTypes.User# => type User = Object { abstract method foo (param x: Int): Int; abstract method age => Int; abstract method name => String } +example/StructuralTypes.V. => val method V Object { abstract method scalameta => String } +example/StructuralTypes.fooBar. => val method fooBar Int +example/StructuralTypes.user. => val method user User +local0 => abstract method name => String +local1 => abstract method age => Int +local2 => param x: Int +local3 => abstract method foo (param x: Int): Int +local4 => abstract method scalameta => String +local5 => method scalameta => String +local6 => final class $anon extends Object { self: $anon => +2 decls } + +Occurrences: +[0:8..0:15): example <- example/ +[2:7..2:14): reflect -> scala/reflect/ +[2:15..2:25): Selectable -> scala/reflect/Selectable. +[2:26..2:46): reflectiveSelectable -> scala/reflect/Selectable.reflectiveSelectable(). +[4:7..4:22): StructuralTypes <- example/StructuralTypes. +[5:7..5:11): User <- example/StructuralTypes.User# +[6:8..6:12): name <- local0 +[6:14..6:20): String -> scala/Predef.String# +[7:8..7:11): age <- local1 +[7:13..7:16): Int -> scala/Int# +[8:8..8:11): foo <- local3 +[8:12..8:13): x <- local2 +[8:15..8:18): Int -> scala/Int# +[8:21..8:24): Int -> scala/Int# +[11:6..11:10): user <- example/StructuralTypes.user. +[11:18..11:30): asInstanceOf -> scala/Any#asInstanceOf(). +[11:31..11:35): User -> example/StructuralTypes.User# +[12:2..12:6): user -> example/StructuralTypes.user. +[12:7..12:11): name -> scala/reflect/Selectable#selectDynamic(). +[13:2..13:6): user -> example/StructuralTypes.user. +[13:7..13:10): age -> scala/reflect/Selectable#selectDynamic(). +[14:6..14:12): fooBar <- example/StructuralTypes.fooBar. +[14:15..14:19): user -> example/StructuralTypes.user. +[14:20..14:23): foo -> scala/reflect/Selectable#applyDynamic(). +[16:6..16:7): V <- example/StructuralTypes.V. +[16:9..16:15): Object -> java/lang/Object# +[17:8..17:17): scalameta <- local4 +[17:19..17:25): String -> scala/Predef.String# +[18:6..18:6): <- local6 +[19:8..19:17): scalameta <- local5 +[20:2..20:3): V -> example/StructuralTypes.V. +[20:4..20:13): scalameta -> scala/reflect/Selectable#selectDynamic(). +[21:4..21:19): StructuralTypes -> example/StructuralTypes. + +Diagnostics: +[14:20..14:23): [warning] Alphanumeric method foo is not declared infix; it should not be used as infix operator. +Instead, use method syntax .foo(...) or backticked identifier `foo`. +The latter can be rewritten automatically under -rewrite -source 3.4-migration. + +Synthetics: +[12:2..12:6):user => reflectiveSelectable(*) +[13:2..13:6):user => reflectiveSelectable(*) +[14:15..14:19):user => reflectiveSelectable(*) +[20:2..20:3):V => reflectiveSelectable(*) + expect/Synthetic.scala ---------------------- @@ -3187,7 +3529,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 132 entries +Occurrences => 136 entries Synthetics => 39 entries Symbols: @@ -3251,6 +3593,7 @@ Occurrences: [2:22..2:41): implicitConversions -> scala/language.implicitConversions. [4:6..4:15): Synthetic <- example/Synthetic# [5:2..5:6): List -> scala/package.List. +[5:2..5:2): <- example/Synthetic#``(). [5:10..5:13): map -> scala/collection/immutable/List#map(). [5:16..5:17): + -> scala/Int#`+`(+4). [6:2..6:7): Array -> scala/Array. @@ -3304,11 +3647,13 @@ Occurrences: [24:8..24:13): apply <- example/Synthetic#s.apply(). [26:6..26:11): apply -> example/Synthetic#s.apply(). [27:15..27:18): Bar <- example/Synthetic#s.Bar# +[27:18..27:18): <- example/Synthetic#s.Bar#``(). [28:4..28:7): Bar -> example/Synthetic#s.Bar. [29:9..29:21): asInstanceOf -> scala/Any#asInstanceOf(). [29:22..29:25): Int -> scala/Int# [29:29..29:32): Int -> scala/Int# [32:8..32:9): J <- example/Synthetic#J# +[32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] [32:11..32:11): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# @@ -3316,6 +3661,7 @@ Occurrences: [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). [32:47..32:48): T -> example/Synthetic#J#[T] +[34:2..34:2): <- example/Synthetic#F#``(). [34:8..34:9): F <- example/Synthetic#F# [35:15..35:23): ordering <- example/Synthetic#ordering. [35:25..35:33): Ordering -> scala/package.Ordering# @@ -3456,7 +3802,7 @@ Uri => Traits.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 13 entries +Occurrences => 17 entries Symbols: local0 => final class $anon extends Object with U { self: $anon => +1 decls } @@ -3476,16 +3822,20 @@ traits/V#``(). => primary ctor (): V Occurrences: [0:8..0:14): traits <- traits/ [2:6..2:7): T <- traits/T# +[3:2..3:2): <- traits/T#``(). [3:6..3:7): x <- traits/T#x(). +[6:0..6:0): <- traits/U#``(). [6:13..6:14): U <- traits/U# [7:7..7:8): U <- traits/U. [8:6..8:7): u <- traits/U.u(). [8:9..8:10): U -> traits/U# [8:13..8:13): <- local0 [8:17..8:18): U -> traits/U# +[11:0..11:0): <- traits/C#``(). [11:6..11:7): C <- traits/C# [12:6..12:7): V <- traits/V# [12:10..12:14): self <- local2 +[12:10..12:10): <- traits/V#``(). [12:16..12:17): C -> traits/C# expect/ValPattern.scala @@ -3497,7 +3847,8 @@ Uri => ValPattern.scala Text => empty Language => Scala Symbols => 22 entries -Occurrences => 44 entries +Occurrences => 45 entries +Diagnostics => 3 entries Synthetics => 11 entries Symbols: @@ -3513,7 +3864,7 @@ example/ValPattern#app(). => method app (): Unit example/ValPattern#left. => val method left Int example/ValPattern#leftVar(). => var method leftVar Int example/ValPattern#number1. => val method number1 Int -example/ValPattern#number1Var(). => var method number1Var Int +example/ValPattern#number1Var(). => val method number1Var Int example/ValPattern#q1. => val method q1 Nothing example/ValPattern#right. => val method right Int example/ValPattern#rightVar(). => var method rightVar Int @@ -3522,11 +3873,12 @@ local1 => val local right: Int local2 => val local number1: Int local3 => var local leftVar: Int local4 => var local rightVar: Int -local5 => var local number1Var: Int +local5 => val local number1Var: Int Occurrences: [0:8..0:15): example <- example/ [2:6..2:16): ValPattern <- example/ValPattern# +[4:2..4:2): <- example/ValPattern#``(). [4:7..4:11): left <- example/ValPattern#left. [4:13..4:18): right <- example/ValPattern#right. [5:6..5:10): Some -> scala/Some. @@ -3570,6 +3922,11 @@ Occurrences: [39:10..39:17): leftVar -> local3 [40:10..40:18): rightVar -> local4 +Diagnostics: +[30:11..30:18): [warning] unset local variable, consider using an immutable val instead +[30:20..30:28): [warning] unset local variable, consider using an immutable val instead +[31:15..31:25): [warning] unset local variable, consider using an immutable val instead + Synthetics: [5:6..5:10):Some => *.unapply[Int] [6:4..6:8):Some => *.apply[Int] @@ -3592,7 +3949,8 @@ Uri => Vals.scala Text => empty Language => Scala Symbols => 42 entries -Occurrences => 128 entries +Occurrences => 129 entries +Diagnostics => 5 entries Symbols: example/ValUsages. => final object ValUsages extends Object { self: ValUsages.type => +2 decls } @@ -3641,6 +3999,7 @@ local4 => implicit var local yil: Int Occurrences: [0:8..0:15): example <- example/ [2:15..2:19): Vals <- example/Vals# +[2:19..2:19): <- example/Vals#``(). [2:20..2:21): p <- example/Vals#p. [2:23..2:26): Int -> scala/Int# [2:32..2:34): xp <- example/Vals#xp. @@ -3766,7 +4125,22 @@ Occurrences: [48:18..48:19): v -> example/ValUsages.v. [48:20..48:23): yim -> example/Vals#yim(). [49:2..49:3): v -> example/ValUsages.v. -[49:3..49:18): .explicitSetter -> example/Vals#`explicitSetter_=`(). +[49:4..49:18): explicitSetter -> example/Vals#`explicitSetter_=`(). + +Diagnostics: +[2:20..2:21): [warning] unused explicit parameter +[5:16..5:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[7:16..7:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[12:16..12:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[13:16..13:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. expect/Vararg.scala ------------------- @@ -3777,7 +4151,7 @@ Uri => Vararg.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 10 entries +Occurrences => 11 entries Symbols: example/Vararg# => class Vararg extends Object { self: Vararg => +3 decls } @@ -3790,6 +4164,7 @@ example/Vararg#add2().(a) => param a: Seq[Int]* Occurrences: [0:8..0:15): example <- example/ [2:6..2:12): Vararg <- example/Vararg# +[3:2..3:2): <- example/Vararg#``(). [3:6..3:10): add1 <- example/Vararg#add1(). [3:11..3:12): a <- example/Vararg#add1().(a) [3:14..3:17): Int -> scala/Int# @@ -3809,6 +4184,7 @@ Text => empty Language => Scala Symbols => 8 entries Occurrences => 18 entries +Diagnostics => 1 entries Symbols: _empty_/Test_depmatch. => final object Test_depmatch extends Object { self: Test_depmatch.type => +4 decls } @@ -3840,6 +4216,9 @@ Occurrences: [6:19..6:20): U -> local0 [6:24..6:27): ??? -> scala/Predef.`???`(). +Diagnostics: +[6:8..6:9): [warning] unused local definition + expect/example-dir/FileInDir.scala ---------------------------------- @@ -3849,7 +4228,7 @@ Uri => example-dir/FileInDir.scala Text => empty Language => Scala Symbols => 2 entries -Occurrences => 2 entries +Occurrences => 3 entries Symbols: example/FileInDir# => class FileInDir extends Object { self: FileInDir => +1 decls } @@ -3857,6 +4236,7 @@ example/FileInDir#``(). => primary ctor (): FileInDir Occurrences: [0:8..0:15): example <- example/ +[2:0..2:0): <- example/FileInDir#``(). [2:6..2:15): FileInDir <- example/FileInDir# expect/exports-example-Codec.scala @@ -3868,7 +4248,7 @@ Uri => exports-example-Codec.scala Text => empty Language => Scala Symbols => 21 entries -Occurrences => 30 entries +Occurrences => 33 entries Symbols: exports/example/Codec# => trait Codec [typeparam T ] extends Object with Decoder[T] with Encoder[T] { self: Codec[T] => +6 decls } @@ -3897,6 +4277,7 @@ Occurrences: [0:8..0:15): exports -> exports/ [0:16..0:23): example <- exports/example/ [2:6..2:13): Decoder <- exports/example/Decoder# +[2:13..2:13): <- exports/example/Decoder#``(). [2:15..2:16): T <- exports/example/Decoder#[T] [3:6..3:12): decode <- exports/example/Decoder#decode(). [3:13..3:14): a <- exports/example/Decoder#decode().(a) @@ -3904,6 +4285,7 @@ Occurrences: [3:22..3:26): Byte -> scala/Byte# [3:30..3:31): T -> exports/example/Decoder#[T] [6:6..6:13): Encoder <- exports/example/Encoder# +[6:13..6:13): <- exports/example/Encoder#``(). [6:15..6:16): T <- exports/example/Encoder#[T] [7:6..7:12): encode <- exports/example/Encoder#encode(). [7:13..7:14): t <- exports/example/Encoder#encode().(t) @@ -3911,6 +4293,7 @@ Occurrences: [7:20..7:25): Array -> scala/Array# [7:26..7:30): Byte -> scala/Byte# [10:6..10:11): Codec <- exports/example/Codec# +[10:11..10:11): <- exports/example/Codec#``(). [10:12..10:13): T <- exports/example/Codec#[T] [10:15..10:21): decode <- exports/example/Codec#decode. [10:23..10:30): Decoder -> exports/example/Decoder# @@ -3961,7 +4344,7 @@ Uri => filename%20with%20spaces.scala Text => empty Language => Scala Symbols => 2 entries -Occurrences => 2 entries +Occurrences => 3 entries Symbols: example/FilenameWithSpaces# => class FilenameWithSpaces extends Object { self: FilenameWithSpaces => +1 decls } @@ -3969,6 +4352,7 @@ example/FilenameWithSpaces#``(). => primary ctor (): FilenameWithSp Occurrences: [0:8..0:15): example <- example/ +[2:0..2:0): <- example/FilenameWithSpaces#``(). [2:6..2:24): FilenameWithSpaces <- example/FilenameWithSpaces# expect/hk.scala @@ -3980,7 +4364,7 @@ Uri => hk.scala Text => empty Language => Scala Symbols => 30 entries -Occurrences => 52 entries +Occurrences => 54 entries Symbols: hk/EitherMonad# => class EitherMonad [typeparam T ] extends Object with Monad[[E] =>> Either[T, E]] { self: EitherMonad[T] => +2 decls } @@ -4017,6 +4401,7 @@ hk/hk$package.MapV#[_] => type _ Occurrences: [0:8..0:10): hk <- hk/ [2:6..2:11): Monad <- hk/Monad# +[2:11..2:11): <- hk/Monad#``(). [2:12..2:13): M <- hk/Monad#[M] [3:6..3:10): pure <- hk/Monad#pure(). [3:11..3:12): A <- hk/Monad#pure().[A] @@ -4039,6 +4424,7 @@ Occurrences: [4:46..4:47): B -> hk/Monad#flatMap().[B] [4:51..4:54): ??? -> scala/Predef.`???`(). [7:6..7:17): EitherMonad <- hk/EitherMonad# +[7:17..7:17): <- hk/EitherMonad#``(). [7:18..7:19): T <- hk/EitherMonad#[T] [7:29..7:34): Monad -> hk/Monad# [7:36..7:37): E <- hk/EitherMonad#``().[E] @@ -4077,7 +4463,7 @@ Uri => i5854.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 16 entries +Occurrences => 17 entries Symbols: i5854/B# => class B extends Object { self: B => +4 decls } @@ -4090,6 +4476,7 @@ local0 => type A >: Any <: Nothing Occurrences: [0:8..0:13): i5854 <- i5854/ [2:6..2:7): B <- i5854/B# +[7:2..7:2): <- i5854/B#``(). [7:6..7:7): a <- i5854/B#a. [7:9..7:15): String -> scala/Predef.String# [7:24..7:27): Any -> scala/Any# @@ -4114,7 +4501,8 @@ Uri => i9727.scala Text => empty Language => Scala Symbols => 7 entries -Occurrences => 8 entries +Occurrences => 9 entries +Diagnostics => 1 entries Symbols: i9727/Test# => class Test extends Object { self: Test => +2 decls } @@ -4128,6 +4516,7 @@ i9727/i9727$package.b. => val method b Test Occurrences: [0:8..0:13): i9727 <- i9727/ [2:6..2:10): Test <- i9727/Test# +[2:10..2:10): <- i9727/Test#``(). [2:11..2:12): a <- i9727/Test#a. [2:14..2:17): Int -> scala/Int# [3:4..3:5): a <- i9727/i9727$package.a. @@ -4135,6 +4524,9 @@ Occurrences: [4:4..4:5): b <- i9727/i9727$package.b. [4:12..4:16): Test -> i9727/Test# +Diagnostics: +[2:11..2:12): [warning] unused explicit parameter + expect/i9782.scala ------------------ @@ -4144,7 +4536,7 @@ Uri => i9782.scala Text => empty Language => Scala Symbols => 24 entries -Occurrences => 59 entries +Occurrences => 63 entries Symbols: _empty_/Copy# => trait Copy [typeparam In <: Txn[In], typeparam Out <: Txn[Out]] extends Object { self: Copy[In, Out] => +5 decls } @@ -4174,20 +4566,24 @@ local2 => val local outObj: Repr[Out] & Obj[Out] Occurrences: [1:6..1:9): Txn <- _empty_/Txn# +[1:9..1:9): <- _empty_/Txn#``(). [1:10..1:11): T <- _empty_/Txn#[T] [1:15..1:18): Txn -> _empty_/Txn# [1:19..1:20): T -> _empty_/Txn#[T] [3:6..3:10): Elem <- _empty_/Elem# +[3:10..3:10): <- _empty_/Elem#``(). [3:11..3:12): T <- _empty_/Elem#[T] [3:16..3:19): Txn -> _empty_/Txn# [3:20..3:21): T -> _empty_/Elem#[T] [5:6..5:9): Obj <- _empty_/Obj# +[5:9..5:9): <- _empty_/Obj#``(). [5:10..5:11): T <- _empty_/Obj#[T] [5:15..5:18): Txn -> _empty_/Txn# [5:19..5:20): T -> _empty_/Obj#[T] [5:31..5:35): Elem -> _empty_/Elem# [5:36..5:37): T -> _empty_/Obj#[T] [7:6..7:10): Copy <- _empty_/Copy# +[7:10..7:10): <- _empty_/Copy#``(). [7:11..7:13): In <- _empty_/Copy#[In] [7:17..7:20): Txn -> _empty_/Txn# [7:21..7:23): In -> _empty_/Copy#[In] @@ -4242,7 +4638,7 @@ Uri => inlineconsume.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 8 entries +Occurrences => 9 entries Symbols: inlineconsume/Foo# => class Foo extends Object { self: Foo => +2 decls } @@ -4255,6 +4651,7 @@ Occurrences: [2:18..2:28): FakePredef -> inlinedefs/FakePredef. [2:29..2:35): assert -> inlinedefs/FakePredef.assert(). [4:6..4:9): Foo <- inlineconsume/Foo# +[5:2..5:2): <- inlineconsume/Foo#``(). [5:6..5:10): test <- inlineconsume/Foo#test(). [5:13..5:19): assert -> inlinedefs/FakePredef.assert(). [5:22..5:23): > -> scala/Int#`>`(+3). @@ -4296,7 +4693,7 @@ Uri => local-file.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 6 entries +Occurrences => 7 entries Synthetics => 1 entries Symbols: @@ -4308,6 +4705,7 @@ Occurrences: [0:8..0:15): example <- example/ [2:7..2:17): local-file <- example/`local-file`# [3:2..3:9): locally -> scala/Predef.locally(). +[3:2..3:2): <- example/`local-file`#``(). [4:8..4:13): local <- local0 [5:4..5:9): local -> local0 [5:10..5:11): + -> scala/Int#`+`(+4). @@ -4324,7 +4722,7 @@ Uri => nullary.scala Text => empty Language => Scala Symbols => 17 entries -Occurrences => 29 entries +Occurrences => 31 entries Synthetics => 1 entries Symbols: @@ -4348,6 +4746,7 @@ _empty_/test. => final object test extends Object { self: test.type => +1 decls Occurrences: [0:15..0:26): NullaryTest <- _empty_/NullaryTest# +[0:26..0:26): <- _empty_/NullaryTest#``(). [0:27..0:28): T <- _empty_/NullaryTest#[T] [0:30..0:31): m <- _empty_/NullaryTest#[m] [0:32..0:33): s <- _empty_/NullaryTest#``().[m][s] @@ -4366,6 +4765,7 @@ Occurrences: [8:11..8:19): nullary3 -> _empty_/NullaryTest#nullary3(). [11:6..11:14): Concrete <- _empty_/Concrete# [11:23..11:34): NullaryTest -> _empty_/NullaryTest# +[11:23..11:23): <- _empty_/Concrete#``(). [11:35..11:38): Int -> scala/Int# [11:40..11:44): List -> scala/package.List# [12:6..12:14): nullary2 <- _empty_/Concrete#nullary2(). @@ -4389,7 +4789,7 @@ Uri => recursion.scala Text => empty Language => Scala Symbols => 36 entries -Occurrences => 46 entries +Occurrences => 48 entries Symbols: local0 => type N$1 <: Nat @@ -4433,6 +4833,7 @@ Occurrences: [1:8..1:17): recursion <- recursion/ [3:7..3:11): Nats <- recursion/Nats. [4:15..4:18): Nat <- recursion/Nats.Nat# +[5:4..5:4): <- recursion/Nats.Nat#``(). [5:27..5:29): ++ <- recursion/Nats.Nat#`++`(). [5:32..5:36): Succ -> recursion/Nats.Succ# [5:50..5:54): Succ -> recursion/Nats.Succ. @@ -4451,6 +4852,7 @@ Occurrences: [14:14..14:18): Zero <- recursion/Nats.Zero. [14:27..14:30): Nat -> recursion/Nats.Nat# [15:13..15:17): Succ <- recursion/Nats.Succ# +[15:17..15:17): <- recursion/Nats.Succ#``(). [15:18..15:19): N <- recursion/Nats.Succ#[N] [15:23..15:26): Nat -> recursion/Nats.Nat# [15:28..15:29): p <- recursion/Nats.Succ#p. @@ -4486,7 +4888,7 @@ Uri => semanticdb-Definitions.scala Text => empty Language => Scala Symbols => 10 entries -Occurrences => 7 entries +Occurrences => 9 entries Symbols: a/Definitions. => final object Definitions extends Object { self: Definitions.type => +9 decls } @@ -4506,7 +4908,9 @@ Occurrences: [2:6..2:7): a <- a/Definitions.a. [3:6..3:7): b <- a/Definitions.b(). [4:6..4:7): c <- a/Definitions.c(). +[5:2..5:2): <- a/Definitions.D#``(). [5:8..5:9): D <- a/Definitions.D# +[6:2..6:2): <- a/Definitions.E#``(). [6:8..6:9): E <- a/Definitions.E# expect/semanticdb-Flags.scala @@ -4518,7 +4922,8 @@ Uri => semanticdb-Flags.scala Text => empty Language => Scala Symbols => 50 entries -Occurrences => 73 entries +Occurrences => 78 entries +Diagnostics => 4 entries Synthetics => 2 entries Symbols: @@ -4592,6 +4997,7 @@ Occurrences: [8:13..8:16): Int -> scala/Int# [8:25..8:28): ??? -> scala/Predef.`???`(). [9:17..9:18): C <- flags/p/package.C# +[9:18..9:18): <- flags/p/package.C#``(). [9:20..9:21): T <- flags/p/package.C#[T] [9:24..9:25): U <- flags/p/package.C#[U] [9:27..9:28): V <- flags/p/package.C#[V] @@ -4624,9 +5030,12 @@ Occurrences: [17:7..17:8): V <- flags/p/package.V# [17:12..17:15): Int -> scala/Int# [18:14..18:15): X <- flags/p/package.X. +[19:2..19:2): <- flags/p/package.Y#``(). [19:14..19:15): Y <- flags/p/package.Y# +[20:2..20:2): <- flags/p/package.Z#``(). [20:15..20:16): Z <- flags/p/package.Z# [21:8..21:10): AA <- flags/p/package.AA# +[21:10..21:10): <- flags/p/package.AA#``(). [21:11..21:12): x <- flags/p/package.AA#x. [21:14..21:17): Int -> scala/Int# [21:23..21:24): y <- flags/p/package.AA#y. @@ -4634,6 +5043,7 @@ Occurrences: [21:35..21:36): z <- flags/p/package.AA#z(). [21:38..21:41): Int -> scala/Int# [22:8..22:9): S <- flags/p/package.S# +[22:9..22:9): <- flags/p/package.S#``(). [22:11..22:22): specialized -> scala/specialized# [22:23..22:24): T <- flags/p/package.S#[T] [23:6..23:10): List -> scala/package.List. @@ -4648,6 +5058,12 @@ Occurrences: [25:27..25:28): t <- local1 [25:33..25:36): ??? -> scala/Predef.`???`(). +Diagnostics: +[9:30..9:31): [warning] unused explicit parameter +[9:36..9:37): [warning] unused explicit parameter +[9:42..9:43): [warning] unused explicit parameter +[21:11..21:12): [warning] unused explicit parameter + Synthetics: [23:6..23:10):List => *.unapplySeq[Nothing] [24:19..24:23):List => *.unapplySeq[Nothing] @@ -4661,7 +5077,8 @@ Uri => semanticdb-Types.scala Text => empty Language => Scala Symbols => 143 entries -Occurrences => 228 entries +Occurrences => 246 entries +Diagnostics => 4 entries Synthetics => 1 entries Symbols: @@ -4818,6 +5235,7 @@ Occurrences: [3:13..3:21): language -> scala/language. [3:22..3:33): higherKinds -> scala/language.higherKinds. [5:6..5:9): ann <- types/ann# +[5:9..5:9): <- types/ann#``(). [5:10..5:11): T <- types/ann#[T] [5:13..5:14): x <- types/ann#x. [5:16..5:17): T -> types/ann#[T] @@ -4826,25 +5244,36 @@ Occurrences: [5:44..5:60): StaticAnnotation -> scala/annotation/StaticAnnotation# [6:6..6:10): ann1 <- types/ann1# [6:19..6:24): scala -> scala/ +[6:19..6:19): <- types/ann1#``(). [6:25..6:35): annotation -> scala/annotation/ [6:36..6:52): StaticAnnotation -> scala/annotation/StaticAnnotation# [7:6..7:10): ann2 <- types/ann2# [7:19..7:24): scala -> scala/ +[7:19..7:19): <- types/ann2#``(). [7:25..7:35): annotation -> scala/annotation/ [7:36..7:52): StaticAnnotation -> scala/annotation/StaticAnnotation# +[9:0..9:0): <- types/B#``(). [9:6..9:7): B <- types/B# +[11:0..11:0): <- types/C#``(). [11:6..11:7): C <- types/C# [13:6..13:7): P <- types/P# +[14:2..14:2): <- types/P#C#``(). +[14:2..14:2): <- types/P#``(). [14:8..14:9): C <- types/P#C# +[15:2..15:2): <- types/P#X#``(). [15:8..15:9): X <- types/P#X# [16:6..16:7): x <- types/P#x. [16:14..16:15): X -> types/P#X# [19:6..19:7): T <- types/T# +[20:2..20:2): <- types/T#C#``(). +[20:2..20:2): <- types/T#``(). [20:8..20:9): C <- types/T#C# +[21:2..21:2): <- types/T#X#``(). [21:8..21:9): X <- types/T#X# [22:6..22:7): x <- types/T#x. [22:14..22:15): X -> types/T#X# [25:11..25:14): Foo <- types/Foo# +[25:14..25:14): <- types/Foo#``(). [25:15..25:16): s <- types/Foo#s. [27:7..27:10): Foo <- types/Foo. [28:6..28:7): x <- types/Foo.x. @@ -4853,15 +5282,18 @@ Occurrences: [29:17..29:18): x -> types/Foo.x. [32:7..32:11): Test <- types/Test. [33:8..33:9): M <- types/Test.M# +[34:4..34:4): <- types/Test.M#``(). [34:8..34:9): m <- types/Test.M#m(). [34:11..34:14): Int -> scala/Int# [34:17..34:20): ??? -> scala/Predef.`???`(). [37:8..37:9): N <- types/Test.N# +[38:4..38:4): <- types/Test.N#``(). [38:8..38:9): n <- types/Test.N#n(). [38:11..38:14): Int -> scala/Int# [38:17..38:20): ??? -> scala/Predef.`???`(). [41:8..41:9): C <- types/Test.C# [41:18..41:19): M -> types/Test.M# +[41:18..41:18): <- types/Test.C#``(). [42:8..42:9): p <- types/Test.C#p. [42:16..42:17): P -> types/P# [43:8..43:9): x <- types/Test.C#x. @@ -4961,8 +5393,10 @@ Occurrences: [78:11..78:25): ClassInfoType1 <- types/Test.C#ClassInfoType1. [79:10..79:24): ClassInfoType2 <- types/Test.C#ClassInfoType2# [79:33..79:34): B -> types/B# +[79:33..79:33): <- types/Test.C#ClassInfoType2#``(). [79:41..79:42): x <- types/Test.C#ClassInfoType2#x(). [80:10..80:24): ClassInfoType3 <- types/Test.C#ClassInfoType3# +[80:24..80:24): <- types/Test.C#ClassInfoType3#``(). [80:25..80:26): T <- types/Test.C#ClassInfoType3#[T] [82:11..82:21): MethodType <- types/Test.C#MethodType. [83:10..83:12): x1 <- types/Test.C#MethodType.x1(). @@ -4995,6 +5429,7 @@ Occurrences: [92:25..92:28): Int -> scala/Int# [92:31..92:34): ??? -> scala/Predef.`???`(). [95:15..95:27): RepeatedType <- types/Test.C#RepeatedType# +[95:27..95:27): <- types/Test.C#RepeatedType#``(). [95:28..95:29): s <- types/Test.C#RepeatedType#s. [95:31..95:37): String -> scala/Predef.String# [96:10..96:12): m1 <- types/Test.C#RepeatedType#m1(). @@ -5039,6 +5474,15 @@ Occurrences: [119:32..119:38): Option -> scala/Option# [119:39..119:42): Int -> scala/Int# +Diagnostics: +[5:13..5:14): [warning] unused explicit parameter +[62:25..62:29): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[63:25..63:29): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[71:31..71:31): [warning] `_` is deprecated for wildcard arguments of types: use `?` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + Synthetics: [68:20..68:24):@ann => *[Int] @@ -5051,7 +5495,7 @@ Uri => semanticdb-extract.scala Text => empty Language => Scala Symbols => 18 entries -Occurrences => 20 entries +Occurrences => 21 entries Synthetics => 3 entries Symbols: @@ -5093,6 +5537,7 @@ Occurrences: [14:2..14:9): println -> scala/Predef.println(+1). [14:12..14:13): + -> scala/Int#`+`(+4). [16:13..16:16): Foo <- _empty_/AnObject.Foo# +[16:16..16:16): <- _empty_/AnObject.Foo#``(). [16:17..16:18): x <- _empty_/AnObject.Foo#x. [16:20..16:23): Int -> scala/Int# diff --git a/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/NonNativeJSTypeTestScala3.scala b/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/NonNativeJSTypeTestScala3.scala new file mode 100644 index 000000000000..ceb29d39b6a0 --- /dev/null +++ b/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/NonNativeJSTypeTestScala3.scala @@ -0,0 +1,78 @@ +package org.scalajs.testsuite.jsinterop + +import org.junit.Assert.* +import org.junit.Test + +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +class NonNativeJSTypeTestScala3 { + import NonNativeJSTypeTestScala3.* + + @Test + def overloadWithVarargOfGenericType(): Unit = { + class OverloadWithVarargOfGenericType extends js.Object { + def overloaded(x: Int): Int = x + def overloaded(xs: (Int, Int)*): Int = xs.size + } + + val obj = new OverloadWithVarargOfGenericType + assertEquals(5, obj.overloaded(5)) + assertEquals(1, obj.overloaded((5, 6))) + assertEquals(2, obj.overloaded((1, 2), (3, 4))) + } + + @Test + def overloadWithVarargOfValueClass(): Unit = { + class OverloadWithVarargOfValueClass extends js.Object { + def overloaded(x: Int): Int = x + def overloaded(xs: VC*): Int = xs.size + } + + val obj = new OverloadWithVarargOfValueClass + assertEquals(5, obj.overloaded(5)) + assertEquals(1, obj.overloaded(new VC(5))) + assertEquals(2, obj.overloaded(new VC(5), new VC(6))) + } + + @Test + def overloadWithVarargOfGenericValueClass(): Unit = { + class OverloadWithVarargOfGenericValueClass extends js.Object { + def overloaded(x: Int): Int = x + def overloaded(xs: GenVC[Int]*): Int = xs.size + } + + val obj = new OverloadWithVarargOfGenericValueClass + assertEquals(5, obj.overloaded(5)) + assertEquals(1, obj.overloaded(new GenVC(5))) + assertEquals(2, obj.overloaded(new GenVC(5), new GenVC(6))) + } + + @Test + def overloadWithVarargOfOpaqueTypeAlias(): Unit = { + import OpaqueContainer.* + + class OverloadWithVarargOfOpaqueTypeAlias extends js.Object { + def overloaded(x: String): Int = x.toInt + def overloaded(xs: OpaqueInt*): Int = xs.size + } + + val obj = new OverloadWithVarargOfOpaqueTypeAlias + assertEquals(5, obj.overloaded("5")) + assertEquals(1, obj.overloaded(fromInt(5))) + assertEquals(2, obj.overloaded(fromInt(5), fromInt(6))) + } +} + +object NonNativeJSTypeTestScala3 { + final class VC(val x: Int) extends AnyVal + + final class GenVC[T](val x: T) extends AnyVal + + object OpaqueContainer { + opaque type OpaqueInt = Int + + def fromInt(x: Int): OpaqueInt = x + def toInt(x: OpaqueInt): Int = x + } +} diff --git a/tests/warn/context-bounds-migration-3.4.check b/tests/warn/context-bounds-migration-3.4.check new file mode 100644 index 000000000000..5341cfbe3ea5 --- /dev/null +++ b/tests/warn/context-bounds-migration-3.4.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/context-bounds-migration-3.4.scala:9:2 ---------------------------------------------------------- +9 | foo(C[Int]()) // warn + | ^^^ + | Context bounds will map to context parameters. + | A `using` clause is needed to pass explicit arguments to them. + | This code can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/warn/context-bounds-migration-3.4.scala b/tests/warn/context-bounds-migration-3.4.scala new file mode 100644 index 000000000000..1094db68f41b --- /dev/null +++ b/tests/warn/context-bounds-migration-3.4.scala @@ -0,0 +1,10 @@ +//> using options -source 3.4 + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // warn + foo(using C[Int]()) // ok diff --git a/tests/warn/enum-approx2.check b/tests/warn/enum-approx2.check new file mode 100644 index 000000000000..a75c15b424ff --- /dev/null +++ b/tests/warn/enum-approx2.check @@ -0,0 +1,14 @@ +-- [E030] Match case Unreachable Warning: tests/warn/enum-approx2.scala:7:12 ------------------------------------------- +7 | case Fun(x: Exp[Int => String]) => ??? // warn: unreachable // also: unchecked (hidden) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Unreachable case +-- [E121] Pattern Match Warning: tests/warn/enum-approx2.scala:8:9 ----------------------------------------------------- +8 | case _ => // warn: unreachable-only-null + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E092] Pattern Match Unchecked Warning: tests/warn/enum-approx2.scala:6:13 ------------------------------------------ +6 | case Fun(x: Fun[Int, Double]) => ??? // warn: unchecked + | ^ + |the type test for Fun[Int, Double] cannot be checked at runtime because its type arguments can't be determined from Exp[Int => Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/enum-approx2.scala b/tests/warn/enum-approx2.scala new file mode 100644 index 000000000000..38a78cd6a5e9 --- /dev/null +++ b/tests/warn/enum-approx2.scala @@ -0,0 +1,10 @@ +sealed trait Exp[T] +case class Fun[A, B](f: Exp[A => B]) extends Exp[A => B] + +class Test { + def eval(e: Fun[Int, Int]) = e match { + case Fun(x: Fun[Int, Double]) => ??? // warn: unchecked + case Fun(x: Exp[Int => String]) => ??? // warn: unreachable // also: unchecked (hidden) + case _ => // warn: unreachable-only-null + } +} diff --git a/tests/warn/i11178.scala b/tests/warn/i11178.scala new file mode 100644 index 000000000000..a59b899be365 --- /dev/null +++ b/tests/warn/i11178.scala @@ -0,0 +1,36 @@ +trait Box[+T] +case class Foo[+S](s: S) extends Box[S] + +def unwrap2[A](b: Box[A]): A = + b match + case _: Foo[Int] => 0 // warn + +object Test1 { + // Invariant case, OK + sealed trait Bar[A] + + def test[A](bar: Bar[A]) = + bar match { + case _: Bar[Boolean] => ??? // warn + } +} + +object Test2 { + // Covariant case + sealed trait Bar[+A] + + def test[A](bar: Bar[A]) = + bar match { + case _: Bar[Boolean] => ??? // warn + } +} + +object Test3 { + // Contravariant case + sealed trait Bar[-A] + + def test[A](bar: Bar[A]) = + bar match { + case _: Bar[Boolean] => ??? // warn + } +} diff --git a/tests/warn/i16451.check b/tests/warn/i16451.check new file mode 100644 index 000000000000..09c2a7df8179 --- /dev/null +++ b/tests/warn/i16451.check @@ -0,0 +1,44 @@ +-- [E030] Match case Unreachable Warning: tests/warn/i16451.scala:14:9 ------------------------------------------------- +14 | case x: Wrapper[Color.Green.type] => None // warn: unreachable // also: unchecked (hidden) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/i16451.scala:22:9 ------------------------------------------------- +22 | case x: Wrapper[Color.Green.type] => None // warn: unreachable // also: unchecked (hidden) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Unreachable case +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:13:9 ------------------------------------------------ +13 | case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:21:9 ------------------------------------------------ +21 | case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Any + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:25:9 ------------------------------------------------ +25 | case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:29:9 ------------------------------------------------ +29 | case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from A1 + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:34:11 ----------------------------------------------- +34 | case x: Wrapper[Color.Red.type] => x // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i16451.scala:39:11 ----------------------------------------------- +39 | case x: Wrapper[Color.Red.type] => x // warn: unchecked + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16451.scala b/tests/warn/i16451.scala new file mode 100644 index 000000000000..138af3632772 --- /dev/null +++ b/tests/warn/i16451.scala @@ -0,0 +1,44 @@ +// +enum Color: + case Red, Green +//sealed trait Color +//object Color: +// case object Red extends Color +// case object Green extends Color + +case class Wrapper[A](value: A) + +object Test: + def test_correct(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + case x: Wrapper[Color.Green.type] => None // warn: unreachable // also: unchecked (hidden) + + def test_different(x: Wrapper[Color]): Option[Wrapper[Color]] = x match + case x @ Wrapper(_: Color.Red.type) => Some(x) + case x @ Wrapper(_: Color.Green.type) => None + + def test_any(x: Any): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + case x: Wrapper[Color.Green.type] => None // warn: unreachable // also: unchecked (hidden) + + def test_wrong(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + case null => None + + def t2[A1 <: Wrapper[Color]](x: A1): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // warn: unchecked + case null => None + + def test_wrong_seq(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { + case x: Wrapper[Color.Red.type] => x // warn: unchecked + } + + def test_wrong_seq2(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { x => x match + case x: Wrapper[Color.Red.type] => x // warn: unchecked + } + + def main(args: Array[String]): Unit = + println(test_wrong_seq(Seq(Wrapper(Color.Red), Wrapper(Color.Green)))) + // outputs: List(Wrapper(Red), Wrapper(Green)) diff --git a/tests/warn/i17429.check b/tests/warn/i17429.check new file mode 100644 index 000000000000..3496f3d6f106 --- /dev/null +++ b/tests/warn/i17429.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/i17429.scala:3:17 ------------------------------------------------------------------------------- +3 | println(A(1) plus A(2)) // warn + | ^^^^ + | Alphanumeric method plus is not declared infix; it should not be used as infix operator. + | Instead, use method syntax .plus(...) or backticked identifier `plus`. + | The latter can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/warn/i17429.scala b/tests/warn/i17429.scala new file mode 100644 index 000000000000..4c7616303de5 --- /dev/null +++ b/tests/warn/i17429.scala @@ -0,0 +1,3 @@ +case class A(a:Int): + def plus(a:A) = A(this.a+a.a) + println(A(1) plus A(2)) // warn diff --git a/tests/warn/i18661.scala b/tests/warn/i18661.scala new file mode 100644 index 000000000000..b7f490ae44df --- /dev/null +++ b/tests/warn/i18661.scala @@ -0,0 +1,28 @@ +class Jacket[T]: + sealed trait BodyType: + sealed trait OrganType: + case class Heart() extends Body.Organ + case class Brain() extends Body.Organ + object Organ extends OrganType + sealed trait Organ + object Body extends BodyType + sealed trait Body + +type AnyJacket = Jacket[?] +type AnyBodyOrgan = AnyJacket#BodyType#Organ +type AnyBodyOrganHeart = AnyJacket#BodyType#OrganType#Heart +type AnyBodyOrganBrain = AnyJacket#BodyType#OrganType#Brain + +def check( asr : AnyBodyOrgan ) : String = + asr match + case c : AnyBodyOrganHeart => "Heart" + case s : AnyBodyOrganBrain => "Brain" // was: unreachable + +val jacket = new Jacket[Unit] +val heart = new jacket.Body.Organ.Heart() +val brain = new jacket.Body.Organ.Brain() + +@main +def go = + println( check( heart ) ) + println( check( brain ) ) diff --git a/tests/warn/i19013-a.check b/tests/warn/i19013-a.check new file mode 100644 index 000000000000..2b5c4ada7ebf --- /dev/null +++ b/tests/warn/i19013-a.check @@ -0,0 +1,6 @@ +-- [E092] Pattern Match Unchecked Warning: tests/warn/i19013-a.scala:5:13 ---------------------------------------------- +5 | catch case e: E => Some(e) // warn + | ^^^^ + | the type test for E cannot be checked at runtime because it refers to an abstract type member or type parameter + | + | longer explanation available when compiling with `-explain` \ No newline at end of file diff --git a/tests/warn/i19013-a.scala b/tests/warn/i19013-a.scala new file mode 100644 index 000000000000..1c83c7173c86 --- /dev/null +++ b/tests/warn/i19013-a.scala @@ -0,0 +1,7 @@ +def handle[E <: Exception](f: => Unit): Option[E] = + try + f + None + catch case e: E => Some(e) // warn + +val r: RuntimeException = handle[RuntimeException](throw new Exception()).get \ No newline at end of file diff --git a/tests/warn/i19013-b.check b/tests/warn/i19013-b.check new file mode 100644 index 000000000000..f2cee8b58615 --- /dev/null +++ b/tests/warn/i19013-b.check @@ -0,0 +1,6 @@ +-- [E092] Pattern Match Unchecked Warning: tests/warn/i19013-b.scala:7:29 ---------------------------------------------- +7 | catch case CustomException(e: E) => Some(e) // warn + | ^ + | the type test for E cannot be checked at runtime because it refers to an abstract type member or type parameter + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i19013-b.scala b/tests/warn/i19013-b.scala new file mode 100644 index 000000000000..73d342f4f683 --- /dev/null +++ b/tests/warn/i19013-b.scala @@ -0,0 +1,9 @@ +case class CustomException(x: Any) extends Exception("") + +def handle[E](f: => Unit): Option[E] = + try + f + None + catch case CustomException(e: E) => Some(e) // warn + +val r: RuntimeException = handle[RuntimeException](throw new Exception()).get diff --git a/tests/warn/i19084.scala b/tests/warn/i19084.scala new file mode 100644 index 000000000000..0ba033d131be --- /dev/null +++ b/tests/warn/i19084.scala @@ -0,0 +1,17 @@ + + +class Test: + def t1(y: ( + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + "Bob", Int, 33, Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int) + ): Unit = y match + case b @ (x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, + "Bob", y1, 33, y2, + z0, z1, z2, z3, z4, z5, z6, z7, z8, z9) + => () + case b @ (x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, // warn: unreachable + "Bob", y1, 33, y2, + z0, z1, z2, z3, z4, z5, z6, z7, z8, z9) + => () + case _ => () diff --git a/tests/warn/i5826.check b/tests/warn/i5826.check new file mode 100644 index 000000000000..18ff50a933cb --- /dev/null +++ b/tests/warn/i5826.check @@ -0,0 +1,30 @@ +-- [E092] Pattern Match Unchecked Warning: tests/warn/i5826.scala:3:9 -------------------------------------------------- +3 | case ls: List[Int] => ls.head // warn: unchecked + | ^ + | the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from A + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i5826.scala:8:9 -------------------------------------------------- +8 | case ls: List[Int] => ls.head // warn: unchecked + | ^ + |the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from List[String] + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i5826.scala:16:9 ------------------------------------------------- +16 | case ls: A[X] => 4 // warn + | ^ + |the type test for Foo.this.A[X] cannot be checked at runtime because its type arguments can't be determined from Foo.this.B[X] + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i5826.scala:21:9 ------------------------------------------------- +21 | case ls: List[Int] => ls.head // warn, List extends Int => T + | ^ + |the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from A => Int + | + | longer explanation available when compiling with `-explain` +-- [E092] Pattern Match Unchecked Warning: tests/warn/i5826.scala:27:54 ------------------------------------------------ +27 | def test5[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[String]] // warn + | ^ + |the type test for Foo.this.C[String] cannot be checked at runtime because its type arguments can't be determined from Foo.this.A[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i5826.scala b/tests/warn/i5826.scala new file mode 100644 index 000000000000..f54d6e58d033 --- /dev/null +++ b/tests/warn/i5826.scala @@ -0,0 +1,40 @@ +class Foo { + def test[A]: (List[Int] | A) => Int = { + case ls: List[Int] => ls.head // warn: unchecked + case _ => 0 + } + + def test2: List[Int] | List[String] => Int = { + case ls: List[Int] => ls.head // warn: unchecked + } + + trait A[T] + trait B[T] + + // suppose: class C extends A[Int] with B[String] + def test3[X]: A[X] | B[X] => Int = { + case ls: A[X] => 4 // warn + case _ => 0 + } + + def test4[A](x: List[Int] | (A => Int)) = x match { + case ls: List[Int] => ls.head // warn, List extends Int => T + case _ => 0 + } + + final class C[T] extends A[T] + + def test5[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[String]] // warn + + def test6[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[T]] + + def test7[A](x: Option[Int] | (A => Int)) = x match { + case ls: Option[Int] => ls.head // OK, Option decomposes to Some and None + case _ => 0 + } + + def test8(x: List[Int] | A[String]) = x match { + case ls: List[Int] => ls.head // OK, List decomposes to :: and Nil + case _ => 0 + } +} diff --git a/tests/warn/i8932.scala b/tests/warn/i8932.scala new file mode 100644 index 000000000000..95a4e86e9791 --- /dev/null +++ b/tests/warn/i8932.scala @@ -0,0 +1,12 @@ +sealed trait Foo[+A] +case class Bar[A]() extends Foo[A] + +class Dummy extends Bar[Nothing] with Foo[String] + +def bugReport[A](foo: Foo[A]): Foo[A] = + foo match { + case bar: Bar[A] => bar // warn: unchecked + case dummy: Dummy => ??? // warn: unreachable + } + +def test = bugReport(new Dummy: Foo[String]) diff --git a/tests/neg-custom-args/fatal-warnings/suppressed-type-test-warnings.scala b/tests/warn/suppressed-type-test-warnings.scala similarity index 85% rename from tests/neg-custom-args/fatal-warnings/suppressed-type-test-warnings.scala rename to tests/warn/suppressed-type-test-warnings.scala index 92d86b3307e5..63849cb2a1ba 100644 --- a/tests/neg-custom-args/fatal-warnings/suppressed-type-test-warnings.scala +++ b/tests/warn/suppressed-type-test-warnings.scala @@ -11,18 +11,18 @@ object Test { } def err1[A, B](value: Foo[A, B], a: A => Int): B = value match { - case b: Bar[A] => // spurious // error + case b: Bar[A] => // spurious // warn b.x } def err2[A, B](value: Foo[A, B], a: A => Int): B = value match { - case b: Bar[B] => // spurious // error + case b: Bar[B] => // spurious // warn b.x case _ => ??? // avoid fatal inexhaustivity warnings suppressing the uncheckable warning } def fail[A, B](value: Foo[A, B], a: A => Int): B = value match { - case b: Bar[Int] => // error + case b: Bar[Int] => // warn b.x case _ => ??? // avoid fatal inexhaustivity warnings suppressing the uncheckable warning }