diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index cf375b147793..cd5c1d717990 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -47,7 +47,7 @@ jobs: test_non_bootstrapped: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -65,8 +65,8 @@ jobs: && github.repository == 'scala/scala3' )" steps: - - name: Set JDK 16 as default - run: echo "/usr/lib/jvm/java-16-openjdk-amd64/bin" >> $GITHUB_PATH + - name: Set JDK 17 as default + run: echo "/usr/lib/jvm/java-17-openjdk-amd64/bin" >> $GITHUB_PATH ## Workaround for https://github.com/actions/runner/issues/2033 (See https://github.com/scala/scala3/pull/19720) - name: Reset existing repo @@ -99,7 +99,7 @@ jobs: test: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -119,8 +119,8 @@ jobs: )" steps: - - name: Set JDK 16 as default - run: echo "/usr/lib/jvm/java-16-openjdk-amd64/bin" >> $GITHUB_PATH + - name: Set JDK 17 as default + run: echo "/usr/lib/jvm/java-17-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | @@ -141,7 +141,7 @@ jobs: - name: Cmd Tests run: | - ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala2-library-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -153,12 +153,12 @@ jobs: run: ./project/scripts/sbt ";set ThisBuild/Build.scala2Library := Build.Scala2LibraryTasty ;scala3-bootstrapped/testCompilation i5; scala3-bootstrapped/testCompilation tests/run/typelevel-peano.scala; scala3-bootstrapped/testOnly dotty.tools.backend.jvm.DottyBytecodeTests" # only test a subset of test to avoid doubling the CI execution time - name: Test with Scala 2 library with CC TASTy (fast) - run: ./project/scripts/sbt "scala2-library-cc/compile; scala2-library-cc-tasty/compile; scala3-bootstrapped/testCompilation i3" + run: ./project/scripts/sbt ";set ThisBuild/Build.scala2Library := Build.Scala2LibraryCCTasty; scala2-library-cc/compile; scala2-library-cc-tasty/compile; scala3-bootstrapped/testCompilation i3" test_scala2_library_tasty: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -175,8 +175,8 @@ jobs: )" steps: - - name: Set JDK 16 as default - run: echo "/usr/lib/jvm/java-16-openjdk-amd64/bin" >> $GITHUB_PATH + - name: Set JDK 17 as default + run: echo "/usr/lib/jvm/java-17-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | @@ -198,9 +198,8 @@ jobs: - name: Test with Scala 2 library TASTy run: ./project/scripts/sbt ";set ThisBuild/Build.scala2Library := Build.Scala2LibraryTasty ;scala3-bootstrapped/test" - # TODO test all the test configurations in non-CC library (currently disabled due to bug while loading the library) - # - name: Test with Scala 2 library with CC TASTy - # run: ./project/scripts/sbt ";set ThisBuild/Build.scala2Library := Build.Scala2LibraryCCTasty ;scala3-bootstrapped/test" + - name: Test with Scala 2 library with CC TASTy + run: ./project/scripts/sbt ";set ThisBuild/Build.scala2Library := Build.Scala2LibraryCCTasty ;scala3-bootstrapped/test" test_windows_fast: @@ -227,7 +226,7 @@ jobs: uses: actions/checkout@v4 - name: Test - run: sbt ";scala3-bootstrapped/compile" + run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation" shell: cmd - name: build binary @@ -282,7 +281,7 @@ jobs: name: MiMa runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -301,6 +300,9 @@ jobs: && github.repository == 'scala/scala3' )" steps: + - name: Set JDK 17 as default + run: echo "/usr/lib/jvm/java-17-openjdk-amd64/bin" >> $GITHUB_PATH + - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -330,7 +332,7 @@ jobs: community_build_a: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -351,6 +353,11 @@ jobs: )" steps: + ###################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. SCALA IS DISTRIBUTED USING JAVA 8. ## + ###################################################################################### + - name: Set JDK 8 as default + run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -382,7 +389,7 @@ jobs: community_build_b: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -403,6 +410,11 @@ jobs: )" steps: + ###################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. SCALA IS DISTRIBUTED USING JAVA 8. ## + ###################################################################################### + - name: Set JDK 8 as default + run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -434,7 +446,7 @@ jobs: community_build_c: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2020-11-19 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -455,6 +467,11 @@ jobs: )" steps: + ###################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. SCALA IS DISTRIBUTED USING JAVA 8. ## + ###################################################################################### + - name: Set JDK 8 as default + run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -486,7 +503,7 @@ jobs: test_sbt: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -505,6 +522,9 @@ jobs: )" steps: + - name: Set JDK 17 as default + run: echo "/usr/lib/jvm/java-17-openjdk-amd64/bin" >> $GITHUB_PATH + - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -528,7 +548,7 @@ jobs: test_java8: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -551,6 +571,9 @@ jobs: )" steps: + #################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. THIS TEST IS SPECIFIC FOR JAVA 8 ## + #################################################################################### - name: Set JDK 8 as default run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH @@ -573,7 +596,7 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala2-library-tasty-tests/test" + ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -584,7 +607,7 @@ jobs: publish_nightly: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -600,6 +623,11 @@ jobs: SONATYPE_USER: ${{ secrets.SONATYPE_USER_ORGSCALALANG }} steps: + ###################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. SCALA IS DISTRIBUTED USING JAVA 8. ## + ###################################################################################### + - name: Set JDK 8 as default + run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -637,7 +665,7 @@ jobs: nightly_documentation: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -691,7 +719,7 @@ jobs: contents: write # for actions/create-release to create a release runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -709,6 +737,11 @@ jobs: SONATYPE_USER: ${{ secrets.SONATYPE_USER_ORGSCALALANG }} steps: + ###################################################################################### + ## WARNING: DO NOT CHANGE THE JAVA VERSION HERE. SCALA IS DISTRIBUTED USING JAVA 8. ## + ###################################################################################### + - name: Set JDK 8 as default + run: echo "/usr/lib/jvm/java-8-openjdk-amd64/bin" >> $GITHUB_PATH - name: Reset existing repo run: | git config --global --add safe.directory /__w/scala3/scala3 @@ -781,7 +814,7 @@ jobs: open_issue_on_failure: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2021-03-22 + image: lampepfl/dotty:2023-11-07 needs: [nightly_documentation, test_windows_full] # The `failure()` expression is true iff at least one of the dependencies # of this job (including transitive dependencies) has failed. diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 090b55b8eaf5..7e8564a634c4 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.1.2 + - uses: VirtusLab/scala-cli-setup@v1.2.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml index 0c09ec170986..94b99e81e044 100644 --- a/.github/workflows/spec.yml +++ b/.github/workflows/spec.yml @@ -45,7 +45,7 @@ jobs: env: USER_FOR_TEST: ${{ secrets.SPEC_DEPLOY_USER }} if: ${{ env.USER_FOR_TEST != '' }} - uses: burnett01/rsync-deployments@6.0.0 + uses: burnett01/rsync-deployments@7.0.0 with: switches: -rzv path: docs/_spec/_site/ diff --git a/.gitignore b/.gitignore index 3d44cdefb941..0fc39ecbae5b 100644 --- a/.gitignore +++ b/.gitignore @@ -99,7 +99,3 @@ docs/_spec/.jekyll-metadata # scaladoc related scaladoc/output/ - -#coverage -coverage/ - diff --git a/MAINTENANCE.md b/MAINTENANCE.md index fd14bab68153..79e55e11d4a9 100644 --- a/MAINTENANCE.md +++ b/MAINTENANCE.md @@ -94,8 +94,8 @@ The following is the list of all the principal areas of the compiler and the cor - Linting (especially unused warnings) / Reporting UX: @szymon-rd ### Infrastructure -- CI: @anatoliykmetyuk -- Community Build: @anatoliykmetyuk +- CI: @hamzaremmal +- Community Build: @hamzaremmal - Open Community Build: @WojciechMazur - Vulpix: @dwijnand, @prolativ - Benchmarks: @mbovel diff --git a/build.sbt b/build.sbt index 1712e80405ae..1bc74e5e23fb 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,6 @@ val `scala3-bench-bootstrapped` = Build.`scala3-bench-bootstrapped` val `scala3-bench-micro` = Build.`scala3-bench-micro` val `scala2-library-bootstrapped` = Build.`scala2-library-bootstrapped` val `scala2-library-tasty` = Build.`scala2-library-tasty` -val `scala2-library-tasty-tests` = Build.`scala2-library-tasty-tests` val `scala2-library-cc` = Build.`scala2-library-cc` val `scala2-library-cc-tasty` = Build.`scala2-library-cc-tasty` val `tasty-core` = Build.`tasty-core` diff --git a/changelogs/3.4.2-RC1.md b/changelogs/3.4.2-RC1.md new file mode 100644 index 000000000000..464a5f6b086a --- /dev/null +++ b/changelogs/3.4.2-RC1.md @@ -0,0 +1,209 @@ +# Highlights of the release + +- Bump JLine 3.19.0 -> 3.24.1 & sbt 1.9.7 -> 1.9.9 [#19744](https://github.com/lampepfl/dotty/pull/19744) +- Refactor settings & improve dx [#19766](https://github.com/lampepfl/dotty/pull/19766) +- Publish `scala2-library-tasty-experimental` [#19588](https://github.com/lampepfl/dotty/pull/19588) +- Repl - method signatures in autocomplete [#19917](https://github.com/lampepfl/dotty/pull/19917) + +# Other changes and fixes + +## Annotations + +- Attempt implicit search for old style `implicit` parameters in Application matchArgs [#19737](https://github.com/lampepfl/dotty/pull/19737) + +## Backend + +- Fix(#17255): cannot find Scala companion module from Java [#19773](https://github.com/lampepfl/dotty/pull/19773) +- Change isStatic to isStaticOwner in hasLocalInstantiation [#19803](https://github.com/lampepfl/dotty/pull/19803) + +## Coverage + +- Port coverage filter options for packages and files [#19727](https://github.com/lampepfl/dotty/pull/19727) + +## Default parameters + +- Lift all non trivial prefixes for default parameters [#19739](https://github.com/lampepfl/dotty/pull/19739) + +## Doctool + +- Prevent HTML/XSS Injection in Scala Search [#19980](https://github.com/lampepfl/dotty/pull/19980) +- Parse search query param in Scaladoc [#19669](https://github.com/lampepfl/dotty/pull/19669) + +## Experimental: Capture Checking + +- Disallow covariant `cap`s in the lower bound of type members [#19624](https://github.com/lampepfl/dotty/pull/19624) +- Ignore orphan parameters inside a retains annotation during Ycheck [#19684](https://github.com/lampepfl/dotty/pull/19684) +- Fix the pickling of `This` inside capture sets [#19797](https://github.com/lampepfl/dotty/pull/19797) +- Add updated to SeqViewOps [#19798](https://github.com/lampepfl/dotty/pull/19798) +- Fix Function tree copier [#19822](https://github.com/lampepfl/dotty/pull/19822) +- Drop FreeSeqFactory from stdlib-cc [#19849](https://github.com/lampepfl/dotty/pull/19849) +- Fix i19859 [#19860](https://github.com/lampepfl/dotty/pull/19860) +- Various fixes to stdlib-cc [#19873](https://github.com/lampepfl/dotty/pull/19873) +- Add more methods in `SeqViewOps` [#19993](https://github.com/lampepfl/dotty/pull/19993) +- Check `This` references in `refersToParamOf` [#20005](https://github.com/lampepfl/dotty/pull/20005) + +## Exports + +- Fix the tparam bounds of exported inherited classes [#18647](https://github.com/lampepfl/dotty/pull/18647) + +## Implicits + +- Prefer extensions over conversions for member selection [#19717](https://github.com/lampepfl/dotty/pull/19717) +- Don't allow implicit conversions on prefixes of type selections [#19934](https://github.com/lampepfl/dotty/pull/19934) +- Make sure typeParams returns a stable result even in the presence of completions [#19974](https://github.com/lampepfl/dotty/pull/19974) + +## Incremental Compilation + +- Fix undercompilation upon ctor change [#19911](https://github.com/lampepfl/dotty/pull/19911) +- Load but not enter case accessors fields in Scala2Unpickler [#19926](https://github.com/lampepfl/dotty/pull/19926) + +## Initialization + +- Add supports for type cast and filtering type for field and method owner in global initialization checker [#19612](https://github.com/lampepfl/dotty/pull/19612) +- Added a second trace for global init checker showing creation of mutable fields [#19996](https://github.com/lampepfl/dotty/pull/19996) +- Suppressing repetitive warnings in the global initialization checker [#19898](https://github.com/lampepfl/dotty/pull/19898) + +## Inline + +- Specialized retained inline FunctionN apply methods [#19801](https://github.com/lampepfl/dotty/pull/19801) +- Avoid crash after StopMacroExpansion [#19883](https://github.com/lampepfl/dotty/pull/19883) +- Check deprecation of inline methods [#19914](https://github.com/lampepfl/dotty/pull/19914) +- Inline transparent implicit parameters when typing Unapply trees [#19646](https://github.com/lampepfl/dotty/pull/19646) +- Restore pre-3.3.2 behavior of `inline implicit def` [#19877](https://github.com/lampepfl/dotty/pull/19877) + +## Match Types + +- Cover patterns using `reflect.TypeTest` in isMatchTypeShaped [#19923](https://github.com/lampepfl/dotty/pull/19923) +- Rework MatchType recursion in collectParts [#19867](https://github.com/lampepfl/dotty/pull/19867) + +## Nullability + +- Fix #19808: Don't force to compute the owner of a symbol when there is no denotation [#19813](https://github.com/lampepfl/dotty/pull/19813) + +## Parser + +- Add support for JEP-409 (sealed classes) + Add javacOpt directive [#19080](https://github.com/lampepfl/dotty/pull/19080) +- Fix(#16458): regression in xml syntax parsing [#19522](https://github.com/lampepfl/dotty/pull/19522) +- Fix parsing of conditional expressions in parentheses [#19985](https://github.com/lampepfl/dotty/pull/19985) + +## Presentation Compiler + +- Allow range selection on function parameter to select a parameter list [#19777](https://github.com/lampepfl/dotty/pull/19777) + +## Quotes + +- Disallow ill-staged references to local classes [#19869](https://github.com/lampepfl/dotty/pull/19869) +- Add regression test for #19909 [#19915](https://github.com/lampepfl/dotty/pull/19915) +- Detect non `Expr[..]` splice patterns [#19944](https://github.com/lampepfl/dotty/pull/19944) +- Avoid spurious `val` binding in quote pattern [#19948](https://github.com/lampepfl/dotty/pull/19948) +- Add regression test and imporve -Xprint-suspension message [#19688](https://github.com/lampepfl/dotty/pull/19688) + +## REPL + +- Repl truncation copes with null [#17336](https://github.com/lampepfl/dotty/pull/17336) +- Catch stackoverflow errors in the highlighter [#19836](https://github.com/lampepfl/dotty/pull/19836) +- Fix a REPL bad symbolic reference [#19786](https://github.com/lampepfl/dotty/pull/19786) + +## Reflection + +- Fix `TypeTreeTypeTest` to not match `TypeBoundsTree`s [#19485](https://github.com/lampepfl/dotty/pull/19485) +- Improve message when tree cannot be shown as source [#19906](https://github.com/lampepfl/dotty/pull/19906) +- Fix #19732: quotes.reflect.Ref incorrectly casting `This` to `RefTree` [#19930](https://github.com/lampepfl/dotty/pull/19930) +- Add check for parents in Quotes (#19842) [#19870](https://github.com/lampepfl/dotty/pull/19870) + +## Reporting + +- Improve error reporting for missing members [#19800](https://github.com/lampepfl/dotty/pull/19800) +- Avoid repetitions in name hints [#19975](https://github.com/lampepfl/dotty/pull/19975) +- Improve error message when using experimental definitions [#19782](https://github.com/lampepfl/dotty/pull/19782) +- Make -Xprompt work as desired under -Werror [#19765](https://github.com/lampepfl/dotty/pull/19765) +- Fix #19402: emit proper error in absence of using in given definitions [#19714](https://github.com/lampepfl/dotty/pull/19714) +- Bugfix: Choose correct signature is signatureHelp for overloaded methods [#19707](https://github.com/lampepfl/dotty/pull/19707) +- Unify completion pos usage, fix presentation compiler crash in interpolation [#19614](https://github.com/lampepfl/dotty/pull/19614) + +## Scaladoc + +- Fix(#16610): warn ignored Scaladoc on multiple enum cases [#19555](https://github.com/lampepfl/dotty/pull/19555) + +## TASTy format + +- Add patch for undefined behavior with `object $` [#19705](https://github.com/lampepfl/dotty/pull/19705) +- Fix(#19806): wrong tasty of scala module class reference [#19827](https://github.com/lampepfl/dotty/pull/19827) +- Used derived types to type arguments of dependent function type [#19838](https://github.com/lampepfl/dotty/pull/19838) + +## Tooling + +- Java TASTy: use new threadsafe writer implementation [#19690](https://github.com/lampepfl/dotty/pull/19690) +- Remove `-Yforce-inline-while-typing` [#19889](https://github.com/lampepfl/dotty/pull/19889) +- Cleanup unnecessary language flag [#19865](https://github.com/lampepfl/dotty/pull/19865) +- Bugfix: Auto imports in worksheets in Scala 3 [#19793](https://github.com/lampepfl/dotty/pull/19793) +- Refine behavior of `-Yno-experimental` [#19741](https://github.com/lampepfl/dotty/pull/19741) + +## Transform + +- Short-circuit isCheckable with classSymbol [#19634](https://github.com/lampepfl/dotty/pull/19634) +- Avoid eta-reduction of `(..., f: T => R, ...) => f.apply(..)` into `f` [#19966](https://github.com/lampepfl/dotty/pull/19966) +- Tweak parameter accessor scheme [#19719](https://github.com/lampepfl/dotty/pull/19719) + +## Typer + +- Update phrasing for NotClassType explain error message [#19635](https://github.com/lampepfl/dotty/pull/19635) +- Fix java typer problems with inner class references and raw types [#19747](https://github.com/lampepfl/dotty/pull/19747) +- Approximate MatchTypes with lub of case bodies, if non-recursive [#19761](https://github.com/lampepfl/dotty/pull/19761) +- Revert broken changes with transparent inline [#19922](https://github.com/lampepfl/dotty/pull/19922) +- Delay hard argument comparisons [#20007](https://github.com/lampepfl/dotty/pull/20007) +- Fix #19607: Allow to instantiate *wildcard* type captures to TypeBounds. [#19627](https://github.com/lampepfl/dotty/pull/19627) +- Fix #19907: Skip soft unions in widenSingle of widenInferred [#19995](https://github.com/lampepfl/dotty/pull/19995) +- Fix untupling of functions in for comprehensions [#19620](https://github.com/lampepfl/dotty/pull/19620) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.1..3.42-RC1` these are: + +``` + 46 Nicolas Stucki + 33 Martin Odersky + 25 Dale Wijnand + 22 Hamza REMMAL + 18 Yichen Xu + 17 Jamie Thompson + 15 Szymon Rodziewicz + 11 EnzeXing + 11 i10416 + 7 Paweł Marks + 6 Kacper Korban + 4 Dan13llljws + 4 Katarzyna Marek + 4 Matt Bovel + 4 Som Snytt + 4 noti0na1 + 3 110416 + 3 Eugene Flesselle + 3 Sébastien Doeraene + 3 dependabot[bot] + 2 Bersier + 2 Hamza Remmal + 2 Jakub Ciesluk + 2 João Costa + 2 Jędrzej Rochala + 2 Natsu Kagami + 2 Stephane Bersier + 2 Taro L. Saito + 2 aherlihy + 1 Aleksander Boruch-Gruszecki + 1 Aviv Keller + 1 Eugene Yokota + 1 Guillaume Martres + 1 Jan Chyb + 1 Lukas Rytz + 1 Mikołaj Fornal + 1 Olga Mazhara + 1 Ondřej Lhoták + 1 Robert Stoll + 1 Seth Tisue + 1 Valentin Schneeberger + 1 Yilin Wei + 1 willerf +``` diff --git a/changelogs/3.4.2.md b/changelogs/3.4.2.md new file mode 100644 index 000000000000..bb6fcc40c952 --- /dev/null +++ b/changelogs/3.4.2.md @@ -0,0 +1,209 @@ +# Highlights of the release + +- Bump JLine 3.19.0 -> 3.24.1 & sbt 1.9.7 -> 1.9.9 [#19744](https://github.com/lampepfl/dotty/pull/19744) +- Refactor settings & improve dx [#19766](https://github.com/lampepfl/dotty/pull/19766) +- Publish `scala2-library-tasty-experimental` [#19588](https://github.com/lampepfl/dotty/pull/19588) +- Repl - method signatures in autocomplete [#19917](https://github.com/lampepfl/dotty/pull/19917) + +# Other changes and fixes + +## Annotations + +- Attempt implicit search for old style `implicit` parameters in Application matchArgs [#19737](https://github.com/lampepfl/dotty/pull/19737) + +## Backend + +- Fix(#17255): cannot find Scala companion module from Java [#19773](https://github.com/lampepfl/dotty/pull/19773) +- Change isStatic to isStaticOwner in hasLocalInstantiation [#19803](https://github.com/lampepfl/dotty/pull/19803) + +## Coverage + +- Port coverage filter options for packages and files [#19727](https://github.com/lampepfl/dotty/pull/19727) + +## Default parameters + +- Lift all non trivial prefixes for default parameters [#19739](https://github.com/lampepfl/dotty/pull/19739) + +## Doctool + +- Prevent HTML/XSS Injection in Scala Search [#19980](https://github.com/lampepfl/dotty/pull/19980) +- Parse search query param in Scaladoc [#19669](https://github.com/lampepfl/dotty/pull/19669) + +## Experimental: Capture Checking + +- Disallow covariant `cap`s in the lower bound of type members [#19624](https://github.com/lampepfl/dotty/pull/19624) +- Ignore orphan parameters inside a retains annotation during Ycheck [#19684](https://github.com/lampepfl/dotty/pull/19684) +- Fix the pickling of `This` inside capture sets [#19797](https://github.com/lampepfl/dotty/pull/19797) +- Add updated to SeqViewOps [#19798](https://github.com/lampepfl/dotty/pull/19798) +- Fix Function tree copier [#19822](https://github.com/lampepfl/dotty/pull/19822) +- Drop FreeSeqFactory from stdlib-cc [#19849](https://github.com/lampepfl/dotty/pull/19849) +- Fix i19859 [#19860](https://github.com/lampepfl/dotty/pull/19860) +- Various fixes to stdlib-cc [#19873](https://github.com/lampepfl/dotty/pull/19873) +- Add more methods in `SeqViewOps` [#19993](https://github.com/lampepfl/dotty/pull/19993) +- Check `This` references in `refersToParamOf` [#20005](https://github.com/lampepfl/dotty/pull/20005) + +## Exports + +- Fix the tparam bounds of exported inherited classes [#18647](https://github.com/lampepfl/dotty/pull/18647) + +## Implicits + +- Prefer extensions over conversions for member selection [#19717](https://github.com/lampepfl/dotty/pull/19717) +- Don't allow implicit conversions on prefixes of type selections [#19934](https://github.com/lampepfl/dotty/pull/19934) +- Make sure typeParams returns a stable result even in the presence of completions [#19974](https://github.com/lampepfl/dotty/pull/19974) + +## Incremental Compilation + +- Fix undercompilation upon ctor change [#19911](https://github.com/lampepfl/dotty/pull/19911) +- Load but not enter case accessors fields in Scala2Unpickler [#19926](https://github.com/lampepfl/dotty/pull/19926) + +## Initialization + +- Add supports for type cast and filtering type for field and method owner in global initialization checker [#19612](https://github.com/lampepfl/dotty/pull/19612) +- Added a second trace for global init checker showing creation of mutable fields [#19996](https://github.com/lampepfl/dotty/pull/19996) +- Suppressing repetitive warnings in the global initialization checker [#19898](https://github.com/lampepfl/dotty/pull/19898) + +## Inline + +- Specialized retained inline FunctionN apply methods [#19801](https://github.com/lampepfl/dotty/pull/19801) +- Avoid crash after StopMacroExpansion [#19883](https://github.com/lampepfl/dotty/pull/19883) +- Check deprecation of inline methods [#19914](https://github.com/lampepfl/dotty/pull/19914) +- Inline transparent implicit parameters when typing Unapply trees [#19646](https://github.com/lampepfl/dotty/pull/19646) +- Restore pre-3.3.2 behavior of `inline implicit def` [#19877](https://github.com/lampepfl/dotty/pull/19877) + +## Match Types + +- Cover patterns using `reflect.TypeTest` in isMatchTypeShaped [#19923](https://github.com/lampepfl/dotty/pull/19923) +- Rework MatchType recursion in collectParts [#19867](https://github.com/lampepfl/dotty/pull/19867) + +## Nullability + +- Fix #19808: Don't force to compute the owner of a symbol when there is no denotation [#19813](https://github.com/lampepfl/dotty/pull/19813) + +## Parser + +- Add support for JEP-409 (sealed classes) + Add javacOpt directive [#19080](https://github.com/lampepfl/dotty/pull/19080) +- Fix(#16458): regression in xml syntax parsing [#19522](https://github.com/lampepfl/dotty/pull/19522) +- Fix parsing of conditional expressions in parentheses [#19985](https://github.com/lampepfl/dotty/pull/19985) + +## Presentation Compiler + +- Allow range selection on function parameter to select a parameter list [#19777](https://github.com/lampepfl/dotty/pull/19777) + +## Quotes + +- Disallow ill-staged references to local classes [#19869](https://github.com/lampepfl/dotty/pull/19869) +- Add regression test for #19909 [#19915](https://github.com/lampepfl/dotty/pull/19915) +- Detect non `Expr[..]` splice patterns [#19944](https://github.com/lampepfl/dotty/pull/19944) +- Avoid spurious `val` binding in quote pattern [#19948](https://github.com/lampepfl/dotty/pull/19948) +- Add regression test and imporve -Xprint-suspension message [#19688](https://github.com/lampepfl/dotty/pull/19688) + +## REPL + +- Repl truncation copes with null [#17336](https://github.com/lampepfl/dotty/pull/17336) +- Catch stackoverflow errors in the highlighter [#19836](https://github.com/lampepfl/dotty/pull/19836) +- Fix a REPL bad symbolic reference [#19786](https://github.com/lampepfl/dotty/pull/19786) + +## Reflection + +- Fix `TypeTreeTypeTest` to not match `TypeBoundsTree`s [#19485](https://github.com/lampepfl/dotty/pull/19485) +- Improve message when tree cannot be shown as source [#19906](https://github.com/lampepfl/dotty/pull/19906) +- Fix #19732: quotes.reflect.Ref incorrectly casting `This` to `RefTree` [#19930](https://github.com/lampepfl/dotty/pull/19930) +- Add check for parents in Quotes (#19842) [#19870](https://github.com/lampepfl/dotty/pull/19870) + +## Reporting + +- Improve error reporting for missing members [#19800](https://github.com/lampepfl/dotty/pull/19800) +- Avoid repetitions in name hints [#19975](https://github.com/lampepfl/dotty/pull/19975) +- Improve error message when using experimental definitions [#19782](https://github.com/lampepfl/dotty/pull/19782) +- Make -Xprompt work as desired under -Werror [#19765](https://github.com/lampepfl/dotty/pull/19765) +- Fix #19402: emit proper error in absence of using in given definitions [#19714](https://github.com/lampepfl/dotty/pull/19714) +- Bugfix: Choose correct signature is signatureHelp for overloaded methods [#19707](https://github.com/lampepfl/dotty/pull/19707) +- Unify completion pos usage, fix presentation compiler crash in interpolation [#19614](https://github.com/lampepfl/dotty/pull/19614) + +## Scaladoc + +- Fix(#16610): warn ignored Scaladoc on multiple enum cases [#19555](https://github.com/lampepfl/dotty/pull/19555) + +## TASTy format + +- Add patch for undefined behavior with `object $` [#19705](https://github.com/lampepfl/dotty/pull/19705) +- Fix(#19806): wrong tasty of scala module class reference [#19827](https://github.com/lampepfl/dotty/pull/19827) +- Used derived types to type arguments of dependent function type [#19838](https://github.com/lampepfl/dotty/pull/19838) + +## Tooling + +- Java TASTy: use new threadsafe writer implementation [#19690](https://github.com/lampepfl/dotty/pull/19690) +- Remove `-Yforce-inline-while-typing` [#19889](https://github.com/lampepfl/dotty/pull/19889) +- Cleanup unnecessary language flag [#19865](https://github.com/lampepfl/dotty/pull/19865) +- Bugfix: Auto imports in worksheets in Scala 3 [#19793](https://github.com/lampepfl/dotty/pull/19793) +- Refine behavior of `-Yno-experimental` [#19741](https://github.com/lampepfl/dotty/pull/19741) + +## Transform + +- Short-circuit isCheckable with classSymbol [#19634](https://github.com/lampepfl/dotty/pull/19634) +- Avoid eta-reduction of `(..., f: T => R, ...) => f.apply(..)` into `f` [#19966](https://github.com/lampepfl/dotty/pull/19966) +- Tweak parameter accessor scheme [#19719](https://github.com/lampepfl/dotty/pull/19719) + +## Typer + +- Update phrasing for NotClassType explain error message [#19635](https://github.com/lampepfl/dotty/pull/19635) +- Fix java typer problems with inner class references and raw types [#19747](https://github.com/lampepfl/dotty/pull/19747) +- Approximate MatchTypes with lub of case bodies, if non-recursive [#19761](https://github.com/lampepfl/dotty/pull/19761) +- Revert broken changes with transparent inline [#19922](https://github.com/lampepfl/dotty/pull/19922) +- Delay hard argument comparisons [#20007](https://github.com/lampepfl/dotty/pull/20007) +- Fix #19607: Allow to instantiate *wildcard* type captures to TypeBounds. [#19627](https://github.com/lampepfl/dotty/pull/19627) +- Fix #19907: Skip soft unions in widenSingle of widenInferred [#19995](https://github.com/lampepfl/dotty/pull/19995) +- Fix untupling of functions in for comprehensions [#19620](https://github.com/lampepfl/dotty/pull/19620) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.1..3.4.2` these are: + +``` + 46 Nicolas Stucki + 33 Martin Odersky + 25 Dale Wijnand + 22 Hamza REMMAL + 18 Yichen Xu + 17 Jamie Thompson + 15 Szymon Rodziewicz + 11 EnzeXing + 11 i10416 + 9 Paweł Marks + 6 Kacper Korban + 4 Dan13llljws + 4 Katarzyna Marek + 4 Matt Bovel + 4 Som Snytt + 4 noti0na1 + 3 110416 + 3 Eugene Flesselle + 3 Sébastien Doeraene + 3 dependabot[bot] + 2 Bersier + 2 Hamza Remmal + 2 Jakub Ciesluk + 2 João Costa + 2 Jędrzej Rochala + 2 Natsu Kagami + 2 Stephane Bersier + 2 Taro L. Saito + 2 aherlihy + 1 Aleksander Boruch-Gruszecki + 1 Aviv Keller + 1 Eugene Yokota + 1 Guillaume Martres + 1 Jan Chyb + 1 Lukas Rytz + 1 Mikołaj Fornal + 1 Olga Mazhara + 1 Ondřej Lhoták + 1 Robert Stoll + 1 Seth Tisue + 1 Valentin Schneeberger + 1 Yilin Wei + 1 willerf +``` diff --git a/community-build/community-projects/munit b/community-build/community-projects/munit index c18fddb143b9..5c77d7316fc6 160000 --- a/community-build/community-projects/munit +++ b/community-build/community-projects/munit @@ -1 +1 @@ -Subproject commit c18fddb143b98e4c026dc118687410d52b187d88 +Subproject commit 5c77d7316fc66adaed64e9532ee0a45a668b01ec diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index ba01cca013d9..a618330aa808 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit ba01cca013d9d99e390d17619664bdedd716e0d7 +Subproject commit a618330aa80833787859dae805d02e45d4304c42 diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index 6a4f832ce05a..f307a6b0a8eb 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -71,7 +71,7 @@ class CommunityBuildTestC: @Test def jacksonModuleScala = projects.jacksonModuleScala.run() @Test def libretto = projects.libretto.run() @Test def minitest = projects.minitest.run() - @Test def onnxScala = projects.onnxScala.run() + //@Test def onnxScala = projects.onnxScala.run() @Test def oslib = projects.oslib.run() // @Test def oslibWatch = projects.oslibWatch.run() @Test def parboiled2 = projects.parboiled2.run() diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 2f8a469169cc..865ee9bf4af9 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -20,24 +20,7 @@ class BackendUtils(val postProcessor: PostProcessor) { import bTypes.* import coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = compilerSettings.target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15 - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20 - case "21" => asm.Opcodes.V21 - case "22" => asm.Opcodes.V22 - } + lazy val classfileVersion: Int = BackendUtils.classfileVersionMap(compilerSettings.target.toInt) lazy val extraProc: Int = { import GenBCodeOps.addFlagIf @@ -184,3 +167,23 @@ class BackendUtils(val postProcessor: PostProcessor) { } } } + +object BackendUtils { + lazy val classfileVersionMap: Map[Int, Int] = Map( + 8 -> asm.Opcodes.V1_8, + 9 -> asm.Opcodes.V9, + 10 -> asm.Opcodes.V10, + 11 -> asm.Opcodes.V11, + 12 -> asm.Opcodes.V12, + 13 -> asm.Opcodes.V13, + 14 -> asm.Opcodes.V14, + 15 -> asm.Opcodes.V15, + 16 -> asm.Opcodes.V16, + 17 -> asm.Opcodes.V17, + 18 -> asm.Opcodes.V18, + 19 -> asm.Opcodes.V19, + 20 -> asm.Opcodes.V20, + 21 -> asm.Opcodes.V21, + 22 -> asm.Opcodes.V22, + ) +} diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 290df761d117..3abd46ebe8eb 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -167,6 +167,8 @@ class Compiler { val rctx = if ctx.settings.Xsemanticdb.value then ctx.addMode(Mode.ReadPositions) + else if ctx.settings.YcheckInitGlobal.value then + ctx.addMode(Mode.ReadPositions) else ctx new Run(this, rctx) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 4e0b7d09e95f..d18a2ddc7db0 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -141,7 +141,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint |""" val enableXprintSuspensionHint = if ctx.settings.XprintSuspension.value then "" - else "\n\nCompiling with -Xprint-suspension gives more information." + else "\n\nCompile with -Xprint-suspension for information." report.error(em"""Cyclic macro dependencies $where |Compilation stopped since no further progress can be made. | diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 9591bc5a93f0..2d99cf201375 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -178,21 +178,7 @@ object desugar { val valName = normalizeName(vdef, tpt).asTermName var mods1 = vdef.mods - def dropInto(tpt: Tree): Tree = tpt match - case Into(tpt1) => - mods1 = vdef.mods.withAddedAnnotation( - TypedSplice( - Annotation(defn.AllowConversionsAnnot, tpt.span.startPos).tree)) - tpt1 - case ByNameTypeTree(tpt1) => - cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) - case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => - cpy.PostfixOp(tpt)(dropInto(tpt1), op) - case _ => - tpt - - val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) - .withMods(mods1) + val vdef1 = cpy.ValDef(vdef)(name = valName).withMods(mods1) if isSetterNeeded(vdef) then val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) @@ -1876,8 +1862,11 @@ object desugar { assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) Select(t, op.name) case PrefixOp(op, t) => - val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme - Select(t, nspace.UNARY_PREFIX ++ op.name) + if op.name == tpnme.into then + Annotated(t, New(ref(defn.IntoAnnot.typeRef), Nil :: Nil)) + else + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) case ForDo(enums, body) => makeFor(nme.foreach, nme.foreach, enums, body) orElse tree case ForYield(enums, body) => @@ -1978,10 +1967,7 @@ object desugar { val applyVParams = args.zipWithIndex.map { case (p, n) => makeSyntheticParameter(n + 1, p) } - tree match - case tree: FunctionWithMods => - untpd.FunctionWithMods(applyVParams, result, tree.mods, tree.erasedParams) - case _ => untpd.Function(applyVParams, result) + cpy.Function(tree)(applyVParams, result).asInstanceOf[untpd.Function] } } diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index c64b636648ee..41899ed661f5 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -304,6 +304,7 @@ object Trees { def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) + def withAddedAnnotation(annot: Tree[Untyped]): ThisTree[Untyped] = withMods(rawMods.withAddedAnnotation(annot)) /** Destructively update modifiers. To be used with care. */ def setMods(mods: untpd.Modifiers): Unit = myMods = mods @@ -1254,11 +1255,12 @@ object Trees { case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) } def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { - case tree: SelectWithSig => - if ((qualifier eq tree.qualifier) && (name == tree.name)) tree - else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree - case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) + case _ => + val tree1 = tree match + case tree: SelectWithSig => untpd.SelectWithSig(qualifier, name, tree.sig)(using sourceFile(tree)) + case _ => untpd.Select(qualifier, name)(using sourceFile(tree)) + finalize(tree, tree1) } /** Copy Ident or Select trees */ def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index aabfdd97d7bd..08f3db4981ff 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -118,7 +118,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree - case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -552,6 +551,12 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) .withFlags(flags) + def isInto(t: Tree)(using Context): Boolean = t match + case PrefixOp(Ident(tpnme.into), _) => true + case Function(_, res) => isInto(res) + case Parens(t) => isInto(t) + case _ => false + def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = params match case Nil => tpt @@ -609,7 +614,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { case tree: Function if (args eq tree.args) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.Function(args, body)(tree.source)) + case _ => + val tree1 = tree match + case tree: FunctionWithMods => untpd.FunctionWithMods(args, body, tree.mods, tree.erasedParams)(using tree.source) + case _ => untpd.Function(args, body)(using tree.source) + finalize(tree, tree1) } def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree @@ -666,9 +675,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) - def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match - case tree: Into if tpt eq tree.tpt => tree - case _ => finalize(tree, untpd.Into(tpt)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -734,8 +740,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) - case Into(tpt) => - cpy.Into(tree)(transform(tpt)) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -791,8 +795,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) - case Into(tpt) => - this(x, tpt) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 4564bed6db01..de584797f154 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -142,32 +142,19 @@ object CheckCaptures: private val seen = new EqHashSet[TypeRef] - /** Check that there is at least one method containing carrier and defined - * in the scope of tparam. E.g. this is OK: - * def f[T] = { ... var x: T ... } - * So is this: - * class C[T] { def f() = { class D { var x: T }}} - * But this is not OK: - * class C[T] { object o { var x: T }} - */ - extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - carrier.exists && { - val encl = carrier.owner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) - } - def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => if !seen.contains(t) then seen += t traverseChildren(t) + + // Check the lower bound of path dependent types. + // See issue #19330. + val isMember = t.prefix ne NoPrefix + t.info match + case TypeBounds(lo, _) if isMember => traverse(lo) + case _ => case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index 8c31faa43186..b8cb9a2155dc 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -114,6 +114,10 @@ object FileUtils { if classOrModuleName.endsWith("$") && classOrModuleName != "Null$" // scala.runtime.Null$ && classOrModuleName != "Nothing$" // scala.runtime.Nothing$ + // Special case for `object $` in Amonite. + // This is an ad-hoc workaround for Amonite `object $`. See issue #19702 + // This definition is not valid Scala. + && classOrModuleName != "$" then classOrModuleName.stripSuffix("$") else classOrModuleName className + SUFFIX_TASTY diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 5c24dd57eeba..be97297218fa 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -86,7 +86,7 @@ trait CliCommand: protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = s.name.startsWith("-V") && s.name != "-V" protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" + s.name.startsWith("-W") && s.name != "-W" protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = s.name.startsWith("-X") && s.name != "-X" protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 2ffe900fbdbf..587f94dad7b3 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -7,7 +7,7 @@ import core.Contexts.* abstract class CompilerCommand extends CliCommand: type ConcreteSettings = ScalaSettings - final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = + final def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String = settings.allSettings.find(isHelping) match case Some(s) => s.description case _ => @@ -20,7 +20,7 @@ abstract class CompilerCommand extends CliCommand: else if (settings.XshowPhases.value) phasesMessage else "" - final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = + final def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean = import settings.* val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 2798828ad9a7..7eb95badd4d0 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -33,8 +33,6 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") - val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) - /** Is `feature` enabled by by a command-line setting? The enabling setting is * * -language:feature @@ -135,26 +133,25 @@ object Feature: if !isExperimentalEnabled then report.error( em"""Experimental $which may only be used under experimental mode: - | 1. In a definition marked as @experimental - | 2. Compiling with the -experimental compiler flag - | 3. With a nightly or snapshot version of the compiler$note + | 1. in a definition marked as @experimental, or + | 2. compiling with the -experimental compiler flag, or + | 3. with a nightly or snapshot version of the compiler.$note """, srcPos) private def ccException(sym: Symbol)(using Context): Boolean = ccEnabled && defn.ccExperimental.contains(sym) def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = - if !isExperimentalEnabled then - val experimentalSym = - if sym.hasAnnotation(defn.ExperimentalAnnot) then sym - else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then sym.owner - else NoSymbol - if !ccException(experimentalSym) then - val symMsg = - if experimentalSym.exists - then i"$experimentalSym is marked @experimental" - else i"$sym inherits @experimental" - report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) + val experimentalSym = + if sym.hasAnnotation(defn.ExperimentalAnnot) then sym + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then sym.owner + else NoSymbol + if !ccException(experimentalSym) then + val note = + if experimentalSym.exists + then i"$experimentalSym is marked @experimental" + else i"$sym inherits @experimental" + checkExperimentalFeature("definition", srcPos, s"\n\n$note") /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = @@ -163,7 +160,7 @@ object Feature: do checkExperimentalFeature(s"feature $setting", NoSourcePosition) def isExperimentalEnabled(using Context): Boolean = - (Properties.experimental || ctx.settings.experimental.value) && !ctx.settings.YnoExperimental.value + (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || ctx.settings.experimental.value /** Handle language import `import language..` if it is one * of the global imports `pureFunctions` or `captureChecking`. In this case diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 1e9cc82112af..3392882057e7 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -86,11 +86,12 @@ trait PropertiesTrait { /** Whether the current version of compiler is experimental * - * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. - * 2. Features supported by experimental versions of the compiler: - * - research plugins + * Snapshot, nightly releases and non-bootstrapped compiler are experimental. */ - val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") + val unstableExperimentalEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") + + /** Whether the current version of compiler supports research plugins. */ + val researchPluginEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 85e6ebef751f..687adfe05ca7 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -2,92 +2,84 @@ package dotty.tools.dotc package config import scala.language.unsafeNulls - import dotty.tools.dotc.config.PathResolver.Defaults -import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory} import dotty.tools.dotc.config.SourceVersion import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.rewrites.Rewrites import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} import Setting.ChoiceWithHelp +import ScalaSettingCategories.* import scala.util.chaining.* import java.util.zip.Deflater -class ScalaSettings extends SettingGroup with AllScalaSettings - -object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BackendUtils` - private val minTargetVersion = 8 - private val maxTargetVersion = 22 - - def supportedTargetVersions: List[String] = - (minTargetVersion to maxTargetVersion).toList.map(_.toString) - - def supportedReleaseVersions: List[String] = - if scala.util.Properties.isJavaAtLeast("9") then - val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - val maxVersion = Math.min(jdkVersion, maxTargetVersion) - (minTargetVersion to maxVersion).toList.map(_.toString) - else List(minTargetVersion).map(_.toString) - - def supportedScalaReleaseVersions: List[String] = - ScalaRelease.values.toList.map(_.show) - - def supportedSourceVersions: List[String] = - SourceVersion.values.toList.map(_.toString) - - def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") - - def defaultPageWidth: Int = { - val defaultWidth = 80 - val columnsVar = System.getenv("COLUMNS") - if columnsVar != null then columnsVar.toInt - else if Properties.isWin then - val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" - if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then - ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt - else defaultWidth - else defaultWidth - } - +enum ScalaSettingCategories(val prefixLetter: String) extends SettingCategory: + // Root settings, a category for setting that are used to configure the core compilation process + case RootSetting extends ScalaSettingCategories("") + // Warning settings, a category for settings that are used to enable and configure warnings + case WarningSetting extends ScalaSettingCategories("W") + // Fork / private settings, a category for settings that enable private or advanced features, mainly used for debugging the compiler + case ForkSetting extends ScalaSettingCategories("Y") + // Advanced settings, a category for settings that enable advanced, often unstable, features + case AdvancedSetting extends ScalaSettingCategories("X") + // Verbose settings, a category to configure the verbosity of the compiler + case VerboseSetting extends ScalaSettingCategories("V") + +object ScalaSettings extends ScalaSettings + +// Kept as seperate type to avoid breaking backward compatibility +abstract class ScalaSettings extends SettingGroup, AllScalaSettings: + val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = + allSettings.groupBy(_.category) + .view.mapValues(_.toList).toMap + .withDefaultValue(Nil) + def categories: List[SettingCategory] = settingsByCategory.keys.toList.sortBy(_.prefixLetter) + val rootSettings: List[Setting[_]] = settingsByCategory(RootSetting).sortBy(_.name) + val warningSettings: List[Setting[_]] = settingsByCategory(WarningSetting).sortBy(_.name) + val forkSettings: List[Setting[_]] = settingsByCategory(ForkSetting).sortBy(_.name) + val advancedSettings: List[Setting[_]] = settingsByCategory(AdvancedSetting).sortBy(_.name) + val verboseSettings: List[Setting[_]] = settingsByCategory(VerboseSetting).sortBy(_.name) + val settingsByAliases: Map[String, Setting[_]] = allSettings.flatMap(s => s.aliases.map(_ -> s)).toMap + + trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: self: SettingGroup => /* Path related settings */ - val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") - val semanticdbText: Setting[Boolean] = BooleanSetting("-semanticdb-text", "Specifies whether to include source code in SemanticDB files or not.") + val semanticdbTarget: Setting[String] = PathSetting(RootSetting, "semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + val semanticdbText: Setting[Boolean] = BooleanSetting(RootSetting, "semanticdb-text", "Specifies whether to include source code in SemanticDB files or not.") - val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) - val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) - val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) - val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) + val source: Setting[String] = ChoiceSetting(RootSetting, "source", "source version", "source version", ScalaSettingsProperties.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) + val uniqid: Setting[Boolean] = BooleanSetting(RootSetting, "uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) + val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites](RootSetting, "rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) + val fromTasty: Setting[Boolean] = BooleanSetting(RootSetting, "from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) - val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") - val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") - val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") - val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) + val newSyntax: Setting[Boolean] = BooleanSetting(RootSetting, "new-syntax", "Require `then` and `do` in control expressions.") + val oldSyntax: Setting[Boolean] = BooleanSetting(RootSetting, "old-syntax", "Require `(...)` around conditions.") + val indent: Setting[Boolean] = BooleanSetting(RootSetting, "indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") + val noindent: Setting[Boolean] = BooleanSetting(RootSetting, "no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) /* Decompiler settings */ - val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) - val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + val printTasty: Setting[Boolean] = BooleanSetting(RootSetting, "print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) + val printLines: Setting[Boolean] = BooleanSetting(RootSetting, "print-lines", "Show source code line numbers.", aliases = List("--print-lines")) /* Scala.js-related settings */ - val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only).") - val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only).") + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting(RootSetting, "scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only).") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting(RootSetting, "scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only).") val projectUrl: Setting[String] = StringSetting ( - "-project-url", + RootSetting, + "project-url", "project repository homepage", "The source repository of your project.", "" ) - val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") - - val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") - val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") + val wikiSyntax: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") + val jvmargs = PrefixSetting(RootSetting, "J", "Pass -J directly to the runtime system.") + val defines = PrefixSetting(RootSetting, "D", "Pass -D directly to the runtime system.") end AllScalaSettings /** Settings shared by compiler and scaladoc */ @@ -95,81 +87,85 @@ trait CommonScalaSettings: self: SettingGroup => /* Path related settings */ - val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) - val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) - val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) - val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) - val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) - val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") - - val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) - val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", + val bootclasspath: Setting[String] = PathSetting(RootSetting, "bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) + val extdirs: Setting[String] = PathSetting(RootSetting, "extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) + val javabootclasspath: Setting[String] = PathSetting(RootSetting, "javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) + val javaextdirs: Setting[String] = PathSetting(RootSetting, "javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) + val sourcepath: Setting[String] = PathSetting(RootSetting, "sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) + val sourceroot: Setting[String] = PathSetting(RootSetting, "sourceroot", "Specify workspace root directory.", ".") + + val classpath: Setting[String] = PathSetting(RootSetting, "classpath", "Specify where to find user class files.", ScalaSettingsProperties.defaultClasspath, aliases = List("-cp", "--class-path")) + val outputDir: Setting[AbstractFile] = OutputSetting(RootSetting, "d", "directory|jar", "Destination for generated classfiles.", new PlainDirectory(Directory("."))) - val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) - val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) - val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) - val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) - val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) - val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) - - val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) - - val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) - val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) - val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) + val color: Setting[String] = ChoiceSetting(RootSetting, "color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) + val verbose: Setting[Boolean] = BooleanSetting(RootSetting, "verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) + val version: Setting[Boolean] = BooleanSetting(RootSetting, "version", "Print product version and exit.", aliases = List("--version")) + val help: Setting[Boolean] = BooleanSetting(RootSetting, "help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) + val pageWidth: Setting[Int] = IntSetting(RootSetting, "pagewidth", "Set page width", ScalaSettingsProperties.defaultPageWidth, aliases = List("--page-width")) + val silentWarnings: Setting[Boolean] = BooleanSetting(RootSetting, "nowarn", "Silence all warnings.", aliases = List("--no-warnings")) + + val javaOutputVersion: Setting[String] = ChoiceSetting(RootSetting, "java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettingsProperties.supportedReleaseVersions, "", aliases = List("-release", "--release")) + + val deprecation: Setting[Boolean] = BooleanSetting(RootSetting, "deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) + val feature: Setting[Boolean] = BooleanSetting(RootSetting, "feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) + val explain: Setting[Boolean] = BooleanSetting(RootSetting, "explain", "Explain errors in more detail.", aliases = List("--explain")) // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance // it is otherwise subsumed by -explain, and should be dropped as soon as we can. - val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) - val explainCyclic: Setting[Boolean] = BooleanSetting("-explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) - val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) - val experimental: Setting[Boolean] = BooleanSetting("-experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") + val explainTypes: Setting[Boolean] = BooleanSetting(RootSetting, "explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) + val explainCyclic: Setting[Boolean] = BooleanSetting(RootSetting, "explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) + val unchecked: Setting[Boolean] = BooleanSetting(RootSetting, "unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) + val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.", aliases = List("--language")) + val experimental: Setting[Boolean] = BooleanSetting(RootSetting, "experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ - val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) + val coverageOutputDir = PathSetting(RootSetting, "coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) + val coverageExcludeClasslikes: Setting[List[String]] = MultiStringSetting(RootSetting, "coverage-exclude-classlikes", "packages, classes and modules", "List of regexes for packages, classes and modules to exclude from coverage.", aliases = List("--coverage-exclude-classlikes")) + val coverageExcludeFiles: Setting[List[String]] = MultiStringSetting(RootSetting, "coverage-exclude-files", "files", "List of regexes for files to exclude from coverage.", aliases = List("--coverage-exclude-files")) /* Other settings */ - val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) - val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) - val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) + val encoding: Setting[String] = StringSetting(RootSetting, "encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) + val usejavacp: Setting[Boolean] = BooleanSetting(RootSetting, "usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) + val scalajs: Setting[Boolean] = BooleanSetting(RootSetting, "scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) end CommonScalaSettings /** -P "plugin" settings. Various tools might support plugins. */ private sealed trait PluginSettings: self: SettingGroup => - val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") - val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") - val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") - val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") - val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) - val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") + val plugin: Setting[List[String]] = MultiStringSetting (AdvancedSetting, "Xplugin", "paths", "Load a plugin from each classpath.") + val disable: Setting[List[String]] = MultiStringSetting (AdvancedSetting, "Xplugin-disable", "plugin", "Disable plugins by name.") + val require: Setting[List[String]] = MultiStringSetting (AdvancedSetting, "Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") + val showPlugins: Setting[Boolean] = BooleanSetting (AdvancedSetting, "Xplugin-list", "Print a synopsis of loaded plugins.") + val pluginsDir: Setting[String] = StringSetting (AdvancedSetting, "Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) + val pluginOptions: Setting[List[String]] = MultiStringSetting (RootSetting, "P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") /** -V "Verbose" settings */ private sealed trait VerboseSettings: self: SettingGroup => - val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") - val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) - val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) + val Vhelp: Setting[Boolean] = BooleanSetting(VerboseSetting, "V", "Print a synopsis of verbose options.") + val Xprint: Setting[List[String]] = PhasesSetting(VerboseSetting, "Vprint", "Print out program after", aliases = List("-Xprint")) + val XshowPhases: Setting[Boolean] = BooleanSetting(VerboseSetting, "Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) - val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") - val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") - val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) - val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) - val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) + val Vprofile: Setting[Boolean] = BooleanSetting(VerboseSetting, "Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") + val VprofileSortedBy = ChoiceSetting(VerboseSetting, "Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") + val VprofileDetails = IntSetting(VerboseSetting, "Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting(VerboseSetting, "Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting(VerboseSetting, "Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) /** -W "Warnings" settings */ private sealed trait WarningSettings: self: SettingGroup => - val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") - val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - val WvalueDiscard: Setting[Boolean] = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") - val WNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - val WimplausiblePatterns = BooleanSetting("-Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") - val WunstableInlineAccessors = BooleanSetting("-WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") + val Whelp: Setting[Boolean] = BooleanSetting(WarningSetting, "W", "Print a synopsis of warning options.") + val XfatalWarnings: Setting[Boolean] = BooleanSetting(WarningSetting, "Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + val WvalueDiscard: Setting[Boolean] = BooleanSetting(WarningSetting, "Wvalue-discard", "Warn when non-Unit expression results are unused.") + val WNonUnitStatement = BooleanSetting(WarningSetting, "Wnonunit-statement", "Warn when block statements are non-Unit expressions.") + val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") + val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") + val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( - name = "-Wunused", + WarningSetting, + name = "Wunused", helpArg = "warning", descr = "Enable or disable specific `unused` warnings", choices = List( @@ -228,7 +224,8 @@ private sealed trait WarningSettings: isChoiceSet("strict-no-implicit-warn") val Wconf: Setting[List[String]] = MultiStringSetting( - "-Wconf", + WarningSetting, + "Wconf", "patterns", default = List(), descr = @@ -274,33 +271,56 @@ private sealed trait WarningSettings: |to prevent the shell from expanding patterns.""".stripMargin, ) + val Wshadow: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( + WarningSetting, + name = "Wshadow", + helpArg = "warning", + descr = "Enable or disable specific `shadow` warnings", + choices = List( + ChoiceWithHelp("all", ""), + ChoiceWithHelp("private-shadow", "Warn if a private field or class parameter shadows a superclass field"), + ChoiceWithHelp("type-parameter-shadow", "Warn when a type parameter shadows a type already in the scope"), + ), + default = Nil + ) + + object WshadowHas: + def allOr(s: String)(using Context) = + Wshadow.value.pipe(us => us.contains("all") || us.contains(s)) + def privateShadow(using Context) = + allOr("private-shadow") + def typeParameterShadow(using Context) = + allOr("type-parameter-shadow") + + /** -X "Extended" or "Advanced" settings */ private sealed trait XSettings: self: SettingGroup => - val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") - val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") - val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) - val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) - val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") - val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") - val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") - val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") - val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") - val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") - val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") - val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") - val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") - val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") - val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) - val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) - val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) - val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) - val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") - val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + val Xhelp: Setting[Boolean] = BooleanSetting(AdvancedSetting, "X", "Print a synopsis of advanced options.") + val XnoForwarders: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-forwarders", "Do not generate static forwarders in mirror classes.") + val XmaxInlines: Setting[Int] = IntSetting(AdvancedSetting, "Xmax-inlines", "Maximal number of successive inlines.", 32) + val XmaxInlinedTrees: Setting[Int] = IntSetting(AdvancedSetting, "Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) + val Xmigration: Setting[ScalaVersion] = VersionSetting(AdvancedSetting, "Xmigration", "Warn about constructs whose behavior may have changed since version.", legacyArgs = true) + val XprintTypes: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprint-types", "Print tree types (debugging option).") + val XprintDiff: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprint-diff", "Print changed parts of the tree since last print.") + val XprintDiffDel: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") + val XprintInline: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprint-inline", "Show where inlined code comes from.") + val XprintSuspension: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprint-suspension", "Show when code is suspended until macros are compiled.") + val Xprompt: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xprompt", "Display a prompt after each error (debugging option).") + val XreplDisableDisplay: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xrepl-disable-display", "Do not display definitions in REPL.") + val XverifySignatures: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xverify-signatures", "Verify generic signatures in generated bytecode.") + val XignoreScala2Macros: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") + val XimportSuggestionTimeout: Setting[Int] = IntSetting(AdvancedSetting, "Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) + val Xsemanticdb: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) + val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting(AdvancedSetting, "Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettingsProperties.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) + val XcheckMacros: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) + val XmainClass: Setting[String] = StringSetting(AdvancedSetting, "Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") + val XimplicitSearchLimit: Setting[Int] = IntSetting(AdvancedSetting, "Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) val XmixinForceForwarders = ChoiceSetting( - name = "-Xmixin-force-forwarders", + AdvancedSetting, + name = "Xmixin-force-forwarders", helpArg = "mode", descr = "Generate forwarder methods in classes inhering concrete methods from traits.", choices = List("true", "junit", "false"), @@ -311,27 +331,10 @@ private sealed trait XSettings: def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" } - val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") + val XmacroSettings: Setting[List[String]] = MultiStringSetting(AdvancedSetting, "Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") - val Xlint: Setting[List[ChoiceWithHelp[String]]] = UncompleteMultiChoiceHelpSetting( - name = "-Xlint", - helpArg = "advanced warning", - descr = "Enable or disable specific `lint` warnings", - choices = List( - ChoiceWithHelp("all", ""), - ChoiceWithHelp("private-shadow", "Warn if a private field or class parameter shadows a superclass field"), - ChoiceWithHelp("type-parameter-shadow", "Warn when a type parameter shadows a type already in the scope"), - ), - default = Nil - ) - - object XlintHas: - def allOr(s: String)(using Context) = - Xlint.value.pipe(us => us.contains("all") || us.contains(s)) - def privateShadow(using Context) = - allOr("private-shadow") - def typeParameterShadow(using Context) = - allOr("type-parameter-shadow") + // Deprecated + val Xlint: Setting[_] = DeprecatedSetting(AdvancedSetting, "Xlint", "Enable or disable specific warnings", "Use -Wshadow to enable shadowing lints.") end XSettings @@ -339,106 +342,104 @@ end XSettings private sealed trait YSettings: self: SettingGroup => - val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") - val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") - val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") - val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") - val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") - val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") - val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") - val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") - val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") - val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) - val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) - val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) - val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) - val YdebugCyclic: Setting[Boolean] = BooleanSetting("-Ydebug-cyclic", "Print the stack trace when a cyclic reference error occurs.", false) - val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") - val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") - val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") - val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") - - val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") - - val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") - val Yimports: Setting[List[String]] = MultiStringSetting("-Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") - val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") - val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") - val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") - val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val YjarCompressionLevel: Setting[Int] = IntChoiceSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) - val YbackendParallelism: Setting[Int] = IntChoiceSetting("-Ybackend-parallelism", "maximum worker threads for backend", 1 to 16, 1) - val YbackendWorkerQueue: Setting[Int] = IntChoiceSetting("-Ybackend-worker-queue", "backend threads worker queue size", 0 to 1000, 0) - val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat - val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully - val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") - val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable") - val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") - val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") - val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") - val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") - val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") - val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") - val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") - val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") - val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") - val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") - val YprintTasty: Setting[Boolean] = BooleanSetting("-Yprint-tasty", "Prints the generated TASTY to stdout.") - val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") - val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) - val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) - val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") - val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") - val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") - val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") - val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") - val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") - val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") - val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") - val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features.") - val YlegacyLazyVals: Setting[Boolean] = BooleanSetting("-Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") - val YcompileScala2Library: Setting[Boolean] = BooleanSetting("-Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") - val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting("-Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") - - val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") + val Yhelp: Setting[Boolean] = BooleanSetting(ForkSetting, "Y", "Print a synopsis of private options.") + val Ycheck: Setting[List[String]] = PhasesSetting(ForkSetting, "Ycheck", "Check the tree at the end of") + val YcheckMods: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") + val Ydebug: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug", "Increase the quantity of debugging output.") + val YdebugTrace: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-trace", "Trace core operations.") + val YdebugFlags: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-flags", "Print all flags of definitions.") + val YdebugMissingRefs: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") + val YdebugNames: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-names", "Show internal representation of names.") + val YdebugPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-pos", "Show full source positions including spans.") + val YdebugTreeWithId: Setting[Int] = IntSetting(ForkSetting, "Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) + val YdebugTypeError: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) + val YdebugError: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-error", "Print the stack trace when any error is caught.", false) + val YdebugUnpickling: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) + val YdebugCyclic: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-cyclic", "Print the stack trace when a cyclic reference error occurs.", false) + val YtermConflict: Setting[String] = ChoiceSetting(ForkSetting, "Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val Ylog: Setting[List[String]] = PhasesSetting(ForkSetting, "Ylog", "Log operations during") + val YlogClasspath: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylog-classpath", "Output information about what classpath is being applied.") + val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting(ForkSetting, "YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + + val Yscala2Unpickler: Setting[String] = StringSetting(ForkSetting, "Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") + + val YnoImports: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val Yimports: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") + val YnoGenericSig: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-generic-signatures", "Suppress generation of generic signatures for Java.") + val YnoPredef: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-predef", "Compile without importing Predef.") + val Yskip: Setting[List[String]] = PhasesSetting(ForkSetting, "Yskip", "Skip") + val Ydumpclasses: Setting[String] = StringSetting(ForkSetting, "Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val YjarCompressionLevel: Setting[Int] = IntChoiceSetting(ForkSetting, "Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) + val YbackendParallelism: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-parallelism", "maximum worker threads for backend", 1 to 16, 1) + val YbackendWorkerQueue: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-worker-queue", "backend threads worker queue size", 0 to 1000, 0) + val YstopAfter: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-after", "Stop after", aliases = List("-stop")) // backward compat + val YstopBefore: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully + val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") + val YdetailedStats: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") + val YkindProjector: Setting[String] = ChoiceSetting(ForkSetting, "Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true) + val YprintPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos", "Show tree positions.") + val YprintPosSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos-syms", "Show symbol definitions positions.") + val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoPatmatOpt: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-patmat-opt", "Disable all pattern matching optimizations.") + val YplainPrinter: Setting[Boolean] = BooleanSetting(ForkSetting, "Yplain-printer", "Pretty-print using a plain printer.") + val YprintSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") + val YprintDebug: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-debug", "When printing trees, print some extra information useful for debugging.") + val YprintDebugOwners: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-level", "print nesting levels of symbols and type variables.") + val YshowPrintErrors: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") + val YprintTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-tasty", "Prints the generated TASTY to stdout.") + val YtestPickler: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") + val YtestPicklerCheck: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler-check", "Self-test for pickling -print-tasty output; should be used with -Ytest-pickler.") + val YcheckReentrant: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") + val YdropComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) + val YcookComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) + val YreadComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Yread-docs", "Read documentation from tasty.") + val YforceSbtPhases: Setting[Boolean] = BooleanSetting(ForkSetting, "Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") + val YdumpSbtInc: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") + val YcheckAllPatmat: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") + val YretainTrees: Setting[Boolean] = BooleanSetting(ForkSetting, "Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") + val YshowTreeIds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") + val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") + val YnoExperimental: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-experimental", "Disable experimental language features by default in NIGHTLY/SNAPSHOT versions of the compiler.") + val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") + val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") + val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") + val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") + val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") + val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") //.withPostSetHook( _ => YprofileEnabled.value = true ) // Experimental language features - val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") - val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") - val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects.") - val YcheckInitGlobal: Setting[Boolean] = BooleanSetting("-Ysafe-init-global", "Check safe initialization of global objects.") - val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") - val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only).") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references.") - val YccNew: Setting[Boolean] = BooleanSetting("-Ycc-new", "Used in conjunction with captureChecking language import, try out new variants (debug option)") - val YccLog: Setting[Boolean] = BooleanSetting("-Ycc-log", "Used in conjunction with captureChecking language import, print tracing and debug info") - val YccPrintSetup: Setting[Boolean] = BooleanSetting("-Ycc-print-setup", "Used in conjunction with captureChecking language import, print trees after cc.Setup phase") + val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") + val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") + val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.") + val YcheckInitGlobal: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init-global", "Check safe initialization of global objects.") + val YrequireTargetName: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") + val YrecheckTest: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrecheck-test", "Run basic rechecking (internal test only).") + val YccDebug: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references.") + val YccNew: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-new", "Used in conjunction with captureChecking language import, try out new variants (debug option)") + val YccLog: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-log", "Used in conjunction with captureChecking language import, print tracing and debug info") + val YccPrintSetup: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-print-setup", "Used in conjunction with captureChecking language import, print trees after cc.Setup phase") /** Area-specific debug output */ - val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") - val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") - val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") - - val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting("-Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") + val YexplainLowlevel: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") + val YnoDoubleBindings: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") + val YshowVarBounds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-var-bounds", "Print type variables with their bounds.") - val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") - val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") - val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") + val Yinstrument: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument", "Add instrumentation code that counts allocations and closure creations.") + val YinstrumentDefs: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") - val YdebugMacros: Setting[Boolean] = BooleanSetting("-Ydebug-macros", "Show debug info when quote pattern match fails") + val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting("-Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") - val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting("-Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) - val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting("-Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") + val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") + val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) + val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala new file mode 100644 index 000000000000..e8a55dc6e737 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -0,0 +1,41 @@ +package dotty.tools.dotc +package config + +import dotty.tools.backend.jvm.BackendUtils.classfileVersionMap +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} +import scala.language.unsafeNulls + +object ScalaSettingsProperties: + + private lazy val minTargetVersion = classfileVersionMap.keysIterator.min + private lazy val maxTargetVersion = classfileVersionMap.keysIterator.max + + def supportedTargetVersions: List[String] = + (minTargetVersion to maxTargetVersion).toList.map(_.toString) + + def supportedReleaseVersions: List[String] = + if scala.util.Properties.isJavaAtLeast("9") then + val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + val maxVersion = Math.min(jdkVersion, maxTargetVersion) + (minTargetVersion to maxVersion).toList.map(_.toString) + else List(minTargetVersion).map(_.toString) + + def supportedScalaReleaseVersions: List[String] = + ScalaRelease.values.toList.map(_.show) + + def supportedSourceVersions: List[String] = + SourceVersion.values.toList.map(_.toString) + + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") + + def defaultPageWidth: Int = { + val defaultWidth = 80 + val columnsVar = System.getenv("COLUMNS") + if columnsVar != null then columnsVar.toInt + else if Properties.isWin then + val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" + if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then + ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt + else defaultWidth + else defaultWidth + } diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 79eb2b882f8f..a65072427ba7 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -8,7 +8,9 @@ import core.Contexts.* import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} import annotation.tailrec +import annotation.internal.unshared import collection.mutable.ArrayBuffer +import collection.mutable import reflect.ClassTag import scala.util.{Success, Failure} import dotty.tools.dotc.config.Settings.Setting.ChoiceWithHelp @@ -23,8 +25,12 @@ object Settings: val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) - class SettingsState(initialValues: Seq[Any]): + trait SettingCategory: + def prefixLetter: String + + class SettingsState(initialValues: Seq[Any], initialChanged: Set[Int] = Set.empty): private val values = ArrayBuffer(initialValues*) + private val changed: mutable.Set[Int] = initialChanged.to(mutable.Set) private var _wasRead: Boolean = false override def toString: String = s"SettingsState(values: ${values.toList})" @@ -33,10 +39,13 @@ object Settings: _wasRead = true values(idx) + def wasChanged(idx: Int): Boolean = changed.contains(idx) + def update(idx: Int, x: Any): SettingsState = - if (_wasRead) then SettingsState(values.toSeq).update(idx, x) + if (_wasRead) then SettingsState(values.toSeq, changed.toSet).update(idx, x) else values(idx) = x + changed.add(idx) this end SettingsState @@ -53,19 +62,37 @@ object Settings: ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) } + @unshared + val settingCharacters = "[a-zA-Z0-9_\\-]*".r + def validateSettingString(name: String): Unit = + assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") + + case class Setting[T: ClassTag] private[Settings] ( + category: SettingCategory, name: String, description: String, default: T, helpArg: String = "", choices: Option[Seq[?]] = None, - prefix: String = "", + prefix: Option[String] = None, aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, ignoreInvalidArgs: Boolean = false, - propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { - - private var changed: Boolean = false + propertyClass: Option[Class[?]] = None, + deprecationMsg: Option[String] = None, + // kept only for -Ykind-projector option compatibility + legacyArgs: Boolean = false)(private[Settings] val idx: Int) { + + validateSettingString(prefix.getOrElse(name)) + aliases.foreach(validateSettingString) + assert(name.startsWith(s"-${category.prefixLetter}"), s"Setting $name does not start with category -$category") + assert(legacyArgs || !choices.exists(_.contains("")), s"Empty string is not supported as a choice for setting $name") + // Without the following assertion, it would be easy to mistakenly try to pass a file to a setting that ignores invalid args. + // Example: -opt Main.scala would be interpreted as -opt:Main.scala, and the source file would be ignored. + assert(!(summon[ClassTag[T]] == ListTag && ignoreInvalidArgs), s"Ignoring invalid args is not supported for multivalue settings: $name") + + val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] @@ -77,6 +104,8 @@ object Settings: def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag + def acceptsNoArg: Boolean = summon[ClassTag[T]] == BooleanTag || summon[ClassTag[T]] == OptionTag || choices.exists(_.contains("")) + def legalChoices: String = choices match { case Some(xs) if xs.isEmpty => "" @@ -89,17 +118,16 @@ object Settings: val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked def update(value: Any, args: List[String]): ArgsSummary = var dangers = warnings - val value1 = - if changed && isMultivalue then - val value0 = value.asInstanceOf[List[String]] + val valueNew = + if sstate.wasChanged(idx) && isMultivalue then + val valueList = value.asInstanceOf[List[String]] val current = valueIn(sstate).asInstanceOf[List[String]] - value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") - current ++ value0 + valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ valueList else - if changed then dangers :+= s"Flag $name set repeatedly" + if sstate.wasChanged(idx) then dangers :+= s"Flag $name set repeatedly" value - changed = true - ArgsSummary(updateIn(sstate, value1), args, errors, dangers) + ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) end update def fail(msg: String, args: List[String]) = @@ -140,54 +168,73 @@ object Settings: update(x, args) catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - - def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { - case (BooleanTag, _) => - setBoolean(argRest, args) - case (OptionTag, _) => - update(Some(propertyClass.get.getConstructor().newInstance()), args) - case (ListTag, _) => - if (argRest.isEmpty) missingArg - else - val strings = argRest.split(",").toList - choices match - case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) - case invalid => invalidChoices(invalid) - case _ => update(strings, args) - case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => - setString(argRest, args) - case (StringTag, arg2 :: args2) => - if (arg2 startsWith "-") missingArg - else setString(arg2, args2) - case (OutputTag, arg :: args) => - val path = Directory(arg) - val isJar = path.extension == "jar" - if (!isJar && !path.isDirectory) - fail(s"'$arg' does not exist or is not a directory or .jar file", args) - else { - val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) - update(output, args) - } - case (IntTag, args) if argRest.nonEmpty => - setInt(argRest, args) - case (IntTag, arg2 :: args2) => - setInt(arg2, args2) - case (VersionTag, _) => - ScalaVersion.parse(argRest) match { - case Success(v) => update(v, args) - case Failure(ex) => fail(ex.getMessage, args) - } - case (_, Nil) => - missingArg - } - - def matches(argName: String) = (name :: aliases).exists(_ == argName) - - if (prefix != "" && arg.startsWith(prefix)) - doSet(arg drop prefix.length) - else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) - doSet(arg.dropWhile(_ != ':').drop(1)) + + def setOutput(argValue: String, args: List[String]) = + val path = Directory(argValue) + val isJar = path.extension == "jar" + if (!isJar && !path.isDirectory) + fail(s"'$argValue' does not exist or is not a directory or .jar file", args) + else { + val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, args) + } + + def setVersion(argValue: String, args: List[String]) = + ScalaVersion.parse(argValue) match { + case Success(v) => update(v, args) + case Failure(ex) => fail(ex.getMessage, args) + } + + def appendList(strings: List[String], args: List[String]) = + choices match + case Some(valid) => strings.filterNot(valid.contains) match + case Nil => update(strings, args) + case invalid => invalidChoices(invalid) + case _ => update(strings, args) + + + def doSet(argRest: String) = + ((summon[ClassTag[T]], args): @unchecked) match { + case (BooleanTag, _) => + setBoolean(argRest, args) + case (OptionTag, _) => + update(Some(propertyClass.get.getConstructor().newInstance()), args) + case (ct, args) => + val argInArgRest = !argRest.isEmpty || legacyArgs + val argAfterParam = !argInArgRest && args.nonEmpty && (ct == IntTag || !args.head.startsWith("-")) + if argInArgRest then + doSetArg(argRest, args) + else if argAfterParam then + doSetArg(args.head, args.tail) + else missingArg + } + + def doSetArg(arg: String, argsLeft: List[String]) = summon[ClassTag[T]] match + case ListTag => + val strings = arg.split(",").toList + appendList(strings, argsLeft) + case StringTag => + setString(arg, argsLeft) + case OutputTag => + setOutput(arg, argsLeft) + case IntTag => + setInt(arg, argsLeft) + case VersionTag => + setVersion(arg, argsLeft) + case _ => + missingArg + + def matches(argName: String): Boolean = + (allFullNames).exists(_ == argName.takeWhile(_ != ':')) || prefix.exists(arg.startsWith) + + def argValRest: String = + if(prefix.isEmpty) arg.dropWhile(_ != ':').drop(1) else arg.drop(prefix.get.length) + + if matches(arg) then + if deprecationMsg.isDefined then + warn(s"Option $name is deprecated: ${deprecationMsg.get}", args) + else + doSet(argValRest) else state } @@ -214,6 +261,7 @@ object Settings: class SettingGroup { + @unshared private val _allSettings = new ArrayBuffer[Setting[?]] def allSettings: Seq[Setting[?]] = _allSettings.toSeq @@ -281,49 +329,54 @@ object Settings: setting } - def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(name, descr, initialValue, aliases = aliases)) + def prependName(name: String): String = + assert(!name.startsWith("-"), s"Setting $name cannot start with -") + "-" + name - def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) + def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases)) - def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs)) - def MultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) - def UncompleteMultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases, ignoreInvalidArgs = true)) + def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) - def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = - publish(Setting(name, descr, default, aliases = aliases)) + def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases)) - def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = - publish(Setting(name, descr, default, choices = Some(choices))) + def IntChoiceSetting(category: SettingCategory, name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, choices = Some(choices))) - def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) + def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(name, descr, default, helpArg)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg)) - def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, aliases = aliases)) + def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases)) - def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) - def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = - publish(Setting(name, descr, Nil, prefix = pre)) + def PrefixSetting(category: SettingCategory, name: String, descr: String): Setting[List[String]] = + val prefix = name.takeWhile(_ != '<') + publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix))) - def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = - publish(Setting(name, descr, default)) + def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false): Setting[ScalaVersion] = + publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs)) - def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(name, descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) + def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = + publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) + + def DeprecatedSetting(category: SettingCategory, name: String, descr: String, deprecationMsg: String): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, false, deprecationMsg = Some(deprecationMsg))) } end Settings diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index ac02baa429b4..45dba97a79f7 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -72,12 +72,11 @@ object Annotations { def refersToParamOf(tl: TermLambda)(using Context): Boolean = val args = arguments if args.isEmpty then false - else tree.existsSubTree { - case id: Ident => id.tpe.stripped match + else tree.existsSubTree: + case id: (Ident | This) => id.tpe.stripped match case TermParamRef(tl1, _) => tl eq tl1 case _ => false case _ => false - } /** A string representation of the annotation. Overridden in BodyAnnotation. */ diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index da94226b34af..1870956357d6 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -651,11 +651,20 @@ trait ConstraintHandling { def widenOr(tp: Type) = if widenUnions then val tpw = tp.widenUnion - if (tpw ne tp) && !tpw.isTransparent() && (tpw <:< bound) then tpw else tp + if tpw ne tp then + if tpw.isTransparent() then + // Now also widen singletons of soft unions. Before these were skipped + // since widenUnion on soft unions is independent of whether singletons + // are widened or not. This avoids an expensive subtype check in widenSingle, + // see i19907_*.scala for test cases. + widenSingle(tp, skipSoftUnions = false) + else if tpw <:< bound then tpw + else tp + else tp else tp.hardenUnions - def widenSingle(tp: Type) = - val tpw = tp.widenSingletons + def widenSingle(tp: Type, skipSoftUnions: Boolean) = + val tpw = tp.widenSingletons(skipSoftUnions) if (tpw ne tp) && (tpw <:< bound) then tpw else tp def isSingleton(tp: Type): Boolean = tp match @@ -665,7 +674,7 @@ trait ConstraintHandling { val wideInst = if isSingleton(bound) then inst else - val widenedFromSingle = widenSingle(inst) + val widenedFromSingle = widenSingle(inst, skipSoftUnions = widenUnions) val widenedFromUnion = widenOr(widenedFromSingle) val widened = dropTransparentTraits(widenedFromUnion, bound) widenIrreducible(widened) diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index 920da377f9b4..57c369a08de6 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -34,26 +34,44 @@ object ContextOps: if (elem.name == name) return elem.sym.denot // return self } val pre = ctx.owner.thisType - if ctx.isJava then javaFindMember(name, pre, required, excluded) + if ctx.isJava then + // Note: I didn't verify if there exists a code path that would require `lookInCompanion = true`, + // it is just to preserve the original behavior. + javaFindMember(name, pre, lookInCompanion = true, required, excluded) else pre.findMember(name, pre, required, excluded) } else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. - ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) + if ctx.isJava then + javaFindMember(name, ctx.owner.thisType, lookInCompanion = true,required, excluded) + else + ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) else ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) } - final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + /** Look in the prefix with Java semantics. + * @param lookInCompanion If true, try in the companion class of a module as a fallback. + * Note: originally this was used to type Select nodes in Java code, + * but that is no longer the case. + * It is preserved in case it is necessary for denotNamed, but this is unverified. + */ + final def javaFindMember(name: Name, pre: Type, lookInCompanion: Boolean, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = assert(ctx.isJava) inContext(ctx) { - + import dotty.tools.dotc.core.NameOps.* val preSym = pre.typeSymbol - // 1. Try to search in current type and parents. - val directSearch = pre.findMember(name, pre, required, excluded) + val directSearch = + def asModule = + if name.isTypeName && name.endsWith(StdNames.str.MODULE_SUFFIX) then + pre.findMember(name.stripModuleClassSuffix.moduleClassName, pre, required, excluded) + else NoDenotation + pre.findMember(name, pre, required, excluded) match + case NoDenotation => asModule + case denot => denot // 2. Try to search in companion class if current is an object. - def searchCompanionClass = if preSym.is(Flags.Module) then + def searchCompanionClass = if lookInCompanion && preSym.is(Flags.Module) then preSym.companionClass.thisType.findMember(name, pre, required, excluded) else NoDenotation diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index c5f04d18b7fb..ae21c6fb8763 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -861,8 +861,7 @@ object Contexts { with Phases.PhasesBase with Plugins { - /** The applicable settings */ - val settings: ScalaSettings = new ScalaSettings + val settings: ScalaSettings = ScalaSettings /** The initial context */ val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 3cde29ee3d79..789e744fbfc9 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -642,8 +642,6 @@ class Definitions { @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) - @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) - // fundamental classes @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") def StringType: Type = StringClass.typeRef @@ -1002,7 +1000,6 @@ class Definitions { @tu lazy val JavaAnnotationClass: ClassSymbol = requiredClass("java.lang.annotation.Annotation") // Annotation classes - @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") @tu lazy val AssignedNonLocallyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AssignedNonLocally") @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @@ -1018,6 +1015,8 @@ class Definitions { @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") + @tu lazy val IntoAnnot: ClassSymbol = requiredClass("scala.annotation.into") + @tu lazy val IntoParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.$into") @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @@ -2137,7 +2136,6 @@ class Definitions { orType, RepeatedParamClass, ByNameParamClass2x, - IntoType, AnyValClass, NullClass, NothingClass, diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index be43cbc8dfcf..249940d8ff99 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -581,6 +581,7 @@ object Flags { val LazyGiven: FlagSet = Given | Lazy val InlineOrProxy: FlagSet = Inline | InlineProxy // An inline method or inline argument proxy */ val InlineMethod: FlagSet = Inline | Method + val InlineImplicitMethod: FlagSet = Implicit | InlineMethod val InlineParam: FlagSet = Inline | Param val InlineByNameProxy: FlagSet = InlineProxy | Method val JavaEnum: FlagSet = JavaDefined | Enum // A Java enum trait diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index a2e78add1338..9772199678d7 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -131,7 +131,7 @@ object StdNames { val EXCEPTION_RESULT_PREFIX: N = "exceptionResult" val EXPAND_SEPARATOR: N = str.EXPAND_SEPARATOR val IMPORT: N = "" - val INTO: N = "" + val INTO: N = "$into" val MODULE_SUFFIX: N = str.MODULE_SUFFIX val OPS_PACKAGE: N = "" val OVERLOADED: N = "" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 13eb5ce8b5ba..14ba05568735 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1208,8 +1208,8 @@ object SymDenotations { final def isLocalToCompilationUnit(using Context): Boolean = is(Private) - || owner.ownersIterator.exists(_.isTerm) - || accessBoundary(defn.RootClass).isContainedIn(symbol.topLevelClass) + || owner.ownersIterator.takeWhile(!_.isStaticOwner).exists(_.isTerm) + || accessBoundary(defn.RootClass).isProperlyContainedIn(symbol.topLevelClass) final def isTransparentClass(using Context): Boolean = is(TransparentType) @@ -1512,6 +1512,13 @@ object SymDenotations { def namedType(using Context): NamedType = if (isType) typeRef else termRef + /** Like typeRef, but the prefix is widened. + * + * See tests/neg/i19619/Test.scala + */ + def javaTypeRef(using Context) = + TypeRef(maybeOwner.reachablePrefix.widen, symbol) + /** Like typeRef, but objects in the prefix are represented by their singleton type, * this means we output `pre.O.member` rather than `pre.O$.this.member`. * diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 32a2da8b46b6..78c736649605 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -110,17 +110,28 @@ object Symbols extends SymUtils { } private def computeDenot(lastd: SymDenotation)(using Context): SymDenotation = { + // Written that way so that it comes in at 32 bytes and is therefore inlineable for + // the JIT (reputedly, cutoff is at 35 bytes) util.Stats.record("Symbol.computeDenot") val now = ctx.period checkedPeriod = now - if (lastd.validFor contains now) lastd else recomputeDenot(lastd) + if lastd.validFor.contains(now) then lastd else recomputeDenot(lastd) } /** Overridden in NoSymbol */ protected def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = { util.Stats.record("Symbol.recomputeDenot") val newd = lastd.current.asInstanceOf[SymDenotation] - lastDenot = newd + if newd.exists || lastd.initial.validFor.firstPhaseId <= ctx.phaseId then + lastDenot = newd + else + // We are trying to bring forward a symbol that is defined only at a later phase + // (typically, a nested Java class, invisible before erasure). + // In that case, keep lastDenot as it was and set the checked period to lastDenot's + // previous validity, which means we will try another bring forward when the symbol + // is referenced at a later phase. Otherwise we'd get stuck on NoDenotation here. + // See #15562 and test i15562b in ReplCompilerTests + checkedPeriod = lastd.validFor newd } @@ -791,7 +802,7 @@ object Symbols extends SymUtils { cls: ClassSymbol, name: TermName = nme.WILDCARD, selfInfo: Type = NoType)(using Context): TermSymbol = - newSymbol(cls, name, SelfSymFlags, selfInfo orElse cls.classInfo.selfType, coord = cls.coord) + newSymbol(cls, name, SelfSymFlags, selfInfo.orElse(cls.classInfo.selfType), coord = cls.coord) /** Create new type parameters with given owner, names, and flags. * @param boundsFn A function that, given type refs to the newly created @@ -958,7 +969,7 @@ object Symbols extends SymUtils { */ def getPackageClassIfDefined(path: PreName)(using Context): Symbol = staticRef(path.toTypeName, isPackage = true, generateStubs = false) - .disambiguate(_ is PackageClass).symbol + .disambiguate(_.is(PackageClass)).symbol def requiredModule(path: PreName)(using Context): TermSymbol = { val name = path.toTermName diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index f1edd7cd8f8b..eeb18eaa9cc7 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -33,15 +33,11 @@ object TypeApplications { */ object EtaExpansion: - def apply(tycon: Type)(using Context): Type = - assert(tycon.typeParams.nonEmpty, tycon) - tycon.etaExpand(tycon.typeParamSymbols) - /** Test that the parameter bounds in a hk type lambda `[X1,...,Xn] => C[X1, ..., Xn]` * contain the bounds of the type parameters of `C`. This is necessary to be able to * contract the hk lambda to `C`. */ - private def weakerBounds(tp: HKTypeLambda, tparams: List[ParamInfo])(using Context): Boolean = + private def weakerBounds(tp: HKTypeLambda, fn: Type)(using Context): Boolean = val onlyEmptyBounds = tp.typeParams.forall(_.paramInfo == TypeBounds.empty) onlyEmptyBounds // Note: this pre-test helps efficiency. It is also necessary to workaround #9965 since in some cases @@ -50,18 +46,24 @@ object TypeApplications { // In this case, we can still return true if we know that the hk lambda bounds // are empty anyway. || { + val tparams = fn.typeParams val paramRefs = tparams.map(_.paramRef) + val prefix = fn.normalizedPrefix + val owner = fn.typeSymbol.maybeOwner tp.typeParams.corresponds(tparams) { (param1, param2) => - param2.paramInfo frozen_<:< param1.paramInfo.substParams(tp, paramRefs) + // see tests/neg/variances-constr.scala + // its B parameter should have info <: Any, using class C as the owner + // rather than info <: A, using class Inner2 as the owner + param2.paramInfo.asSeenFrom(prefix, owner) frozen_<:< param1.paramInfo.substParams(tp, paramRefs) } } def unapply(tp: Type)(using Context): Option[Type] = tp match - case tp @ HKTypeLambda(tparams, AppliedType(fn: Type, args)) + case tp @ HKTypeLambda(tparams, AppliedType(fn, args)) if fn.typeSymbol.isClass && tparams.hasSameLengthAs(args) && args.lazyZip(tparams).forall((arg, tparam) => arg == tparam.paramRef) - && weakerBounds(tp, fn.typeParams) => Some(fn) + && weakerBounds(tp, fn) => Some(fn) case _ => None end EtaExpansion @@ -180,7 +182,17 @@ class TypeApplications(val self: Type) extends AnyVal { val tsym = self.symbol if (tsym.isClass) tsym.typeParams else tsym.infoOrCompleter match { - case info: LazyType if isTrivial(self.prefix, tsym) => info.completerTypeParams(tsym) + case info: LazyType if isTrivial(self.prefix, tsym) => + val tparams = info.completerTypeParams(tsym) + if tsym.isCompleted then tsym.info.typeParams + // Completers sometimes represent parameters as symbols where + // the completed type represents them as paramrefs. Make sure we get + // a stable result by calling `typeParams` recursively. Test case + // is pos/i19942.scala, where parameter F0 has initially a Namer#TypeDefCompleter. + // After calling its completerTypeParams, we get a list of parameter symbols + // and as a side effect F0 is completed. Calling typeParams on the completed + // type gives a list of paramrefs. + else tparams case _ => self.info.typeParams } case self: AppliedType => @@ -244,7 +256,7 @@ class TypeApplications(val self: Type) extends AnyVal { def topType(using Context): Type = if self.hasSimpleKind then defn.AnyType - else etaExpand(self.typeParams) match + else self.etaExpand match case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resultType.topType) case _ => @@ -301,21 +313,44 @@ class TypeApplications(val self: Type) extends AnyVal { /** Convert a type constructor `TC` which has type parameters `X1, ..., Xn` * to `[X1, ..., Xn] -> TC[X1, ..., Xn]`. */ - def etaExpand(tparams: List[TypeParamInfo])(using Context): Type = - HKTypeLambda.fromParams(tparams, self.appliedTo(tparams.map(_.paramRef))) - //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") + def etaExpand(using Context): Type = + val tparams = self.typeParams + val resType = self.appliedTo(tparams.map(_.paramRef)) + self.dealias match + case self: TypeRef if tparams.nonEmpty && self.symbol.isClass => + val owner = self.symbol.owner + // Calling asSeenFrom on the type parameter infos is important + // so that class type references within another prefix have + // their type parameters' info fixed. + // e.g. from pos/i18569: + // trait M1: + // trait A + // trait F[T <: A] + // object M2 extends M1 + // Type parameter T in M1.F has an upper bound of M1#A + // But eta-expanding M2.F should have type parameters with an upper-bound of M2.A. + // So we take the prefix M2.type and the F symbol's owner, M1, + // to call asSeenFrom on T's info. + HKTypeLambda(tparams.map(_.paramName))( + tl => tparams.map(p => HKTypeLambda.toPInfo(tl.integrate(tparams, p.paramInfo.asSeenFrom(self.prefix, owner)))), + tl => tl.integrate(tparams, resType)) + case _ => + HKTypeLambda.fromParams(tparams, resType) /** If self is not lambda-bound, eta expand it. */ def ensureLambdaSub(using Context): Type = - if (isLambdaSub) self else EtaExpansion(self) + if isLambdaSub then self + else + assert(self.typeParams.nonEmpty, self) + self.etaExpand /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ def etaExpandIfHK(bound: Type)(using Context): Type = { val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self else self match { - case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => - EtaExpansion(self) + case self: TypeRef if self.symbol.isClass && self.typeParams.hasSameLengthAs(hkParams) => + etaExpand case _ => self } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b04978357508..b677dae3a38b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -487,7 +487,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def widenOK = (tp2.widenSingletons eq tp2) && (tp1.widenSingletons ne tp1) - && inFrozenGadtAndConstraint(recur(tp1.widenSingletons, tp2)) + && inFrozenGadtAndConstraint(recur(tp1.widenSingletons(), tp2)) def joinOK = tp2.dealiasKeepRefiningAnnots match { case tp2: AppliedType if !tp2.tycon.typeSymbol.isClass => @@ -594,7 +594,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (base.typeSymbol == cls2) return true } else if tp1.typeParams.nonEmpty && !tp1.isAnyKind then - return recur(tp1, EtaExpansion(tp2)) + return recur(tp1, tp2.etaExpand) fourthTry } @@ -734,7 +734,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => val tparams1 = tp1.typeParams if (tparams1.nonEmpty) - return recur(tp1.etaExpand(tparams1), tp2) || fourthTry + return recur(tp1.etaExpand, tp2) || fourthTry tp2 match { case EtaExpansion(tycon2: TypeRef) if tycon2.symbol.isClass && tycon2.symbol.is(JavaDefined) => recur(tp1, tycon2) || fourthTry @@ -1683,6 +1683,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param tparams2 The type parameters of the type constructor applied to `args2` */ def isSubArgs(args1: List[Type], args2: List[Type], tp1: Type, tparams2: List[ParamInfo]): Boolean = { + /** The bounds of parameter `tparam`, where all references to type paramneters * are replaced by corresponding arguments (or their approximations in the case of * wildcard arguments). @@ -1690,12 +1691,35 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def paramBounds(tparam: Symbol): TypeBounds = tparam.info.substApprox(tparams2.asInstanceOf[List[Symbol]], args2).bounds - def recurArgs(args1: List[Type], args2: List[Type], tparams2: List[ParamInfo]): Boolean = - if (args1.isEmpty) args2.isEmpty + /** Test all arguments. Incomplete argument tests (according to isIncomplete) are deferred in + * the first run and picked up in the second. + */ + def recurArgs(args1: List[Type], args2: List[Type], tparams2: List[ParamInfo], + canDefer: Boolean, + deferred1: List[Type], deferred2: List[Type], deferredTparams2: List[ParamInfo]): Boolean = + if args1.isEmpty then + args2.isEmpty + && (deferred1.isEmpty + || recurArgs( + deferred1.reverse, deferred2.reverse, deferredTparams2.reverse, + canDefer = false, Nil, Nil, Nil)) else args2.nonEmpty && tparams2.nonEmpty && { val tparam = tparams2.head val v = tparam.paramVarianceSign + /** An argument test is incomplete if it implies a comparison A <: B where + * A is an AndType or B is an OrType. In these cases we need to run an + * either, which can lose solutions if there are type variables involved. + * So we defer such tests to run last, on the chance that some other argument + * comparison will instantiate or constrain type variables first. + */ + def isIncomplete(arg1: Type, arg2: Type): Boolean = + val arg1d = arg1.strippedDealias + val arg2d = arg2.strippedDealias + (v >= 0) && (arg1d.isInstanceOf[AndType] || arg2d.isInstanceOf[OrType]) + || + (v <= 0) && (arg1d.isInstanceOf[OrType] || arg2d.isInstanceOf[AndType]) + /** Try a capture conversion: * If the original left-hand type `leftRoot` is a path `p.type`, * and the current widened left type is an application with wildcard arguments @@ -1781,10 +1805,26 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else if v > 0 then isSubType(arg1, arg2) else isSameType(arg2, arg1) - isSubArg(args1.head, args2.head) - } && recurArgs(args1.tail, args2.tail, tparams2.tail) + val arg1 = args1.head + val arg2 = args2.head + val rest1 = args1.tail + if !canDefer + || rest1.isEmpty && deferred1.isEmpty + // skip the incompleteness test if this is the last argument and no previous argument tests were incomplete + || !isIncomplete(arg1, arg2) + then + isSubArg(arg1, arg2) + && recurArgs( + rest1, args2.tail, tparams2.tail, canDefer, + deferred1, deferred2, deferredTparams2) + else + recurArgs( + rest1, args2.tail, tparams2.tail, canDefer, + arg1 :: deferred1, arg2 :: deferred2, tparams2.head :: deferredTparams2) + } + + recurArgs(args1, args2, tparams2, canDefer = true, Nil, Nil, Nil) - recurArgs(args1, args2, tparams2) } /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where @@ -2820,7 +2860,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp.symbol match case cls: ClassSymbol => if cls == defn.SingletonClass then defn.AnyType - else if cls.typeParams.nonEmpty then EtaExpansion(tp) + else if cls.typeParams.nonEmpty then tp.etaExpand else tp case sym => if !ctx.erasedTypes && sym == defn.FromJavaObjectSymbol then defn.AnyType @@ -2857,6 +2897,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case tp: HKTypeLambda => tp + case tp: ParamRef => + val st = tp.superTypeNormalized + if st.exists then + disjointnessBoundary(st) + else + // workaround for when ParamRef#underlying returns NoType + defn.AnyType case tp: TypeProxy => disjointnessBoundary(tp.superTypeNormalized) case tp: WildcardType => @@ -3054,7 +3101,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling end provablyDisjointTypeArgs protected def explainingTypeComparer(short: Boolean) = ExplainingTypeComparer(comparerContext, short) - protected def trackingTypeComparer = TrackingTypeComparer(comparerContext) + protected def matchReducer = MatchReducer(comparerContext) private def inSubComparer[T, Cmp <: TypeComparer](comparer: Cmp)(op: Cmp => T): T = val saved = myInstance @@ -3068,8 +3115,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling inSubComparer(cmp)(op) cmp.lastTrace(header) - def tracked[T](op: TrackingTypeComparer => T)(using Context): T = - inSubComparer(trackingTypeComparer)(op) + def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = + inSubComparer(matchReducer)(op) } object TypeComparer { @@ -3236,14 +3283,14 @@ object TypeComparer { def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean = false)(using Context): String = comparing(_.explained(op, header, short)) - def tracked[T](op: TrackingTypeComparer => T)(using Context): T = - comparing(_.tracked(op)) + def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = + comparing(_.reduceMatchWith(op)) def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = comparing(_.subCaptures(refs1, refs2, frozen)) } -object TrackingTypeComparer: +object MatchReducer: import printing.*, Texts.* enum MatchResult extends Showable: case Reduced(tp: Type) @@ -3259,38 +3306,16 @@ object TrackingTypeComparer: case Stuck => "Stuck" case NoInstance(fails) => "NoInstance(" ~ Text(fails.map(p.toText(_) ~ p.toText(_)), ", ") ~ ")" -class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { - import TrackingTypeComparer.* +/** A type comparer for reducing match types. + * TODO: Not sure this needs to be a type comparer. Can we make it a + * separate class? + */ +class MatchReducer(initctx: Context) extends TypeComparer(initctx) { + import MatchReducer.* init(initctx) - override def trackingTypeComparer = this - - val footprint: mutable.Set[Type] = mutable.Set[Type]() - - override def bounds(param: TypeParamRef)(using Context): TypeBounds = { - if (param.binder `ne` caseLambda) footprint += param - super.bounds(param) - } - - override def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Boolean = { - if (param.binder `ne` caseLambda) footprint += param - super.addOneBound(param, bound, isUpper) - } - - override def gadtBounds(sym: Symbol)(using Context): TypeBounds | Null = { - if (sym.exists) footprint += sym.typeRef - super.gadtBounds(sym) - } - - override def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = - if (sym.exists) footprint += sym.typeRef - super.gadtAddBound(sym, b, isUpper) - - override def typeVarInstance(tvar: TypeVar)(using Context): Type = { - footprint += tvar - super.typeVarInstance(tvar) - } + override def matchReducer = this def matchCases(scrut: Type, cases: List[MatchTypeCaseSpec])(using Context): Type = { // a reference for the type parameters poisoned during matching @@ -3410,29 +3435,38 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { // Actual matching logic val instances = Array.fill[Type](spec.captureCount)(NoType) + val noInstances = mutable.ListBuffer.empty[(TypeName, TypeBounds)] def rec(pattern: MatchTypeCasePattern, scrut: Type, variance: Int, scrutIsWidenedAbstract: Boolean): Boolean = pattern match - case MatchTypeCasePattern.Capture(num, isWildcard) => + case MatchTypeCasePattern.Capture(num, /* isWildcard = */ true) => + // instantiate the wildcard in a way that the subtype test always succeeds + instances(num) = variance match + case 1 => scrut.hiBound // actually important if we are not in a class type constructor + case -1 => scrut.loBound + case 0 => scrut + !instances(num).isError + + case MatchTypeCasePattern.Capture(num, /* isWildcard = */ false) => + def failNotSpecific(bounds: TypeBounds): TypeBounds = + noInstances += spec.origMatchCase.paramNames(num) -> bounds + bounds + instances(num) = scrut match case scrut: TypeBounds => - if isWildcard then - // anything will do, as long as it conforms to the bounds for the subsequent `scrut <:< instantiatedPat` test - scrut.hi - else if scrutIsWidenedAbstract then - // always keep the TypeBounds so that we can report the correct NoInstances - scrut + if scrutIsWidenedAbstract then + failNotSpecific(scrut) else variance match case 1 => scrut.hi case -1 => scrut.lo - case 0 => scrut + case 0 => failNotSpecific(scrut) case _ => - if !isWildcard && scrutIsWidenedAbstract && variance != 0 then - // force a TypeBounds to report the correct NoInstances + if scrutIsWidenedAbstract && variance != 0 then + // fail as not specific // the Nothing and Any bounds are used so that they are not displayed; not for themselves in particular - if variance > 0 then TypeBounds(defn.NothingType, scrut) - else TypeBounds(scrut, defn.AnyType) + if variance > 0 then failNotSpecific(TypeBounds(defn.NothingType, scrut)) + else failNotSpecific(TypeBounds(scrut, defn.AnyType)) else scrut !instances(num).isError @@ -3508,12 +3542,8 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { MatchResult.Stuck if rec(spec.pattern, scrut, variance = 1, scrutIsWidenedAbstract = false) then - if instances.exists(_.isInstanceOf[TypeBounds]) then - MatchResult.NoInstance { - constrainedCaseLambda.paramNames.zip(instances).collect { - case (name, bounds: TypeBounds) => (name, bounds) - } - } + if noInstances.nonEmpty then + MatchResult.NoInstance(noInstances.toList) else val defn.MatchCase(instantiatedPat, reduced) = instantiateParamsSpec(instances, constrainedCaseLambda)(constrainedCaseLambda.resultType): @unchecked diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b151dcdf8270..701a6360fd3d 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -419,8 +419,9 @@ object Types extends TypeUtils { typeSymbol eq defn.RepeatedParamClass /** Is this a parameter type that allows implicit argument converson? */ - def isConvertibleParam(using Context): Boolean = - typeSymbol eq defn.IntoType + def isInto(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.IntoParamAnnot + case _ => false /** Is this the type of a method that has a repeated parameter type as * last parameter type? @@ -1376,15 +1377,15 @@ object Types extends TypeUtils { * and going to the operands of & and |. * Overridden and cached in OrType. */ - def widenSingletons(using Context): Type = dealias match { + def widenSingletons(skipSoftUnions: Boolean = false)(using Context): Type = dealias match { case tp: SingletonType => tp.widen case tp: OrType => - val tp1w = tp.widenSingletons + val tp1w = tp.widenSingletons(skipSoftUnions) if (tp1w eq tp) this else tp1w case tp: AndType => - val tp1w = tp.tp1.widenSingletons - val tp2w = tp.tp2.widenSingletons + val tp1w = tp.tp1.widenSingletons(skipSoftUnions) + val tp2w = tp.tp2.widenSingletons(skipSoftUnions) if ((tp.tp1 eq tp1w) && (tp.tp2 eq tp2w)) this else tp1w & tp2w case _ => this @@ -1645,6 +1646,8 @@ object Types extends TypeUtils { pre.refinedInfo match { case tp: AliasingBounds => if (pre.refinedName ne name) loop(pre.parent) else tp.alias + case tp: SingletonType => + if pre.refinedName ne name then loop(pre.parent) else tp case _ => loop(pre.parent) } @@ -1925,7 +1928,9 @@ object Types extends TypeUtils { case res => res } defn.FunctionNOf( - mt.paramInfos.mapConserve(_.translateFromRepeated(toArray = isJava)), + mt.paramInfos.mapConserve: + _.translateFromRepeated(toArray = isJava) + .mapIntoAnnot(defn.IntoParamAnnot, null), result1, isContextual) if mt.hasErasedParams then defn.PolyFunctionOf(mt) @@ -1973,6 +1978,38 @@ object Types extends TypeUtils { case _ => this } + /** A mapping between mapping one kind of into annotation to another or + * dropping into annotations. + * @param from the into annotation to map + * @param to either the replacement annotation symbol, or `null` + * in which case the `from` annotations are dropped. + */ + def mapIntoAnnot(from: ClassSymbol, to: ClassSymbol | Null)(using Context): Type = this match + case self @ AnnotatedType(tp, annot) => + val tp1 = tp.mapIntoAnnot(from, to) + if annot.symbol == from then + if to == null then tp1 + else AnnotatedType(tp1, Annotation(to, annot.tree.span)) + else self.derivedAnnotatedType(tp1, annot) + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => + val arg1 = arg.mapIntoAnnot(from, to) + if arg1 eq arg then this + else AppliedType(tycon, arg1 :: Nil) + case defn.FunctionOf(argTypes, resType, isContextual) => + val resType1 = resType.mapIntoAnnot(from, to) + if resType1 eq resType then this + else defn.FunctionOf(argTypes, resType1, isContextual) + case RefinedType(parent, rname, mt: MethodOrPoly) => + val mt1 = mt.mapIntoAnnot(from, to) + if mt1 eq mt then this + else RefinedType(parent.mapIntoAnnot(from, to), rname, mt1) + case mt: MethodOrPoly => + mt.derivedLambdaType(resType = mt.resType.mapIntoAnnot(from, to)) + case tp: ExprType => + tp.derivedExprType(tp.resType.mapIntoAnnot(from, to)) + case _ => + this + /** A type capturing `ref` */ def capturing(ref: CaptureRef)(using Context): Type = if captureSet.accountsFor(ref) then this @@ -2676,11 +2713,8 @@ object Types extends TypeUtils { * refinement type `T { X = U; ... }` */ def reduceProjection(using Context): Type = - if (isType) { - val reduced = prefix.lookupRefined(name) - if (reduced.exists) reduced else this - } - else this + val reduced = prefix.lookupRefined(name) + if reduced.exists then reduced else this /** Guard against cycles that can arise if given `op` * follows info. The problematic cases are a type alias to itself or @@ -2765,14 +2799,14 @@ object Types extends TypeUtils { * (S | T)#A --> S#A | T#A */ def derivedSelect(prefix: Type)(using Context): Type = - if (prefix eq this.prefix) this - else if (prefix.isExactlyNothing) prefix + if prefix eq this.prefix then this + else if prefix.isExactlyNothing then prefix else { + val res = + if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else prefix.lookupRefined(name) + if (res.exists) return res if (isType) { - val res = - if (currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) - else prefix.lookupRefined(name) - if (res.exists) return res if (Config.splitProjections) prefix match { case prefix: AndType => @@ -3585,8 +3619,8 @@ object Types extends TypeUtils { else tp1n.atoms | tp2n.atoms private def computeWidenSingletons()(using Context): Type = - val tp1w = tp1.widenSingletons - val tp2w = tp2.widenSingletons + val tp1w = tp1.widenSingletons() + val tp2w = tp2.widenSingletons() if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) private def ensureAtomsComputed()(using Context): Unit = @@ -3599,9 +3633,11 @@ object Types extends TypeUtils { ensureAtomsComputed() myAtoms - override def widenSingletons(using Context): Type = - ensureAtomsComputed() - myWidened + override def widenSingletons(skipSoftUnions: Boolean)(using Context): Type = + if isSoft && skipSoftUnions then this + else + ensureAtomsComputed() + myWidened def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = if ((tp1 eq this.tp1) && (tp2 eq this.tp2) && soft == isSoft) this @@ -4123,6 +4159,7 @@ object Types extends TypeUtils { /** Produce method type from parameter symbols, with special mappings for repeated * and inline parameters: * - replace @repeated annotations on Seq or Array types by types + * - map into annotations to $into annotations * - add @inlineParam to inline parameters * - add @erasedParam to erased parameters * - wrap types of parameters that have an @allowConversions annotation with Into[_] @@ -4132,34 +4169,14 @@ object Types extends TypeUtils { case ExprType(resType) => ExprType(addAnnotation(resType, cls, param)) case _ => AnnotatedType(tp, Annotation(cls, param.span)) - def wrapConvertible(tp: Type) = - AppliedType(defn.IntoType.typeRef, tp :: Nil) - - /** Add `Into[..] to the type itself and if it is a function type, to all its - * curried result type(s) as well. - */ - def addInto(tp: Type): Type = tp match - case tp @ AppliedType(tycon, args) if tycon.typeSymbol == defn.RepeatedParamClass => - tp.derivedAppliedType(tycon, addInto(args.head) :: Nil) - case tp @ AppliedType(tycon, args) if defn.isFunctionNType(tp) => - wrapConvertible(tp.derivedAppliedType(tycon, args.init :+ addInto(args.last))) - case tp @ defn.RefinedFunctionOf(rinfo) => - wrapConvertible(tp.derivedRefinedType(refinedInfo = addInto(rinfo))) - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = addInto(tp.resType)) - case ExprType(resType) => - ExprType(addInto(resType)) - case _ => - wrapConvertible(tp) - def paramInfo(param: Symbol) = - var paramType = param.info.annotatedToRepeated + var paramType = param.info + .annotatedToRepeated + .mapIntoAnnot(defn.IntoAnnot, defn.IntoParamAnnot) if param.is(Inline) then paramType = addAnnotation(paramType, defn.InlineParamAnnot, param) if param.is(Erased) then paramType = addAnnotation(paramType, defn.ErasedParamAnnot, param) - if param.hasAnnotation(defn.AllowConversionsAnnot) then - paramType = addInto(paramType) paramType apply(params.map(_.name.asTermName))( @@ -5009,6 +5026,8 @@ object Types extends TypeUtils { case ex: Throwable => handleRecursive("normalizing", s"${scrutinee.show} match ..." , ex) + private def thisMatchType = this + def reduced(using Context): Type = { def contextInfo(tp: Type): Type = tp match { @@ -5023,16 +5042,43 @@ object Types extends TypeUtils { tp.underlying } - def updateReductionContext(footprint: collection.Set[Type]): Unit = - reductionContext = util.HashMap() - for (tp <- footprint) - reductionContext(tp) = contextInfo(tp) - typr.println(i"footprint for $this $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") - def isUpToDate: Boolean = - reductionContext.keysIterator.forall { tp => + reductionContext.keysIterator.forall: tp => reductionContext(tp) `eq` contextInfo(tp) - } + + def setReductionContext(): Unit = + new TypeTraverser: + var footprint: Set[Type] = Set() + var deep: Boolean = true + val seen = util.HashSet[Type]() + def traverse(tp: Type) = + if !seen.contains(tp) then + seen += tp + tp match + case tp: NamedType => + if tp.symbol.is(TypeParam) then footprint += tp + traverseChildren(tp) + case _: AppliedType | _: RefinedType => + if deep then traverseChildren(tp) + case TypeBounds(lo, hi) => + traverse(hi) + case tp: TypeVar => + footprint += tp + traverse(tp.underlying) + case tp: TypeParamRef => + footprint += tp + case _ => + traverseChildren(tp) + end traverse + + traverse(scrutinee) + deep = false + cases.foreach(traverse) + reductionContext = util.HashMap() + for tp <- footprint do + reductionContext(tp) = contextInfo(tp) + matchTypes.println(i"footprint for $thisMatchType $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") + end setReductionContext record("MatchType.reduce called") if !Config.cacheMatchReduced @@ -5043,20 +5089,22 @@ object Types extends TypeUtils { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") myReduced = - trace(i"reduce match type $this $hashCode", matchTypes, show = true)(withMode(Mode.Type) { - def matchCases(cmp: TrackingTypeComparer): Type = - val saved = ctx.typerState.snapshot() - try cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze(_))) - catch case ex: Throwable => - handleRecursive("reduce type ", i"$scrutinee match ...", ex) - finally - updateReductionContext(cmp.footprint) - ctx.typerState.resetTo(saved) - // this drops caseLambdas in constraint and undoes any typevar - // instantiations during matchtype reduction - - TypeComparer.tracked(matchCases) - }) + trace(i"reduce match type $this $hashCode", matchTypes, show = true): + withMode(Mode.Type): + setReductionContext() + def matchCases(cmp: MatchReducer): Type = + val saved = ctx.typerState.snapshot() + try + cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze(_))) + catch case ex: Throwable => + handleRecursive("reduce type ", i"$scrutinee match ...", ex) + finally + ctx.typerState.resetTo(saved) + // this drops caseLambdas in constraint and undoes any typevar + // instantiations during matchtype reduction + TypeComparer.reduceMatchWith(matchCases) + + //else println(i"no change for $this $hashCode / $myReduced") myReduced.nn } @@ -6532,7 +6580,7 @@ object Types extends TypeUtils { record(s"foldOver $getClass") record(s"foldOver total") tp match { - case tp: TypeRef => + case tp: NamedType => if stopBecauseStaticOrLocal(tp) then x else val tp1 = tp.prefix.lookupRefined(tp.name) @@ -6561,9 +6609,6 @@ object Types extends TypeUtils { variance = saved this(y, restpe) - case tp: TermRef => - if stopBecauseStaticOrLocal(tp) then x else applyToPrefix(x, tp) - case tp: TypeVar => this(x, tp.underlying) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala index d8d72a4e651e..a3d8cedacb4a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala @@ -2,7 +2,10 @@ package dotty.tools.dotc package core package tasty -class TastyAnsiiPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes) { +class TastyAnsiiPrinter(bytes: Array[Byte], testPickler: Boolean) extends TastyPrinter(bytes, testPickler) { + + def this(bytes: Array[Byte]) = this(bytes, testPickler = false) + override protected def nameStr(str: String): String = Console.MAGENTA + str + Console.RESET override protected def treeStr(str: String): String = Console.YELLOW + str + Console.RESET override protected def lengthStr(str: String): String = Console.CYAN + str + Console.RESET diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index a74607dbc9d5..af2097f347ba 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -13,16 +13,21 @@ import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSec import java.nio.file.{Files, Paths} import dotty.tools.io.{JarArchive, Path} import dotty.tools.tasty.TastyFormat.header +import scala.collection.immutable.BitSet import scala.compiletime.uninitialized import dotty.tools.tasty.TastyBuffer.Addr +import dotty.tools.dotc.core.Names.TermName object TastyPrinter: def showContents(bytes: Array[Byte], noColor: Boolean): String = + showContents(bytes, noColor, testPickler = false) + + def showContents(bytes: Array[Byte], noColor: Boolean, testPickler: Boolean = false): String = val printer = - if noColor then new TastyPrinter(bytes) - else new TastyAnsiiPrinter(bytes) + if noColor then new TastyPrinter(bytes, testPickler) + else new TastyAnsiiPrinter(bytes, testPickler) printer.showContents() def main(args: Array[String]): Unit = { @@ -62,7 +67,9 @@ object TastyPrinter: println(line) } -class TastyPrinter(bytes: Array[Byte]) { +class TastyPrinter(bytes: Array[Byte], val testPickler: Boolean) { + + def this(bytes: Array[Byte]) = this(bytes, testPickler = false) class TastyPrinterUnpickler extends TastyUnpickler(bytes) { var namesStart: Addr = uninitialized @@ -77,39 +84,53 @@ class TastyPrinter(bytes: Array[Byte]) { private val unpickler: TastyPrinterUnpickler = new TastyPrinterUnpickler import unpickler.{nameAtRef, unpickle} - private def nameToString(name: Name): String = name.debugString - - private def nameRefToString(ref: NameRef): String = nameToString(nameAtRef(ref)) - private def printHeader(sb: StringBuilder): Unit = val header = unpickler.header sb.append("Header:\n") - sb.append(s" version: ${header.majorVersion}.${header.minorVersion}.${header.experimentalVersion}\n") - sb.append(" tooling: ").append(header.toolingVersion).append("\n") - sb.append(" UUID: ").append(header.uuid).append("\n") - sb.append("\n") + if testPickler then + // these fields are not stable when the TASTy/compiler versions change, so not useful for testing + sb.append(" version: \n") + sb.append(" tooling: \n") + sb.append(" UUID: \n") + else + sb.append(s" version: ${header.majorVersion}.${header.minorVersion}.${header.experimentalVersion}\n") + sb.append(" tooling: ").append(header.toolingVersion).append("\n") + sb.append(" UUID: ").append(header.uuid).append("\n") + end if - private def printNames(sb: StringBuilder): Unit = - sb.append(s"Names (${unpickler.namesEnd.index - unpickler.namesStart.index} bytes, starting from ${unpickler.namesStart.index}):\n") + private def printNames(sb: StringBuilder)(using refs: NameRefs): Unit = + sb.append(sectionHeader( + name = "Names", + count = (unpickler.namesEnd.index - unpickler.namesStart.index).toString, + base = showBase(unpickler.namesStart.index), + lineEnd = true + )) for ((name, idx) <- nameAtRef.contents.zipWithIndex) { val index = nameStr("%6d".format(idx)) - sb.append(index).append(": ").append(nameToString(name)).append("\n") + sb.append(index).append(": ").append(refs.nameRefToString(NameRef(idx))).append("\n") } def showContents(): String = { val sb: StringBuilder = new StringBuilder + given NameRefs = unpickle0(new SourceFileUnpickler)(using NameRefs.empty).getOrElse(NameRefs.empty) printHeader(sb) printNames(sb) - unpickle(new TreeSectionUnpickler(sb)) - unpickle(new PositionSectionUnpickler(sb)) - unpickle(new CommentSectionUnpickler(sb)) - unpickle(new AttributesSectionUnpickler(sb)) + unpickle0(new TreeSectionUnpickler(sb)) + unpickle0(new PositionSectionUnpickler(sb)) + unpickle0(new CommentSectionUnpickler(sb)) + unpickle0(new AttributesSectionUnpickler(sb)) sb.result } - class TreeSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](ASTsSection) { + def unpickle0[R](sec: PrinterSectionUnpickler[R])(using NameRefs): Option[R] = + unpickle(new SectionUnpickler[R](sec.name) { + def unpickle(reader: TastyReader, nameAtRef: NameTable): R = + sec.unpickle0(reader.subReader(reader.startAddr, reader.endAddr)) // fork so we can visit multiple times + }) + + class TreeSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](ASTsSection) { import dotty.tools.tasty.TastyFormat.* - def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + def unpickle0(reader: TastyReader)(using refs: NameRefs): Unit = { import reader.* var indent = 0 def newLine() = { @@ -119,7 +140,7 @@ class TastyPrinter(bytes: Array[Byte]) { def printNat() = sb.append(treeStr(" " + readNat())) def printName() = { val idx = readNat() - sb.append(nameStr(" " + idx + " [" + nameRefToString(NameRef(idx)) + "]")) + sb.append(nameStr(" " + idx + " [" + refs.nameRefToString(NameRef(idx)) + "]")) } def printTree(): Unit = { newLine() @@ -170,19 +191,20 @@ class TastyPrinter(bytes: Array[Byte]) { } indent -= 2 } - sb.append(s"\n\nTrees (${endAddr.index - startAddr.index} bytes, starting from $base):") + sb.append(sectionHeader("Trees", reader, lineEnd = false)) while (!isAtEnd) { printTree() newLine() } + sb.append("\n") } } - class PositionSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](PositionsSection) { - def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + class PositionSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](PositionsSection) { + def unpickle0(reader: TastyReader)(using tastyName: NameRefs): Unit = { import reader.* val posUnpickler = new PositionUnpickler(reader, tastyName) - sb.append(s"\n\nPositions (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") + sb.append(sectionHeader("Positions", reader)) val lineSizes = posUnpickler.lineSizes sb.append(s" lines: ${lineSizes.length}\n") sb.append(s" line sizes:\n") @@ -210,12 +232,12 @@ class TastyPrinter(bytes: Array[Byte]) { } } - class CommentSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](CommentsSection) { - def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + class CommentSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](CommentsSection) { + def unpickle0(reader: TastyReader)(using NameRefs): Unit = { import reader.* val comments = new CommentUnpickler(reader).comments if !comments.isEmpty then - sb.append(s"\n\nComments (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") + sb.append(sectionHeader("Comments", reader)) val sorted = comments.toSeq.sortBy(_._1.index) for ((addr, cmt) <- sorted) { sb.append(treeStr("%6d".format(addr.index))) @@ -224,12 +246,14 @@ class TastyPrinter(bytes: Array[Byte]) { } } - class AttributesSectionUnpickler(sb: StringBuilder) extends SectionUnpickler[Unit](AttributesSection) { + class AttributesSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](AttributesSection) { import dotty.tools.tasty.TastyFormat.* - def unpickle(reader: TastyReader, tastyName: NameTable): Unit = { + def unpickle0(reader: TastyReader)(using nameAtRef: NameRefs): Unit = { import reader.* - sb.append(s"\n\nAttributes (${reader.endAddr.index - reader.startAddr.index} bytes, starting from $base):\n") + sb.append(sectionHeader("Attributes", reader)) while !isAtEnd do + // TODO: Should we elide attributes under testPickler? (i.e. + // if we add new attributes many check files will need to be updated) val tag = readByte() sb.append(" ").append(attributeTagToString(tag)) if isBooleanAttrTag(tag) then () @@ -242,6 +266,50 @@ class TastyPrinter(bytes: Array[Byte]) { } } + class NameRefs(sourceFileRefs: Set[NameRef]) extends (NameRef => TermName): + private val isSourceFile = sourceFileRefs.map(_.index).to(BitSet) + + def nameRefToString(ref: NameRef): String = this(ref).debugString + + def apply(ref: NameRef): TermName = + if isSourceFile(ref.index) then NameRefs.elidedSourceFile + else nameAtRef(ref) + + object NameRefs: + import dotty.tools.dotc.core.Names.termName + + private val elidedSourceFile = termName("") + val empty = NameRefs(Set.empty) + + + class SourceFileUnpickler extends PrinterSectionUnpickler[NameRefs](PositionsSection) { + def unpickle0(reader: TastyReader)(using nameAtRef: NameRefs): NameRefs = { + if !testPickler then return NameRefs.empty + val buf = Set.newBuilder[NameRef] + val posUnpickler = new PositionUnpickler(reader, nameAtRef) + val sources = posUnpickler.sourceNameRefs + for ((_, nameRef) <- sources.iterator) { + buf += nameRef + } + NameRefs(buf.result) + } + } + + private final def showBase(index: Int): String = + if testPickler then "" else index.toString() + + private final def sectionHeader(name: String, reader: TastyReader, lineEnd: Boolean = true): String = + val count = reader.endAddr.index - reader.startAddr.index + sectionHeader(name, count.toString, {showBase(reader.base)}, lineEnd) + + private final def sectionHeader(name: String, count: String, base: String, lineEnd: Boolean): String = + val suffix = if lineEnd then "\n" else "" + s"\n$name ($count bytes, starting from $base):$suffix" + + abstract class PrinterSectionUnpickler[T](val name: String) { + def unpickle0(reader: TastyReader)(using refs: NameRefs): T + } + protected def nameStr(str: String): String = str protected def treeStr(str: String): String = str protected def lengthStr(str: String): String = str diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index d70b56fca43d..7d2d95aa9601 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -405,12 +405,21 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleType(tp) } case This(qual) => - if (qual.isEmpty) pickleType(tree.tpe) - else { - writeByte(QUALTHIS) - val ThisType(tref) = tree.tpe: @unchecked - pickleTree(qual.withType(tref)) - } + // This may be needed when pickling a `This` inside a capture set. See #19662 and #19859. + // In this case, we pickle the tree as null.asInstanceOf[tree.tpe]. + // Since the pickled tree is not the same as the input, special handling is needed + // in the tree printer when testing the pickler. See [[PlainPrinter#homogenize]]. + inline def pickleCapturedThis = + pickleTree(Literal(Constant(null)).cast(tree.tpe).withSpan(tree.span)) + if (qual.isEmpty) + if tree.tpe.isSingleton then pickleType(tree.tpe) + else pickleCapturedThis + else + tree.tpe match + case ThisType(tref) => + writeByte(QUALTHIS) + pickleTree(qual.withType(tref)) + case _ => pickleCapturedThis case Select(qual, name) => name match { case OuterSelectName(_, levels) => diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index b95e4df663a1..57c0b2217e9d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -65,6 +65,15 @@ class TreeUnpickler(reader: TastyReader, /** A map from addresses of definition entries to the symbols they define */ private val symAtAddr = new mutable.HashMap[Addr, Symbol] + private def addrOfSymbol(sym: Symbol): Option[Addr] = symAtAddr.iterator.collectFirst { + case (addr, s) if s == sym => addr + } + + private def locatedSymbol(sym: Symbol)(using Context): String = + addrOfSymbol(sym) match + case Some(addr) => i"local $sym @ ${addr.index}" + case None => i"external $sym" + /** A temporary map from addresses of definition entries to the trees they define. * Used to remember trees of symbols that are created by a completion. Emptied * once the tree is inlined into a larger tree. @@ -297,7 +306,7 @@ class TreeUnpickler(reader: TastyReader, /** The symbol defined by current definition */ def symbolAtCurrent()(using Context): Symbol = symAtAddr.get(currentAddr) match { case Some(sym) => - assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}") + assert(ctx.owner == sym.owner, i"owner discrepancy for ${locatedSymbol(sym)}, expected: ${locatedSymbol(ctx.owner)}, found: ${locatedSymbol(sym.owner)}") sym case None => createSymbol() diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 806f39ee0425..611fda9c1d41 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -445,8 +445,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // Scala 2 sometimes pickle the same type parameter symbol multiple times // (see i11173 for an example), but we should only unpickle it once. || tag == TYPEsym && flags.is(TypeParam) && symScope(owner).lookup(name.asTypeName).exists - // We discard the private val representing a case accessor. We only load the case accessor def. - || flags.isAllOf(CaseAccessor| PrivateLocal, butNot = Method) then // skip this member return NoSymbol @@ -534,7 +532,10 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // parameter unpickling and try to emulate it. !completer.areParamsInitialized case _ => - true) + true) && + // We discard the private val representing a case accessor. We only enter the case accessor def. + // We do need to load these symbols to read properly unpickle the annotations on the symbol (see sbt-test/scala2-compat/i19421). + !flags.isAllOf(CaseAccessor | PrivateLocal, butNot = Method) if (canEnter) owner.asClass.enter(sym, symScope(owner)) @@ -850,7 +851,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) - else if (sym.typeParams.nonEmpty) tycon.etaExpand(sym.typeParams) + else if (sym.typeParams.nonEmpty) tycon.etaExpand else tycon case TYPEBOUNDStpe => val lo = readTypeRef() diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 2f4ecad8859d..98ab8e2b6226 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -22,9 +22,9 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { file.extension match case "jar" => JarArchive.open(Path(file.path), create = false).allFileNames() - .map(_.stripPrefix(File.separator)) // change paths from absolute to relative - .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e)) - .map(e => e.stripSuffix(".tasty").replace(File.separator, ".")) + .map(_.stripPrefix("/")) // change paths from absolute to relative + .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e.replace("/", File.separator))) + .map(e => e.stripSuffix(".tasty").replace("/", ".")) .toList case "tasty" => TastyFileUtil.getClassName(file) case _ => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 230092898051..65792d09f88c 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -5,6 +5,7 @@ package inlines import ast.*, core.* import Flags.*, Symbols.*, Types.*, Decorators.*, Constants.*, Contexts.* import StdNames.{tpnme, nme} +import NameOps.* import typer.* import NameKinds.BodyRetainerName import SymDenotations.SymDenotation @@ -54,6 +55,16 @@ object Inlines: def needsInlining(tree: Tree)(using Context): Boolean = tree match { case Block(_, expr) => needsInlining(expr) case _ => + def isUnapplyExpressionWithDummy: Boolean = + // The first step of typing an `unapply` consists in typing the call + // with a dummy argument (see Applications.typedUnApply). We delay the + // inlining of this call. + def rec(tree: Tree): Boolean = tree match + case Apply(_, ProtoTypes.dummyTreeOfType(_) :: Nil) => true + case Apply(fn, _) => rec(fn) + case _ => false + tree.symbol.name.isUnapplyName && rec(tree) + isInlineable(tree.symbol) && !tree.tpe.widenTermRefExpr.isInstanceOf[MethodOrPoly] && StagingLevel.level == 0 @@ -64,12 +75,12 @@ object Inlines: && !ctx.typer.hasInliningErrors && !ctx.base.stopInlining && !ctx.mode.is(Mode.NoInline) + && !isUnapplyExpressionWithDummy } private def needsTransparentInlining(tree: Tree)(using Context): Boolean = tree.symbol.is(Transparent) || ctx.mode.is(Mode.ForceInline) - || ctx.settings.YforceInlineWhileTyping.value /** Try to inline a call to an inline method. Fail with error if the maximal * inline depth is exceeded. @@ -89,7 +100,7 @@ object Inlines: if ctx.isAfterTyper then // During typer we wait with cross version checks until PostTyper, in order // not to provoke cyclic references. See i16116 for a test case. - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + CrossVersionChecks.checkRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition @@ -482,14 +493,14 @@ object Inlines: // Take care that only argument bindings go into `bindings`, since positions are // different for bindings from arguments and bindings from body. - val res = tpd.Inlined(call, bindings, expansion) + val inlined = tpd.Inlined(call, bindings, expansion) - if !hasOpaqueProxies then res + if !hasOpaqueProxies then inlined else val target = - if inlinedMethod.is(Transparent) then call.tpe & res.tpe + if inlinedMethod.is(Transparent) then call.tpe & inlined.tpe else call.tpe - res.ensureConforms(target) + inlined.ensureConforms(target) // Make sure that the sealing with the declared type // is type correct. Without it we might get problems since the // expression's type is the opaque alias but the call's type is diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 5237f19d19ae..025a2022500d 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -90,29 +90,22 @@ object Completion: val completionSymbolKind: Mode = path match - case untpd.Ident(_) :: untpd.Import(_, _) :: _ => Mode.ImportOrExport - case untpd.Ident(_) :: (_: untpd.ImportSelector) :: _ => Mode.ImportOrExport - case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term // literal completions + case GenericImportSelector(sel) => + if sel.imported.span.contains(pos.span) then Mode.ImportOrExport // import scala.@@ + else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport + else Mode.None // import scala.{util => u@@} + case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ + case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions case (ref: untpd.RefTree) :: _ => - if (ref.name.isTermName) Mode.Term - else if (ref.name.isTypeName) Mode.Type - else Mode.None + val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope - case (sel: untpd.ImportSelector) :: _ => - if sel.imported.span.contains(pos.span) then Mode.ImportOrExport - else Mode.None // Can't help completing the renaming + if (ref.name.isTermName) Mode.Term | maybeSelectMembers + else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers + else Mode.None - case (_: untpd.ImportOrExport) :: _ => Mode.ImportOrExport case _ => Mode.None - val completionKind: Mode = - path match - case Nil | (_: untpd.PackageDef) :: _ => Mode.None - case untpd.Ident(_) :: (_: untpd.ImportSelector) :: _ => Mode.Member - case (_: untpd.Select) :: _ => Mode.Member - case _ => Mode.Scope - - completionSymbolKind | completionKind + completionSymbolKind /** When dealing with in varios palces we check to see if they are * due to incomplete backticks. If so, we ensure we get the full prefix @@ -141,18 +134,11 @@ object Completion: i + 1 path match - case (sel: untpd.ImportSelector) :: _ => - completionPrefix(sel.imported :: Nil, pos) - - case untpd.Ident(_) :: (sel: untpd.ImportSelector) :: _ if !sel.isGiven => - if sel.isWildcard then pos.source.content()(pos.point - 1).toString + case GenericImportSelector(sel) => + if sel.isGiven then completionPrefix(sel.bound :: Nil, pos) + else if sel.isWildcard then pos.source.content()(pos.point - 1).toString else completionPrefix(sel.imported :: Nil, pos) - case (tree: untpd.ImportOrExport) :: _ => - tree.selectors.find(_.span.contains(pos.span)).map: selector => - completionPrefix(selector :: Nil, pos) - .getOrElse("") - // Foo.`se will result in Select(Ident(Foo), ) case (select: untpd.Select) :: _ if select.name == nme.ERROR => checkBacktickPrefix(select.source.content(), select.nameSpan.start, select.span.end) @@ -169,6 +155,20 @@ object Completion: end completionPrefix + private object GenericImportSelector: + def unapply(path: List[untpd.Tree]): Option[untpd.ImportSelector] = + path match + case untpd.Ident(_) :: (sel: untpd.ImportSelector) :: _ => Some(sel) + case (sel: untpd.ImportSelector) :: _ => Some(sel) + case _ => None + + private object GenericImportOrExport: + def unapply(path: List[untpd.Tree]): Option[untpd.ImportOrExport] = + path match + case untpd.Ident(_) :: (importOrExport: untpd.ImportOrExport) :: _ => Some(importOrExport) + case (importOrExport: untpd.ImportOrExport) :: _ => Some(importOrExport) + case _ => None + /** Inspect `path` to determine the offset where the completion result should be inserted. */ def completionOffset(untpdPath: List[untpd.Tree]): Int = untpdPath match @@ -211,7 +211,6 @@ object Completion: case tpd.Select(qual, _) :: _ if qual.typeOpt.hasSimpleKind => completer.selectionCompletions(qual) case tpd.Select(qual, _) :: _ => Map.empty case (tree: tpd.ImportOrExport) :: _ => completer.directMemberCompletions(tree.expr) - case (_: untpd.ImportSelector) :: tpd.Import(expr, _) :: _ => completer.directMemberCompletions(expr) case _ => completer.scopeCompletions interactiv.println(i"""completion info with pos = $pos, diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index e98ff6c9d66d..79282b0e5223 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -489,6 +489,18 @@ object JavaParsers { addAnnot(scalaDot(jtpnme.VOLATILEkw)) case SYNCHRONIZED | STRICTFP => in.nextToken() + case SEALED => + flags |= Flags.Sealed + in.nextToken() + // JEP-409: Special trick for the 'non-sealed' java keyword + case IDENTIFIER if in.name.toString == "non" => + val lookahead = in.LookaheadScanner() + ({lookahead.nextToken(); lookahead.token}, {lookahead.nextToken(); lookahead.name.toString}) match + case (MINUS, "sealed") => + in.nextToken(); in.nextToken() // skip '-' and 'sealed'. Nothing more to do + case _ => + syntaxError(em"Identifier '${in.name}' is not allowed here") + in.nextToken() case _ => val privateWithin: TypeName = if (isPackageAccess && !inInterface) thisPackageName @@ -812,6 +824,17 @@ object JavaParsers { else List() + + def permittedSubclassesOpt(isSealed: Boolean) : List[Tree] = + if in.token == PERMITS && !isSealed then + syntaxError(em"A type declaration that has a permits clause should have a sealed modifier") + if in.token == PERMITS then + in.nextToken() + repsep(() => typ(), COMMA) + else + // JEP-409: Class/Interface may omit the permits clause + Nil + def classDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(CLASS) val nameOffset = in.offset @@ -825,6 +848,7 @@ object JavaParsers { else ObjectTpt() val interfaces = interfacesOpt() + val permittedSubclasses = permittedSubclassesOpt(mods.is(Flags.Sealed)) val (statics, body) = typeBody(CLASS, name) val cls = atSpan(start, nameOffset) { TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, needsDummyConstr = true)).withMods(mods) @@ -889,6 +913,7 @@ object JavaParsers { } else List(ObjectTpt()) + val permittedSubclasses = permittedSubclassesOpt(mods is Flags.Sealed) val (statics, body) = typeBody(INTERFACE, name) val iface = atSpan(start, nameOffset) { TypeDef( diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index f50dcdda438c..3f993195e4f3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -393,7 +393,6 @@ object JavaScanners { '5' | '6' | '7' | '8' | '9' => putChar(ch) nextChar() - case '_' => putChar(ch) nextChar() diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala index 2b7882173e00..5fd177f384ae 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala @@ -10,7 +10,7 @@ object JavaTokens extends TokensCommon { final val javaOnlyKeywords: TokenSet = tokenRange(INSTANCEOF, ASSERT) final val sharedKeywords: BitSet = BitSet( IF, FOR, ELSE, THIS, NULL, NEW, SUPER, ABSTRACT, FINAL, PRIVATE, PROTECTED, - EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN ) + EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN, SEALED) final val primTypes: TokenSet = tokenRange(VOID, DOUBLE) final val keywords: BitSet = sharedKeywords | javaOnlyKeywords | primTypes @@ -22,6 +22,7 @@ object JavaTokens extends TokensCommon { inline val INTERFACE = 105; enter(INTERFACE, "interface") inline val ENUM = 106; enter(ENUM, "enum") inline val IMPLEMENTS = 107; enter(IMPLEMENTS, "implements") + inline val PERMITS = 108; enter(PERMITS, "permits") /** modifiers */ inline val PUBLIC = 110; enter(PUBLIC, "public") diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 6892dfdd94ca..addd54df9d69 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -30,7 +30,7 @@ import scala.annotation.tailrec import rewrites.Rewrites.{patch, overlapsPatch} import reporting.* import config.Feature -import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} +import config.Feature.{sourceVersion, migrateTo3} import config.SourceVersion.* import config.SourceVersion import dotty.tools.dotc.config.MigrationVersion @@ -73,6 +73,9 @@ object Parsers { enum ParseKind: case Expr, Type, Pattern + enum IntoOK: + case Yes, No, Nested + type StageKind = Int object StageKind { val None = 0 @@ -1484,7 +1487,7 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(): Tree = rejectWildcardType(typ()) + def toplevelTyp(intoOK: IntoOK = IntoOK.No): Tree = rejectWildcardType(typ(intoOK)) private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) @@ -1532,13 +1535,24 @@ object Parsers { * PolyFunType ::= HKTypeParamClause '=>' Type * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType - * | `(' [ [ ‘['erased'] FunArgType {`,' FunArgType } ] `)' - * | '(' [ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' - */ - def typ(): Tree = + * | `(' [ FunArgType {`,' FunArgType } ] `)' + * | '(' [ TypedFunParam {',' TypedFunParam } ')' + * MatchType ::= InfixType `match` <<< TypeCaseClauses >>> + * IntoType ::= [‘into’] IntoTargetType + * | ‘( IntoType ‘)’ + * IntoTargetType ::= Type + * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType + */ + def typ(intoOK: IntoOK = IntoOK.No): Tree = val start = in.offset var imods = Modifiers() - var erasedArgs: ListBuffer[Boolean] = ListBuffer() + val erasedArgs: ListBuffer[Boolean] = ListBuffer() + + def nestedIntoOK(token: Int) = + if token == TLARROW then IntoOK.No + else if intoOK == IntoOK.Nested then IntoOK.Yes + else intoOK + def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { @@ -1567,7 +1581,9 @@ object Parsers { else accept(ARROW) - val resultType = if isPure then capturesAndResult(typ) else typ() + def resType() = typ(nestedIntoOK(token)) + val resultType = + if isPure then capturesAndResult(resType) else resType() if token == TLARROW then for case ValDef(_, tpt, _) <- params do if isByNameType(tpt) then @@ -1585,98 +1601,120 @@ object Parsers { Function(params, resultType) } - var isValParamList = false + def typeRest(t: Tree) = in.token match + case ARROW | CTXARROW => + erasedArgs.addOne(false) + functionRest(t :: Nil) + case MATCH => + matchType(t) + case FORSOME => + syntaxError(ExistentialTypesNoLongerSupported()) + t + case _ if isPureArrow => + erasedArgs.addOne(false) + functionRest(t :: Nil) + case _ => + if erasedArgs.contains(true) && !t.isInstanceOf[FunctionWithMods] then + syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) + t + + def isIntoPrefix: Boolean = + intoOK == IntoOK.Yes + && in.isIdent(nme.into) + && in.featureEnabled(Feature.into) + && canStartTypeTokens.contains(in.lookahead.token) - val t = - if (in.token == LPAREN) { + var isValParamList = false + if in.token == LPAREN then + in.nextToken() + if in.token == RPAREN then in.nextToken() - if (in.token == RPAREN) { - in.nextToken() - functionRest(Nil) - } - else { - val paramStart = in.offset - def addErased() = - erasedArgs.addOne(isErasedKw) - if isErasedKw then { in.skipToken(); } - addErased() - val ts = in.currentRegion.withCommasExpected { + functionRest(Nil) + else + val paramStart = in.offset + def addErased() = + erasedArgs.addOne(isErasedKw) + if isErasedKw then in.skipToken() + addErased() + val args = + in.currentRegion.withCommasExpected: funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => isValParamList = true - def funParam(start: Offset, mods: Modifiers) = { - atSpan(start) { + def funParam(start: Offset, mods: Modifiers) = + atSpan(start): addErased() typedFunParam(in.offset, ident(), imods) - } - } commaSeparatedRest( typedFunParam(paramStart, name.toTermName, imods), () => funParam(in.offset, imods)) case t => - def funParam() = { - addErased() - funArgType() - } - commaSeparatedRest(t, funParam) - } - accept(RPAREN) - if isValParamList || in.isArrow || isPureArrow then - functionRest(ts) - else { - val ts1 = ts.mapConserve { t => - if isByNameType(t) then - syntaxError(ByNameParameterNotSupported(t), t.span) - stripByNameType(t) - else - t - } - val tuple = atSpan(start) { makeTupleOrParens(ts1) } - infixTypeRest( - refinedTypeRest( - withTypeRest( - annotTypeRest( - simpleTypeRest(tuple))))) - } - } - } - else if (in.token == LBRACKET) { - val start = in.offset - val tparams = typeParamClause(ParamOwner.TypeParam) - if (in.token == TLARROW) - atSpan(start, in.skipToken())(LambdaTypeTree(tparams, toplevelTyp())) - else if (in.token == ARROW || isPureArrow(nme.PUREARROW)) { - val arrowOffset = in.skipToken() - val body = toplevelTyp() - atSpan(start, arrowOffset) { - getFunction(body) match { - case Some(f) => - PolyFunction(tparams, body) - case None => - syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) - } - } - } - else { accept(TLARROW); typ() } - } - else if (in.token == INDENT) enclosed(INDENT, typ()) - else infixType() - - in.token match - case ARROW | CTXARROW => - erasedArgs.addOne(false) - functionRest(t :: Nil) - case MATCH => matchType(t) - case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t - case _ => - if isPureArrow then - erasedArgs.addOne(false) - functionRest(t :: Nil) + def funArg() = + erasedArgs.addOne(false) + funArgType() + commaSeparatedRest(t, funArg) + accept(RPAREN) + + val intoAllowed = + intoOK == IntoOK.Yes + && args.lengthCompare(1) == 0 + && (!canFollowSimpleTypeTokens.contains(in.token) || followingIsVararg()) + val byNameAllowed = in.isArrow || isPureArrow + + def sanitize(arg: Tree): Tree = arg match + case ByNameTypeTree(t) if !byNameAllowed => + syntaxError(ByNameParameterNotSupported(t), t.span) + t + case PrefixOp(id @ Ident(tpnme.into), t) if !intoAllowed => + syntaxError(em"no `into` modifier allowed here", id.span) + t + case Parens(t) => + cpy.Parens(arg)(sanitize(t)) + case arg: FunctionWithMods => + val body1 = sanitize(arg.body) + if body1 eq arg.body then arg + else FunctionWithMods(arg.args, body1, arg.mods, arg.erasedParams).withSpan(arg.span) + case Function(args, res) if !intoAllowed => + cpy.Function(arg)(args, sanitize(res)) + case arg => + arg + + val args1 = args.mapConserve(sanitize) + if isValParamList || in.isArrow || isPureArrow then + functionRest(args) else - if (erasedArgs.contains(true) && !t.isInstanceOf[FunctionWithMods]) - syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) - t + val tuple = atSpan(start)(makeTupleOrParens(args1)) + typeRest: + infixTypeRest: + refinedTypeRest: + withTypeRest: + annotTypeRest: + simpleTypeRest(tuple) + else if in.token == LBRACKET then + val start = in.offset + val tparams = typeParamClause(ParamOwner.TypeParam) + if in.token == TLARROW then + atSpan(start, in.skipToken()): + LambdaTypeTree(tparams, toplevelTyp()) + else if in.token == ARROW || isPureArrow(nme.PUREARROW) then + val arrowOffset = in.skipToken() + val body = toplevelTyp(nestedIntoOK(in.token)) + atSpan(start, arrowOffset): + getFunction(body) match + case Some(f) => + PolyFunction(tparams, body) + case None => + syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) + Ident(nme.ERROR.toTypeName) + else + accept(TLARROW) + typ() + else if in.token == INDENT then + enclosed(INDENT, typ()) + else if isIntoPrefix then + PrefixOp(typeIdent(), typ(IntoOK.Nested)) + else + typeRest(infixType()) end typ private def makeKindProjectorTypeDef(name: TypeName): TypeDef = { @@ -1713,7 +1751,7 @@ object Parsers { private def implicitKwPos(start: Int): Span = Span(start, start + nme.IMPLICITkw.asSimpleName.length) - /** TypedFunParam ::= id ':' Type */ + /** TypedFunParam ::= [`erased`] id ':' Type */ def typedFunParam(start: Offset, name: TermName, mods: Modifiers = EmptyModifiers): ValDef = atSpan(start) { acceptColon() @@ -2049,18 +2087,13 @@ object Parsers { else core() - private def maybeInto(tp: () => Tree) = - if in.isIdent(nme.into) - && in.featureEnabled(Feature.into) - && canStartTypeTokens.contains(in.lookahead.token) - then atSpan(in.skipToken()) { Into(tp()) } - else tp() - /** FunArgType ::= Type * | `=>' Type * | `->' [CaptureSet] Type */ - val funArgType: () => Tree = () => paramTypeOf(typ) + val funArgType: () => Tree = + () => paramTypeOf(() => typ(IntoOK.Yes)) + // We allow intoOK and filter out afterwards in typ() /** ParamType ::= ParamValueType * | `=>' ParamValueType @@ -2068,16 +2101,22 @@ object Parsers { */ def paramType(): Tree = paramTypeOf(paramValueType) - /** ParamValueType ::= [`into`] Type [`*'] + /** ParamValueType ::= Type [`*'] + * | IntoType + * | ‘(’ IntoType ‘)’ `*' */ - def paramValueType(): Tree = { - val t = maybeInto(toplevelTyp) - if (isIdent(nme.raw.STAR)) { + def paramValueType(): Tree = + val t = toplevelTyp(IntoOK.Yes) + if isIdent(nme.raw.STAR) then + if !t.isInstanceOf[Parens] && isInto(t) then + syntaxError( + em"""`*` cannot directly follow `into` parameter + |the `into` parameter needs to be put in parentheses""", + in.offset) in.nextToken() - atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } - } + atSpan(startOffset(t)): + PostfixOp(t, Ident(tpnme.raw.STAR)) else t - } /** TypeArgs ::= `[' Type {`,' Type} `]' * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' @@ -2153,7 +2192,8 @@ object Parsers { def condExpr(altToken: Token): Tree = val t: Tree = if in.token == LPAREN then - var t: Tree = atSpan(in.offset) { Parens(inParens(exprInParens())) } + var t: Tree = atSpan(in.offset): + makeTupleOrParens(inParensWithCommas(commaSeparated(exprInParens))) if in.token != altToken then if toBeContinued(altToken) then t = inSepRegion(InCond) { @@ -2425,7 +2465,7 @@ object Parsers { Match(t, inBracesOrIndented(caseClauses(() => caseClause()))) } - /** `match' `{' TypeCaseClauses `}' + /** `match' <<< TypeCaseClauses >>> */ def matchType(t: Tree): MatchTypeTree = atSpan(startOffset(t), accept(MATCH)) { @@ -2435,7 +2475,7 @@ object Parsers { /** FunParams ::= Bindings * | id * | `_' - * Bindings ::= `(' [[‘erased’] Binding {`,' Binding}] `)' + * Bindings ::= `(' [Binding {`,' Binding}] `)' */ def funParams(mods: Modifiers, location: Location): List[Tree] = if in.token == LPAREN then @@ -3173,7 +3213,7 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | inline | transparent + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3317,7 +3357,7 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ def contextTypes(paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = - val tps = commaSeparated(() => paramTypeOf(toplevelTyp)) + val tps = commaSeparated(() => paramTypeOf(() => toplevelTyp())) var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if paramOwner.isClass then LocalParamAccessor else Param @@ -3326,7 +3366,7 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} + * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param * * TypelessClause ::= DefTermParamClause * | UsingParamClause @@ -3958,6 +3998,14 @@ object Parsers { if (in.token == COMMA) { in.nextToken() val ids = commaSeparated(() => termIdent()) + if ctx.settings.WenumCommentDiscard.value then + in.getDocComment(start).foreach: comm => + warning( + em"""Ambiguous Scaladoc comment on multiple cases is ignored. + |Remove the comment or make separate cases to add Scaladoc comments to each of them.""", + comm.span.start + ) + PatDef(mods1, id :: ids, TypeTree(), EmptyTree) } else { @@ -4014,7 +4062,7 @@ object Parsers { val tparams = typeParamClauseOpt(ParamOwner.Given) newLineOpt() val vparamss = - if in.token == LPAREN && in.lookahead.isIdent(nme.using) + if in.token == LPAREN && (in.lookahead.isIdent(nme.using) || name != EmptyTermName) then termParamClauses(ParamOwner.Given) else Nil newLinesOpt() diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index fbf4e8d701dd..b0a533b2f1df 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -78,7 +78,7 @@ abstract class TokensCommon { //inline val YIELD = 48; enter(YIELD, "yield") inline val DO = 49; enter(DO, "do") //inline val TRAIT = 50; enter(TRAIT, "trait") - //inline val SEALED = 51; enter(SEALED, "sealed") + inline val SEALED = 51; enter(SEALED, "sealed") inline val THROW = 52; enter(THROW, "throw") inline val TRY = 53; enter(TRY, "try") inline val CATCH = 54; enter(CATCH, "catch") @@ -169,7 +169,7 @@ object Tokens extends TokensCommon { inline val OBJECT = 44; enter(OBJECT, "object") inline val YIELD = 48; enter(YIELD, "yield") inline val TRAIT = 50; enter(TRAIT, "trait") - inline val SEALED = 51; enter(SEALED, "sealed") + //inline val SEALED = 51; enter(SEALED, "sealed") inline val MATCH = 58; enter(MATCH, "match") inline val LAZY = 59; enter(LAZY, "lazy") inline val THEN = 60; enter(THEN, "then") @@ -238,6 +238,9 @@ object Tokens extends TokensCommon { final val canStartPatternTokens = atomicExprTokens | openParensTokens | BitSet(USCORE, QUOTE) + val canFollowSimpleTypeTokens = + BitSet(AT, WITH, COLONop, COLONfollow, COLONeol, LBRACE, IDENTIFIER, BACKQUOTED_IDENT, ARROW, CTXARROW, MATCH, FORSOME) + final val templateIntroTokens: TokenSet = BitSet(CLASS, TRAIT, OBJECT, ENUM, CASECLASS, CASEOBJECT) final val dclIntroTokens: TokenSet = BitSet(DEF, VAL, VAR, TYPE, GIVEN) diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 22ef15b6f497..06e8645b82c0 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -373,7 +373,7 @@ object MarkupParsers { while { xSpaceOpt() nextch() - ts.append(element) + content_LT(ts) charComingAfter(xSpaceOpt()) == '<' } do () handle.makeXMLseq(Span(start, curOffset, start), ts) diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 05b9f0cf75d7..31176bb2fb2c 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -129,7 +129,7 @@ trait Plugins { val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins - if Properties.experimental && !ctx.settings.YnoExperimental.value then + if Properties.researchPluginEnabled then plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan) { (plug, plan) => plug.init(options(plug), plan) } diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 8fc0c568e125..ac7b4ef39604 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -15,7 +15,7 @@ import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch import config.{Config, Feature} -import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, levelOwner, retainedElems} +import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, levelOwner, retainedElems, isRetainsLike} class PlainPrinter(_ctx: Context) extends Printer { @@ -60,7 +60,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) case AnnotatedType(parent, annot) - if !ctx.mode.is(Mode.Type) && annot.symbol == defn.UncheckedVarianceAnnot => + if !ctx.mode.is(Mode.Type) && annot.symbol == defn.UncheckedVarianceAnnot + || annot.symbol.isRetainsLike => homogenize(parent) case tp: SkolemType => homogenize(tp.info) @@ -241,10 +242,12 @@ class PlainPrinter(_ctx: Context) extends Printer { val refsText = if showAsCap then rootSetText else toTextCaptureSet(refs) toTextCapturing(parent, refsText, boxText) case tp @ RetainingType(parent, refs) => - val refsText = refs match - case ref :: Nil if ref.symbol == defn.captureRoot => rootSetText - case _ => toTextRetainedElems(refs) - toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) + if Feature.ccEnabledSomewhere then + val refsText = refs match + case ref :: Nil if ref.symbol == defn.captureRoot => rootSetText + case _ => toTextRetainedElems(refs) + toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) + else toText(parent) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recuresevely case tp: ErrorType => @@ -285,7 +288,11 @@ class PlainPrinter(_ctx: Context) extends Printer { toTextGlobal(tp.resultType) } case AnnotatedType(tpe, annot) => - if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) + if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot + then toText(tpe) + else if (annot.symbol == defn.IntoAnnot || annot.symbol == defn.IntoParamAnnot) + && !printDebug + then atPrec(GlobalPrec)( Str("into ") ~ toText(tpe) ) else toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 5d8b448e409c..93e280f8a13c 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -244,7 +244,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" - else if tp.isConvertibleParam then "into " ~ toText(args.head) else if defn.isFunctionSymbol(tsym) then toTextFunction(tp) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked @@ -647,6 +646,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { && Feature.ccEnabled && !printDebug && Phases.checkCapturesPhase.exists // might be missing on -Ytest-pickler then toTextRetainsAnnot + else if annot.symbol.enclosingClass == defn.IntoAnnot && !printDebug then + atPrec(GlobalPrec): + Str("into ") ~ toText(arg) else toTextAnnot case EmptyTree => "" @@ -1008,7 +1010,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { var modsText = modText(constr.mods, constr.symbol, "", isType = false) if (!modsText.isEmpty) modsText = " " ~ modsText if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this" - withEnclosingDef(constr) { addParamssText(tparamsTxt ~~ modsText, constr.trailingParamss) } + val ctorParamss = + // for fake `(x$1: Unit): Foo` constructor, don't print the param (span is not reconstructed correctly) + if constr.symbol.isAllOf(JavaParsers.fakeFlags) then Nil else constr.trailingParamss + withEnclosingDef(constr) { addParamssText(tparamsTxt ~~ modsText, ctorParamss) } } val parentsText = Text(impl.parents.map(constrText), if (ofNew) keywordStr(" with ") else ", ") val derivedText = Text(impl.derived.map(toText(_)), ", ") diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala index 6f65320d2c8e..ce925e336b53 100644 --- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala @@ -124,24 +124,30 @@ object SyntaxHighlighting { } } - val parser = new Parser(source) - val trees = parser.blockStatSeq() - TreeHighlighter.highlight(trees) - - val highlighted = new StringBuilder() - - for (idx <- colorAt.indices) { - val prev = if (idx == 0) NoColor else colorAt(idx - 1) - val curr = colorAt(idx) - if (curr != prev) - highlighted.append(curr) - highlighted.append(in(idx)) - } + try + val parser = new Parser(source) + val trees = parser.blockStatSeq() + TreeHighlighter.highlight(trees) + + + val highlighted = new StringBuilder() - if (colorAt.last != NoColor) - highlighted.append(NoColor) + for (idx <- colorAt.indices) { + val prev = if (idx == 0) NoColor else colorAt(idx - 1) + val curr = colorAt(idx) + if (curr != prev) + highlighted.append(curr) + highlighted.append(in(idx)) + } + + if (colorAt.last != NoColor) + highlighted.append(NoColor) - highlighted.toString + highlighted.toString + catch + case e: StackOverflowError => + in } } + } diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala index 76961f691617..1ebf2ae5714b 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala @@ -28,7 +28,9 @@ object QuotePatterns: /** Check for restricted patterns */ def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser { def traverse(tree: Tree)(using Context): Unit = tree match { - case _: SplicePattern => + case tree: SplicePattern => + if !tree.body.typeOpt.derivesFrom(defn.QuotedExprClass) then + report.error(i"Splice pattern must match an Expr[...]", tree.body.srcPos) case tdef: TypeDef if tdef.symbol.isClass => val kind = if tdef.symbol.is(Module) then "objects" else "classes" report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index ce7477f4da70..270c35d0add7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -4,7 +4,7 @@ package reporting import core.Contexts.* import java.io.{ BufferedReader, PrintWriter } -import Diagnostic.Error +import Diagnostic.* /** * This class implements a Reporter that displays messages on a text console @@ -18,9 +18,11 @@ class ConsoleReporter( override def doReport(dia: Diagnostic)(using Context): Unit = { super.doReport(dia) - dia match - case dia: Error if ctx.settings.Xprompt.value => Reporter.displayPrompt(reader, writer) - case _ => + if ctx.settings.Xprompt.value then + dia match + case _: Error => Reporter.displayPrompt(reader, writer) + case _: Warning if ctx.settings.XfatalWarnings.value => Reporter.displayPrompt(reader, writer) + case _ => } } diff --git a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala index 04b9b518fd5e..14e5e7798e05 100644 --- a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala +++ b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala @@ -149,7 +149,7 @@ object DidYouMean: if d != 0 || b.sym.is(ModuleClass) => // Avoid repeating the same name in "did you mean" if qualifies(b) then def hint(b: Binding) = prefix ++ showName(b.name, b.sym) - val alts = alternatives(d, rest).map(hint).take(3) + val alts = alternatives(d, rest).filter(_.name != b.name).map(hint).take(3).distinct val suffix = if alts.isEmpty then "" else alts.mkString(" or perhaps ", " or ", "?") s" - did you mean ${hint(b)}?$suffix" else diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index c230e3b4d6d7..484bc88c0983 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -289,7 +289,7 @@ extends NotFoundMsg(MissingIdentID) { } } -class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) +class TypeMismatch(val found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): def msg(using Context) = @@ -2775,7 +2775,11 @@ extends SyntaxMsg(TargetNameOnTopLevelClassID): class NotClassType(tp: Type)(using Context) extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): def msg(using Context) = i"$tp is not a class type" - def explain(using Context) = "" + def explain(using Context) = + i"""A class type includes classes and traits in a specific order. Defining a class, even an anonymous class, + |requires specifying a linearization order for the traits it extends. For example, `A & B` is not a class type + |because it doesn't specify which trait takes precedence, A or B. For more information about class types, please see the Scala Language Specification. + |Class types also can't have refinements.""" class NotConstant(suffix: String, tp: Type)(using Context) extends TypeMsg(NotConstantID), ShowMatchTrace(tp): diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index 379a2e45ce40..dc0df381f08f 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -4,6 +4,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.Names.termName inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime @@ -11,10 +12,15 @@ inline val InlineParamHash = 1997 // 302nd prime extension (sym: Symbol) - def constructorName(using Context) = - sym.owner.fullName ++ ";init;" - - /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. */ - def zincMangledName(using Context): Name = - if (sym.isConstructor) constructorName - else sym.name.stripModuleClassSuffix + /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. + * WARNING: output must not be written to TASTy, as it is not a valid TASTy name. + */ + private[sbt] def zincMangledName(using Context): Name = + if sym.isConstructor then + // TODO: ideally we should avoid unnecessarily caching these Zinc specific + // names in the global chars array. But we would need to restructure + // ExtractDependencies caches to avoid expensive `toString` on + // each member reference. + termName(sym.owner.fullName.mangledString.replace(".", ";").nn ++ ";init;") + else + sym.name.stripModuleClassSuffix diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 8b77f0774cdc..08e1e91b0bad 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -47,7 +47,10 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { checkNotWildcardSplice(tp) if level == 0 then tp else getTagRef(prefix) case _: TermRef | _: ThisType | NoPrefix => - if levelInconsistentRootOfPath(tp).exists then + val inconsistentRoot = levelInconsistentRootOfPath(tp) + if inconsistentRoot.isClass && inconsistentRoot.isLocal then + levelError(inconsistentRoot, tp, pos) + else if inconsistentRoot.exists then tryHeal(tp) else tp diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala index a85cabdd5460..87d652bd9133 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -51,7 +51,7 @@ class CheckShadowing extends MiniPhase: override def isRunnable(using Context): Boolean = super.isRunnable && - ctx.settings.Xlint.value.nonEmpty && + ctx.settings.Wshadow.value.nonEmpty && !ctx.isJava // Setup before the traversal @@ -266,12 +266,12 @@ object CheckShadowing: /** Get the shadowing analysis's result */ def getShadowingResult(using Context): ShadowResult = val privateWarnings: List[ShadowWarning] = - if ctx.settings.XlintHas.privateShadow then + if ctx.settings.WshadowHas.privateShadow then privateShadowWarnings.toList else Nil val typeParamWarnings: List[ShadowWarning] = - if ctx.settings.XlintHas.typeParameterShadow then + if ctx.settings.WshadowHas.typeParameterShadow then typeParamShadowWarnings.toList else Nil diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala index a8565d008f46..2b0c49644b09 100644 --- a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala @@ -43,6 +43,7 @@ class EtaReduce extends MiniPhase: arg.isInstanceOf[Ident] && arg.symbol == param.symbol) && isPurePath(fn) && fn.tpe <:< tree.tpe + && !(fn.symbol.is(Flags.Param) && fn.symbol.owner == mdef.symbol) // Do not eta-educe `(..., f: T => R, ...) => f.apply(..)` into `f` && defn.isFunctionClass(fn.tpe.widen.typeSymbol) => report.log(i"eta reducing $tree --> $fn") fn diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index f57595293ae1..b976dfaa2f9f 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -218,18 +218,17 @@ object ExplicitOuter { */ private def needsOuterAlways(cls: ClassSymbol)(using Context): Boolean = needsOuterIfReferenced(cls) && - (!hasLocalInstantiation(cls) || // needs outer because we might not know whether outer is referenced or not + (!hasOnlyLocalInstantiation(cls) || // needs outer because we might not know whether outer is referenced or not cls.mixins.exists(needsOuterIfReferenced) || // needs outer for parent traits cls.info.parents.exists(parent => // needs outer to potentially pass along to parent needsOuterIfReferenced(parent.classSymbol.asClass))) /** Class is only instantiated in the compilation unit where it is defined */ - private def hasLocalInstantiation(cls: ClassSymbol)(using Context): Boolean = + private def hasOnlyLocalInstantiation(cls: ClassSymbol)(using Context): Boolean = // Modules are normally locally instantiated, except if they are declared in a trait, // in which case they will be instantiated in the classes that mix in the trait. - cls.owner.ownersIterator.takeWhile(!_.isStatic).exists(_.isTerm) - || cls.is(Private, butNot = Module) - || cls.is(Module) && !cls.owner.is(Trait) + if cls.is(Module) then !cls.owner.is(Trait) + else cls.isLocalToCompilationUnit /** The outer parameter accessor of cass `cls` */ private def outerParamAccessor(cls: ClassSymbol)(using Context): TermSymbol = diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 3ff72d61d41f..f5e0f8c63b58 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -13,12 +13,15 @@ import core.Constants.Constant import core.NameOps.isContextFunction import core.StdNames.nme import core.Types.* +import core.Decorators.* import coverage.* import typer.LiftCoverage import util.{SourcePosition, SourceFile} import util.Spans.Span import localopt.StringInterpolatorOpt import inlines.Inlines +import scala.util.matching.Regex +import java.util.regex.Pattern /** Implements code coverage by inserting calls to scala.runtime.coverage.Invoker * ("instruments" the source code). @@ -41,6 +44,9 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: // stores all instrumented statements private val coverage = Coverage() + private var coverageExcludeClasslikePatterns: List[Pattern] = Nil + private var coverageExcludeFilePatterns: List[Pattern] = Nil + override def run(using ctx: Context): Unit = val outputPath = ctx.settings.coverageOutputDir.value @@ -54,10 +60,26 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: .filter(_.nn.getName.nn.startsWith("scoverage")) .foreach(_.nn.delete()) end if + + coverageExcludeClasslikePatterns = ctx.settings.coverageExcludeClasslikes.value.map(_.r.pattern) + coverageExcludeFilePatterns = ctx.settings.coverageExcludeFiles.value.map(_.r.pattern) + super.run Serializer.serialize(coverage, outputPath, ctx.settings.sourceroot.value) + private def isClassIncluded(sym: Symbol)(using Context): Boolean = + val fqn = sym.fullName.toText(ctx.printerFn(ctx)).show + coverageExcludeClasslikePatterns.isEmpty || !coverageExcludeClasslikePatterns.exists( + _.matcher(fqn).nn.matches + ) + + private def isFileIncluded(file: SourceFile)(using Context): Boolean = + val normalizedPath = file.path.replace(".scala", "") + coverageExcludeFilePatterns.isEmpty || !coverageExcludeFilePatterns.exists( + _.matcher(normalizedPath).nn.matches + ) + override protected def newTransformer(using Context) = CoverageTransformer(ctx.settings.coverageOutputDir.value) @@ -269,8 +291,17 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: transformDefDef(tree) case tree: PackageDef => - // only transform the statements of the package - cpy.PackageDef(tree)(tree.pid, transform(tree.stats)) + if isFileIncluded(tree.srcPos.sourcePos.source) && isClassIncluded(tree.symbol) then + // only transform the statements of the package + cpy.PackageDef(tree)(tree.pid, transform(tree.stats)) + else + tree + + case tree: TypeDef => + if isFileIncluded(tree.srcPos.sourcePos.source) && isClassIncluded(tree.symbol) then + super.transform(tree) + else + tree case tree: Assign => // only transform the rhs diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 0be66828d58c..b0aed580e824 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -9,7 +9,7 @@ import tasty.* import config.Printers.{noPrinter, pickling} import config.Feature import java.io.PrintStream -import io.ClassfileWriterOps +import io.FileWriters.TastyWriter import StdNames.{str, nme} import Periods.* import Phases.* @@ -19,8 +19,9 @@ import reporting.{ThrowingReporter, Profile, Message} import collection.mutable import util.concurrent.{Executor, Future} import compiletime.uninitialized -import dotty.tools.io.JarArchive +import dotty.tools.io.{JarArchive, AbstractFile} import dotty.tools.dotc.printing.OutlinePrinter +import scala.annotation.constructorOnly object Pickler { val name: String = "pickler" @@ -32,8 +33,17 @@ object Pickler { */ inline val ParallelPickling = true - class EarlyFileWriter(writer: ClassfileWriterOps): - export writer.{writeTasty, close} + class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): + def this(dest: AbstractFile)(using @constructorOnly ctx: Context) = this(TastyWriter(dest), dest) + + export writer.writeTasty + + def close(): Unit = + writer.close() + origin match { + case jar: JarArchive => jar.close() // also close the file system + case _ => + } } /** This phase pickles trees */ @@ -59,6 +69,7 @@ class Pickler extends Phase { // Maps that keep a record if -Ytest-pickler is set. private val beforePickling = new mutable.HashMap[ClassSymbol, String] + private val printedTasty = new mutable.HashMap[ClassSymbol, String] private val pickledBytes = new mutable.HashMap[ClassSymbol, (CompilationUnit, Array[Byte])] /** Drop any elements of this list that are linked module classes of other elements in the list */ @@ -174,7 +185,10 @@ class Pickler extends Phase { else val pickled = computePickled() reportPositionWarnings() - if ctx.settings.YtestPickler.value then pickledBytes(cls) = (unit, pickled) + if ctx.settings.YtestPickler.value then + pickledBytes(cls) = (unit, pickled) + if ctx.settings.YtestPicklerCheck.value then + printedTasty(cls) = TastyPrinter.showContents(pickled, noColor = true, testPickler = true) () => pickled unit.pickled += (cls -> demandPickled) @@ -184,7 +198,7 @@ class Pickler extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YjavaTastyOutput.value match case jar: JarArchive if jar.exists => - Some(Pickler.EarlyFileWriter(ClassfileWriterOps(jar))) + Some(Pickler.EarlyFileWriter(jar)) case _ => None val units0 = @@ -225,9 +239,11 @@ class Pickler extends Phase { (cls, pickled) <- unit.pickled if cls.isDefinedInCurrentRun do - val binaryName = cls.binaryClassName.replace('.', java.io.File.separatorChar).nn - val binaryClassName = if (cls.is(Module)) binaryName.stripSuffix(str.MODULE_SUFFIX).nn else binaryName - writer.writeTasty(binaryClassName, pickled()) + val binaryClassName = cls.binaryClassName + val internalName = + if (cls.is(Module)) binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else binaryClassName + val _ = writer.writeTasty(internalName, pickled()) count += 1 finally writer.close() @@ -239,15 +255,22 @@ class Pickler extends Phase { private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() + val resolveCheck = ctx.settings.YtestPicklerCheck.value val unpicklers = for ((cls, (unit, bytes)) <- pickledBytes) yield { val unpickler = new DottyUnpickler(unit.source.file, bytes) unpickler.enter(roots = Set.empty) - cls -> (unit, unpickler) + val optCheck = + if resolveCheck then + val resolved = unit.source.file.resolveSibling(s"${cls.name.mangledString}.tastycheck") + if resolved == null then None + else Some(resolved) + else None + cls -> (unit, unpickler, optCheck) } pickling.println("************* entered toplevel ***********") val rootCtx = ctx - for ((cls, (unit, unpickler)) <- unpicklers) do + for ((cls, (unit, unpickler, optCheck)) <- unpicklers) do val testJava = unit.typedAsJava if testJava then if unpickler.unpickler.nameAtRef.contents.exists(_ == nme.FromJavaObject) then @@ -256,6 +279,15 @@ class Pickler extends Phase { val freshUnit = CompilationUnit(rootCtx.compilationUnit.source) freshUnit.needsCaptureChecking = unit.needsCaptureChecking freshUnit.knowsPureFuns = unit.knowsPureFuns + optCheck match + case Some(check) => + import java.nio.charset.StandardCharsets.UTF_8 + val checkContents = String(check.toByteArray, UTF_8) + inContext(rootCtx.fresh.setCompilationUnit(freshUnit)): + testSamePrinted(printedTasty(cls), checkContents, cls, check) + case None => + () + inContext(printerContext(testJava)(using rootCtx.fresh.setCompilationUnit(freshUnit))): testSame(i"$unpickled%\n%", beforePickling(cls), cls) @@ -271,4 +303,35 @@ class Pickler extends Phase { | | diff before-pickling.txt after-pickling.txt""") end testSame + + private def testSamePrinted(printed: String, checkContents: String, cls: ClassSymbol, check: AbstractFile)(using Context): Unit = { + for lines <- diff(printed, checkContents) do + output("after-printing.txt", printed) + report.error(em"""TASTy printer difference for $cls in ${cls.source}, did not match ${check}, + | output dumped in after-printing.txt, check diff with `git diff --no-index -- $check after-printing.txt` + | actual output: + |$lines%\n%""") + } + + /** Reuse diff logic from compiler/test/dotty/tools/vulpix/FileDiff.scala */ + private def diff(actual: String, expect: String): Option[Seq[String]] = + import scala.util.Using + import scala.io.Source + val actualLines = Using(Source.fromString(actual))(_.getLines().toList).get + val expectLines = Using(Source.fromString(expect))(_.getLines().toList).get + Option.when(!matches(actualLines, expectLines))(actualLines) + + private def matches(actual: String, expect: String): Boolean = { + import java.io.File + val actual1 = actual.stripLineEnd + val expect1 = expect.stripLineEnd + + // handle check file path mismatch on windows + actual1 == expect1 || File.separatorChar == '\\' && actual1.replace('\\', '/') == expect1 + } + + private def matches(actual: Seq[String], expect: Seq[String]): Boolean = { + actual.length == expect.length + && actual.lazyZip(expect).forall(matches) + } } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 63f6af2beb86..3bcec80b5b10 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -116,7 +116,8 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => * This info is used in phase ParamForwarding */ private def forwardParamAccessors(impl: Template)(using Context): Unit = impl.parents match - case superCall @ Apply(fn, superArgs) :: _ if superArgs.nonEmpty => + case superCall @ Apply(fn, superArgs) :: _ + if superArgs.nonEmpty && fn.symbol.isPrimaryConstructor => fn.tpe.widen match case MethodType(superParamNames) => for case stat: ValDef <- impl.body do @@ -368,7 +369,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } case tree @ Inlined(call, bindings, expansion) if !tree.inlinedFromOuterScope => val pos = call.sourcePos - CrossVersionChecks.checkExperimentalRef(call.symbol, pos) + CrossVersionChecks.checkRef(call.symbol, pos) withMode(Mode.NoInline)(transform(call)) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(tree))) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 3d7b81a606ab..44d5caba631a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -9,14 +9,13 @@ import ast.* import Names.Name import Phases.Phase import DenotTransformers.{DenotTransformer, IdentityDenotTransformer, SymTransformer} -import NamerOps.{methodType, linkConstructorParams} +import NamerOps.linkConstructorParams import NullOpsDecorator.stripNull import typer.ErrorReporting.err import typer.ProtoTypes.* import typer.TypeAssigner.seqLitType import typer.ConstFold import typer.ErrorReporting.{Addenda, NothingToAdd} -import NamerOps.methodType import config.Printers.recheckr import util.Property import StdNames.nme diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index 094d6024eb4e..43aef6279cec 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -3,7 +3,7 @@ package transform import ast.Trees.*, ast.tpd, core.* import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* -import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* +import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.*, NameKinds.* import MegaPhase.MiniPhase @@ -25,7 +25,24 @@ class SpecializeFunctions extends MiniPhase { /** Create forwarders from the generic applys to the specialized ones. */ override def transformDefDef(ddef: DefDef)(using Context) = { - if ddef.name != nme.apply + // Note on special case for inline `apply`s: + // `apply` and `apply$retainedBody` are specialized in this transformation. + // `apply$retainedBody` have the name kind `BodyRetainerName`, these contain + // the runtime implementation of an inline `apply` that implements (or overrides) + // the `FunctionN.apply` method. The inline method is not specialized, it will + // be replaced with the implementation of `apply$retainedBody`. The following code + // inline def apply(x: Int): Double = x.toDouble:Double + // private def apply$retainedBody(x: Int): Double = x.toDouble:Double + // in is transformed into + // inline def apply(x: Int): Double = x.toDouble:Double + // private def apply$retainedBody(x: Int): Double = this.apply$mcDI$sp(x) + // def apply$mcDI$sp(v: Int): Double = x.toDouble:Double + // after erasure it will become + // def apply(v: Int): Double = this.apply$mcDI$sp(v) // from apply$retainedBody + // def apply$mcDI$sp(v: Int): Double = v.toDouble():Double + // def apply(v1: Object): Object = Double.box(this.apply(Int.unbox(v1))) // erasure bridge + + if ddef.name.asTermName.exclude(BodyRetainerName) != nme.apply || ddef.termParamss.length != 1 || ddef.termParamss.head.length > 2 || !ctx.owner.isClass @@ -44,12 +61,12 @@ class SpecializeFunctions extends MiniPhase { defn.isSpecializableFunction(cls, paramTypes, retType) } - if (sym.is(Flags.Deferred) || !isSpecializable) return ddef + if (sym.is(Flags.Deferred) || sym.is(Flags.Inline) || !isSpecializable) return ddef val specializedApply = newSymbol( cls, specName.nn, - sym.flags | Flags.Synthetic, + (sym.flags | Flags.Synthetic) &~ Flags.Private, // Private flag can be set if the name is a BodyRetainerName sym.info ).entered diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 93ba1845e484..e42f997e7265 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -72,7 +72,7 @@ object Splicer { if !ctx.reporter.hasErrors then report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", splicePos) // errors have been emitted - EmptyTree + ref(defn.Predef_undefined).withType(ErrorType(em"macro expansion was stopped")) case ex: StopInterpretation => report.error(ex.msg, ex.pos) ref(defn.Predef_undefined).withType(ErrorType(ex.msg)) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 17298a45e01e..4a7548f40f43 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -23,6 +23,7 @@ import collection.mutable import ProtoTypes.* import staging.StagingLevel import inlines.Inlines.inInlineMethod +import cc.{isRetainsLike, CaptureAnnotation} import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -162,6 +163,13 @@ object TreeChecker { */ def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { val definedBinders = new java.util.IdentityHashMap[Type, Any] + private var inRetainingAnnot = false + + def insideRetainingAnnot[T](op: => T): T = + val saved = inRetainingAnnot + inRetainingAnnot = true + try op finally inRetainingAnnot = saved + def apply(tp: Type): Type = { tp match { case tp: BindingType => @@ -169,10 +177,20 @@ object TreeChecker { mapOver(tp) definedBinders.remove(tp) case tp: ParamRef => - assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") + val isValidRef = + definedBinders.get(tp.binder) != null + || inRetainingAnnot + // Inside a normal @retains annotation, the captured references could be ill-formed. See issue #19661. + // But this is ok since capture checking does not rely on them. + assert(isValidRef, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") case tp: TypeVar => assert(tp.isInstantiated, s"Uninstantiated type variable: ${tp.show}, tree = ${tree.show}") apply(tp.underlying) + case tp @ AnnotatedType(underlying, annot) if annot.symbol.isRetainsLike && !annot.isInstanceOf[CaptureAnnotation] => + val underlying1 = this(underlying) + val annot1 = insideRetainingAnnot: + annot.mapWith(this) + derivedAnnotatedType(tp, underlying1, annot1) case _ => mapOver(tp) } @@ -180,6 +198,17 @@ object TreeChecker { } }.apply(tp0) + def checkParents(sym: ClassSymbol, parents: List[tpd.Tree])(using Context): Unit = + val symbolParents = sym.classInfo.parents.map(_.dealias.typeSymbol) + val treeParents = parents.map(_.tpe.dealias.typeSymbol) + assert(symbolParents == treeParents, + i"""Parents of class symbol differs from the parents in the tree for $sym + | + |Parents in symbol: $symbolParents + |Parents in tree: $treeParents + |""".stripMargin) + end checkParents + /** Run some additional checks on the nodes of the trees. Specifically: * * - TypeTree can only appear in TypeApply args, New, Typed tpt, Closure @@ -552,14 +581,7 @@ object TreeChecker { assert(ctx.owner.isClass) val sym = ctx.owner.asClass if !sym.isPrimitiveValueClass then - val symbolParents = sym.classInfo.parents.map(_.dealias.typeSymbol) - val treeParents = impl.parents.map(_.tpe.dealias.typeSymbol) - assert(symbolParents == treeParents, - i"""Parents of class symbol differs from the parents in the tree for $sym - | - |Parents in symbol: $symbolParents - |Parents in tree: $treeParents - |""".stripMargin) + TreeChecker.checkParents(sym, impl.parents) } override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 7cf028c95064..692b3177786d 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -50,7 +50,8 @@ class Checker extends Phase: Semantic.checkClasses(classes)(using checkCtx) if ctx.settings.YcheckInitGlobal.value then - Objects.checkClasses(classes)(using checkCtx) + val obj = new Objects + obj.checkClasses(classes)(using checkCtx) } units0 diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 763b71619de8..cbbd97e3810e 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -27,6 +27,7 @@ import scala.collection.immutable.ListSet import scala.collection.mutable import scala.annotation.tailrec import scala.annotation.constructorOnly +import dotty.tools.dotc.core.Flags.AbstractOrTrait /** Check initialization safety of static objects * @@ -65,7 +66,13 @@ import scala.annotation.constructorOnly * whole-program analysis. However, the check is not modular in terms of project boundaries. * */ -object Objects: +import Decorators.* +class Objects(using Context @constructorOnly): + val immutableHashSetBuider: Symbol = requiredClass("scala.collection.immutable.HashSetBuilder") + // TODO: this should really be an annotation on the rhs of the field initializer rather than the field itself. + val HashSetBuilder_rootNode: Symbol = immutableHashSetBuider.requiredValue("rootNode") + + val whiteList = Set(HashSetBuilder_rootNode) // ----------------------------- abstract domain ----------------------------- @@ -196,13 +203,14 @@ object Objects: * * @param owner The static object whose initialization creates the array. */ - case class OfArray(owner: ClassSymbol, regions: Regions.Data)(using @constructorOnly ctx: Context) extends ValueElement: + case class OfArray(owner: ClassSymbol, regions: Regions.Data)(using @constructorOnly ctx: Context, @constructorOnly trace: Trace) extends ValueElement: val klass: ClassSymbol = defn.ArrayClass val addr: Heap.Addr = Heap.arrayAddr(regions, owner) def show(using Context) = "OfArray(owner = " + owner.show + ")" /** * Represents a lambda expression + * @param klass The enclosing class of the anonymous function's creation site */ case class Fun(code: Tree, thisV: ThisValue, klass: ClassSymbol, env: Env.Data) extends ValueElement: def show(using Context) = "Fun(" + code.show + ", " + thisV.show + ", " + klass.show + ")" @@ -453,9 +461,11 @@ object Objects: abstract class Addr: /** The static object which owns the mutable slot */ def owner: ClassSymbol + def getTrace: Trace = Trace.empty /** The address for mutable fields of objects. */ - private case class FieldAddr(regions: Regions.Data, field: Symbol, owner: ClassSymbol) extends Addr + private case class FieldAddr(regions: Regions.Data, field: Symbol, owner: ClassSymbol)(trace: Trace) extends Addr: + override def getTrace: Trace = trace /** The address for mutable local variables . */ private case class LocalVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol) extends Addr @@ -495,11 +505,11 @@ object Objects: def localVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = LocalVarAddr(regions, sym, owner) - def fieldVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = - FieldAddr(regions, sym, owner) + def fieldVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol)(using Trace): Addr = + FieldAddr(regions, sym, owner)(summon[Trace]) - def arrayAddr(regions: Regions.Data, owner: ClassSymbol)(using Context): Addr = - FieldAddr(regions, defn.ArrayClass, owner) + def arrayAddr(regions: Regions.Data, owner: ClassSymbol)(using Trace, Context): Addr = + FieldAddr(regions, defn.ArrayClass, owner)(summon[Trace]) def getHeapData()(using mutable: MutableData): Data = mutable.heap @@ -599,6 +609,26 @@ object Objects: case _ => a + def filterType(tpe: Type)(using Context): Value = + tpe match + case t @ SAMType(_, _) if a.isInstanceOf[Fun] => a // if tpe is SAMType and a is Fun, allow it + case _ => + val baseClasses = tpe.baseClasses + if baseClasses.isEmpty then a + else filterClass(baseClasses.head) // could have called ClassSymbol, but it does not handle OrType and AndType + + def filterClass(sym: Symbol)(using Context): Value = + if !sym.isClass then a + else + val klass = sym.asClass + a match + case Cold => Cold + case ref: Ref => if ref.klass.isSubClass(klass) then ref else Bottom + case ValueSet(values) => values.map(v => v.filterClass(klass)).join + case arr: OfArray => if defn.ArrayClass.isSubClass(klass) then arr else Bottom + case fun: Fun => + if klass.isOneOf(AbstractOrTrait) && klass.baseClasses.exists(defn.isFunctionClass) then fun else Bottom + extension (value: Ref | Cold.type) def widenRefOrCold(height : Int)(using Context) : Ref | Cold.type = value.widen(height).asInstanceOf[ThisValue] @@ -617,7 +647,7 @@ object Objects: * @param needResolve Whether the target of the call needs resolution? */ def call(value: Value, meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", this = " + value.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { - value match + value.filterClass(meth.owner) match case Cold => report.warning("Using cold alias. " + Trace.show, Trace.position) Bottom @@ -632,12 +662,12 @@ object Objects: if arr.addr.owner == State.currentObject then Heap.read(arr.addr) else - errorReadOtherStaticObject(State.currentObject, arr.addr.owner) + errorReadOtherStaticObject(State.currentObject, arr.addr) Bottom else if target == defn.Array_update then assert(args.size == 2, "Incorrect number of arguments for Array update, found = " + args.size) if arr.addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, arr.addr.owner) + errorMutateOtherStaticObject(State.currentObject, arr.addr) else Heap.writeJoin(arr.addr, args.tail.head.value) Bottom @@ -733,7 +763,6 @@ object Objects: * @param args Arguments of the constructor call (all parameter blocks flatten to a list). */ def callConstructor(value: Value, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("call " + ctor.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { - value match case ref: Ref => if ctor.hasSource then @@ -768,7 +797,7 @@ object Objects: * @param needResolve Whether the target of the selection needs resolution? */ def select(value: Value, field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value.show, printer, (_: Value).show) { - value match + value.filterClass(field.owner) match case Cold => report.warning("Using cold alias", Trace.position) Bottom @@ -789,7 +818,7 @@ object Objects: if addr.owner == State.currentObject then Heap.read(addr) else - errorReadOtherStaticObject(State.currentObject, addr.owner) + errorReadOtherStaticObject(State.currentObject, addr) Bottom else if ref.isObjectRef && ref.klass.hasSource then report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) @@ -839,12 +868,12 @@ object Objects: * @param rhsTyp The type of the right-hand side. */ def assign(lhs: Value, field: Symbol, rhs: Value, rhsTyp: Type): Contextual[Value] = log("Assign" + field.show + " of " + lhs.show + ", rhs = " + rhs.show, printer, (_: Value).show) { - lhs match + lhs.filterClass(field.owner) match case fun: Fun => report.warning("[Internal error] unexpected tree in assignment, fun = " + fun.code.show + Trace.show, Trace.position) case arr: OfArray => - report.warning("[Internal error] unexpected tree in assignment, array = " + arr.show + Trace.show, Trace.position) + report.warning("[Internal error] unexpected tree in assignment, array = " + arr.show + " field = " + field + Trace.show, Trace.position) case Cold => report.warning("Assigning to cold aliases is forbidden. " + Trace.show, Trace.position) @@ -858,7 +887,7 @@ object Objects: if ref.hasVar(field) then val addr = ref.varAddr(field) if addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, addr.owner) + errorMutateOtherStaticObject(State.currentObject, addr) else Heap.writeJoin(addr, rhs) else @@ -876,8 +905,7 @@ object Objects: * @param args The arguments passsed to the constructor. */ def instantiate(outer: Value, klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("instantiating " + klass.show + ", outer = " + outer + ", args = " + args.map(_.value.show), printer, (_: Value).show) { - outer match - + outer.filterClass(klass.owner) match case _ : Fun | _: OfArray => report.warning("[Internal error] unexpected outer in instantiating a class, outer = " + outer.show + ", class = " + klass.show + ", " + Trace.show, Trace.position) Bottom @@ -948,7 +976,7 @@ object Objects: if addr.owner == State.currentObject then Heap.read(addr) else - errorReadOtherStaticObject(State.currentObject, addr.owner) + errorReadOtherStaticObject(State.currentObject, addr) Bottom end if case _ => @@ -1000,7 +1028,7 @@ object Objects: Env.getVar(sym) match case Some(addr) => if addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, addr.owner) + errorMutateOtherStaticObject(State.currentObject, addr) else Heap.writeJoin(addr, value) case _ => @@ -1091,6 +1119,9 @@ object Objects: instantiate(outer, cls, ctor, args) } + case TypeCast(elem, tpe) => + eval(elem, thisV, klass).filterType(tpe) + case Apply(ref, arg :: Nil) if ref.symbol == defn.InitRegionMethod => val regions2 = Regions.extend(expr.sourcePos) if Regions.exists(expr.sourcePos) then @@ -1549,7 +1580,7 @@ object Objects: report.warning("The argument should be a constant integer value", arg) res.widen(1) case _ => - res.widen(1) + if res.isInstanceOf[Fun] then res.widen(2) else res.widen(1) argInfos += ArgInfo(widened, trace.add(arg.tree), arg.tree) } @@ -1662,8 +1693,8 @@ object Objects: // class body tpl.body.foreach { case vdef : ValDef if !vdef.symbol.is(Flags.Lazy) && !vdef.rhs.isEmpty => - val res = eval(vdef.rhs, thisV, klass) val sym = vdef.symbol + val res = if (whiteList.contains(sym)) Bottom else eval(vdef.rhs, thisV, klass) if sym.is(Flags.Mutable) then val addr = Heap.fieldVarAddr(summon[Regions.Data], sym, State.currentObject) thisV.initVar(sym, addr) @@ -1734,16 +1765,31 @@ object Objects: if cls.isAllOf(Flags.JavaInterface) then Bottom else evalType(tref.prefix, thisV, klass, elideObjectAccess = cls.isStatic) - def errorMutateOtherStaticObject(currentObj: ClassSymbol, otherObj: ClassSymbol)(using Trace, Context) = - val msg = - s"Mutating ${otherObj.show} during initialization of ${currentObj.show}.\n" + - "Mutating other static objects during the initialization of one static object is forbidden. " + Trace.show - - report.warning(msg, Trace.position) - - def errorReadOtherStaticObject(currentObj: ClassSymbol, otherObj: ClassSymbol)(using Trace, Context) = - val msg = - "Reading mutable state of " + otherObj.show + " during initialization of " + currentObj.show + ".\n" + - "Reading mutable state of other static objects is forbidden as it breaks initialization-time irrelevance. " + Trace.show - - report.warning(msg, Trace.position) + def printTraceWhenMultiple(trace: Trace)(using Context): String = + if trace.toVector.size > 1 then + Trace.buildStacktrace(trace, "The mutable state is created through: " + System.lineSeparator()) + else "" + + val mutateErrorSet: mutable.Set[(ClassSymbol, ClassSymbol)] = mutable.Set.empty + def errorMutateOtherStaticObject(currentObj: ClassSymbol, addr: Heap.Addr)(using Trace, Context) = + val otherObj = addr.owner + val addr_trace = addr.getTrace + if mutateErrorSet.add((currentObj, otherObj)) then + val msg = + s"Mutating ${otherObj.show} during initialization of ${currentObj.show}.\n" + + "Mutating other static objects during the initialization of one static object is forbidden. " + Trace.show + + printTraceWhenMultiple(addr_trace) + + report.warning(msg, Trace.position) + + val readErrorSet: mutable.Set[(ClassSymbol, ClassSymbol)] = mutable.Set.empty + def errorReadOtherStaticObject(currentObj: ClassSymbol, addr: Heap.Addr)(using Trace, Context) = + val otherObj = addr.owner + val addr_trace = addr.getTrace + if readErrorSet.add((currentObj, otherObj)) then + val msg = + "Reading mutable state of " + otherObj.show + " during initialization of " + currentObj.show + ".\n" + + "Reading mutable state of other static objects is forbidden as it breaks initialization-time irrelevance. " + Trace.show + + printTraceWhenMultiple(addr_trace) + + report.warning(msg, Trace.position) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 70390028e84f..756fd1a0a8e7 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -78,6 +78,13 @@ object Util: case _ => None + object TypeCast: + def unapply(tree: Tree)(using Context): Option[(Tree, Type)] = + tree match + case TypeApply(Select(qual, _), typeArgs) if tree.symbol.isTypeCast => + Some(qual, typeArgs.head.tpe) + case _ => None + def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show): if sym.isEffectivelyFinal then sym else sym.matchingMember(cls.appliedRef) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 012f2affffc1..e1603761f08b 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -773,7 +773,7 @@ object SpaceEngine { } private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { - val seen = collection.mutable.Set.empty[Type] + val seen = collection.mutable.Set.empty[Symbol] // Possible to check everything, but be compatible with scalac by default def isCheckable(tp: Type): Boolean = @@ -789,7 +789,7 @@ object SpaceEngine { tpw.isRef(defn.BooleanClass) || classSym.isAllOf(JavaEnum) || classSym.is(Case) && { - if seen.add(tpw) then productSelectorTypes(tpw, sel.srcPos).exists(isCheckable(_)) + if seen.add(classSym) then productSelectorTypes(tpw, sel.srcPos).exists(isCheckable(_)) else true // recursive case class: return true and other members can still fail the check } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 8a1db87a4f92..82f4c89ae203 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -638,7 +638,7 @@ trait Applications extends Compatibility { defaultArg.tpe.widen match case _: MethodOrPoly if testOnly => matchArgs(args1, formals1, n + 1) case _ => matchArgs(args1, addTyped(treeToArg(defaultArg)), n + 1) - else if methodType.isContextualMethod && ctx.mode.is(Mode.ImplicitsEnabled) then + else if methodType.isImplicitMethod && ctx.mode.is(Mode.ImplicitsEnabled) then matchArgs(args1, addTyped(treeToArg(implicitArg)), n + 1) else missingArg(n) @@ -1492,7 +1492,7 @@ trait Applications extends Compatibility { val dummyArg = dummyTreeOfType(ownType) val (newUnapplyFn, unapplyApp) = - val unapplyAppCall = withMode(Mode.NoInline): + val unapplyAppCall = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) inlinedUnapplyFnAndApp(dummyArg, unapplyAppCall) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5fc63f4575e3..56f67574a72d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1082,14 +1082,14 @@ trait Checking { /** If `tree` is an application of a new-style implicit conversion (using the apply * method of a `scala.Conversion` instance), check that the expected type is - * a convertible formal parameter type or that implicit conversions are enabled. + * annotated with @$into or that implicit conversions are enabled. */ def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = val sym = tree.symbol if sym.name == nme.apply && sym.owner.derivesFrom(defn.ConversionClass) && !sym.info.isErroneous - && !expected.isConvertibleParam + && !expected.isInto then def conv = methPart(tree) match case Select(qual, _) => qual.symbol.orElse(sym.owner) @@ -1250,30 +1250,6 @@ trait Checking { def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = Checking.checkDerivedValueClass(clazz, stats) - /** Given a parent `parent` of a class `cls`, if `parent` is a trait check that - * the superclass of `cls` derived from the superclass of `parent`. - * - * An exception is made if `cls` extends `Any`, and `parent` is `java.io.Serializable` - * or `java.lang.Comparable`. These two classes are treated by Scala as universal - * traits. E.g. the following is OK: - * - * ... extends Any with java.io.Serializable - * - * The standard library relies on this idiom. - */ - def checkTraitInheritance(parent: Symbol, cls: ClassSymbol, pos: SrcPos)(using Context): Unit = - parent match { - case parent: ClassSymbol if parent.is(Trait) => - val psuper = parent.superClass - val csuper = cls.superClass - val ok = csuper.derivesFrom(psuper) || - parent.is(JavaDefined) && csuper == defn.AnyClass && - (parent == defn.JavaSerializableClass || parent == defn.ComparableClass) - if (!ok) - report.error(em"illegal trait inheritance: super$csuper does not derive from $parent's super$psuper", pos) - case _ => - } - /** Check that case classes are not inherited by case classes. */ def checkCaseInheritance(parent: Symbol, caseCls: ClassSymbol, pos: SrcPos)(using Context): Unit = @@ -1650,7 +1626,6 @@ trait NoChecking extends ReChecking { override def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = () override def checkSimpleKinded(tpt: Tree)(using Context): Tree = tpt override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () - override def checkTraitInheritance(parentSym: Symbol, cls: ClassSymbol, pos: SrcPos)(using Context): Unit = () override def checkCaseInheritance(parentSym: Symbol, caseCls: ClassSymbol, pos: SrcPos)(using Context): Unit = () override def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = () override def checkMembersOK(tp: Type, pos: SrcPos)(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index c22d03ca77d7..1e0907ee74a6 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -24,54 +24,13 @@ class CrossVersionChecks extends MiniPhase: // warnings after the first, but I think it'd be better if we didn't have to // arbitrarily choose one as more important than the other. private def checkUndesiredProperties(sym: Symbol, pos: SrcPos)(using Context): Unit = - checkDeprecated(sym, pos) - checkExperimentalRef(sym, pos) + checkRef(sym, pos) val xMigrationValue = ctx.settings.Xmigration.value if xMigrationValue != NoScalaVersion then checkMigration(sym, pos, xMigrationValue) end checkUndesiredProperties - /**Skip warnings for synthetic members of case classes during declaration and - * scan the chain of outer declaring scopes from the current context - * a deprecation warning will be skipped if one the following holds - * for a given declaring scope: - * - the symbol associated with the scope is also deprecated. - * - if and only if `sym` is an enum case, the scope is either - * a module that declares `sym`, or the companion class of the - * module that declares `sym`. - */ - def skipWarning(sym: Symbol)(using Context): Boolean = - - /** is the owner an enum or its companion and also the owner of sym */ - def isEnumOwner(owner: Symbol)(using Context) = - // pre: sym is an enumcase - if owner.isEnumClass then owner.companionClass eq sym.owner - else if owner.is(ModuleClass) && owner.companionClass.isEnumClass then owner eq sym.owner - else false - - def isDeprecatedOrEnum(owner: Symbol)(using Context) = - // pre: sym is an enumcase - owner.isDeprecated || isEnumOwner(owner) - - (ctx.owner.is(Synthetic) && sym.is(CaseClass)) - || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) - end skipWarning - - - /** If @deprecated is present, and the point of reference is not enclosed - * in either a deprecated member or a scala bridge method, issue a warning. - */ - private def checkDeprecated(sym: Symbol, pos: SrcPos)(using Context): Unit = - - // Also check for deprecation of the companion class for synthetic methods - val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) - for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do - if !skipWarning(sym) then - val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") - val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") - report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) - private def checkExperimentalAnnots(sym: Symbol)(using Context): Unit = if sym.exists && !sym.isInExperimentalScope then for annot <- sym.annotations if annot.symbol.isExperimental do @@ -160,11 +119,11 @@ class CrossVersionChecks extends MiniPhase: tpe.foreachPart { case TypeRef(_, sym: Symbol) => if tree.span.isSourceDerived then - checkDeprecated(sym, tree.srcPos) + checkDeprecatedRef(sym, tree.srcPos) checkExperimentalRef(sym, tree.srcPos) case TermRef(_, sym: Symbol) => if tree.span.isSourceDerived then - checkDeprecated(sym, tree.srcPos) + checkDeprecatedRef(sym, tree.srcPos) checkExperimentalRef(sym, tree.srcPos) case _ => } @@ -186,9 +145,55 @@ object CrossVersionChecks: val name: String = "crossVersionChecks" val description: String = "check issues related to deprecated and experimental" + /** Check that a reference to an experimental definition with symbol `sym` meets cross-version constraints + * for `@deprecated` and `@experimental`. + */ + def checkRef(sym: Symbol, pos: SrcPos)(using Context): Unit = + checkDeprecatedRef(sym, pos) + checkExperimentalRef(sym, pos) + /** Check that a reference to an experimental definition with symbol `sym` is only * used in an experimental scope */ - def checkExperimentalRef(sym: Symbol, pos: SrcPos)(using Context): Unit = + private[CrossVersionChecks] def checkExperimentalRef(sym: Symbol, pos: SrcPos)(using Context): Unit = if sym.isExperimental && !ctx.owner.isInExperimentalScope then Feature.checkExperimentalDef(sym, pos) + + /** If @deprecated is present, and the point of reference is not enclosed + * in either a deprecated member or a scala bridge method, issue a warning. + */ + private[CrossVersionChecks] def checkDeprecatedRef(sym: Symbol, pos: SrcPos)(using Context): Unit = + + // Also check for deprecation of the companion class for synthetic methods + val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) + for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do + if !skipWarning(sym) then + val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") + val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") + report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) + + /** Skip warnings for synthetic members of case classes during declaration and + * scan the chain of outer declaring scopes from the current context + * a deprecation warning will be skipped if one the following holds + * for a given declaring scope: + * - the symbol associated with the scope is also deprecated. + * - if and only if `sym` is an enum case, the scope is either + * a module that declares `sym`, or the companion class of the + * module that declares `sym`. + */ + private def skipWarning(sym: Symbol)(using Context): Boolean = + + /** is the owner an enum or its companion and also the owner of sym */ + def isEnumOwner(owner: Symbol)(using Context) = + // pre: sym is an enumcase + if owner.isEnumClass then owner.companionClass eq sym.owner + else if owner.is(ModuleClass) && owner.companionClass.isEnumClass then owner eq sym.owner + else false + + def isDeprecatedOrEnum(owner: Symbol)(using Context) = + // pre: sym is an enumcase + owner.isDeprecated || isEnumOwner(owner) + + (ctx.owner.is(Synthetic) && sym.is(CaseClass)) + || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) + end skipWarning diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 6def1ecc30a8..f3be1dcff766 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -165,7 +165,7 @@ trait Deriving { // case (a) ... see description above val derivedParams = clsParams.dropRight(instanceArity) val instanceType = - if (instanceArity == clsArity) clsType.etaExpand(clsParams) + if (instanceArity == clsArity) clsType.etaExpand else { val derivedParamTypes = derivedParams.map(_.typeRef) diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 2c441c2f915e..b09580d51943 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -122,7 +122,10 @@ abstract class Lifter { case TypeApply(fn, targs) => cpy.TypeApply(tree)(liftApp(defs, fn), targs) case Select(pre, name) if isPureRef(tree) => - cpy.Select(tree)(liftPrefix(defs, pre), name) + val liftedPrefix = + if tree.symbol.is(HasDefaultParams) then liftPrefix(defs, pre) + else liftNonIdempotentPrefix(defs, pre) + cpy.Select(tree)(liftedPrefix, name) case Block(stats, expr) => liftApp(defs ++= stats, expr) case New(tpt) => @@ -138,8 +141,26 @@ abstract class Lifter { * * unless `pre` is idempotent. */ - def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + def liftNonIdempotentPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = if (isIdempotentExpr(tree)) tree else lift(defs, tree) + + /** Lift prefix `pre` of an application `pre.f(...)` to + * + * val x0 = pre + * x0.f(...) + * + * unless `pre` is idempotent reference, a `this` reference, a literal value, or a or the prefix of an `init` (`New` tree). + * + * Note that default arguments will refer to the prefix, we do not want + * to re-evaluate a complex expression each time we access a getter. + */ + def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + tree match + case tree: Literal => tree + case tree: This => tree + case tree: New => tree // prefix of call + case tree: RefTree if isIdempotentExpr(tree) => tree + case _ => lift(defs, tree) } /** No lifting at all */ diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index c3bf2dd822c9..5162b3fed1b9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -661,15 +661,9 @@ trait ImplicitRunInfo: case t: TypeLambda => for p <- t.paramRefs do partSeen += p traverseChildren(t) - case t: MatchType => - traverseChildren(t) - traverse(t.normalized) - case MatchType.InDisguise(mt) - if !t.isInstanceOf[LazyRef] // skip recursive applications (eg. Tuple.Map) - => - traverse(mt) case t => traverseChildren(t) + traverse(t.normalized) catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = @@ -1308,35 +1302,36 @@ trait Implicits: case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2) assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` - if diff == 0 && alt1.isExtension && alt2.isExtension then - // Fall back: if both results are extension method applications, - // compare the extension methods instead of their wrappers. - def stripExtension(alt: SearchSuccess) = methPart(stripApply(alt.tree)).tpe - (stripExtension(alt1), stripExtension(alt2)) match - case (ref1: TermRef, ref2: TermRef) => - // ref1 and ref2 might refer to type variables owned by - // alt1.tstate and alt2.tstate respectively, to compare the - // alternatives correctly we need a TyperState that includes - // constraints from both sides, see - // tests/*/extension-specificity2.scala for test cases. - val constraintsIn1 = alt1.tstate.constraint ne ctx.typerState.constraint - val constraintsIn2 = alt2.tstate.constraint ne ctx.typerState.constraint - def exploreState(alt: SearchSuccess): TyperState = - alt.tstate.fresh(committable = false) - val comparisonState = - if constraintsIn1 && constraintsIn2 then - exploreState(alt1).mergeConstraintWith(alt2.tstate) - else if constraintsIn1 then - exploreState(alt1) - else if constraintsIn2 then - exploreState(alt2) - else - ctx.typerState - - diff = inContext(ctx.withTyperState(comparisonState)) { - compare(ref1, ref2) - } - case _ => + if diff == 0 && alt2.isExtension then + if alt1.isExtension then + // Fall back: if both results are extension method applications, + // compare the extension methods instead of their wrappers. + def stripExtension(alt: SearchSuccess) = methPart(stripApply(alt.tree)).tpe + (stripExtension(alt1), stripExtension(alt2)) match + case (ref1: TermRef, ref2: TermRef) => + // ref1 and ref2 might refer to type variables owned by + // alt1.tstate and alt2.tstate respectively, to compare the + // alternatives correctly we need a TyperState that includes + // constraints from both sides, see + // tests/*/extension-specificity2.scala for test cases. + val constraintsIn1 = alt1.tstate.constraint ne ctx.typerState.constraint + val constraintsIn2 = alt2.tstate.constraint ne ctx.typerState.constraint + def exploreState(alt: SearchSuccess): TyperState = + alt.tstate.fresh(committable = false) + val comparisonState = + if constraintsIn1 && constraintsIn2 then + exploreState(alt1).mergeConstraintWith(alt2.tstate) + else if constraintsIn1 then + exploreState(alt1) + else if constraintsIn2 then + exploreState(alt2) + else + ctx.typerState + + diff = inContext(ctx.withTyperState(comparisonState)): + compare(ref1, ref2) + else // alt1 is a conversion, prefer extension alt2 over it + diff = -1 if diff < 0 then alt2 else if diff > 0 then alt1 else SearchFailure(new AmbiguousImplicits(alt1, alt2, pt, argument), span) diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 7615fbda9f0a..33643a0fae2f 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -158,7 +158,7 @@ trait ImportSuggestions: // Candidates that are already available without explicit import because they // are already provided by the context (imported or inherited) or because they // are in the implicit scope of `pt`. - val alreadyAvailableCandidates: Set[Symbol] = { + lazy val alreadyAvailableCandidates: Set[Symbol] = { val wildProto = wildApprox(pt) val contextualCandidates = ctx.implicits.eligible(wildProto) val implicitScopeCandidates = ctx.run.nn.implicitScope(wildProto).eligible diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 3af87d311d9d..24721f1cd758 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1200,7 +1200,7 @@ class Namer { typer: Typer => val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span) var target = pathType.select(sym) if target.typeParams.nonEmpty then - target = target.etaExpand(target.typeParams) + target = target.etaExpand newSymbol( cls, forwarderName, MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), @@ -1542,17 +1542,20 @@ class Namer { typer: Typer => end parentType /** Check parent type tree `parent` for the following well-formedness conditions: - * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) + * (1) It must be a class type with a stable prefix (unless `isJava`) (@see checkClassTypeWithStablePrefix) * (2) If may not derive from itself * (3) The class is not final * (4) If the class is sealed, it is defined in the same compilation unit as the current class + * (unless defined in Java. See JEP-409) + * + * @param isJava If true, the parent type is in Java mode, and we do not require a stable prefix */ - def checkedParentType(parent: untpd.Tree): Type = { + def checkedParentType(parent: untpd.Tree, isJava: Boolean): Type = { val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = true) + traitReq = parent ne parents.head, stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1567,7 +1570,7 @@ class Namer { typer: Typer => if pclazz.is(Final) then report.error(ExtendFinalClass(cls, pclazz), cls.srcPos) else if pclazz.isEffectivelySealed && pclazz.associatedFile != cls.associatedFile then - if pclazz.is(Sealed) then + if pclazz.is(Sealed) && !pclazz.is(JavaDefined) then report.error(UnableToExtendSealedClass(pclazz), cls.srcPos) else if sourceVersion.isAtLeast(future) then checkFeature(nme.adhocExtensions, @@ -1621,7 +1624,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, addUsingTraits( - ensureFirstIsClass(cls, parents.map(checkedParentType(_))) + locally: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) ) diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index cc3fac3a6ffd..914fc0acb89d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -283,7 +283,7 @@ object Nullables: */ def usedOutOfOrder(using Context): Boolean = val refSym = ref.symbol - val refOwner = refSym.owner + val refOwner = refSym.maybeOwner @tailrec def recur(s: Symbol): Boolean = s != NoSymbol diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 53ae7438d381..46c12b244fbb 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -17,8 +17,6 @@ import Inferencing.* import ErrorReporting.* import util.SourceFile import TypeComparer.necessarySubType -import dotty.tools.dotc.core.Flags.Transparent -import dotty.tools.dotc.config.{ Feature, SourceVersion } import scala.annotation.internal.sharable import dotty.tools.dotc.util.Spans.{NoSpan, Span} @@ -108,7 +106,7 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is a transparent inlineable method in an inlineable context. + /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. * In that case, we should always succeed and not constrain type parameters in the expected type, * because the actual return type can be a subtype of the currently known return type. * However, we should constrain parameters of the declared return type. This distinction is @@ -116,21 +114,8 @@ object ProtoTypes { */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = if (Inlines.isInlineable(meth)) { - // Stricter behaviour in 3.4+: do not apply `wildApprox` to non-transparent inlines - if (Feature.sourceVersion.isAtLeast(SourceVersion.`3.4`)) { - if (meth.is(Transparent)) { - constrainResult(mt, wildApprox(pt)) - // do not constrain the result type of transparent inline methods - true - } else { - constrainResult(mt, pt) - } - } else { - // Best-effort to fix https://github.com/scala/scala3/issues/9685 in the 3.3.x series - // while preserving source compatibility as much as possible - val methodMatchedType = constrainResult(mt, wildApprox(pt)) - meth.is(Transparent) || methodMatchedType - } + constrainResult(mt, wildApprox(pt)) + true } else constrainResult(mt, pt) } @@ -407,7 +392,9 @@ object ProtoTypes { case closureDef(mdef) => hasInnerErrors(mdef.rhs) case _ => t.existsSubTree { t1 => - if t1.typeOpt.isError && t1.span.toSynthetic != t.span.toSynthetic then + if t1.typeOpt.isError + && t.span.toSynthetic != t1.span.toSynthetic + && t.typeOpt != t1.typeOpt then typr.println(i"error subtree $t1 of $t with ${t1.typeOpt}, spans = ${t1.span}, ${t.span}") true else diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index f0914a9f6664..173d5e6b1f7e 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -8,7 +8,7 @@ import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, NameOps.*, NameKinds.* import StdNames.*, Denotations.*, Phases.*, SymDenotations.* import NameKinds.DefaultGetterName import util.Spans.* -import scala.collection.mutable +import scala.collection.{mutable, immutable} import ast.* import MegaPhase.* import config.Printers.{checks, noPrinter, capt} @@ -368,6 +368,52 @@ object RefChecks { && atPhase(typerPhase): loop(member.info.paramInfoss, other.info.paramInfoss) + /** A map of all occurrences of `into` in a member type. + * Key: number of parameter carrying `into` annotation(s) + * Value: A list of all depths of into annotations, where each + * function arrow increases the depth. + * Example: + * def foo(x: into A, y: => [X] => into (x: X) => into B): C + * produces the map + * (0 -> List(0), 1 -> List(1, 2)) + */ + type IntoOccurrenceMap = immutable.Map[Int, List[Int]] + + def intoOccurrences(tp: Type): IntoOccurrenceMap = + + def traverseInfo(depth: Int, tp: Type): List[Int] = tp match + case AnnotatedType(tp, annot) if annot.symbol == defn.IntoParamAnnot => + depth :: traverseInfo(depth, tp) + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => + traverseInfo(depth, arg) + case defn.FunctionOf(_, resType, _) => + traverseInfo(depth + 1, resType) + case RefinedType(parent, rname, mt: MethodOrPoly) => + traverseInfo(depth, mt) + case tp: MethodOrPoly => + traverseInfo(depth + 1, tp.resType) + case tp: ExprType => + traverseInfo(depth, tp.resType) + case _ => + Nil + + def traverseParams(n: Int, formals: List[Type], acc: IntoOccurrenceMap): IntoOccurrenceMap = + if formals.isEmpty then acc + else + val occs = traverseInfo(0, formals.head) + traverseParams(n + 1, formals.tail, if occs.isEmpty then acc else acc + (n -> occs)) + + def traverse(n: Int, tp: Type, acc: IntoOccurrenceMap): IntoOccurrenceMap = tp match + case tp: PolyType => + traverse(n, tp.resType, acc) + case tp: MethodType => + traverse(n + tp.paramInfos.length, tp.resType, traverseParams(n, tp.paramInfos, acc)) + case _ => + acc + + traverse(0, tp, immutable.Map.empty) + end intoOccurrences + val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) @@ -377,7 +423,7 @@ object RefChecks { */ def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = def memberTp(self: Type) = - if (member.isClass) TypeAlias(member.typeRef.etaExpand(member.typeParams)) + if (member.isClass) TypeAlias(member.typeRef.etaExpand) else self.memberInfo(member) def otherTp(self: Type) = self.memberInfo(other) @@ -572,6 +618,8 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") + else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then + overrideError("has different occurrences of `into` modifiers", compareTypes = true) else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", @@ -654,8 +702,8 @@ object RefChecks { val missingMethods = grouped.toList flatMap { case (name, syms) => - val withoutSetters = syms filterNot (_.isSetter) - if (withoutSetters.nonEmpty) withoutSetters else syms + syms.filterConserve(!_.isSetter) + .distinctBy(_.signature) // Avoid duplication for similar definitions (#19731) } def stubImplementations: List[String] = { @@ -666,7 +714,7 @@ object RefChecks { if (regrouped.tail.isEmpty) membersStrings(regrouped.head._2) - else (regrouped.sortBy("" + _._1.name) flatMap { + else (regrouped.sortBy(_._1.name.toString()) flatMap { case (owner, members) => ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ "" }).init @@ -685,7 +733,7 @@ object RefChecks { return } - for (member <- missing) { + for (member <- missingMethods) { def showDclAndLocation(sym: Symbol) = s"${sym.showDcl} in ${sym.owner.showLocated}" def undefined(msg: String) = @@ -1002,9 +1050,9 @@ object RefChecks { end checkNoPrivateOverrides def checkVolatile(sym: Symbol)(using Context): Unit = - if sym.isVolatile && !sym.is(Mutable) then + if sym.isVolatile && !sym.is(Mutable) then report.warning(VolatileOnVal(), sym.srcPos) - + /** Check that unary method definition do not receive parameters. * They can only receive inferred parameters such as type parameters and implicit parameters. */ diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 8bae3a2fb3a7..96c5e57dde0e 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -156,7 +156,10 @@ trait TypeAssigner { val pre = maybeSkolemizePrefix(qualType, name) val mbr = if ctx.isJava then - ctx.javaFindMember(name, pre) + // don't look in the companion class here if qual is a module, + // we use backtracking to instead change the qual to the companion class + // if this fails. + ctx.javaFindMember(name, pre, lookInCompanion = false) else qualType.findMember(name, pre) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f1db302e958c..0b05bcd078ff 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -461,6 +461,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType else if (ctx.isJava && defDenot.symbol.isStatic) { defDenot.symbol.namedType + } + else if (ctx.isJava && defDenot.symbol.isClass) { + // in a java context a raw identifier to a class should have a widened prefix. + defDenot.symbol.javaTypeRef } else { val effectiveOwner = if (curOwner.isTerm && defDenot.symbol.maybeOwner.isType) @@ -627,7 +631,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case checkedType: NamedType if !prefixIsElidable(checkedType) => ref(checkedType).withSpan(tree.span) case _ => - tree.withType(checkedType) + def isScalaModuleRef = checkedType match + case moduleRef: TypeRef if moduleRef.symbol.is(ModuleClass, butNot = JavaDefined) => true + case _ => false + if ctx.isJava && isScalaModuleRef then + cpy.Ident(tree)(tree.name.unmangleClassName).withType(checkedType) + else + tree.withType(checkedType) val tree2 = toNotNullTermRef(tree1, pt) checkLegalValue(tree2, pt) tree2 @@ -710,9 +720,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else - val tree1 = tryExtensionOrConversion( + val tree1 = { + if selName.isTypeName then EmptyTree + else tryExtensionOrConversion( tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - .orElse { + }.orElse { if ctx.gadt.isNarrowing then // try GADT approximation if we're trying to select a member // Member lookup cannot take GADTs into account b/c of cache, so we @@ -761,26 +773,30 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typeSelectOnTerm(using Context): Tree = val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) - typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() - - def javaSelectOnType(qual: Tree)(using Context) = - // semantic name conversion for `O$` in java code - if !qual.symbol.is(JavaDefined) then - val tree2 = untpd.cpy.Select(tree)(qual, tree.name.unmangleClassName) - assignType(tree2, qual) + if ctx.isJava then + javaSelection(qual) else - assignType(cpy.Select(tree)(qual, tree.name), qual) + typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() + + def javaSelection(qual: Tree)(using Context) = + val tree1 = assignType(cpy.Select(tree)(qual, tree.name), qual) + tree1.tpe match + case moduleRef: TypeRef if moduleRef.symbol.is(ModuleClass, butNot = JavaDefined) => + // handle unmangling of module names (Foo$ -> Foo[ModuleClass]) + cpy.Select(tree)(qual, tree.name.unmangleClassName).withType(moduleRef) + case _ => + tree1 def tryJavaSelectOnType(using Context): Tree = tree.qualifier match { case sel @ Select(qual, name) => val qual1 = untpd.cpy.Select(sel)(qual, name.toTypeName) val qual2 = typedType(qual1, WildcardType) - javaSelectOnType(qual2) + javaSelection(qual2) case id @ Ident(name) => val qual1 = untpd.cpy.Ident(id)(name.toTypeName) val qual2 = typedType(qual1, WildcardType) - javaSelectOnType(qual2) + javaSelection(qual2) case _ => errorTree(tree, em"cannot convert to type selection") // will never be printed due to fallback @@ -1459,14 +1475,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if isErasedClass then arg.withAddedFlags(Erased) else arg } return typedDependent(newParams) - val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) - val typeArgs = appDef.termParamss.head.map(_.tpt) :+ resTpt val core = if mt.hasErasedParams then TypeTree(defn.PolyFunctionClass.typeRef) else + val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) + val paramTpts = appDef.termParamss.head.map(p => TypeTree(p.tpt.tpe).withSpan(p.tpt.span)) val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) val tycon = TypeTree(funSym.typeRef) - AppliedTypeTree(tycon, typeArgs) + AppliedTypeTree(tycon, paramTpts :+ resTpt) RefinedTypeTree(core, List(appDef), ctx.owner.asClass) end typedDependent @@ -1621,15 +1637,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case untpd.Annotated(scrut1, _) => isParamRef(scrut1) case untpd.Ident(id) => id == params.head.name fnBody match - case untpd.Match(scrut, untpd.CaseDef(untpd.Tuple(elems), untpd.EmptyTree, rhs) :: Nil) + case untpd.Match(scrut, cases @ untpd.CaseDef(untpd.Tuple(elems), untpd.EmptyTree, rhs) :: Nil) if scrut.span.isSynthetic && isParamRef(scrut) && elems.hasSameLengthAs(protoFormals) => // If `pt` is N-ary function type, convert synthetic lambda // x$1 => x$1 match case (a1, ..., aN) => e // to // (a1, ..., aN) => e val params1 = desugar.patternsToParams(elems) - if params1.hasSameLengthAs(elems) then - desugared = cpy.Function(tree)(params1, rhs) + desugared = if params1.hasSameLengthAs(elems) + then cpy.Function(tree)(params1, rhs) + else desugar.makeCaseLambda(cases, desugar.MatchCheck.IrrefutablePatDef, protoFormals.length) case _ => if desugared.isEmpty then @@ -1809,7 +1826,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check `pat` here and throw away the result. val gadtCtx: Context = ctx.fresh.setFreshGADTBounds val pat1 = typedPattern(pat, selType)(using gadtCtx) - val Typed(_, tpt) = tpd.unbind(tpd.unsplice(pat1)): @unchecked + val tpt = tpd.unbind(tpd.unsplice(pat1)) match + case Typed(_, tpt) => tpt + case UnApply(fun, _, p1 :: _) if fun.symbol == defn.TypeTest_unapply => p1 instantiateMatchTypeProto(pat1, pt) match { case defn.MatchCase(patternTp, _) => tpt.tpe frozen_=:= patternTp case _ => false @@ -2330,9 +2349,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && checkedArgs(1).tpe.derivesFrom(defn.RuntimeExceptionClass) then report.error(em"throws clause cannot be defined for RuntimeException", checkedArgs(1).srcPos) - else if tycon == defn.IntoType then - // is defined in package scala but this should be hidden from user programs - report.error(em"not found: ", tpt1.srcPos) else if (ctx.isJava) if tycon eq defn.ArrayClass then checkedArgs match { @@ -2373,7 +2389,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) - assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) + val bound2 = if tree.bound.isEmpty then + val lub = cases1.foldLeft(defn.NothingType: Type): (acc, case1) => + if !acc.exists then NoType + else if case1.body.tpe.isProvisional then NoType + else acc | case1.body.tpe + if lub.exists then TypeTree(lub, inferred = true) + else bound1 + else bound1 + assignType(cpy.MatchTypeTree(tree)(bound2, sel1, cases1), bound2, sel1, cases1) } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match @@ -2983,8 +3007,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate infix operation expression `l op r` to * * l.op(r) if `op` is left-associative - * { val x = l; r.op(x) } if `op` is right-associative call-by-value and `l` is impure - * r.op(l) if `op` is right-associative call-by-name or `l` is pure + * { val x = l; r.op(x) } if `op` is right-associative call-by-value and `l` is impure, and not in a quote pattern + * r.op(l) if `op` is right-associative call-by-name, or `l` is pure, or in a quote pattern * * Translate infix type `l op r` to `op[l, r]` * Translate infix pattern `l op r` to `op(l, r)` @@ -3001,7 +3025,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedUnApply(cpy.Apply(tree)(op, l :: r :: Nil), pt) else { val app = typedApply(desugar.binop(l, op, r), pt) - if op.name.isRightAssocOperatorName then + if op.name.isRightAssocOperatorName && !ctx.mode.is(Mode.QuotedExprPattern) then val defs = new mutable.ListBuffer[Tree] def lift(app: Tree): Tree = (app: @unchecked) match case Apply(fn, args) => @@ -3983,10 +4007,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Reasons NOT to eta expand: // - we reference a constructor + // - we reference an inline implicit def (see #19862) // - we are in a pattern // - the current tree is a synthetic apply which is not expandable (eta-expasion would simply undo that) if arity >= 0 && !tree.symbol.isConstructor + && !tree.symbol.isAllOf(InlineImplicitMethod) && !ctx.mode.is(Mode.Pattern) && !(isSyntheticApply(tree) && !functionExpected) then @@ -4269,16 +4295,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptType(tp: Type): Tree = { val tree1 = - if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree + if (pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty then tree else { - val tp1 = - if (ctx.isJava) - // Cook raw type - AppliedType(tree.tpe, tp.typeParams.map(Function.const(TypeBounds.empty))) - else - // Eta-expand higher-kinded type - tree.tpe.etaExpand(tp.typeParamSymbols) - tree.withType(tp1) + if (ctx.isJava) + // Cook raw type + val typeArgs = tp.typeParams.map(Function.const(TypeBounds.empty)) + val tree1 = AppliedTypeTree(tree, typeArgs.map(TypeTree(_))) + val tp1 = AppliedType(tree.tpe, typeArgs) + tree1.withType(tp1) + else + // Eta-expand higher-kinded type + val tp1 = tree.tpe.etaExpand + tree.withType(tp1) } if (ctx.mode.is(Mode.Pattern) || ctx.mode.isQuotedPattern || tree1.tpe <:< pt) tree1 else err.typeMismatch(tree1, pt) diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 9131f4f761a2..0bd407261125 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -221,7 +221,7 @@ object Signatures { val funSymbol = fun.symbol val alternatives = if funSymbol.isLocalToBlock then List(funSymbol.denot) else funSymbol.owner.info.member(funSymbol.name).alternatives - val alternativeIndex = alternatives.map(_.symbol).indexOf(funSymbol) max 0 + val alternativeIndex = bestAlternative(alternatives, params, paramssListIndex) (alternativeIndex, alternatives) if alternativeIndex < alternatives.length then @@ -660,24 +660,56 @@ object Signatures { case msg: NoMatchingOverload => msg.alternatives case _ => Nil - val userParamsTypes = params.map(_.tpe) // Assign a score to each alternative (how many parameters are correct so far), and // use that to determine what is the current active signature. + val alternativeIndex = bestAlternative(alternatives, params, paramssIndex) + (alternativeIndex, alternatives) + } + + /** + * Given a list of alternatives, and a list of parameters, returns the index of the best + * alternative, i.e. the alternative that has the most formal parameters matching the given + * arguments and the least number of formal parameters. + * + * @param alternatives The list of alternatives to inspect. + * @param params The parameters that were given at the call site. + * @param paramssIndex Index of paramss we are currently in. + * + * @return The index of the best alternative. + */ + private def bestAlternative(alternatives: List[SingleDenotation], params: List[tpd.Tree], paramssIndex: Int)(using Context): Int = + val userParamsTypes = params.map( + _.tpe match + case e: PreviousErrorType => + /** + * In case: + * def foo(i: Int, s: String): Unit = ??? + * def foo(i: Boolean, s: Int, x: Double): Unit = ??? + * foo(false, @@) + * + * `false` has error type: `Required: Int, Found: Boolean` + */ + e.msg match + case tm: TypeMismatch => + tm.found + case _ => e + case t => t + ) val alternativesScores = alternatives.map { alt => val alreadyCurriedBonus = if (alt.symbol.paramSymss.length > paramssIndex) 1 else 0 - alt.info.stripPoly match - case tpe: MethodType => alreadyCurriedBonus + - userParamsTypes.zip(tpe.paramInfos).takeWhile{ case (t0, t1) => t0 <:< t1 }.size - case _ => 0 + alt.info.stripPoly match + case tpe: MethodType => + val score = alreadyCurriedBonus + + userParamsTypes + .zip(tpe.paramInfos) + .takeWhile { case (t0, t1) =>t0 <:< t1 } + .size + (score, -tpe.paramInfos.length) + case _ => (0, 0) } - - val bestAlternative = - if (alternativesScores.isEmpty) 0 - else alternativesScores.zipWithIndex.maxBy(_._1)._2 - - (bestAlternative, alternatives) - } + if (alternativesScores.isEmpty) 0 + else alternativesScores.zipWithIndex.maxBy(_._1)._2 } diff --git a/compiler/src/dotty/tools/io/ClassfileWriterOps.scala b/compiler/src/dotty/tools/io/ClassfileWriterOps.scala deleted file mode 100644 index c2107ded6f51..000000000000 --- a/compiler/src/dotty/tools/io/ClassfileWriterOps.scala +++ /dev/null @@ -1,50 +0,0 @@ -package dotty.tools.io - -import dotty.tools.io.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.report -import scala.language.unsafeNulls -import scala.annotation.constructorOnly - - -/** Experimental usage - writes bytes to JarArchives */ -class ClassfileWriterOps(outputDir: JarArchive)(using @constructorOnly ictx: Context) { - - type InternalName = String - - // if non-null, classfiles are written to a jar instead of the output directory - private val jarWriter: JarWriter | Null = - val localCtx = ictx - outputDir.underlyingSource.map { source => - if outputDir.isEmpty then - new Jar(source.file).jarWriter() - else inContext(localCtx) { - // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWriter - report.warning(em"Tried to write to non-empty JAR: $source") - null - } - }.getOrElse( - inContext(localCtx) { - report.warning(em"tried to create a file writer for $outputDir, but it had no underlying source.") - null - } - ) - - def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = - writeToJar(className, bytes, ".tasty") - - private def writeToJar(className: InternalName, bytes: Array[Byte], suffix: String): Unit = { - if (jarWriter == null) return - val path = className + suffix - val out = jarWriter.newOutputStream(path) - try out.write(bytes, 0, bytes.length) - finally out.flush() - } - - def close(): Unit = { - if (jarWriter != null) jarWriter.close() - outputDir.close() - } -} diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala new file mode 100644 index 000000000000..4f03194fa4ce --- /dev/null +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -0,0 +1,254 @@ +package dotty.tools.io + +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.em +import dotty.tools.dotc.report +import dotty.tools.io.AbstractFile +import dotty.tools.io.JarArchive +import dotty.tools.io.PlainFile + +import java.io.BufferedOutputStream +import java.io.DataOutputStream +import java.io.FileOutputStream +import java.io.IOException +import java.nio.ByteBuffer +import java.nio.channels.ClosedByInterruptException +import java.nio.channels.FileChannel +import java.nio.file.FileAlreadyExistsException +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.StandardOpenOption +import java.nio.file.attribute.FileAttribute +import java.util +import java.util.concurrent.ConcurrentHashMap +import java.util.zip.CRC32 +import java.util.zip.Deflater +import java.util.zip.ZipEntry +import java.util.zip.ZipOutputStream +import scala.language.unsafeNulls + +/** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ +object FileWriters { + type InternalName = String + type NullableFile = AbstractFile | Null + + /** + * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the + * directory and files that are created, and eventually calls `close` when the writing is complete. + * + * The companion object is responsible for constructing a appropriate and optimal implementation for + * the supplied settings. + * + * Operations are threadsafe. + */ + sealed trait TastyWriter { + /** + * Write a `.tasty` file. + * + * @param name the internal name of the class, e.g. "scala.Option" + */ + def writeTasty(name: InternalName, bytes: Array[Byte])(using Context): NullableFile + + /** + * Close the writer. Behavior is undefined after a call to `close`. + */ + def close(): Unit + + protected def classToRelativePath(className: InternalName): String = + className.replace('.', '/').nn + ".tasty" + } + + object TastyWriter { + + def apply(output: AbstractFile)(using Context): TastyWriter = { + + // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used + // In Dotty we always use single output directory + val basicTastyWriter = new SingleTastyWriter( + FileWriter(output, None) + ) + + basicTastyWriter + } + + private final class SingleTastyWriter(underlying: FileWriter) extends TastyWriter { + + override def writeTasty(className: InternalName, bytes: Array[Byte])(using Context): NullableFile = { + underlying.writeFile(classToRelativePath(className), bytes) + } + + override def close(): Unit = underlying.close() + } + + } + + sealed trait FileWriter { + def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile + def close(): Unit + } + + object FileWriter { + def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using Context): FileWriter = + if (file.isInstanceOf[JarArchive]) { + val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + val jarFile = file.underlyingSource.getOrElse{ + throw new IllegalStateException("No underlying source for jar") + } + assert(file.isEmpty, s"Unsafe writing to non-empty JAR: $jarFile") + new JarEntryWriter(jarFile, jarManifestMainClass, jarCompressionLevel) + } + else if (file.isVirtual) new VirtualFileWriter(file) + else if (file.isDirectory) new DirEntryWriter(file.file.toPath.nn) + else throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") + } + + private final class JarEntryWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends FileWriter { + //keep these imports local - avoid confusion with scala naming + import java.util.jar.Attributes.Name.{MANIFEST_VERSION, MAIN_CLASS} + import java.util.jar.{JarOutputStream, Manifest} + + val storeOnly = compressionLevel == Deflater.NO_COMPRESSION + + val jarWriter: JarOutputStream = { + import scala.util.Properties.* + val manifest = new Manifest + val attrs = manifest.getMainAttributes.nn + attrs.put(MANIFEST_VERSION, "1.0") + attrs.put(ScalaCompilerVersion, versionNumberString) + mainClass.foreach(c => attrs.put(MAIN_CLASS, c)) + + val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) + jar.setLevel(compressionLevel) + if (storeOnly) jar.setMethod(ZipOutputStream.STORED) + jar + } + + lazy val crc = new CRC32 + + override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = this.synchronized { + val entry = new ZipEntry(relativePath) + if (storeOnly) { + // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ + // uncompressed sizes to be written before the data. The JarOutputStream could compute the + // values while writing the data, but not patch them into the stream after the fact. So we + // need to pre-compute them here. The compressed size is taken from size. + // https://stackoverflow.com/questions/1206970/how-to-create-uncompressed-zip-archive-in-java/5868403 + // With compression method `DEFLATED` JarOutputStream computes and sets the values. + entry.setSize(bytes.length) + crc.reset() + crc.update(bytes) + entry.setCrc(crc.getValue) + } + jarWriter.putNextEntry(entry) + try jarWriter.write(bytes, 0, bytes.length) + finally jarWriter.flush() + null + } + + override def close(): Unit = this.synchronized(jarWriter.close()) + } + + private final class DirEntryWriter(base: Path) extends FileWriter { + val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() + val noAttributes = Array.empty[FileAttribute[?]] + private val isWindows = scala.util.Properties.isWin + + private def checkName(component: Path)(using Context): Unit = if (isWindows) { + val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r + val name = component.toString + def warnSpecial(): Unit = report.warning(em"path component is special Windows device: ${name}") + specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) + } + + def ensureDirForPath(baseDir: Path, filePath: Path)(using Context): Unit = { + import java.lang.Boolean.TRUE + val parent = filePath.getParent + if (!builtPaths.containsKey(parent)) { + parent.iterator.forEachRemaining(checkName) + try Files.createDirectories(parent, noAttributes*) + catch { + case e: FileAlreadyExistsException => + // `createDirectories` reports this exception if `parent` is an existing symlink to a directory + // but that's fine for us (and common enough, `scalac -d /tmp` on mac targets symlink). + if (!Files.isDirectory(parent)) + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + } + builtPaths.put(baseDir, TRUE) + var current = parent + while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { + current = current.getParent + } + } + checkName(filePath.getFileName()) + } + + // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call + // even if the file is new. + // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + + private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + + override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = { + val path = base.resolve(relativePath) + try { + ensureDirForPath(base, path) + val os = if (isWindows) { + try FileChannel.open(path, fastOpenOptions) + catch { + case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) + } + } else FileChannel.open(path, fallbackOpenOptions) + + try os.write(ByteBuffer.wrap(bytes), 0L) + catch { + case ex: ClosedByInterruptException => + try Files.deleteIfExists(path) // don't leave a empty of half-written classfile around after an interrupt + catch { case _: Throwable => () } + throw ex + } + os.close() + } catch { + case e: FileConflictException => + report.error(em"error writing ${path.toString}: ${e.getMessage}") + case e: java.nio.file.FileSystemException => + if (ctx.settings.Ydebug.value) e.printStackTrace() + report.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + } + AbstractFile.getFile(path) + } + + override def close(): Unit = () + } + + private final class VirtualFileWriter(base: AbstractFile) extends FileWriter { + private def getFile(base: AbstractFile, path: String): AbstractFile = { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/${path}: ${dir.path} is not a directory") + val components = path.split('/') + var dir = base + for (i <- 0 until components.length - 1) dir = ensureDirectory(dir) subdirectoryNamed components(i).toString + ensureDirectory(dir) fileNamed components.last.toString + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + + override def writeFile(relativePath: String, bytes: Array[Byte])(using Context):NullableFile = { + val outFile = getFile(base, relativePath) + writeBytes(outFile, bytes) + outFile + } + override def close(): Unit = () + } + + /** Can't output a file due to the state of the file system. */ + class FileConflictException(msg: String, cause: Throwable = null) extends IOException(msg, cause) +} diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 517815615f2a..d5688d1038b4 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -50,36 +50,40 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): // We need to use the ScalaRunTime class coming from the scala-library // on the user classpath, and not the one available in the current // classloader, so we use reflection instead of simply calling - // `ScalaRunTime.replStringOf`. Probe for new API without extraneous newlines. - // For old API, try to clean up extraneous newlines by stripping suffix and maybe prefix newline. + // `ScalaRunTime.stringOf`. Also probe for new stringOf that does string quoting, etc. val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader) val renderer = "stringOf" - def stringOfMaybeTruncated(value: Object, maxElements: Int): String = { - try { - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) - val truly = java.lang.Boolean.TRUE - meth.invoke(null, value, maxElements, truly).asInstanceOf[String] - } catch { - case _: NoSuchMethodException => - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) - meth.invoke(null, value, maxElements).asInstanceOf[String] - } - } - - (value: Object, maxElements: Int, maxCharacters: Int) => { - // `ScalaRuntime.stringOf` may truncate the output, in which case we want to indicate that fact to the user - // In order to figure out if it did get truncated, we invoke it twice - once with the `maxElements` that we - // want to print, and once without a limit. If the first is shorter, truncation did occur. - val notTruncated = stringOfMaybeTruncated(value, Int.MaxValue) - val maybeTruncatedByElementCount = stringOfMaybeTruncated(value, maxElements) - val maybeTruncated = truncate(maybeTruncatedByElementCount, maxCharacters) - - // our string representation may have been truncated by element and/or character count - // if so, append an info string - but only once - if (notTruncated.length == maybeTruncated.length) maybeTruncated - else s"$maybeTruncated ... large output truncated, print value to show all" - } - + val stringOfInvoker: (Object, Int) => String = + def richStringOf: (Object, Int) => String = + val method = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) + val richly = java.lang.Boolean.TRUE // add a repl option for enriched output + (value, maxElements) => method.invoke(null, value, maxElements, richly).asInstanceOf[String] + def poorStringOf: (Object, Int) => String = + try + val method = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) + (value, maxElements) => method.invoke(null, value, maxElements).asInstanceOf[String] + catch case _: NoSuchMethodException => (value, maxElements) => String.valueOf(value).take(maxElements) + try richStringOf + catch case _: NoSuchMethodException => poorStringOf + def stringOfMaybeTruncated(value: Object, maxElements: Int): String = stringOfInvoker(value, maxElements) + + // require value != null + // `ScalaRuntime.stringOf` returns null iff value.toString == null, let caller handle that. + // `ScalaRuntime.stringOf` may truncate the output, in which case we want to indicate that fact to the user + // In order to figure out if it did get truncated, we invoke it twice - once with the `maxElements` that we + // want to print, and once without a limit. If the first is shorter, truncation did occur. + // Note that `stringOf` has new API in flight to handle truncation, see stringOfMaybeTruncated. + (value: Object, maxElements: Int, maxCharacters: Int) => + stringOfMaybeTruncated(value, Int.MaxValue) match + case null => null + case notTruncated => + val maybeTruncated = + val maybeTruncatedByElementCount = stringOfMaybeTruncated(value, maxElements) + truncate(maybeTruncatedByElementCount, maxCharacters) + // our string representation may have been truncated by element and/or character count + // if so, append an info string - but only once + if notTruncated.length == maybeTruncated.length then maybeTruncated + else s"$maybeTruncated ... large output truncated, print value to show all" } myClassLoader } @@ -90,13 +94,18 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): else str.substring(0, str.offsetByCodePoints(0, maxPrintCharacters - 1)) /** Return a String representation of a value we got from `classLoader()`. */ - private[repl] def replStringOf(value: Object)(using Context): String = + private[repl] def replStringOf(sym: Symbol, value: Object)(using Context): String = assert(myReplStringOf != null, "replStringOf should only be called on values creating using `classLoader()`, but `classLoader()` has not been called so far") val maxPrintElements = ctx.settings.VreplMaxPrintElements.valueIn(ctx.settingsState) val maxPrintCharacters = ctx.settings.VreplMaxPrintCharacters.valueIn(ctx.settingsState) - val res = myReplStringOf(value, maxPrintElements, maxPrintCharacters) - if res == null then "null // non-null reference has null-valued toString" else res + // stringOf returns null if value.toString returns null. Show some text as a fallback. + def fallback = s"""null // result of "${sym.name}.toString" is null""" + if value == null then "null" else + myReplStringOf(value, maxPrintElements, maxPrintCharacters) match + case null => fallback + case res => res + end if /** Load the value of the symbol using reflection. * @@ -108,17 +117,15 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): val symValue = resObj .getDeclaredMethods.find(_.getName == sym.name.encode.toString) .flatMap(result => rewrapValueClass(sym.info.classSymbol, result.invoke(null))) - val valueString = symValue.map(replStringOf) + symValue + .filter(_ => sym.is(Flags.Method) || sym.info != defn.UnitType) + .map(value => stripReplPrefix(replStringOf(sym, value))) - if (!sym.is(Flags.Method) && sym.info == defn.UnitType) - None + private def stripReplPrefix(s: String): String = + if (s.startsWith(REPL_WRAPPER_NAME_PREFIX)) + s.drop(REPL_WRAPPER_NAME_PREFIX.length).dropWhile(c => c.isDigit || c == '$') else - valueString.map { s => - if (s.startsWith(REPL_WRAPPER_NAME_PREFIX)) - s.drop(REPL_WRAPPER_NAME_PREFIX.length).dropWhile(c => c.isDigit || c == '$') - else - s - } + s /** Rewrap value class to their Wrapper class * diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 5226ef0b4546..0d64c88d9228 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -31,6 +31,7 @@ import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.{CompilationUnit, Driver} import dotty.tools.dotc.config.CompilerCommand import dotty.tools.io.* +import dotty.tools.repl.Rendering.showUser import dotty.tools.runner.ScalaClassLoader.* import org.jline.reader.* @@ -149,11 +150,36 @@ class ReplDriver(settings: Array[String], /** Blockingly read a line, getting back a parse result */ def readLine()(using state: State): ParseResult = { - val completer: Completer = { (_, line, candidates) => - val comps = completions(line.cursor, line.line, state) - candidates.addAll(comps.asJava) - } given Context = state.context + val completer: Completer = { (lineReader, line, candidates) => + def makeCandidate(label: String) = { + new Candidate( + /* value = */ label, + /* displ = */ stripBackTicks(label), // displayed value + /* group = */ null, // can be used to group completions together + /* descr = */ null, // TODO use for documentation? + /* suffix = */ null, + /* key = */ null, + /* complete = */ false // if true adds space when completing + ) + } + val comps = completionsWithSignatures(line.cursor, line.line, state) + candidates.addAll(comps.map(_.label).distinct.map(makeCandidate).asJava) + val lineWord = line.word() + comps.filter(c => c.label == lineWord && c.symbols.nonEmpty) match + case Nil => + case exachMatches => + val terminal = lineReader.nn.getTerminal + lineReader.callWidget(LineReader.CLEAR) + terminal.writer.println() + exachMatches.foreach: exact => + exact.symbols.foreach: sym => + terminal.writer.println(SyntaxHighlighting.highlight(sym.showUser)) + lineReader.callWidget(LineReader.REDRAW_LINE) + lineReader.callWidget(LineReader.REDISPLAY) + terminal.flush() + } + try { val line = terminal.readLine(completer) ParseResult(line) @@ -229,24 +255,25 @@ class ReplDriver(settings: Array[String], else label - /** Extract possible completions at the index of `cursor` in `expr` */ + @deprecated("Use completionsWithSignatures instead", "3.4.2") protected final def completions(cursor: Int, expr: String, state0: State): List[Candidate] = - def makeCandidate(label: String) = { - + completionsWithSignatures(cursor, expr, state0).map: c => new Candidate( - /* value = */ label, - /* displ = */ stripBackTicks(label), // displayed value + /* value = */ c.label, + /* displ = */ stripBackTicks(c.label), // displayed value /* group = */ null, // can be used to group completions together /* descr = */ null, // TODO use for documentation? /* suffix = */ null, /* key = */ null, /* complete = */ false // if true adds space when completing ) - } + end completions + /** Extract possible completions at the index of `cursor` in `expr` */ + protected final def completionsWithSignatures(cursor: Int, expr: String, state0: State): List[Completion] = if expr.startsWith(":") then ParseResult.commands.collect { - case command if command._1.startsWith(expr) => makeCandidate(command._1) + case command if command._1.startsWith(expr) => Completion(command._1, "", List()) } else given state: State = newRun(state0) @@ -259,11 +286,10 @@ class ReplDriver(settings: Array[String], unit.tpdTree = tpdTree given Context = state.context.fresh.setCompilationUnit(unit) val srcPos = SourcePosition(file, Span(cursor)) - val completions = try Completion.completions(srcPos)._2 catch case NonFatal(_) => Nil - completions.map(_.label).distinct.map(makeCandidate) + try Completion.completions(srcPos)._2 catch case NonFatal(_) => Nil } .getOrElse(Nil) - end completions + end completionsWithSignatures protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = { res match { diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 04d8d7bc51a0..256940645ec3 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -24,6 +24,7 @@ import scala.quoted.runtime.impl.printers.* import scala.reflect.TypeTest import dotty.tools.dotc.core.NameKinds.ExceptionBinderName +import dotty.tools.dotc.transform.TreeChecker object QuotesImpl { @@ -253,6 +254,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler (cdef.name.toString, cdef.constructor, cdef.parents, cdef.self, rhs.body) def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) = { + if xCheckMacro then TreeChecker.checkParents(module.moduleClass.asClass, parents) val cls = module.moduleClass val clsDef = ClassDef(cls, parents, body) val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) @@ -466,7 +468,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler withDefaultPos(tpd.ref(tp).asInstanceOf[tpd.RefTree]) def apply(sym: Symbol): Ref = assert(sym.isTerm) - withDefaultPos(tpd.ref(sym).asInstanceOf[tpd.RefTree]) + val refTree = tpd.ref(sym) match + case t @ tpd.This(ident) => // not a RefTree, so we need to work around this - issue #19732 + // ident in `This` can be a TypeIdent of sym, so we manually prepare the ref here, + // knowing that the owner is actually `This`. + val term = Select(This(sym.owner), sym) + term.asInstanceOf[tpd.RefTree] + case other => other.asInstanceOf[tpd.RefTree] + withDefaultPos(refTree) end Ref type Ident = tpd.Ident @@ -1105,7 +1114,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object TypeTreeTypeTest extends TypeTest[Tree, TypeTree]: def unapply(x: Tree): Option[TypeTree & x.type] = x match - case x: (tpd.TypeBoundsTree & x.type) => None + case TypeBoundsTreeTypeTest(_) => None case x: (tpd.Tree & x.type) if x.isType => Some(x) case _ => None end TypeTreeTypeTest diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index b27016045051..9aec7fc17ed7 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -188,7 +188,7 @@ object SourceCode { case Select(newTree: New, _) => printType(newTree.tpe)(using Some(cdef.symbol)) case parent: Term => - throw new MatchError(parent.show(using Printer.TreeStructure)) + cannotBeShownAsSource(parent.show(using Printer.TreeStructure)) } def printSeparated(list: List[Tree /* Term | TypeTree */]): Unit = list match { @@ -536,7 +536,7 @@ object SourceCode { printCaseDef(tree) case _ => - throw new MatchError(tree.show(using Printer.TreeStructure)) + cannotBeShownAsSource(tree.show(using Printer.TreeStructure)) } @@ -934,7 +934,7 @@ object SourceCode { case Ident("unapply" | "unapplySeq") => this += fun.symbol.owner.fullName.stripSuffix("$") case _ => - throw new MatchError(fun.show(using Printer.TreeStructure)) + cannotBeShownAsSource(fun.show(using Printer.TreeStructure)) } inParens(printPatterns(patterns, ", ")) @@ -953,7 +953,7 @@ object SourceCode { printTree(v) case _ => - throw new MatchError(pattern.show(using Printer.TreeStructure)) + cannotBeShownAsSource(pattern.show(using Printer.TreeStructure)) } @@ -1079,7 +1079,7 @@ object SourceCode { printTypeTree(tpt) case _ => - throw new MatchError(tree.show(using Printer.TreeStructure)) + cannotBeShownAsSource(tree.show(using Printer.TreeStructure)) } @@ -1248,7 +1248,7 @@ object SourceCode { printType(rhs) case _ => - throw new MatchError(tpe.show(using Printer.TypeReprStructure)) + cannotBeShownAsSource(tpe.show(using Printer.TypeReprStructure)) } private def printSelector(sel: Selector): this.type = sel match { @@ -1287,7 +1287,7 @@ object SourceCode { val sym = annot.tpe.typeSymbol sym != Symbol.requiredClass("scala.forceInline") && sym.maybeOwner != Symbol.requiredPackage("scala.annotation.internal") - case x => throw new MatchError(x.show(using Printer.TreeStructure)) + case x => cannotBeShownAsSource(x.show(using Printer.TreeStructure)) } printAnnotations(annots) if (annots.nonEmpty) this += " " @@ -1458,6 +1458,9 @@ object SourceCode { } } + private def cannotBeShownAsSource(x: String): Nothing = + throw new Exception(s"$x does not have a source representation") + private object SpecialOp { def unapply(arg: Tree): Option[(String, List[Term])] = arg match { case arg @ Apply(fn, args) => diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 8f9a9bd69a50..b8dfa833c437 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -76,7 +76,7 @@ class CoursierScalaTests: def emptyArgsEqualsRepl() = val output = CoursierScalaTests.csScalaCmd() - assertTrue(output.mkString("\n").contains("Unable to create a system terminal")) // Scala attempted to create REPL so we can assume it is working + assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working emptyArgsEqualsRepl() def run() = @@ -133,7 +133,7 @@ class CoursierScalaTests: def replWithArgs() = val output = CoursierScalaTests.csScalaCmd("-source", "3.0-migration") - assertTrue(output.mkString("\n").contains("Unable to create a system terminal")) // Scala attempted to create REPL so we can assume it is working + assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working replWithArgs() def argumentFile() = diff --git a/compiler/test-resources/repl/erased b/compiler/test-resources/repl/erased index 9a67c1963769..fd9067692dbe 100644 --- a/compiler/test-resources/repl/erased +++ b/compiler/test-resources/repl/erased @@ -1,2 +1,3 @@ +scala> import scala.language.experimental.erasedDefinitions scala> def f(erased a: Int): Int = ??? def f(erased a: Int): Int diff --git a/compiler/test-resources/repl/erased-implicit b/compiler/test-resources/repl/erased-implicit index 5b1ec654c2aa..43624a5a9a08 100644 --- a/compiler/test-resources/repl/erased-implicit +++ b/compiler/test-resources/repl/erased-implicit @@ -1,2 +1,3 @@ +scala> import scala.language.experimental.erasedDefinitions scala> def f(using erased a: Int): Int = ??? def f(using erased a: Int): Int diff --git a/compiler/test-resources/type-printer/test-definitions b/compiler/test-resources/type-printer/test-definitions index ce33846472cf..cdda5f65cb0e 100644 --- a/compiler/test-resources/type-printer/test-definitions +++ b/compiler/test-resources/type-printer/test-definitions @@ -19,5 +19,7 @@ scala> trait E scala> implicit def x: Int = 1 def x: Int +scala> import scala.language.experimental.erasedDefinitions + scala> erased def y: Int = 1 def y: Int diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index b0da78f0a1eb..32f8cdef1386 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -29,6 +29,7 @@ i16649-irrefutable.scala strict-pattern-bindings-3.0-migration.scala i17186b.scala i11982a.scala +i17255 # Tree is huge and blows stack for printing Text i7034.scala diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index 9f19b439135c..954a64db1b66 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -44,3 +44,5 @@ t6138 t6138-2 i12656.scala trait-static-forwarder +i17255 + diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 94d42952a6eb..51390e35b527 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1733,6 +1733,58 @@ class DottyBytecodeTests extends DottyBytecodeTest { assertSameCode(instructions, expected) } } + + @Test def newInPrefixesOfDefaultParam = { + val source = + s"""class A: + | def f(x: Int = 1): Int = x + | + |class Test: + | def meth1() = (new A).f() + | def meth2() = { val a = new A; a.f() } + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + val meth1 = getMethod(clsNode, "meth1") + val meth2 = getMethod(clsNode, "meth2") + + val instructions1 = instructionsFromMethod(meth1) + val instructions2 = instructionsFromMethod(meth2) + + assert(instructions1 == instructions2, + "`assert` was not properly inlined in `meth1`\n" + + diffInstructions(instructions1, instructions2)) + } + } + + @Test def newInDependentOfDefaultParam = { + val source = + s"""class A: + | def i: Int = 1 + | + |class Test: + | def f(a: A)(x: Int = a.i): Int = x + | def meth1() = f(new A)() + | def meth2() = { val a = new A; f(a)() } + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + val meth1 = getMethod(clsNode, "meth1") + val meth2 = getMethod(clsNode, "meth2") + + val instructions1 = instructionsFromMethod(meth1) + val instructions2 = instructionsFromMethod(meth2) + + assert(instructions1 == instructions2, + "`assert` was not properly inlined in `meth1`\n" + + diffInstructions(instructions1, instructions2)) + } + } + } object invocationReceiversTestCode { diff --git a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala index 25b46532e58b..eebb2b23247a 100644 --- a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala @@ -39,6 +39,11 @@ class PublicInBinaryTests extends DottyBytecodeTest { private def checkPublicClass(classNode: ClassNode): Unit = assert((classNode.access & privateOrProtectedOpcode) == 0) + override def initCtx = + val ctx0 = super.initCtx + ctx0.setSetting(ctx0.settings.experimental, true) + ctx0.setSetting(ctx0.settings.YnoExperimental, true) + @Test def publicInBinaryDef(): Unit = { val code = diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index ab2adddbb16e..5e9a01a77ca7 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -206,7 +206,7 @@ class BootstrappedOnlyCompilationTests { object BootstrappedOnlyCompilationTests extends ParallelTesting { // Test suite configuration -------------------------------------------------- - def maxDuration = 60.seconds + def maxDuration = 100.seconds def numberOfSlaves = Runtime.getRuntime().availableProcessors() def safeMode = Properties.testsSafeMode def isInteractive = SummaryReport.isInteractive diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 64c2bd3b8807..a96a4ea09102 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -31,7 +31,7 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") var tests = List( - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Wunused:all", "-Xlint:private-shadow", "-Xlint:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Wunused:all", "-Wshadow:private-shadow", "-Wshadow:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-special/sourcepath/outer", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), diff --git a/compiler/test/dotty/tools/dotc/ScalaCommandTest.scala b/compiler/test/dotty/tools/dotc/ScalaCommandTest.scala index 53f063a9e767..852a232e4c6a 100644 --- a/compiler/test/dotty/tools/dotc/ScalaCommandTest.scala +++ b/compiler/test/dotty/tools/dotc/ScalaCommandTest.scala @@ -17,7 +17,7 @@ class ScalaCommandTest: def temporaryFolder = _temporaryFolder @Test def `Simple one parameter`: Unit = inContext { - val settings = config.ScalaSettings() + val settings = config.ScalaSettings val args = "-cp path/to/classes1:other/path/to/classes2 files".split(" ") val summary = ScalacCommand.distill(args, settings)() given SettingsState = summary.sstate @@ -26,7 +26,7 @@ class ScalaCommandTest: } @Test def `Unfold @file`: Unit = inContext { - val settings = config.ScalaSettings() + val settings = config.ScalaSettings val file = temporaryFolder.newFile("config") val writer = java.io.FileWriter(file); writer.write("-sourceroot myNewRoot someMoreFiles"); diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 8c571a321548..8125a80f29f8 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -8,6 +8,7 @@ import vulpix.TestConfiguration import core.Contexts.{Context, ContextBase} import dotty.tools.dotc.config.Settings._ +import dotty.tools.dotc.config.ScalaSettingCategories._ import dotty.tools.vulpix.TestConfiguration.mkClasspath import java.nio.file._ @@ -43,8 +44,8 @@ class SettingsTests { @Test def acceptUnconstrained: Unit = object Settings extends SettingGroup: - val foo = StringSetting("-foo", "foo", "Foo", "a") - val bar = IntSetting("-bar", "Bar", 0) + val foo = StringSetting(RootSetting, "foo", "foo", "Foo", "a") + val bar = IntSetting(RootSetting, "bar", "Bar", 0) val args = List("-foo", "b", "-bar", "1") val summary = Settings.processArguments(args, true) @@ -72,7 +73,7 @@ class SettingsTests { @Test def `dont crash on many options`: Unit = object Settings extends SettingGroup: - val option = BooleanSetting("-option", "Some option") + val option = BooleanSetting(RootSetting, "option", "Some option") val limit = 6000 val args = List.fill(limit)("-option") @@ -87,7 +88,7 @@ class SettingsTests { @Test def `bad option warning consumes an arg`: Unit = object Settings extends SettingGroup: - val option = BooleanSetting("-option", "Some option") + val option = BooleanSetting(RootSetting, "option", "Some option") val args = List("-adoption", "dogs", "cats") val summary = Settings.processArguments(args, processAll = true) @@ -97,7 +98,7 @@ class SettingsTests { @Test def `bad option settings throws`: Unit = object Settings extends SettingGroup: - val option = BooleanSetting("-option", "Some option") + val option = BooleanSetting(RootSetting, "option", "Some option") def checkMessage(s: String): (Throwable => Boolean) = t => if t.getMessage == s then true @@ -112,12 +113,12 @@ class SettingsTests { @Test def validateChoices: Unit = object Settings extends SettingGroup: - val foo = ChoiceSetting("-foo", "foo", "Foo", List("a", "b"), "a") - val bar = IntChoiceSetting("-bar", "Bar", List(0, 1, 2), 0) - val baz = IntChoiceSetting("-baz", "Baz", 0 to 10, 10) + val foo = ChoiceSetting(RootSetting, "foo", "foo", "Foo", List("a", "b"), "a") + val bar = IntChoiceSetting(RootSetting, "bar", "Bar", List(0, 1, 2), 0) + val baz = IntChoiceSetting(RootSetting, "baz", "Baz", 0 to 10, 10) - val quux = ChoiceSetting("-quux", "quux", "Quux", List(), "") - val quuz = IntChoiceSetting("-quuz", "Quuz", List(), 0) + val quux = ChoiceSetting(RootSetting, "quux", "quux", "Quux", List(), "") + val quuz = IntChoiceSetting(RootSetting, "quuz", "Quuz", List(), 0) locally { val args = List("-foo", "b", "-bar", "1", "-baz", "5") @@ -169,7 +170,7 @@ class SettingsTests { @Test def `Allow IntSetting's to be set with a colon`: Unit = object Settings extends SettingGroup: - val foo = IntSetting("-foo", "foo", 80) + val foo = IntSetting(RootSetting, "foo", "foo", 80) import Settings._ val args = List("-foo:100") @@ -181,10 +182,10 @@ class SettingsTests { @Test def `Set BooleanSettings correctly`: Unit = object Settings extends SettingGroup: - val foo = BooleanSetting("-foo", "foo", false) - val bar = BooleanSetting("-bar", "bar", true) - val baz = BooleanSetting("-baz", "baz", false) - val qux = BooleanSetting("-qux", "qux", false) + val foo = BooleanSetting(RootSetting, "foo", "foo", false) + val bar = BooleanSetting(RootSetting, "bar", "bar", true) + val baz = BooleanSetting(RootSetting, "baz", "baz", false) + val qux = BooleanSetting(RootSetting, "qux", "qux", false) import Settings._ val args = List("-foo:true", "-bar:false", "-baz", "-qux:true", "-qux:false") diff --git a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala index 50e07f388dc4..27311497de9c 100644 --- a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala +++ b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala @@ -60,7 +60,6 @@ class TastyBootstrapTests { val lib = compileList("lib", librarySources, defaultOptions.and("-Ycheck-reentrant", - "-language:experimental.erasedDefinitions", // support declaration of scala.compiletime.erasedValue // "-source", "future", // TODO: re-enable once library uses updated syntax for vararg splices, wildcard imports, and import renaming ))(libGroup) diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index a1014043724e..e958a5925fce 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -3,7 +3,7 @@ package config import CommandLineParser.tokenize import Settings._ - +import dotty.tools.dotc.config.ScalaSettingCategories._ import org.junit.Test import org.junit.Assert._ import core.Decorators.toMessage @@ -12,7 +12,7 @@ class ScalaSettingsTests: @Test def `A setting with aliases is accepted`: Unit = class MySettings extends SettingGroup: - val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ".", aliases = List("--class-path", "-cp")) + val classpath: Setting[String] = PathSetting(RootSetting, "classpath", "Specify where to find user class files.", ".", aliases = List("--class-path", "-cp")) val settings = MySettings() val args = tokenize("-cp path/to/classes1:other/path/to/classes2") val summary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) @@ -25,7 +25,7 @@ class ScalaSettingsTests: @Test def `A multistring setting is multivalued`: Unit = class SUT extends SettingGroup: - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.") + val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.") val sut = SUT() val args = tokenize("-language:implicitConversions,dynamics") val sumy = ArgsSummary(sut.defaultState, args, errors = Nil, warnings = Nil) @@ -39,7 +39,7 @@ class ScalaSettingsTests: @Test def `t9719 Apply -language more than once`: Unit = class SUT extends SettingGroup: - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.") + val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.") val sut = SUT() val args = tokenize("-language:implicitConversions -language:dynamics") val sumy = ArgsSummary(sut.defaultState, args, errors = Nil, warnings = Nil) @@ -53,7 +53,7 @@ class ScalaSettingsTests: @Test def `Warn if multistring element is supplied multiply`: Unit = class SUT extends SettingGroup: - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.") + val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.") val sut = SUT() val args = tokenize("-language:dynamics -language:implicitConversions -language:dynamics") val sumy = ArgsSummary(sut.defaultState, args, errors = Nil, warnings = Nil) @@ -67,7 +67,7 @@ class ScalaSettingsTests: @Test def `WConf setting is parsed`: Unit = import reporting.{Action, Diagnostic, NoExplanation} - val sets = new ScalaSettings + val sets = ScalaSettings val args = List("-Wconf:cat=deprecation:s,cat=feature:e", "-Wconf:msg=a problem\\.:s") val sumy = ArgsSummary(sets.defaultState, args, errors = Nil, warnings = Nil) val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) @@ -85,7 +85,7 @@ class ScalaSettingsTests: @Test def `i18367 rightmost WConf flags take precedence over flags to the left`: Unit = import reporting.{Action, Diagnostic} - val sets = new ScalaSettings + val sets = ScalaSettings val args = List("-Wconf:cat=deprecation:e", "-Wconf:cat=deprecation:s") val sumy = ArgsSummary(sets.defaultState, args, errors = Nil, warnings = Nil) val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) diff --git a/compiler/test/dotty/tools/dotc/interactive/CustomCompletion.scala b/compiler/test/dotty/tools/dotc/interactive/CustomCompletion.scala deleted file mode 100644 index 7b422a1164ae..000000000000 --- a/compiler/test/dotty/tools/dotc/interactive/CustomCompletion.scala +++ /dev/null @@ -1,130 +0,0 @@ -package dotty.tools.dotc.interactive - -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameOps._ -import dotty.tools.dotc.core.Names.{Name, termName} -import dotty.tools.dotc.core.StdNames.nme -import dotty.tools.dotc.core.Symbols.{Symbol, defn} -import dotty.tools.dotc.core.TypeError -import dotty.tools.dotc.util.Chars.{isOperatorPart, isScalaLetter} -import dotty.tools.dotc.util.SourcePosition - -object CustomCompletion { - - def completions( - pos: SourcePosition, - dependencyCompleteOpt: Option[String => (Int, Seq[String])], - enableDeep: Boolean - )(using Context): (Int, List[Completion]) = { - val path = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) - computeCompletions(pos, path, dependencyCompleteOpt, enableDeep)(using Interactive.contextOfPath(path)) - } - - def computeCompletions( - pos: SourcePosition, - path: List[Tree], - dependencyCompleteOpt: Option[String => (Int, Seq[String])], - enableDeep: Boolean - )(using Context): (Int, List[Completion]) = { - val mode = Completion.completionMode(path, pos) - val prefix = Completion.completionPrefix(path, pos) - val completer = new DeepCompleter(mode, prefix, pos) - - var extra = List.empty[Completion] - - val completions = path match { - case Select(qual, _) :: _ => completer.selectionCompletions(qual) - case Import(Ident(name), _) :: _ if name.decode.toString == "$ivy" && dependencyCompleteOpt.nonEmpty => - val complete = dependencyCompleteOpt.get - val (pos, completions) = complete(prefix) - val input0 = prefix.take(pos) - extra ++= completions.distinct.toList - .map(s => Completion(label(termName(input0 + s)), "", Nil)) - Map.empty - case Import(expr, _) :: _ => completer.directMemberCompletions(expr) - case (_: untpd.ImportSelector) :: Import(expr, _) :: _ => completer.directMemberCompletions(expr) - case _ => - completer.scopeCompletions ++ { - if (enableDeep) completer.deepCompletions - else Nil - } - } - - val describedCompletions = extra ++ describeCompletions(completions) - val offset = Completion.completionOffset(path) - - (pos.span.start - prefix.length, describedCompletions) - } - - private type CompletionMap = Map[Name, Seq[SingleDenotation]] - - private def describeCompletions(completions: CompletionMap)(using Context): List[Completion] = { - for - (name, denots) <- completions.toList - denot <- denots - yield - Completion(label(name), Completion.description(denot), List(denot.symbol)) - } - - class DeepCompleter(mode: Completion.Mode, prefix: String, pos: SourcePosition) extends Completion.Completer(mode, prefix, pos): - def deepCompletions(using Context): Map[Name, Seq[SingleDenotation]] = { - - def allMembers(s: Symbol) = - try s.info.allMembers - catch { - case _: dotty.tools.dotc.core.TypeError => Nil - } - def rec(t: Symbol): Seq[Symbol] = { - val children = - if (t.is(Package) || t.is(PackageVal) || t.is(PackageClass)) { - allMembers(t).map(_.symbol).filter(_ != t).flatMap(rec) - } else Nil - - t +: children.toSeq - } - - val syms = for { - member <- allMembers(defn.RootClass).map(_.symbol).toList - sym <- rec(member) - if sym.name.toString.startsWith(prefix) - } yield sym - - syms.map(sym => (sym.fullName, List(sym: SingleDenotation))).toMap - } - - private val bslash = '\\' - private val specialChars = Set('[', ']', '(', ')', '{', '}', '.', ',', ';') - - def label(name: Name): String = { - - def maybeQuote(name: Name, recurse: Boolean): String = - if (recurse && name.isTermName) - name.asTermName.qualToString(maybeQuote(_, true), maybeQuote(_, false)) - else { - // initially adapted from - // https://github.com/scala/scala/blob/decbd53f1bde4600c8ff860f30a79f028a8e431d/src/reflect/scala/reflect/internal/Printers.scala#L573-L584 - val decName = name.decode.toString - val hasSpecialChar = decName.exists { ch => - specialChars(ch) || ch.isWhitespace - } - def isOperatorLike = (name.isOperatorName || decName.exists(isOperatorPart)) && - decName.exists(isScalaLetter) && - !decName.contains(bslash) - lazy val term = name.toTermName - - val needsBackTicks = hasSpecialChar || - isOperatorLike || - nme.keywords(term) && term != nme.USCOREkw - - if (needsBackTicks) s"`$decName`" - else decName - } - - maybeQuote(name, true) - } -} - diff --git a/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala b/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala deleted file mode 100644 index a43a5cafce21..000000000000 --- a/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala +++ /dev/null @@ -1,171 +0,0 @@ -package dotty.tools -package dotc.interactive - -import dotc.ast.tpd -import dotc.{CompilationUnit, Compiler, Run} -import dotc.core.Contexts.Context -import dotc.core.Mode -import dotc.reporting.StoreReporter -import dotc.util.{SourceFile, SourcePosition} -import dotc.util.Spans.Span - -import org.junit.Test - -class CustomCompletionTests extends DottyTest: - - private def completions( - input: String, - dependencyCompleter: Option[String => (Int, Seq[String])] = None, - deep: Boolean = false, - extraDefinitions: String = "" - ): (Int, Seq[Completion]) = - val prefix = extraDefinitions + """ - object Wrapper { - val expr = { - """ - val suffix = """ - } - } - """ - - val allCode = prefix + input + suffix - val index = prefix.length + input.length - - val run = new Run( - new Compiler, - initialCtx.fresh - .addMode(Mode.ReadPositions | Mode.Interactive) - // discard errors - comment out this line to print them in the console - .setReporter(new StoreReporter(null)) - .setSetting(initialCtx.settings.YstopAfter, List("typer")) - ) - val file = SourceFile.virtual("", allCode, maybeIncomplete = true) - given ctx: Context = run.runContext.withSource(file) - val unit = CompilationUnit(file) - ctx - .run.nn - .compileUnits(unit :: Nil, ctx) - - // ignoring compilation errors here - the input code - // to complete likely doesn't compile - - unit.tpdTree = { - import tpd._ - unit.tpdTree match { - case PackageDef(_, p) => - p.reverseIterator.collectFirst { - case TypeDef(_, tmpl: Template) => - tmpl.body - .collectFirst { case dd: ValDef if dd.name.show == "expr" => dd } - .getOrElse(sys.error("Unexpected tree shape")) - } - .getOrElse(sys.error("Unexpected tree shape")) - case _ => sys.error("Unexpected tree shape") - } - } - val ctx1 = ctx.fresh.setCompilationUnit(unit) - val srcPos = SourcePosition(file, Span(index)) - val (offset0, completions) = - if (deep || dependencyCompleter.nonEmpty) - CustomCompletion.completions(srcPos, dependencyCompleteOpt = dependencyCompleter, enableDeep = deep)(using ctx1) - else - Completion.completions(srcPos)(using ctx1) - val offset = offset0 - prefix.length - (offset, completions) - - - @Test def simple(): Unit = - val prefix = "scala.collection.immutable." - val input = prefix + "Ma" - - val (offset, completions0) = completions(input) - val labels = completions0.map(_.label) - - assert(offset == prefix.length) - assert(labels.contains("Map")) - - @Test def custom(): Unit = - val prefix = "import $ivy." - val input = prefix + "scala" - - val dependencies = Seq( - "scalaCompiler", - "scalaLibrary", - "other" - ) - val (offset, completions0) = completions( - input, - dependencyCompleter = Some { dep => - val matches = dependencies.filter(_.startsWith(dep)) - (0, matches) - } - ) - val labels = completions0.map(_.label) - - assert(offset == prefix.length) - assert(labels.contains("scalaCompiler")) - assert(labels.contains("scalaLibrary")) - assert(labels.length == 2) - - @Test def backTicks(): Unit = - val prefix = "Foo." - val input = prefix + "a" - - val extraDefinitions = - """object Foo { def a1 = 2; def `a-b` = 3 } - |""".stripMargin - val (offset, completions0) = completions( - input, - extraDefinitions = extraDefinitions, - deep = true // Enables CustomCompleter - ) - val labels = completions0.map(_.label) - - assert(offset == prefix.length) - assert(labels.contains("a1")) - assert(labels.contains("`a-b`")) - - @Test def backTicksDependencies(): Unit = - val prefix = "import $ivy." - val input = prefix + "`org.scala-lang:scala-`" - - val dependencies = Seq( - "org.scala-lang:scala-compiler", - "org.scala-lang:scala-library", - "other" - ) - val (offset, completions0) = completions( - input, - dependencyCompleter = Some { dep => - val matches = dependencies.filter(_.startsWith(dep)) - (0, matches) - } - ) - val labels = completions0.map(_.label) - - // Seems backticks mess with that for now... - // assert(offset == prefix.length) - assert(labels.contains("`org.scala-lang:scala-compiler`")) - assert(labels.contains("`org.scala-lang:scala-library`")) - assert(labels.length == 2) - - @Test def deep(): Unit = - val prefix = "" - val input = prefix + "ListBuf" - - val (offset, completions0) = completions(input, deep = true) - val labels = completions0.map(_.label) - - assert(offset == prefix.length) - assert(labels.contains("scala.collection.mutable.ListBuffer")) - - @Test def deepType(): Unit = - val prefix = "" - val input = prefix + "Function2" - - val (offset, completions0) = completions(input, deep = true) - val labels = completions0.map(_.label) - - assert(offset == prefix.length) - assert(labels.contains("scala.Function2")) - diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 26092b73f107..ecae111604cf 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -4,8 +4,9 @@ import scala.language.unsafeNulls import java.util.regex.Pattern -import org.junit.Assert.{assertTrue => assert, _} -import org.junit.{Ignore, Test} +import org.junit.Assert.{assertEquals, assertFalse, assertTrue} +import org.junit.Assert.{assertTrue => assert} +import org.junit.Test import dotty.tools.dotc.core.Contexts.Context class ReplCompilerTests extends ReplTest: @@ -107,28 +108,21 @@ class ReplCompilerTests extends ReplTest: assertEquals(expected, lines()) } - // FIXME: Tests are not run in isolation, the classloader is corrupted after the first exception - @Ignore @Test def i3305: Unit = { - initially { - run("null.toString") - assert(storedOutput().startsWith("java.lang.NullPointerException")) - } + @Test def `i3305 SOE meh`: Unit = initially: + run("def foo: Int = 1 + foo; foo") + assert(storedOutput().startsWith("java.lang.StackOverflowError")) - initially { - run("def foo: Int = 1 + foo; foo") - assert(storedOutput().startsWith("def foo: Int\njava.lang.StackOverflowError")) - } + @Test def `i3305 NPE`: Unit = initially: + run("null.toString") + assert(storedOutput().startsWith("java.lang.NullPointerException")) - initially { - run("""throw new IllegalArgumentException("Hello")""") - assert(storedOutput().startsWith("java.lang.IllegalArgumentException: Hello")) - } + @Test def `i3305 IAE`: Unit = initially: + run("""throw new IllegalArgumentException("Hello")""") + assertTrue(storedOutput().startsWith("java.lang.IllegalArgumentException: Hello")) - initially { - run("val (x, y) = null") - assert(storedOutput().startsWith("scala.MatchError: null")) - } - } + @Test def `i3305 ME`: Unit = initially: + run("val (x, y) = null") + assert(storedOutput().startsWith("scala.MatchError: null")) @Test def i2789: Unit = initially { run("(x: Int) => println(x)") @@ -409,6 +403,51 @@ class ReplCompilerTests extends ReplTest: @Test def `i13097 expect template after colon` = contextually: assert(ParseResult.isIncomplete("class C:")) + @Test def i15562: Unit = initially { + val s1 = run("List(1, 2).filter(_ % 2 == 0).foreach(println)") + assertEquals("2", storedOutput().trim) + s1 + } andThen { s1 ?=> + val comp = tabComplete("List(1, 2).filter(_ % 2 == 0).fore") + assertEquals(List("foreach"), comp.distinct) + s1 + } andThen { + val s2 = run("List(1, 2).filter(_ % 2 == 0).foreach(println)") + assertEquals("2", storedOutput().trim) + s2 + } + + @Test def i15562b: Unit = initially { + val s1 = run("List(1, 2).filter(_ % 2 == 0).foreach(println)") + assertEquals("2", storedOutput().trim) + s1 + } andThen { s1 ?=> + val comp = tabComplete("val x = false + true; List(1, 2).filter(_ % 2 == 0).fore") + assertEquals(List("foreach"), comp.distinct) + s1 + } andThen { + val s2 = run("List(1, 2).filter(_ % 2 == 0).foreach(println)") + assertEquals("2", storedOutput().trim) + s2 + } + + @Test def `i17333 print null result of toString`: Unit = + initially: + run("val tpolecat = new Object { override def toString(): String = null }") + .andThen: + val last = lines().last + assertTrue(last, last.startsWith("val tpolecat: Object = null")) + assertTrue(last, last.endsWith("""// result of "tpolecat.toString" is null""")) + + @Test def `i17333 print toplevel object with null toString`: Unit = + initially: + run("object tpolecat { override def toString(): String = null }") + .andThen: + run("tpolecat") + val last = lines().last + assertTrue(last, last.startsWith("val res0: tpolecat.type = null")) + assertTrue(last, last.endsWith("""// result of "res0.toString" is null""")) + object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); @@ -458,3 +497,16 @@ class ReplVerboseTests extends ReplTest(ReplTest.defaultOptions :+ "-verbose"): } end ReplVerboseTests + +class ReplHighlightTests extends ReplTest(ReplTest.defaultOptions.filterNot(_.startsWith("-color")) :+ "-color:always"): + @Test def i18596: Unit = initially: + run("""(1 to 500).foldRight("x") { case (_, n) => s"$n" }""") + + @Test def i16904: Unit = initially: + run(""""works not fine"* 10000""") + + run(""" + case class Tree(left: Tree, right: Tree) + def deepTree(depth: Int): Tree + deepTree(300)""") + diff --git a/compiler/test/dotty/tools/repl/ReplTest.scala b/compiler/test/dotty/tools/repl/ReplTest.scala index 8fbf635c9a17..3925b61d7de0 100644 --- a/compiler/test/dotty/tools/repl/ReplTest.scala +++ b/compiler/test/dotty/tools/repl/ReplTest.scala @@ -40,6 +40,10 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na def contextually[A](op: Context ?=> A): A = op(using initialState.context) + /** Returns the `(, )`*/ + def tabComplete(src: String)(implicit state: State): List[String] = + completionsWithSignatures(src.length, src, state).map(_.label).sorted.distinct + extension [A](state: State) infix def andThen(op: State ?=> A): A = op(using state) @@ -104,6 +108,6 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na } object ReplTest: - val commonOptions = Array("-color:never", "-language:experimental.erasedDefinitions", "-pagewidth", "80") + val commonOptions = Array("-color:never", "-pagewidth", "80") val defaultOptions = commonOptions ++ Array("-classpath", TestConfiguration.basicClasspath) lazy val withStagingOptions = commonOptions ++ Array("-classpath", TestConfiguration.withStagingClasspath) diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index 0bce525e1469..e4c3a2557e7d 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -8,10 +8,6 @@ import org.junit.Test /** These tests test input that has proved problematic */ class TabcompleteTests extends ReplTest { - /** Returns the `(, )`*/ - private def tabComplete(src: String)(implicit state: State): List[String] = - completions(src.length, src, state).map(_.value).sorted - @Test def tabCompleteList = initially { val comp = tabComplete("List.r") assertEquals(List("range"), comp.distinct) diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index 8161631acb44..a8c480088e08 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -81,7 +81,14 @@ def toolArgsFor(tool: ToolName, filename: Option[String])(lines: List[String]): // groups are (name, args) // note: ideally we would replace everything that requires this to use directive syntax, however scalajs: --skip has no directive equivalent yet. private val toolArg = raw"(?://|/\*| \*) ?(?i:(${ToolName.values.mkString("|")})):((?:[^*]|\*(?!/))*)".r.unanchored + +// ================================================================================================ +// =================================== VULPIX DIRECTIVES ========================================== +// ================================================================================================ + +/** Directive to specify to vulpix the options to pass to Dotty */ private val directiveOptionsArg = raw"//> using options (.*)".r.unanchored +private val directiveJavacOptions = raw"//> using javacOpt (.*)".r.unanchored // Inspect the lines for compiler options of the form // `//> using options args`, `// scalajs: args`, `/* scalajs: args`, ` * scalajs: args` etc. @@ -90,10 +97,15 @@ private val directiveOptionsArg = raw"//> using options (.*)".r.unanchored def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,String)] = lines.flatMap { case toolArg("scalac", _) => sys.error(s"`// scalac: args` not supported. Please use `//> using options args`${filename.fold("")(f => s" in file $f")}") + case toolArg("javac", _) => sys.error(s"`// javac: args` not supported. Please use `//> using javacOpt args`${filename.fold("")(f => s" in file $f")}") case toolArg(name, args) => List((name, args)) case _ => Nil } ++ - lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case _ => Nil } + lines.flatMap { + case directiveOptionsArg(args) => List(("scalac", args)) + case directiveJavacOptions(args) => List(("javac", args)) + case _ => Nil + } import org.junit.Test import org.junit.Assert._ @@ -104,6 +116,6 @@ class ToolArgsTest: @Test def `tool is present`: Unit = assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: Nil)) @Test def `missing tool is absent`: Unit = assertEquals(Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: Nil)) @Test def `multitool is present`: Unit = - assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) - assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) + assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: "// java: -d /tmp" :: Nil)) + assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Java, None)("// test: -hey" :: "// java: -d /tmp" :: Nil)) end ToolArgsTest diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 59d5d3d542fd..e9975ed25b6d 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -498,6 +498,7 @@ trait ParallelTesting extends RunnerOrchestration { self => case None => true def scalacOptions = toolArgs.getOrElse(ToolName.Scalac, Nil) + def javacOptions = toolArgs.getOrElse(ToolName.Javac, Nil) val flags = flags0 .and(scalacOptions*) @@ -512,11 +513,10 @@ trait ParallelTesting extends RunnerOrchestration { self => def compileWithJavac(fs: Array[String]) = if (fs.nonEmpty) { val fullArgs = Array( - "javac", "-encoding", StandardCharsets.UTF_8.name, - ) ++ flags.javacFlags ++ fs + ) ++ flags.javacFlags ++ javacOptions++ fs - val process = Runtime.getRuntime.exec(fullArgs) + val process = Runtime.getRuntime.exec("javac" +: fullArgs) val output = Source.fromInputStream(process.getErrorStream).mkString if waitForJudiciously(process) != 0 then Some(output) diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 04be00fe921e..1defe3f4f53d 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -66,7 +66,9 @@ object TestConfiguration { val yCheckOptions = Array("-Ycheck:all") val commonOptions = Array("-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions + val noYcheckCommonOptions = Array("-indent") ++ checkOptions ++ noCheckOptions val defaultOptions = TestFlags(basicClasspath, commonOptions) + val noYcheckOptions = TestFlags(basicClasspath, noYcheckCommonOptions) val unindentOptions = TestFlags(basicClasspath, Array("-no-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions) val withCompilerOptions = defaultOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index 63e968902600..071cbeb0c0a2 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -49,8 +49,8 @@ Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html Start by cloning the repository: ```bash -$ git clone https://github.com/lampepfl/dotty.git -$ cd dotty +$ git clone https://github.com/scala/scala3.git +$ cd scala3 ``` Dotty provides a standard sbt build: compiling, running and starting a repl can diff --git a/docs/_docs/contributing/testing.md b/docs/_docs/contributing/testing.md index 039b37ead8bf..9ea02f071cb6 100644 --- a/docs/_docs/contributing/testing.md +++ b/docs/_docs/contributing/testing.md @@ -141,6 +141,11 @@ checkfiles with the test outputs. $ sbt > testCompilation --update-checkfiles ``` +Or for ScalaJS +```bash +$ sbt +> sjsCompilerTests/testOnly -- -Ddotty.tests.updateCheckfiles=TRUE +``` Use `--help` to see all the options ```bash diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index aa8cd15f00a0..10f068e53c7f 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -20,6 +20,8 @@ productions map to AST nodes. The following description of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. +Informal descriptions are typeset as `“some comment”`. + ## Lexical Syntax The lexical syntax of Scala is given by the following grammar in EBNF form: @@ -99,7 +101,10 @@ semi ::= ‘;’ | nl {nl} ## Optional Braces -The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](../reference/other-new-features/indentation.md) +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](../reference/other-new-features/indentation.md). In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the @@ -201,14 +206,19 @@ SimpleType1 ::= id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id -FunArgType ::= [`erased`] Type - | [`erased`] ‘=>’ Type PrefixOp(=>, t) +FunArgType ::= Type + | ‘=>’ Type PrefixOp(=>, t) FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= [‘into’] ExactParamType Into(t) -ExactParamType ::= ParamValueType [‘*’] PostfixOp(t, "*") +ParamValueType ::= Type [‘*’] PostfixOp(t, "*") + | IntoType + | ‘(’ IntoType ‘)’ ‘*’ PostfixOp(t, "*") +IntoType ::= [‘into’] IntoTargetType Into(t) + | ‘(’ IntoType ‘)’ +IntoTargetType ::= Type + | FunTypeArgs (‘=>’ | ‘?=>’) IntoType TypeArgs ::= ‘[’ Types ‘]’ ts -Refinement ::= :<<< [RefineDef] {semi [RefineDef]} >>> ds +Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) Types ::= Type {‘,’ Type} @@ -223,7 +233,7 @@ BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block | HkTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings - | [`erased`] id + | id | ‘_’ Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) @@ -272,7 +282,7 @@ ColonArgument ::= colon [LambdaStart] LambdaStart ::= FunParams (‘=>’ | ‘?=>’) | HkTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ - | ‘'’ ‘[’ Type ‘]’ + | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern @@ -294,6 +304,8 @@ BlockStat ::= Import | Extension | Expr1 | EndMarker +TypeBlock ::= {TypeBlockStat semi} Type +TypeBlockStat ::= ‘type’ {nl} TypeDef ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr ForYield(enums, expr) / ForDo(enums, expr) | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr @@ -353,7 +365,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param + [{Modifier} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause @@ -376,8 +388,8 @@ Param ::= id ‘:’ ParamType [‘=’ Expr] ### Bindings and Imports ```ebnf -Bindings ::= ‘(’[`erased`] [Binding {‘,’ [`erased`] Binding}] ‘)’ -Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) +Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Binding ::= [`erased`] (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) Modifier ::= LocalModifier | AccessModifier @@ -390,6 +402,10 @@ LocalModifier ::= ‘abstract’ | ‘implicit’ | ‘lazy’ | ‘inline’ + | ‘transparent’ + | ‘infix’ + | ‘erased’ + AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ @@ -414,9 +430,11 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ### Definitions ```ebnf -RefineDef ::= ‘val’ ValDef - | ‘def’ DefDef +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl | ‘type’ {nl} TypeDef +ValDcl ::= ids ‘:’ Type +DefDcl ::= DefSig ‘:’ Type Def ::= ‘val’ PatDef | ‘var’ PatDef @@ -461,7 +479,6 @@ TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> TemplateStat ::= Import | Export | {Annotation [nl]} {Modifier} Def - | {Annotation [nl]} {Modifier} Dcl | Extension | Expr1 | EndMarker diff --git a/docs/_docs/reference/experimental/into-modifier.md b/docs/_docs/reference/experimental/into-modifier.md index 2ee4c74539b3..54da5f976320 100644 --- a/docs/_docs/reference/experimental/into-modifier.md +++ b/docs/_docs/reference/experimental/into-modifier.md @@ -32,10 +32,10 @@ The `into` modifier on the type of `elems` means that implicit conversions can b `into` also allows conversions on the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: ```scala - def flatMap[B](f: into A => IterableOnce[B]): List[B] + def flatMap[B](f: A => into IterableOnce[B]): List[B] ``` -This allows a conversion of the actual argument to the function type `A => IterableOnce[B]`. Crucially, it also allows that conversion to be applied to -the function result. So the following would work: +This accepts all actual arguments `f` that, when applied to an `A`, give a result +that is convertible to `IterableOnce[B]`. So the following would work: ```scala scala> val xs = List(1, 2, 3) scala> xs.flatMap(x => x.toString * x) @@ -49,7 +49,7 @@ When applied to a vararg parameter, `into` allows a conversion on each argument number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: ```scala -def concatAll(xss: into IterableOnce[Char]*): List[Char] = +def concatAll(xss: (into IterableOnce[Char])*): List[Char] = xss.foldLeft(List[Char]())(_ ++ _) ``` Here, the call @@ -58,24 +58,63 @@ concatAll(List('a'), "bc", Array('d', 'e')) ``` would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. +Note that a vararg parameter type with into modifiers needs to be put in parentheses, as is shown in the example above. This is to make the precedence clear: each element of the argument sequence is converted by itself. + ## Retrofitting Scala 2 libraries -A new annotation `allowConversions` has the same effect as an `into` modifier. It is defined as an `@experimental` class in package `scala.annotation`. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of +There is also an annotation `@into` in the `scala.annotation` package that has +the same effect as an `into` modifier. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of `++` and `flatMap` in the Scala 2.13 `List` class could be retrofitted as follows. ```scala - def ++ (@allowConversions elems: IterableOnce[A]): List[A] - def flatMap[B](@allowConversions f: A => IterableOnce[B]): List[B] + def ++ (elems: IterableOnce[A] @into): List[A] + def flatMap[B](f: A => IterableOnce[B] @into): List[B] +``` +For Scala 3 code, the `into` modifier is preferred, because it adheres to the principle that annotations should not influence typing and type inference in Scala. + +## Restrictions + +The `into` modifier is only allowed in the types of method parameters. It can be given either for the whole type, or some result type of a top-level function type, but not anywhere else. The `into` modifier does not propagate outside the method. In particular, a partially applied method does not propagate `into` modifiers to its result. + +**Example:** + +Say we have +```scala +def f(x: Int)(y: into Text): Unit +``` +then +```scala +f(3) : Text => Unit +``` +Note the `into` modifier is not longer present on the type of `f(3)`. Therefore, follow-on arguments to `f(3)` do not allow implicit conversions. Generally it is not possible to +define function types that allow implicit conversions on their arguments, but it is possible to define SAM types that allow conversions. E.g. +```scala +trait ConvArg: + def apply(x: into Text): Unit + +val x: ConvArg = f(3)(_) +``` + +Note this is similar to the way vararg parameters are handled in Scala. If we have +```scala +def g(x: Int)(y: Int*): Unit +``` +then +```scala +g(4) : Seq[Int] => Unit ``` -For Scala 3 code, the `into` modifier is preferred. First, because it is shorter, -and second, because it adheres to the principle that annotations should not influence -typing and type inference in Scala. +Observe that the vararg annotation also got dropped in the result type of `g(4)`. ## Syntax changes The addition to the grammar is: ``` -ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= [‘into‘] ExactParamType -ExactParamType ::= Type [‘*’] +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] + | IntoType + | ‘(’ IntoType ‘)’ ‘*’ +IntoType ::= [‘into’] IntoTargetType + | ‘(’ IntoType ‘)’ +IntoTargetType ::= Type + | FunTypeArgs (‘=>’ | ‘?=>’) IntoType ``` -As the grammar shows, `into` can only applied to the type of a parameter; it is illegal in other positions. +As the grammar shows, `into` can only applied in the type of a parameter; it is illegal in other positions. Also, `into` modifiers in vararg types have to be enclosed in parentheses. diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index bf2c27d57863..1980bc4e0ab2 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -294,7 +294,7 @@ BlockStat ::= Import | Expr1 | EndMarker TypeBlock ::= {TypeBlockStat semi} Type -TypeBlockStat ::= ‘type’ {nl} TypeDcl +TypeBlockStat ::= ‘type’ {nl} TypeDef ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr @@ -350,8 +350,12 @@ ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} -ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause | UsingParamClause @@ -383,6 +387,8 @@ LocalModifier ::= ‘abstract’ | ‘implicit’ | ‘lazy’ | ‘inline’ + | ‘transparent’ + | ‘infix’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ @@ -409,24 +415,22 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ``` RefineDcl ::= ‘val’ ValDcl | ‘def’ DefDcl - | ‘type’ {nl} TypeDcl -Dcl ::= RefineDcl - | ‘var’ VarDcl + | ‘type’ {nl} TypeDef ValDcl ::= ids ‘:’ Type -VarDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type -DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] -TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds Def ::= ‘val’ PatDef | ‘var’ PatDef | ‘def’ DefDef - | ‘type’ {nl} TypeDcl + | ‘type’ {nl} TypeDef | TmplDef -PatDef ::= ids [‘:’ Type] ‘=’ Expr - | Pattern2 [‘:’ Type] ‘=’ Expr -DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr +PatDef ::= ids [‘:’ Type] [‘=’ Expr] + | Pattern2 [‘:’ Type] [‘=’ Expr] PatDef(_, pats, tpe?, expr) +DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) +DefSig ::= id [DefParamClauses] [DefImplicitClause] +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound + [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -458,7 +462,6 @@ TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> TemplateStat ::= Import | Export | {Annotation [nl]} {Modifier} Def - | {Annotation [nl]} {Modifier} Dcl | Extension | Expr1 | EndMarker diff --git a/docs/_layouts/search.html b/docs/_layouts/search.html index 355a7e83eeb4..581e3948e649 100644 --- a/docs/_layouts/search.html +++ b/docs/_layouts/search.html @@ -47,11 +47,11 @@

Member Results

// Set search term and title: var searchTerm = decodeURIComponent(parameters["searchTerm"]); - document.getElementById("searching-for").innerHTML = 'Search results for "' + searchTerm + '"'; + document.getElementById("searching-for").innerText = 'Search results for "' + searchTerm + '"'; document.title = searchTerm + ' - Search results'; if (!window.Worker) { - document.getElementById("searching-for").innerHTML = + document.getElementById("searching-for").innerText = "Couldn't search for \"" + searchTerm + "\", " + "web workers not supported. Please update your browser."; } diff --git a/docs/_spec/04-basic-definitions.md b/docs/_spec/04-basic-definitions.md index 369709b52bff..28eb2d43a627 100644 --- a/docs/_spec/04-basic-definitions.md +++ b/docs/_spec/04-basic-definitions.md @@ -710,6 +710,22 @@ class C extends I { Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. +### Tail-Recursive Call Elimination + +Method definitions which contain self-recursive invocations in tail position are optimized for stack safety. +Self-invocations which are the last operation before returning from the method are replaced with jumps to the beginning of the method, much as in a while loop. +Sibling-invocations, in which a method calls itself but with a different instance as receiver, are also optimized. + +This transform is performed automatically by the compiler whenever possible. +A method definition bearing the annotation, `scala.annotation.tailrec`, will fail to compile if the transform is not possible. +(The annotation is intended for cases where deoptimization would likely result in a stack overflow.) + +```scala +@annotation.tailrec +def sum(xs: List[Int], acc: Int): Int = + xs match { case h :: t => sum(t, acc + h) case _ => acc } +``` + + assert( + xml match + case Seq(elm: Elem, comment: Comment) if + elm.label == "div" && + elm.child(0) == Atom(Text("FooBar")) && + comment.label == " /.modal-content " + => true + case _ => false + , + xml + ) + } +} + +package scala.xml { + type MetaData = AnyRef + + trait NamespaceBinding + object TopScope extends NamespaceBinding + object Null + abstract class Node { + def label: String + def child: Seq[Node] + override def toString = label + child.mkString + } + class Comment(commentText: String) extends Node{ + def label = commentText + def child = Nil + } + class Elem(prefix: String, val label: String, attributes1: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, val child: Node*) extends Node + class NodeBuffer extends Seq[Node] { + val nodes = scala.collection.mutable.ArrayBuffer.empty[Node] + def &+(o: Any): NodeBuffer = + o match { + case n: Node => nodes.addOne(n) ; this + case t: Text => nodes.addOne(Atom(t)) ; this + } + // Members declared in scala.collection.IterableOnce + def iterator: Iterator[scala.xml.Node] = nodes.iterator + // Members declared in scala.collection.SeqOps + def apply(i: Int): scala.xml.Node = nodes(i) + def length: Int = nodes.length + } + case class Text(text: String) + case class Atom(t: Text) extends Node { + def label = t.text + def child = Nil + } +} \ No newline at end of file diff --git a/tests/run/i17255/J.java b/tests/run/i17255/J.java new file mode 100644 index 000000000000..3706e53cbbc3 --- /dev/null +++ b/tests/run/i17255/J.java @@ -0,0 +1,29 @@ +package p; + +public class J { + public static J j = new J(); + + public static p.J f() { + return p.J.j; + } + + public static Module$ module2() { + return p.Module$.MODULE$; + } + + public static p.Module$ module() { + return p.Module$.MODULE$; + } + + public static Module.InnerModule$ innermodule2() { + return p.Module.InnerModule$.MODULE$; + } + + public static p.Module.InnerModule$ innermodule() { + return p.Module.InnerModule$.MODULE$; + } + + public String toString() { + return "J"; + } +} diff --git a/tests/run/i17255/Module.scala b/tests/run/i17255/Module.scala new file mode 100644 index 000000000000..9b7153edbfd1 --- /dev/null +++ b/tests/run/i17255/Module.scala @@ -0,0 +1,18 @@ +// scalajs: --skip +package p { + object Module { + override def toString = "Module" + + object InnerModule { + override def toString = "InnerModule" + } + } +} + +object Test extends App { + assert(p.J.f().toString == "J") + assert(p.J.module().toString == "Module") + assert(p.J.module2().toString == "Module") + assert(p.J.innermodule().toString == "InnerModule") + assert(p.J.innermodule2().toString == "InnerModule") +} diff --git a/tests/run/i19619/InnerClass.java b/tests/run/i19619/InnerClass.java new file mode 100644 index 000000000000..32bb1642a232 --- /dev/null +++ b/tests/run/i19619/InnerClass.java @@ -0,0 +1,67 @@ +// InnerClass.java + +package lib; + +public class InnerClass { + + public class Inner { + public U innerField; + + public Inner(U innerField) { + this.innerField = innerField; + } + + public U getInnerField() { + return innerField; + } + } + + public class Outer { + + public class Nested { + + public U outerField; + public V innerField; + + public Nested(U outerField, V innerField) { + this.outerField = outerField; + this.innerField = innerField; + } + + public U getOuterField() { + return outerField; + } + + public V getInnerField() { + return innerField; + } + } + } + + public Inner createInner(U innerField) { + return new Inner<>(innerField); + } + + public Outer.Nested createNested(U outerField, V innerField) { + Outer outer = new Outer<>(); + return outer.new Nested<>(outerField, innerField); + } + + public static InnerClass.Inner createInnerStatic(U innerField) { + InnerClass innerClass = new InnerClass(); + return innerClass.new Inner<>(innerField); + } + + public static InnerClass.Outer.Nested createNestedStatic(U outerField, V innerField) { + InnerClass innerClass = new InnerClass(); + InnerClass.Outer outer = innerClass.new Outer<>(); + return outer.new Nested<>(outerField, innerField); + } + + public static void consumeNestedStatic(InnerClass.Outer.Nested nested) { + } + + public static void consumeNestedStatic2(Outer.Nested nested) { + } + +} diff --git a/tests/run/i19619/InnerClassGen.java b/tests/run/i19619/InnerClassGen.java new file mode 100644 index 000000000000..3a691aa0608f --- /dev/null +++ b/tests/run/i19619/InnerClassGen.java @@ -0,0 +1,80 @@ +// InnerClassGen.java + +package lib; + +public class InnerClassGen { + + public class Inner { + public T rootField; + public U innerField; + + public Inner(T rootField, U innerField) { + this.rootField = rootField; + this.innerField = innerField; + } + + public T getRootField() { + return rootField; + } + + public U getInnerField() { + return innerField; + } + } + + public class Outer { + + public class Nested { + public T rootField; + public U outerField; + public V innerField; + + public Nested(T rootField, U outerField, V innerField) { + this.rootField = rootField; + this.outerField = outerField; + this.innerField = innerField; + } + + public T getRootField() { + return rootField; + } + + public U getOuterField() { + return outerField; + } + + public V getInnerField() { + return innerField; + } + } + } + + public static class OuterStatic { + public static class NestedStatic { + } + } + + public Inner createInner(T rootField, U innerField) { + return new Inner<>(rootField, innerField); + } + + public Outer.Nested createNested(T rootField, U outerField, V innerField) { + Outer outer = new Outer<>(); + return outer.new Nested<>(rootField, outerField, innerField); + } + + public static InnerClassGen.Inner createInnerStatic(T rootField, U innerField) { + InnerClassGen innerClassGen = new InnerClassGen<>(); + return innerClassGen.new Inner<>(rootField, innerField); + } + + public static InnerClassGen.Outer.Nested createNestedStatic(T rootField, U outerField, V innerField) { + InnerClassGen innerClassGen = new InnerClassGen<>(); + InnerClassGen.Outer outer = innerClassGen.new Outer<>(); + return outer.new Nested<>(rootField, outerField, innerField); + } + + public static void consumeNestedStatic(InnerClassGen.Outer.Nested nested) { + } + +} diff --git a/tests/run/i19619/InnerClassSub.java b/tests/run/i19619/InnerClassSub.java new file mode 100644 index 000000000000..5a1d1f4d3857 --- /dev/null +++ b/tests/run/i19619/InnerClassSub.java @@ -0,0 +1,54 @@ +// InnerClass.java + +package lib; + +public class InnerClassSub extends InnerClass { + + public class InnerSub extends Inner { + public InnerSub(U innerField) { + super(innerField); + } + } + + public class OuterSub extends Outer { + public OuterSub() { + super(); + } + } + + public Inner createInnerSub(U innerField) { + return new InnerSub<>(innerField); + } + + public Outer.Nested createNestedSub(U outerField, V innerField) { + OuterSub outer = new OuterSub<>(); + return outer.new Nested<>(outerField, innerField); + } + + public InnerClass.Inner createInnerSub2(U innerField) { + return new InnerSub<>(innerField); + } + + public InnerClass.Outer.Nested createNestedSub2(U outerField, V innerField) { + OuterSub outer = new OuterSub<>(); + return outer.new Nested<>(outerField, innerField); + } + + public static InnerClass.Inner createInnerStatic(U innerField) { + InnerClassSub innerClass = new InnerClassSub(); + return innerClass.new Inner<>(innerField); + } + + public static InnerClass.Outer.Nested createNestedStatic(U outerField, V innerField) { + InnerClassSub innerClass = new InnerClassSub(); + InnerClassSub.Outer outer = innerClass.new Outer<>(); + return outer.new Nested<>(outerField, innerField); + } + + public static void consumeNestedStatic(InnerClass.Outer.Nested nested) { + } + + public static void consumeNestedStatic2(Outer.Nested nested) { + } + +} diff --git a/tests/run/i19619/RawTypes.java b/tests/run/i19619/RawTypes.java new file mode 100644 index 000000000000..4373a04093eb --- /dev/null +++ b/tests/run/i19619/RawTypes.java @@ -0,0 +1,18 @@ +// RawTypes.java + +package lib; + +public class RawTypes { + + public class C { + public class D { + } + } + + public static void mii_Raw_Raw(RawTypes.C.D d) { + } + + public static void mii_Raw_Raw2(C.D d) { + } + +} diff --git a/tests/run/i19619/Test.scala b/tests/run/i19619/Test.scala new file mode 100644 index 000000000000..871e7a490ea0 --- /dev/null +++ b/tests/run/i19619/Test.scala @@ -0,0 +1,61 @@ +// scalajs: --skip + +import lib.InnerClass +import lib.InnerClassGen +import lib.RawTypes +import lib.InnerClassSub + +@main def Test = + + locally: + val ici: InnerClass = new InnerClass() + // val ici_inner1: ici.Inner[Long] = ici.createInner[Long](47L) // error + val ici_inner2: InnerClass#Inner[Long] = ici.createInner[Long](47L) + val ici_inner3: InnerClass#Inner[Long] = InnerClass.createInnerStatic[Long](47L) + + val ici_outer: InnerClass#Outer[Long] = new ici.Outer[Long]() + val ici_nested1: InnerClass#Outer[Long]#Nested[Int] = new ici_outer.Nested[Int](47L, 23) + val ici_nested2: InnerClass#Outer[Long]#Nested[Int] = ici.createNested[Long, Int](47L, 23) + val ici_nested3: InnerClass#Outer[Long]#Nested[Int] = InnerClass.createNestedStatic[Long, Int](47L, 23) + + InnerClass.consumeNestedStatic(ici_nested3) + InnerClass.consumeNestedStatic2(ici_nested3) + + locally: + val ici: InnerClassGen[String] = new InnerClassGen() + // val ici_inner1: ici.Inner[Long] = ici.createInner[Long]("Hello", 47L) // error + val ici_inner2: InnerClassGen[String]#Inner[Long] = ici.createInner[Long]("Hello", 47L) + val ici_inner3: InnerClassGen[String]#Inner[Long] = InnerClassGen.createInnerStatic[String, Long]("Hello", 47L) + + val ici_outer: InnerClassGen[String]#Outer[Long] = new ici.Outer[Long]() + val ici_nested1: InnerClassGen[String]#Outer[Long]#Nested[Int] = new ici_outer.Nested[Int]("Hello", 47L, 23) + val ici_nested2: InnerClassGen[String]#Outer[Long]#Nested[Int] = ici.createNested[Long, Int]("Hello", 47L, 23) + val ici_nested3: InnerClassGen[String]#Outer[Long]#Nested[Int] = InnerClassGen.createNestedStatic[String, Long, Int]("Hello", 47L, 23) + + InnerClassGen.consumeNestedStatic(ici_nested3) + + locally: + val rt: RawTypes = new RawTypes() + val c: RawTypes#C[String] = new rt.C[String]() + + val cd_ii: RawTypes#C[String]#D[String] = new c.D[String]() + val cd_ii_Raw: RawTypes#C[?]#D[?] = cd_ii + + RawTypes.mii_Raw_Raw(cd_ii_Raw) + RawTypes.mii_Raw_Raw2(cd_ii_Raw) + + locally: + val ici: InnerClassSub = new InnerClassSub() + // val ici_inner1: ici.Inner[Long] = ici.createInner[Long](47L) // error + val ici_inner2: InnerClass#Inner[Long] = ici.createInnerSub[Long](47L) + val ici_inner2_2: InnerClass#Inner[Long] = ici.createInnerSub2[Long](47L) + val ici_inner3: InnerClass#Inner[Long] = InnerClassSub.createInnerStatic[Long](47L) + + val ici_outer: InnerClassSub#Outer[Long] = new ici.Outer[Long]() + val ici_nested1: InnerClassSub#Outer[Long]#Nested[Int] = new ici_outer.Nested[Int](47L, 23) + val ici_nested2: InnerClass#Outer[Long]#Nested[Int] = ici.createNestedSub[Long, Int](47L, 23) + val ici_nested2_2: InnerClass#Outer[Long]#Nested[Int] = ici.createNestedSub2[Long, Int](47L, 23) + val ici_nested3: InnerClass#Outer[Long]#Nested[Int] = InnerClassSub.createNestedStatic[Long, Int](47L, 23) + + InnerClass.consumeNestedStatic(ici_nested3) + InnerClass.consumeNestedStatic2(ici_nested3) diff --git a/tests/run/i19629/Test_2.scala b/tests/run/i19629/Test_2.scala new file mode 100644 index 000000000000..c9d1ee7f79e5 --- /dev/null +++ b/tests/run/i19629/Test_2.scala @@ -0,0 +1,12 @@ + +class Container[Y1, G[_]]: + lazy val outer: Knit[CP, G] = new: + type Res = Y1 + def visit[R](caseInFst: [F1[_], Y] => (k: Knit[CP, F1]) => (ev: TypeEqK[G, [x] =>> CP[F1[x], Y]]) => R): R = + caseInFst[G, Res](outer)(new TypeEqK[G, [x] =>> CP[G[x], Res]] {}) + +@main def Test = + val knit = new Container[Unit, Option].outer + val res = knit.visit: + [F1[_], Y] => (k: Knit[CP, F1]) => (ev: TypeEqK[Option, [x] =>> CP[F1[x], Y]]) => 42 + assert(res == 42) diff --git a/tests/run/i19629/lib_1.scala b/tests/run/i19629/lib_1.scala new file mode 100644 index 000000000000..ec96e94e70dd --- /dev/null +++ b/tests/run/i19629/lib_1.scala @@ -0,0 +1,10 @@ +trait CP[A,B] +trait TypeEqK[F[_], G[_]] + +trait Knit[CP[_, _], F[_]] { + type Res + + def visit[R]( + caseInFst: [F1[_], Y] => (k: Knit[CP, F1]) => (ev: TypeEqK[F, [x] =>> CP[F1[x], Y]]) => R + ): R +} diff --git a/tests/run/i19711.scala b/tests/run/i19711.scala new file mode 100644 index 000000000000..a9ef03b398e2 --- /dev/null +++ b/tests/run/i19711.scala @@ -0,0 +1,29 @@ +class Foo(val s: Any): + def this(s: String) = + this(0) +class Bar(s: String) extends Foo(s): + def foo = s + +class Foo2(val s: Any) +class Bar2(s: String) extends Foo2(s): + def foo = s + +case class Config(_config: String) + +abstract class Foo3(val config: Config) { + def this(config: String) = { + this(Config(config)) + } +} + +class Bar3(config: String) extends Foo3(config) { + def foo(): Unit = { + config.getClass() + } +} + + +@main def Test = + Bar("").foo + Bar2("").foo + Bar3("").foo() diff --git a/tests/run/i19724.scala b/tests/run/i19724.scala new file mode 100644 index 000000000000..0ed6fcb94c57 --- /dev/null +++ b/tests/run/i19724.scala @@ -0,0 +1,18 @@ +class F0 extends (() => Double): + inline def apply(): Double = 1.toDouble + +class F1 extends (Int => Double): + inline def apply(v: Int): Double = v.toDouble + +class F2 extends ((Int, Int) => Double): + inline def apply(v1: Int, v2: Int): Double = (v1 + v2).toDouble + +@main def Test = + val f0: (() => Double) = new F0 + assert(f0() == 1.0) + + val f1: (Int => Double) = new F1 + assert(f1(3) == 3.0) + + val f2: ((Int, Int) => Double) = new F2 + assert(f2(3, 2) == 5.0) diff --git a/tests/run/main-annotation-birthday.scala b/tests/run/main-annotation-birthday.scala index 563bb2628e62..32cf28784ced 100644 --- a/tests/run/main-annotation-birthday.scala +++ b/tests/run/main-annotation-birthday.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-dash-dash.scala b/tests/run/main-annotation-dash-dash.scala index bf4a1a4e238c..3fe0f47983d5 100644 --- a/tests/run/main-annotation-dash-dash.scala +++ b/tests/run/main-annotation-dash-dash.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-default-value-1.scala b/tests/run/main-annotation-default-value-1.scala index 40888c947bb6..cf4ba79e1aff 100644 --- a/tests/run/main-annotation-default-value-1.scala +++ b/tests/run/main-annotation-default-value-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-default-value-2.scala b/tests/run/main-annotation-default-value-2.scala index 76ee1bb5cc9f..8b60e6197405 100644 --- a/tests/run/main-annotation-default-value-2.scala +++ b/tests/run/main-annotation-default-value-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-example.scala b/tests/run/main-annotation-example.scala index 478ab6baeb4b..926496e595e7 100644 --- a/tests/run/main-annotation-example.scala +++ b/tests/run/main-annotation-example.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-flags.scala b/tests/run/main-annotation-flags.scala index 7e502585489d..8a579e6e2d00 100644 --- a/tests/run/main-annotation-flags.scala +++ b/tests/run/main-annotation-flags.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-help-override.scala b/tests/run/main-annotation-help-override.scala index d174e13ac891..bfff85c5a353 100644 --- a/tests/run/main-annotation-help-override.scala +++ b/tests/run/main-annotation-help-override.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-help.scala b/tests/run/main-annotation-help.scala index e7fdf7c362de..d68bb0d7e874 100644 --- a/tests/run/main-annotation-help.scala +++ b/tests/run/main-annotation-help.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-homemade-annot-1.scala b/tests/run/main-annotation-homemade-annot-1.scala index 3caa2c7e6b00..3106dae4006f 100644 --- a/tests/run/main-annotation-homemade-annot-1.scala +++ b/tests/run/main-annotation-homemade-annot-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.concurrent._ diff --git a/tests/run/main-annotation-homemade-annot-2.scala b/tests/run/main-annotation-homemade-annot-2.scala index a131aa336d47..980241ff93d3 100644 --- a/tests/run/main-annotation-homemade-annot-2.scala +++ b/tests/run/main-annotation-homemade-annot-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.collection.mutable diff --git a/tests/run/main-annotation-homemade-annot-3.scala b/tests/run/main-annotation-homemade-annot-3.scala index d024ba300665..4a894777c562 100644 --- a/tests/run/main-annotation-homemade-annot-3.scala +++ b/tests/run/main-annotation-homemade-annot-3.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-homemade-annot-4.scala b/tests/run/main-annotation-homemade-annot-4.scala index b4ea510aa677..b50e89523475 100644 --- a/tests/run/main-annotation-homemade-annot-4.scala +++ b/tests/run/main-annotation-homemade-annot-4.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-homemade-annot-5.scala b/tests/run/main-annotation-homemade-annot-5.scala index fc5e34e41d6c..a129a51da7eb 100644 --- a/tests/run/main-annotation-homemade-annot-5.scala +++ b/tests/run/main-annotation-homemade-annot-5.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-homemade-annot-6.scala b/tests/run/main-annotation-homemade-annot-6.scala index 39d258803901..5a92e6382d3d 100644 --- a/tests/run/main-annotation-homemade-annot-6.scala +++ b/tests/run/main-annotation-homemade-annot-6.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-homemade-parser-1.scala b/tests/run/main-annotation-homemade-parser-1.scala index be2adc4a5f72..94d43bf19cc5 100644 --- a/tests/run/main-annotation-homemade-parser-1.scala +++ b/tests/run/main-annotation-homemade-parser-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-homemade-parser-2.scala b/tests/run/main-annotation-homemade-parser-2.scala index 1a51400d9f37..4f40f9b42b27 100644 --- a/tests/run/main-annotation-homemade-parser-2.scala +++ b/tests/run/main-annotation-homemade-parser-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-homemade-parser-3.scala b/tests/run/main-annotation-homemade-parser-3.scala index 1e5a4c0dae00..066e40f1b3a0 100644 --- a/tests/run/main-annotation-homemade-parser-3.scala +++ b/tests/run/main-annotation-homemade-parser-3.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-homemade-parser-4.scala b/tests/run/main-annotation-homemade-parser-4.scala index fc087354c16e..668aa040380c 100644 --- a/tests/run/main-annotation-homemade-parser-4.scala +++ b/tests/run/main-annotation-homemade-parser-4.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-homemade-parser-5.scala b/tests/run/main-annotation-homemade-parser-5.scala index f7443f34cab3..123631312ef7 100644 --- a/tests/run/main-annotation-homemade-parser-5.scala +++ b/tests/run/main-annotation-homemade-parser-5.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-multiple.scala b/tests/run/main-annotation-multiple.scala index 62a5fa04cc18..dbc66d0df9ca 100644 --- a/tests/run/main-annotation-multiple.scala +++ b/tests/run/main-annotation-multiple.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-named-params.scala b/tests/run/main-annotation-named-params.scala index 8d677d65978d..4cfa2c8049b4 100644 --- a/tests/run/main-annotation-named-params.scala +++ b/tests/run/main-annotation-named-params.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-newMain.scala b/tests/run/main-annotation-newMain.scala index a28e9d2ab96a..5b00a46ce7e9 100644 --- a/tests/run/main-annotation-newMain.scala +++ b/tests/run/main-annotation-newMain.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.* diff --git a/tests/run/main-annotation-no-parameters-no-parens.scala b/tests/run/main-annotation-no-parameters-no-parens.scala index 7ba87c82c745..b62fd55538de 100644 --- a/tests/run/main-annotation-no-parameters-no-parens.scala +++ b/tests/run/main-annotation-no-parameters-no-parens.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-no-parameters.scala b/tests/run/main-annotation-no-parameters.scala index 951254f07bf7..fc92a5680e07 100644 --- a/tests/run/main-annotation-no-parameters.scala +++ b/tests/run/main-annotation-no-parameters.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-overload.scala b/tests/run/main-annotation-overload.scala index 939f2044f227..60f9b68a58a2 100644 --- a/tests/run/main-annotation-overload.scala +++ b/tests/run/main-annotation-overload.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-param-annot-1.scala b/tests/run/main-annotation-param-annot-1.scala index d16e4fac8848..5cf29b9f4efb 100644 --- a/tests/run/main-annotation-param-annot-1.scala +++ b/tests/run/main-annotation-param-annot-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-param-annot-2.scala b/tests/run/main-annotation-param-annot-2.scala index 0ee99038910a..76033f24e614 100644 --- a/tests/run/main-annotation-param-annot-2.scala +++ b/tests/run/main-annotation-param-annot-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-param-annot-invalid-params.scala b/tests/run/main-annotation-param-annot-invalid-params.scala index 076c7ff80750..46bc812863b1 100644 --- a/tests/run/main-annotation-param-annot-invalid-params.scala +++ b/tests/run/main-annotation-param-annot-invalid-params.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-return-type-1.scala b/tests/run/main-annotation-return-type-1.scala index 641786661613..1366cceeba8a 100644 --- a/tests/run/main-annotation-return-type-1.scala +++ b/tests/run/main-annotation-return-type-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-return-type-2.scala b/tests/run/main-annotation-return-type-2.scala index 986c1f0eae94..e2dc6b8ae4e6 100644 --- a/tests/run/main-annotation-return-type-2.scala +++ b/tests/run/main-annotation-return-type-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-short-name.scala b/tests/run/main-annotation-short-name.scala index 72e37acdce5c..4a179fb793e1 100644 --- a/tests/run/main-annotation-short-name.scala +++ b/tests/run/main-annotation-short-name.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-simple.scala b/tests/run/main-annotation-simple.scala index 47407c85e36c..7d2fd501849b 100644 --- a/tests/run/main-annotation-simple.scala +++ b/tests/run/main-annotation-simple.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-top-level.scala b/tests/run/main-annotation-top-level.scala index 52ca42205d9e..3e2bb7bb2fb4 100644 --- a/tests/run/main-annotation-top-level.scala +++ b/tests/run/main-annotation-top-level.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-types.scala b/tests/run/main-annotation-types.scala index 249c9d010268..0ee6220a1196 100644 --- a/tests/run/main-annotation-types.scala +++ b/tests/run/main-annotation-types.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-vararg-1.scala b/tests/run/main-annotation-vararg-1.scala index a1b99cd5b150..0227054e0189 100644 --- a/tests/run/main-annotation-vararg-1.scala +++ b/tests/run/main-annotation-vararg-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-vararg-2.scala b/tests/run/main-annotation-vararg-2.scala index becb5958ad8e..8521795388b2 100644 --- a/tests/run/main-annotation-vararg-2.scala +++ b/tests/run/main-annotation-vararg-2.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-wrong-param-1.scala b/tests/run/main-annotation-wrong-param-1.scala index ef6ada37b9e3..6c9e9e991136 100644 --- a/tests/run/main-annotation-wrong-param-1.scala +++ b/tests/run/main-annotation-wrong-param-1.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-wrong-param-names.scala b/tests/run/main-annotation-wrong-param-names.scala index c428c955b5cd..90622d543bf1 100644 --- a/tests/run/main-annotation-wrong-param-names.scala +++ b/tests/run/main-annotation-wrong-param-names.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-wrong-param-number.scala b/tests/run/main-annotation-wrong-param-number.scala index e607a85e4dc2..b8ef8c0ea9e7 100644 --- a/tests/run/main-annotation-wrong-param-number.scala +++ b/tests/run/main-annotation-wrong-param-number.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-annotation-wrong-param-type.scala b/tests/run/main-annotation-wrong-param-type.scala index 2228a8faf7df..0fbae70a48a5 100644 --- a/tests/run/main-annotation-wrong-param-type.scala +++ b/tests/run/main-annotation-wrong-param-type.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip import scala.annotation.newMain diff --git a/tests/run/main-calculator-example.scala b/tests/run/main-calculator-example.scala index 857792f11967..fc2e1397009b 100644 --- a/tests/run/main-calculator-example.scala +++ b/tests/run/main-calculator-example.scala @@ -1,3 +1,4 @@ +//> using options -experimental -Yno-experimental // scalajs: --skip sealed trait Expression: @@ -23,7 +24,7 @@ import scala.annotation.{ MainAnnotation, experimental } import scala.annotation.MainAnnotation.{ Info, Parameter } import scala.util.CommandLineParser.FromString -@experimental class showAndEval extends MainAnnotation[FromString, Expression]: +class showAndEval extends MainAnnotation[FromString, Expression]: def command(info: Info, args: Seq[String]): Option[Seq[String]] = assert(info.parameters.forall(param => param.typeName == "Number"), "Only Number parameters allowed") println(s"executing ${info.name} with inputs: ${args.mkString(" ")}") diff --git a/tests/run/noProtectedSuper.scala b/tests/run/noProtectedSuper.scala index 41b0615d12ab..999a8a06c4fa 100644 --- a/tests/run/noProtectedSuper.scala +++ b/tests/run/noProtectedSuper.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.annotation.publicInBinary package p { diff --git a/tests/run/polymorphic-erased-functions.scala b/tests/run/polymorphic-erased-functions.scala index 4086423d8c6a..40fd548a67af 100644 --- a/tests/run/polymorphic-erased-functions.scala +++ b/tests/run/polymorphic-erased-functions.scala @@ -3,8 +3,8 @@ import language.experimental.erasedDefinitions object Test extends App { // Types - type F1 = [T] => (erased T) => Int - type F2 = [T, U] => (T, erased U) => T + type F1 = [T] => (erased x: T) => Int + type F2 = [T, U] => (t: T, erased u: U) => T // Terms val t1 = [T] => (erased t: T) => 3 diff --git a/tests/run/publicInBinary/Lib_1.scala b/tests/run/publicInBinary/Lib_1.scala index a3c6ccea8427..86895ba40706 100644 --- a/tests/run/publicInBinary/Lib_1.scala +++ b/tests/run/publicInBinary/Lib_1.scala @@ -1,4 +1,4 @@ -//> using options -Werror -WunstableInlineAccessors +//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors package foo diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala index 56247d45cd23..5b95051a3744 100644 --- a/tests/run/quotes-add-erased/Macro_1.scala +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.annotation.MacroAnnotation import scala.annotation.internal.ErasedParam import scala.quoted._ diff --git a/tests/run/quotes-reflection/Macros_1.scala b/tests/run/quotes-reflection/Macros_1.scala index 5945d39a097a..c9fe6eb38c99 100644 --- a/tests/run/quotes-reflection/Macros_1.scala +++ b/tests/run/quotes-reflection/Macros_1.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.quoted.* inline def inspect[A]: String = diff --git a/tests/run/t9915/C_1.java b/tests/run/t9915/C_1.java index 4269cf74e058..ec94b1412741 100644 --- a/tests/run/t9915/C_1.java +++ b/tests/run/t9915/C_1.java @@ -1,6 +1,3 @@ -/* - * javac: -encoding UTF-8 - */ public class C_1 { public static final String NULLED = "X\000ABC"; public static final String SUPPED = "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖"; diff --git a/tests/run/tupled-function-andThen.scala b/tests/run/tupled-function-andThen.scala index 94236e9267e1..0068143f9d3f 100644 --- a/tests/run/tupled-function-andThen.scala +++ b/tests/run/tupled-function-andThen.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-apply.scala b/tests/run/tupled-function-apply.scala index 7d2162a565ac..69cfeef91dd1 100644 --- a/tests/run/tupled-function-apply.scala +++ b/tests/run/tupled-function-apply.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-compose.scala b/tests/run/tupled-function-compose.scala index 4cf83563274d..d984b8a9184a 100644 --- a/tests/run/tupled-function-compose.scala +++ b/tests/run/tupled-function-compose.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run/tupled-function-extension-method.scala b/tests/run/tupled-function-extension-method.scala index be5ccbd5ca17..fc3319aa4c15 100644 --- a/tests/run/tupled-function-extension-method.scala +++ b/tests/run/tupled-function-extension-method.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run/tupled-function-tupled.scala b/tests/run/tupled-function-tupled.scala index 6e7d94b3ac3d..5a799be167c3 100644 --- a/tests/run/tupled-function-tupled.scala +++ b/tests/run/tupled-function-tupled.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-untupled.scala b/tests/run/tupled-function-untupled.scala index e5e3d8c29c3a..34b81c74c4f6 100644 --- a/tests/run/tupled-function-untupled.scala +++ b/tests/run/tupled-function-untupled.scala @@ -1,3 +1,5 @@ +//> using options -experimental -Yno-experimental + import scala.util.TupledFunction object Test { def main(args: Array[String]): Unit = { diff --git a/tests/warn/convertible.scala b/tests/warn/convertible.scala index c98006ecdc9b..b701cac32cec 100644 --- a/tests/warn/convertible.scala +++ b/tests/warn/convertible.scala @@ -18,13 +18,27 @@ object Test: def g(x: into Text) = println(x.str) + def g2(x: into Text) = + println(x.str) + + def g3(x: Text) = + println(x.str) g("abc") // OK val gg = g - gg("abc") // straight eta expansion is also OK + gg("abc") // warn, eta expansion does not preserve into + + val c1 = if ??? then g else g2 + c1("abc") // warn, eta expansion does not preserve into + + val c2 = if ??? then g else g3 + c2("abc") // warn, eta expansion does not preserve into + + val c3 = if ??? then g3 else g + c3("abc") // warn, eta expansion does not preserve into def h1[X](x: X)(y: X): Unit = () def h(x: into Text) = val y = h1(x) - y("abc") // warn, inference through type variable does not propagate \ No newline at end of file + y("abc") // warn, eta expansion does not preserve into \ No newline at end of file diff --git a/tests/warn/i16610.check b/tests/warn/i16610.check new file mode 100644 index 000000000000..4eb69b5c551a --- /dev/null +++ b/tests/warn/i16610.check @@ -0,0 +1,5 @@ +-- Warning: tests/warn/i16610.scala:12:2 ------------------------------------------------------------------------------- +12 | /** // warn + | ^ + | Ambiguous Scaladoc comment on multiple cases is ignored. + | Remove the comment or make separate cases to add Scaladoc comments to each of them. diff --git a/tests/warn/i16610.scala b/tests/warn/i16610.scala new file mode 100644 index 000000000000..7657d23b7fd2 --- /dev/null +++ b/tests/warn/i16610.scala @@ -0,0 +1,16 @@ +//> using options -Wenum-comment-discard +/** + * Description of enum + */ +enum MyEnum { + + /** + * Description of case 1 + */ + case MyCase1 + + /** // warn + * Description of case 2 and 3 + */ + case MyCase2, MyCase3 +} \ No newline at end of file diff --git a/tests/warn/i17612a.scala b/tests/warn/i17612a.scala index 7473d8fd9ec9..87fddf9c180f 100644 --- a/tests/warn/i17612a.scala +++ b/tests/warn/i17612a.scala @@ -1,4 +1,4 @@ -//> using options -Xlint:private-shadow -source:3.3 +//> using options -Wshadow:private-shadow -source:3.3 object i17612a: class Base(var x: Int, val y: Int, var z: Int): diff --git a/tests/warn/i17613a.scala b/tests/warn/i17613a.scala index 6ee55a5cf973..5e841a79d105 100644 --- a/tests/warn/i17613a.scala +++ b/tests/warn/i17613a.scala @@ -1,4 +1,4 @@ -//> using options -Xlint:type-parameter-shadow +//> using options -Wshadow:type-parameter-shadow object i17613a: class B: diff --git a/tests/warn/i19913.check b/tests/warn/i19913.check new file mode 100644 index 000000000000..4f532393aa45 --- /dev/null +++ b/tests/warn/i19913.check @@ -0,0 +1,8 @@ +-- Deprecation Warning: tests/warn/i19913.scala:13:25 ------------------------------------------------------------------ +13 | Deprecated.inlineMethod() // warn + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | method inlineMethod in object Deprecated is deprecated: test +-- Deprecation Warning: tests/warn/i19913.scala:12:13 ------------------------------------------------------------------ +12 | Deprecated.method() // warn + | ^^^^^^^^^^^^^^^^^ + | method method in object Deprecated is deprecated: test diff --git a/tests/warn/i19913.scala b/tests/warn/i19913.scala new file mode 100644 index 000000000000..f3b0a95858a4 --- /dev/null +++ b/tests/warn/i19913.scala @@ -0,0 +1,13 @@ +//> using options -deprecation + +object Deprecated: + + @deprecated("test") + def method() = ??? + + @deprecated("test") + inline def inlineMethod() = ??? + +object Test: + Deprecated.method() // warn + Deprecated.inlineMethod() // warn