diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index ecf8f993f90b0..93388ddd24075 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -7,6 +7,6 @@ tracking issue or there are none, feel free to ignore this. This PR will get automatically assigned to a reviewer. In case you would like a specific user to review your work, you can assign it to them by using - r\? (with the `\` removed) + r? --> diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f6df348b72140..c650df6a0ec2a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ # and also on pushes to special branches (auto, try). # # The actual definition of the executed jobs is calculated by a Python -# script located at src/ci/github-actions/calculate-job-matrix.py, which +# script located at src/ci/github-actions/ci.py, which # uses job definition data from src/ci/github-actions/jobs.yml. # You should primarily modify the `jobs.yml` file if you want to modify # what jobs are executed in CI. @@ -46,7 +46,7 @@ jobs: # If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml. calculate_matrix: name: Calculate job matrix - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 outputs: jobs: ${{ steps.jobs.outputs.jobs }} run_type: ${{ steps.jobs.outputs.run_type }} @@ -56,10 +56,10 @@ jobs: - name: Calculate the CI job matrix env: COMMIT_MESSAGE: ${{ github.event.head_commit.message }} - run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT + run: python3 src/ci/github-actions/ci.py calculate-job-matrix >> $GITHUB_OUTPUT id: jobs job: - name: ${{ matrix.name }} + name: ${{ matrix.full_name }} needs: [ calculate_matrix ] runs-on: "${{ matrix.os }}" defaults: @@ -67,7 +67,7 @@ jobs: shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} timeout-minutes: 360 env: - CI_JOB_NAME: ${{ matrix.image }} + CI_JOB_NAME: ${{ matrix.name }} CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} @@ -233,16 +233,17 @@ jobs: env: DATADOG_SITE: datadoghq.com DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }} - DD_GITHUB_JOB_NAME: ${{ matrix.name }} + DD_GITHUB_JOB_NAME: ${{ matrix.full_name }} run: | - npm install -g @datadog/datadog-ci@^2.x.x - python3 src/ci/scripts/upload-build-metrics.py build/cpu-usage.csv + cd src/ci + npm ci + python3 scripts/upload-build-metrics.py ../../build/cpu-usage.csv # This job isused to tell bors the final status of the build, as there is no practical way to detect # when a workflow is successful listening to webhooks only in our current bors implementation (homu). outcome: name: bors build finished - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: [ calculate_matrix, job ] # !cancelled() executes the job regardless of whether the previous jobs passed or failed if: ${{ !cancelled() && contains(fromJSON('["auto", "try"]'), needs.calculate_matrix.outputs.run_type) }} diff --git a/.github/workflows/dependencies.yml b/.github/workflows/dependencies.yml index b7b5a03bd41f3..98d8c14f7d185 100644 --- a/.github/workflows/dependencies.yml +++ b/.github/workflows/dependencies.yml @@ -27,7 +27,7 @@ jobs: not-waiting-on-bors: if: github.repository_owner == 'rust-lang' name: skip if S-waiting-on-bors - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -47,7 +47,7 @@ jobs: if: github.repository_owner == 'rust-lang' name: update dependencies needs: not-waiting-on-bors - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: checkout the source code uses: actions/checkout@v4 @@ -94,7 +94,7 @@ jobs: if: github.repository_owner == 'rust-lang' name: amend PR needs: update - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: contents: write pull-requests: write diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml new file mode 100644 index 0000000000000..0fcd1b178163e --- /dev/null +++ b/.github/workflows/ghcr.yml @@ -0,0 +1,57 @@ +# Mirror DockerHub images used by the Rust project to ghcr.io. +# Images are available at https://github.com/orgs/rust-lang/packages. +# +# In some CI jobs, we pull images from ghcr.io instead of Docker Hub because +# Docker Hub has a rate limit, while ghcr.io doesn't. +# Those images are pushed to ghcr.io by this job. +# +# Note that authenticating to DockerHub or other registries isn't possible +# for PR jobs, because forks can't access secrets. +# That's why we use ghcr.io: it has no rate limit and it doesn't require authentication. + +name: GHCR + +on: + workflow_dispatch: + schedule: + # Run daily at midnight UTC + - cron: '0 0 * * *' + +jobs: + mirror: + name: DockerHub mirror + runs-on: ubuntu-24.04 + if: github.repository == 'rust-lang/rust' + permissions: + # Needed to write to the ghcr.io registry + packages: write + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + # Download crane in the current directory. + # We use crane because it copies the docker image for all the architectures available in + # DockerHub for the image. + # Learn more about crane at + # https://github.com/google/go-containerregistry/blob/main/cmd/crane/README.md + - name: Download crane + run: | + curl -sL "https://github.com/google/go-containerregistry/releases/download/${VERSION}/go-containerregistry_${OS}_${ARCH}.tar.gz" | tar -xzf - + env: + VERSION: v0.20.2 + OS: Linux + ARCH: x86_64 + + - name: Mirror DockerHub + run: | + # DockerHub image we want to mirror + image="ubuntu:22.04" + + # Mirror image from DockerHub to ghcr.io + ./crane copy \ + docker.io/${image} \ + ghcr.io/${{ github.repository_owner }}/${image} diff --git a/.gitmodules b/.gitmodules index 07fd44c2b2da2..f9bd42edab3f4 100644 --- a/.gitmodules +++ b/.gitmodules @@ -22,10 +22,6 @@ path = library/stdarch url = https://github.com/rust-lang/stdarch.git shallow = true -[submodule "src/doc/rustc-dev-guide"] - path = src/doc/rustc-dev-guide - url = https://github.com/rust-lang/rustc-dev-guide.git - shallow = true [submodule "src/doc/edition-guide"] path = src/doc/edition-guide url = https://github.com/rust-lang/edition-guide.git diff --git a/.mailmap b/.mailmap index 874a42656c512..eb6a9fcabca99 100644 --- a/.mailmap +++ b/.mailmap @@ -48,6 +48,7 @@ Andrew Poelstra Anhad Singh Antoine Plaskowski Anton Löfgren +apiraino Araam Borhanian Araam Borhanian Areski Belaid areski @@ -62,7 +63,10 @@ Austin Seipp Ayaz Hafiz Aydin Kim aydin.kim Ayush Mishra +Ashley Mannix asrar +b-naber +b-naber BaoshanPang Barosl Lee Barosl LEE Bastian Kersting @@ -84,7 +88,8 @@ boolean_coercion Boris Egorov bors bors[bot] <26634292+bors[bot]@users.noreply.github.com> bors bors[bot] -Boxy +BoxyUwU +BoxyUwU Braden Nelson Brandon Sanderson Brandon Sanderson Brett Cannon Brett Cannon @@ -98,6 +103,8 @@ Caleb Cartwright Caleb Jones Noah Lev Noah Lev <37223377+camelid@users.noreply.github.com> +Catherine +Catherine cameron1024 Camille Gillot Carl-Anton Ingmarsson @@ -133,11 +140,13 @@ Clement Miao Clément Renault Cliff Dyer Clinton Ryan +Taylor Cramer ember arlynx Crazycolorz5 csmoe <35686186+csmoe@users.noreply.github.com> Cyryl Płotnicki Damien Schoof +Dan Gohman Dan Robertson Daniel Campoverde Daniel J Rollins @@ -179,10 +188,17 @@ Eduardo Bautista <=> Eduardo Bautista Eduardo Broto Edward Shen +Jacob Finkelman +Jacob Finkelman +Jacob Finkelman +Jacob Finkelman +Jacob Finkelman Elliott Slaughter Elly Fong-Jones Eric Holk Eric Holk +Eric Holk +Eric Holk Eric Holmes Eric Reed Erick Tryzelaar @@ -206,6 +222,7 @@ Felix S. Klock II Félix Saparelli Flaper Fesp Florian Berger +Florian Gilcher Florian Wilkens Florian Wilkens François Mockers Frank Steffahn @@ -240,6 +257,8 @@ Herman J. Radtke III Herman J. Radtke III Hrvoje Nikšić Hsiang-Cheng Yang +Huon Wilson +Huon Wilson Ian Jackson Ian Jackson Ian Jackson @@ -252,9 +271,13 @@ ivan tkachenko J. J. Weber Jack Huey Jacob +Jacob Hoffman-Andrews Jacob Greenfield Jacob Pratt Jacob Pratt +Jake Goulding +Jake Goulding +Jake Goulding Jake Vossen Jakob Degen Jakob Lautrup Nysom @@ -287,6 +310,7 @@ Jerry Hardee Jesús Rubio Jethro Beekman Jian Zeng +Jieyou Xu Jieyou Xu <39484203+jieyouxu@users.noreply.github.com> Jihyun Yu Jihyun Yu jihyun @@ -322,9 +346,12 @@ Josh Holmer Josh Stone Josh Stone Julia Ryan +Jubilee Young <46493976+workingjubilee@users.noreply.github.com> +Jubilee Young Julian Knodt jumbatm <30644300+jumbatm@users.noreply.github.com> Junyoung Cho +Jynn Nelson Jynn Nelson Jynn Nelson Jynn Nelson @@ -385,12 +412,14 @@ Marcell Pardavi Marcus Klaas de Vries Margaret Meyerhofer Mark Mansi +Mark Mansi Mark Rousskov Mark Sinclair Mark Sinclair =Mark Sinclair <=125axel125@gmail.com> Markus Legner Markus Westerlind Markus Martin Carton +Martin Carton Martin Habovštiak Martin Hafskjold Thoresen Martin Nordholts @@ -415,6 +444,7 @@ Melody Horn Mendes mental mibac138 <5672750+mibac138@users.noreply.github.com> +Michael Howell Michael Williams Michael Woerister Michael Woerister @@ -431,6 +461,7 @@ Ms2ger msizanoen1 Mukilan Thiagarajan Nadrieril Feneanar +Nadrieril Feneanar Nadrieril Feneanar NAKASHIMA, Makoto NAKASHIMA, Makoto @@ -447,15 +478,23 @@ Nicholas Bishop Nicholas Bishop Nicholas Nethercote Nicholas Nethercote +Nick Cameron +Nick Fitzgerald Nick Platt Niclas Schwarzlose <15schnic@gmail.com> Nicolas Abram Nicole Mazzuca +Niko Matsakis +Niko Matsakis +Noratrieb <48135649+Noratrieb@users.noreply.github.com> Noratrieb <48135649+Noratrieb@users.noreply.github.com> <48135649+Nilstrieb@users.noreply.github.com> Noratrieb <48135649+Noratrieb@users.noreply.github.com> +Noratrieb <48135649+Noratrieb@users.noreply.github.com> Noratrieb <48135649+Noratrieb@users.noreply.github.com> Nif Ward Nika Layzell +Nikita Popov +Nikita Popov NODA Kai Oğuz Ağcayazı Oğuz Ağcayazı @@ -516,6 +555,7 @@ Ricky Hosfelt Ritiek Malhotra Rob Arnold Rob Arnold Rob Arnold +Robert Collins Robert Foss robertfoss Robert Gawdzik Robert Gawdzik ☢ Robert Habermeier @@ -553,7 +593,15 @@ Simon Sapin Simonas Kazlauskas Simonas Kazlauskas Simonas Kazlauskas Siva Prasad +Skgland <3877590+Skgland@users.noreply.github.com> +Skgland <3877590+Skgland@users.noreply.github.com> +Skgland <3877590+Skgland@users.noreply.github.com> Smittyvb +Sophia June Turner <547158+sophiajt@users.noreply.github.com> +Sophia June Turner <547158+sophiajt@users.noreply.github.com> <547158+jntrnr@users.noreply.github.com> +Sophia June Turner <547158+sophiajt@users.noreply.github.com> +Sophia June Turner <547158+sophiajt@users.noreply.github.com> +Sophia June Turner <547158+sophiajt@users.noreply.github.com> Srinivas Reddy Thatiparthy Stanislav Tkach startling @@ -586,8 +634,10 @@ Tim Diekmann Tim Hutt Tim JIANG Tim Joseph Dumol +Tim Neumann Timothy Maloney Tomas Koutsky +Tomasz Miąsko Torsten Weber Torsten Weber Trevor Gross @@ -607,7 +657,9 @@ Valerii Lashmanov Vitali Haravy Vitali Haravy Vitaly Shukela Waffle Lapkin -Waffle Lapkin +Waffle Lapkin +Weihang Lo +Weihang Lo Wesley Wiser whitequark William Ting diff --git a/COPYRIGHT b/COPYRIGHT index 428a438a086e9..4dad74f234eaa 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -3,6 +3,7 @@ Short version for non-lawyers: The Rust Project is dual-licensed under Apache 2.0 and MIT terms. +It is Copyright (c) The Rust Project Contributors. Longer version: @@ -11,374 +12,23 @@ copyright assignment is required to contribute to the Rust project. Some files include explicit copyright notices and/or license notices. For full authorship information, see the version control history or -https://thanks.rust-lang.org - -Except as otherwise noted (below and/or in individual files), Rust is -licensed under the Apache License, Version 2.0 or - or the MIT license - or , at your option. - - -The Rust Project includes packages written by third parties. -The following third party packages are included, and carry -their own copyright notices and license terms: - -* LLVM, located in src/llvm-project, is licensed under the following - terms. - - ============================================================================== - The LLVM Project is under the Apache License v2.0 with LLVM Exceptions: - ============================================================================== - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - - ---- LLVM Exceptions to the Apache 2.0 License ---- - - As an exception, if, as a result of your compiling your source code, portions - of this Software are embedded into an Object form of such source code, you - may redistribute such embedded portions in such Object form without complying - with the conditions of Sections 4(a), 4(b) and 4(d) of the License. - - In addition, if you combine or link compiled forms of this Software with - software that is licensed under the GPLv2 ("Combined Software") and if a - court of competent jurisdiction determines that the patent provision (Section - 3), the indemnity provision (Section 9) or other Section of the License - conflicts with the conditions of the GPLv2, you may retroactively and - prospectively choose to deem waived or otherwise exclude such Section(s) of - the License, but only in their entirety and only with respect to the Combined - Software. - - ============================================================================== - Software from third parties included in the LLVM Project: - ============================================================================== - The LLVM Project contains third party software which is under different license - terms. All such code will be identified clearly using at least one of two - mechanisms: - 1) It will be in a separate directory tree with its own `LICENSE.txt` or - `LICENSE` file at the top containing the specific license and restrictions - which apply to that software, or - 2) It will contain specific license and restriction terms at the top of every - file. - - ============================================================================== - Legacy LLVM License (https://llvm.org/docs/DeveloperPolicy.html#legacy): - ============================================================================== - University of Illinois/NCSA - Open Source License - - Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign. - All rights reserved. - - Developed by: - - LLVM Team - - University of Illinois at Urbana-Champaign - - http://llvm.org - - Permission is hereby granted, free of charge, to any person obtaining a copy of - this software and associated documentation files (the "Software"), to deal with - the Software without restriction, including without limitation the rights to - use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies - of the Software, and to permit persons to whom the Software is furnished to do - so, subject to the following conditions: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the - documentation and/or other materials provided with the distribution. - - * Neither the names of the LLVM Team, University of Illinois at - Urbana-Champaign, nor the names of its contributors may be used to - endorse or promote products derived from this Software without specific - prior written permission. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS - FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE - SOFTWARE. - -* Portions of the FFI code for interacting with the native ABI - is derived from the Clay programming language, which carries - the following license. - - Copyright (C) 2008-2010 Tachyon Technologies. - All rights reserved. - - Redistribution and use in source and binary forms, with - or without modification, are permitted provided that the - following conditions are met: - - 1. Redistributions of source code must retain the above - copyright notice, this list of conditions and the - following disclaimer. - - 2. Redistributions in binary form must reproduce the - above copyright notice, this list of conditions and - the following disclaimer in the documentation and/or - other materials provided with the distribution. - - THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR - IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - DEVELOPERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF - USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - OF SUCH DAMAGE. - -* Portions of internationalization code use code or data from Unicode, which - carry the following license: - - UNICODE LICENSE V3 - - COPYRIGHT AND PERMISSION NOTICE - - Copyright © 1991-2024 Unicode, Inc. - - NOTICE TO USER: Carefully read the following legal agreement. BY - DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR - SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE - TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT - DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. - - Permission is hereby granted, free of charge, to any person obtaining a - copy of data files and any associated documentation (the "Data Files") or - software and any associated documentation (the "Software") to deal in the - Data Files or Software without restriction, including without limitation - the rights to use, copy, modify, merge, publish, distribute, and/or sell - copies of the Data Files or Software, and to permit persons to whom the - Data Files or Software are furnished to do so, provided that either (a) - this copyright and permission notice appear with all copies of the Data - Files or Software, or (b) this copyright and permission notice appear in - associated Documentation. - - THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY - KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF - THIRD PARTY RIGHTS. - - IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE - BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, - OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, - WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, - ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA - FILES OR SOFTWARE. - - Except as contained in this notice, the name of a copyright holder shall - not be used in advertising or otherwise to promote the sale, use or other - dealings in these Data Files or Software without prior written - authorization of the copyright holder. + + +Except as otherwise noted, Rust is licensed under the Apache License, Version +2.0 or or the MIT +license or , at your option. + +We track licenses for third-party materials in two ways: + +* We use [REUSE](https://reuse.software) to track license information for + in-tree source files - both those authored by the Rust project and those + authored by third parties. See `REUSE.toml`, and our cached output of the + `reuse` tool which is committed to `license-metadata.json`. +* We use `cargo` to track license information for out-of-tree dependencies. + +These two sources of information are collected by the tool `generate-copyright` +into a file called `COPYRIGHT.html`, which is shipped with each binary release +of Rust. Please refer to that file for detailed information as to the components of +any given Rust release. We also produce a `COPYRIGHT-library.html` file which only +covers the subset of source code used in the Rust Standard Library, as opposed +to the toolchain as a whole. diff --git a/Cargo.lock b/Cargo.lock index 5a8f2e2f9cee4..2eaa9d3684663 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -57,9 +57,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "ammonia" @@ -101,12 +101,12 @@ dependencies = [ [[package]] name = "annotate-snippets" -version = "0.11.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24e35ed54e5ea7997c14ed4c70ba043478db1112e98263b3b035907aa197d991" +checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", - "unicode-width 0.1.14", + "unicode-width 0.2.0", ] [[package]] @@ -182,12 +182,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" -dependencies = [ - "backtrace", -] +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "ar_archive_writer" @@ -195,7 +192,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01667f6f40216b9a0b2945e05fed5f1ad0ab6470e69cb9378001e37b1c0668e4" dependencies = [ - "object 0.36.5", + "object 0.36.7", ] [[package]] @@ -257,9 +254,9 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "blake3" @@ -285,9 +282,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.11.0" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "regex-automata 0.4.9", @@ -367,7 +364,7 @@ dependencies = [ name = "cargo-miri" version = "0.1.0" dependencies = [ - "cargo_metadata", + "cargo_metadata 0.18.1", "directories", "rustc-build-sysroot", "rustc_tools_util", @@ -396,7 +393,21 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", +] + +[[package]] +name = "cargo_metadata" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8769706aad5d996120af43197bf46ef6ad0fda35216b4505f926a365a232d924" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror 2.0.11", ] [[package]] @@ -405,9 +416,9 @@ version = "0.1.0" [[package]] name = "cc" -version = "1.2.0" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" dependencies = [ "shlex", ] @@ -426,9 +437,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -470,9 +481,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" dependencies = [ "clap_builder", "clap_derive", @@ -490,9 +501,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" dependencies = [ "anstream", "anstyle", @@ -503,37 +514,37 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.38" +version = "4.5.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9647a559c112175f17cf724dc72d3645680a883c58481332779192b0d8e7a01" +checksum = "33a7e468e750fa4b6be660e8b5651ad47372e8fb114030b594c2d75d48c5ffd0" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "clippy" -version = "0.1.85" +version = "0.1.86" dependencies = [ "anstream", - "cargo_metadata", + "cargo_metadata 0.18.1", "clippy_config", "clippy_lints", "clippy_utils", @@ -550,7 +561,7 @@ dependencies = [ "rustc_tools_util", "serde", "serde_json", - "syn 2.0.90", + "syn 2.0.96", "tempfile", "termize", "tokio", @@ -561,7 +572,7 @@ dependencies = [ [[package]] name = "clippy_config" -version = "0.1.85" +version = "0.1.86" dependencies = [ "clippy_utils", "itertools", @@ -586,10 +597,10 @@ dependencies = [ [[package]] name = "clippy_lints" -version = "0.1.85" +version = "0.1.86" dependencies = [ "arrayvec", - "cargo_metadata", + "cargo_metadata 0.18.1", "clippy_config", "clippy_utils", "itertools", @@ -609,7 +620,7 @@ dependencies = [ [[package]] name = "clippy_utils" -version = "0.1.85" +version = "0.1.86" dependencies = [ "arrayvec", "itertools", @@ -660,7 +671,7 @@ dependencies = [ "nom", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -683,12 +694,12 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -727,15 +738,15 @@ dependencies = [ [[package]] name = "console" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ "encode_unicode", - "lazy_static", "libc", - "unicode-width 0.1.14", - "windows-sys 0.52.0", + "once_cell", + "unicode-width 0.2.0", + "windows-sys 0.59.0", ] [[package]] @@ -782,18 +793,18 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -810,9 +821,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" @@ -885,7 +896,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -896,7 +907,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -933,7 +944,7 @@ checksum = "62d671cc41a825ebabc75757b62d3d168c577f9149b2d49ece1dad1f72119d25" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -954,7 +965,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -964,7 +975,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -976,7 +987,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -1054,7 +1065,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -1101,15 +1112,15 @@ dependencies = [ [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "env_filter" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", "regex", @@ -1117,9 +1128,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -1153,9 +1164,9 @@ dependencies = [ [[package]] name = "expect-test" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0" +checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0" dependencies = [ "dissimilar", "once_cell", @@ -1179,9 +1190,9 @@ checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "field-offset" @@ -1212,7 +1223,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide 0.8.2", ] [[package]] @@ -1246,7 +1257,7 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a530c4694a6a8d528794ee9bbd8ba0122e779629ac908d15ad5a7ae7763a33d" dependencies = [ - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1257,9 +1268,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" [[package]] name = "form_urlencoded" @@ -1351,7 +1362,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -1389,11 +1400,11 @@ name = "generate-copyright" version = "0.1.0" dependencies = [ "anyhow", - "cargo_metadata", + "cargo_metadata 0.18.1", "rinja", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1463,9 +1474,9 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "globset" @@ -1491,17 +1502,18 @@ dependencies = [ [[package]] name = "handlebars" -version = "6.2.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4ccde012831f9a071a637b0d4e31df31c0f6c525784b35ae76a9ac6bc1e315" +checksum = "3d6b224b95c1e668ac0270325ad563b2eef1469fbbb8959bc7c692c844b813d9" dependencies = [ + "derive_builder", "log", "num-order", "pest", "pest_derive", "serde", "serde_json", - "thiserror", + "thiserror 2.0.11", ] [[package]] @@ -1550,11 +1562,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1585,7 +1597,7 @@ dependencies = [ "markup5ever", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -1774,7 +1786,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -1942,9 +1954,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ "once_cell", "wasm-bindgen", @@ -2012,9 +2024,9 @@ checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" [[package]] name = "libc" -version = "0.2.167" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libdbus-sys" @@ -2074,9 +2086,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.20" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" +checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" dependencies = [ "cc", "libc", @@ -2103,9 +2115,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" @@ -2123,7 +2135,7 @@ version = "0.0.1" dependencies = [ "anyhow", "clap", - "thiserror", + "thiserror 1.0.69", "tracing", "tracing-subscriber", ] @@ -2305,9 +2317,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ "adler2", ] @@ -2526,9 +2538,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "crc32fast", "flate2", @@ -2536,7 +2548,7 @@ dependencies = [ "indexmap", "memchr", "ruzstd", - "wasmparser 0.218.0", + "wasmparser 0.222.0", ] [[package]] @@ -2701,20 +2713,20 @@ dependencies = [ [[package]] name = "pest" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.11", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -2722,22 +2734,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] name = "pest_meta" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -2746,21 +2758,21 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", ] [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator 0.11.2", - "phf_shared 0.11.2", + "phf_generator 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -2775,11 +2787,11 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", "rand", ] @@ -2789,23 +2801,23 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher", + "siphasher 1.0.1", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2875,9 +2887,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -2952,9 +2964,9 @@ checksum = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45" [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -3020,9 +3032,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags", ] @@ -3035,7 +3047,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3140,7 +3152,7 @@ dependencies = [ "rinja_parser", "rustc-hash 2.1.0", "serde", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -3169,7 +3181,7 @@ dependencies = [ "build_helper", "gimli 0.31.1", "libc", - "object 0.36.5", + "object 0.36.7", "regex", "serde_json", "similar", @@ -3249,9 +3261,9 @@ dependencies = [ [[package]] name = "rustc-stable-hash" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5c9f15eec8235d7cb775ee6f81891db79b98fd54ba1ad8fae565b88ef1ae4e2" +checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1" [[package]] name = "rustc-std-workspace-alloc" @@ -3332,6 +3344,7 @@ version = "0.0.0" dependencies = [ "rustc_ast", "rustc_ast_pretty", + "rustc_attr_parsing", "rustc_data_structures", "rustc_errors", "rustc_fluent_macro", @@ -3339,6 +3352,7 @@ dependencies = [ "rustc_index", "rustc_macros", "rustc_middle", + "rustc_parse", "rustc_session", "rustc_span", "rustc_target", @@ -3385,16 +3399,11 @@ version = "0.0.0" dependencies = [ "rustc_abi", "rustc_ast", - "rustc_ast_pretty", "rustc_data_structures", - "rustc_errors", - "rustc_feature", - "rustc_fluent_macro", - "rustc_lexer", "rustc_macros", "rustc_serialize", - "rustc_session", "rustc_span", + "thin-vec", ] [[package]] @@ -3409,11 +3418,13 @@ dependencies = [ "rustc_errors", "rustc_feature", "rustc_fluent_macro", + "rustc_hir", "rustc_lexer", "rustc_macros", "rustc_serialize", "rustc_session", "rustc_span", + "thin-vec", ] [[package]] @@ -3466,6 +3477,7 @@ dependencies = [ "rustc_expand", "rustc_feature", "rustc_fluent_macro", + "rustc_hir", "rustc_index", "rustc_lexer", "rustc_lint_defs", @@ -3485,10 +3497,11 @@ name = "rustc_codegen_llvm" version = "0.0.0" dependencies = [ "bitflags", + "gimli 0.30.0", "itertools", "libc", "measureme", - "object 0.36.5", + "object 0.36.7", "rustc-demangle", "rustc_abi", "rustc_ast", @@ -3527,7 +3540,7 @@ dependencies = [ "either", "itertools", "libc", - "object 0.36.5", + "object 0.36.7", "pathdiff", "regex", "rustc_abi", @@ -3711,17 +3724,19 @@ dependencies = [ name = "rustc_errors" version = "0.0.0" dependencies = [ - "annotate-snippets 0.11.4", + "annotate-snippets 0.11.5", "derive_setters", "rustc_abi", "rustc_ast", "rustc_ast_pretty", + "rustc_attr_data_structures", "rustc_data_structures", "rustc_error_codes", "rustc_error_messages", "rustc_fluent_macro", "rustc_hir", "rustc_index", + "rustc_lexer", "rustc_lint_defs", "rustc_macros", "rustc_serialize", @@ -3748,6 +3763,7 @@ dependencies = [ "rustc_errors", "rustc_feature", "rustc_fluent_macro", + "rustc_hir", "rustc_lexer", "rustc_lint_defs", "rustc_macros", @@ -3774,12 +3790,12 @@ dependencies = [ name = "rustc_fluent_macro" version = "0.0.0" dependencies = [ - "annotate-snippets 0.11.4", + "annotate-snippets 0.11.5", "fluent-bundle", "fluent-syntax", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "unic-langid", ] @@ -3799,6 +3815,7 @@ dependencies = [ "rustc_abi", "rustc_arena", "rustc_ast", + "rustc_attr_data_structures", "rustc_data_structures", "rustc_index", "rustc_macros", @@ -3845,6 +3862,7 @@ dependencies = [ "rustc_abi", "rustc_ast", "rustc_ast_pretty", + "rustc_attr_parsing", "rustc_hir", "rustc_span", ] @@ -3914,7 +3932,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -4062,7 +4080,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "synstructure", ] @@ -4102,7 +4120,6 @@ name = "rustc_middle" version = "0.0.0" dependencies = [ "bitflags", - "derive-where", "either", "field-offset", "gsgdt", @@ -4146,6 +4163,7 @@ dependencies = [ "rustc_apfloat", "rustc_arena", "rustc_ast", + "rustc_attr_parsing", "rustc_data_structures", "rustc_errors", "rustc_fluent_macro", @@ -4294,7 +4312,6 @@ dependencies = [ "rustc_fluent_macro", "rustc_hir", "rustc_index", - "rustc_lexer", "rustc_macros", "rustc_middle", "rustc_privacy", @@ -4372,6 +4389,7 @@ dependencies = [ "parking_lot", "rustc-rayon-core", "rustc_ast", + "rustc_attr_data_structures", "rustc_data_structures", "rustc_errors", "rustc_feature", @@ -4422,6 +4440,7 @@ version = "0.0.0" dependencies = [ "bitflags", "rustc_abi", + "rustc_ast", "rustc_data_structures", "rustc_hir", "rustc_middle", @@ -4515,6 +4534,7 @@ dependencies = [ "punycode", "rustc-demangle", "rustc_abi", + "rustc_ast", "rustc_data_structures", "rustc_errors", "rustc_hir", @@ -4529,7 +4549,7 @@ name = "rustc_target" version = "0.0.0" dependencies = [ "bitflags", - "object 0.36.5", + "object 0.36.7", "rustc_abi", "rustc_data_structures", "rustc_fs_util", @@ -4651,7 +4671,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "synstructure", ] @@ -4729,7 +4749,7 @@ checksum = "81864b097046da5df3758fdc6e4822bbb70afa06317e8ca45ea1b51cb8c5e5a4" dependencies = [ "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tracing", ] @@ -4740,7 +4760,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -4750,7 +4770,7 @@ dependencies = [ "annotate-snippets 0.9.2", "anyhow", "bytecount", - "cargo_metadata", + "cargo_metadata 0.18.1", "clap", "clap-cargo", "diff", @@ -4763,7 +4783,7 @@ dependencies = [ "serde", "serde_json", "term", - "thiserror", + "thiserror 1.0.69", "toml 0.7.8", "tracing", "tracing-subscriber", @@ -4774,22 +4794,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ruzstd" @@ -4842,49 +4862,49 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e14e4d63b804dc0c7ec4a1e52bcb63f02c7ac94476755aa579edac21e01f915d" dependencies = [ - "self_cell 1.0.4", + "self_cell 1.1.0", ] [[package]] name = "self_cell" -version = "1.0.4" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d369a96f978623eb3dc28807c4852d6cc617fed53da5d3c400feff1ef34a714a" +checksum = "c2fdfc24bc566f839a2da4c4295b82db7d25a24253867d5c64355abb5799bdbe" [[package]] name = "semver" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" dependencies = [ "indexmap", "itoa", @@ -4957,6 +4977,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "slab" version = "0.4.9" @@ -4994,9 +5020,9 @@ dependencies = [ [[package]] name = "spdx" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bae30cc7bfe3656d60ee99bf6836f472b0c53dddcbf335e253329abb16e535a2" +checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193" dependencies = [ "smallvec", ] @@ -5009,7 +5035,7 @@ checksum = "53d7ac03c67c572d85049d6db815e20a4a19b41b3d5cca732ac582342021ad77" dependencies = [ "nom", "serde", - "thiserror", + "thiserror 1.0.69", "tracing", ] @@ -5026,7 +5052,7 @@ dependencies = [ "spdx-expression", "strum", "strum_macros", - "thiserror", + "thiserror 1.0.69", "uuid", ] @@ -5135,9 +5161,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.90" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", @@ -5152,7 +5178,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -5189,12 +5215,13 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand", + "getrandom", "once_cell", "rustix", "windows-sys 0.59.0", @@ -5274,7 +5301,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", ] [[package]] @@ -5285,7 +5321,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", ] [[package]] @@ -5296,7 +5343,7 @@ checksum = "813ba76597db32dc4f6992fd8bf8f394715b88d352fd97401da67dab6283b4c6" dependencies = [ "gimli 0.30.0", "hashbrown 0.14.5", - "object 0.36.5", + "object 0.36.7", "tracing", ] @@ -5324,7 +5371,7 @@ name = "tidy" version = "0.1.0" dependencies = [ "build_helper", - "cargo_metadata", + "cargo_metadata 0.19.1", "fluent-syntax", "ignore", "miropt-test-tools", @@ -5352,9 +5399,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -5373,9 +5420,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -5393,9 +5440,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -5408,9 +5455,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.1" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", @@ -5486,7 +5533,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -5598,11 +5645,11 @@ version = "0.26.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32ee4c40e5a5f9fa6864ff976473e5d6a6e9884b6ce68b40690d9f87e1994c83" dependencies = [ - "annotate-snippets 0.11.4", + "annotate-snippets 0.11.5", "anyhow", "bstr", "cargo-platform", - "cargo_metadata", + "cargo_metadata 0.18.1", "color-eyre", "colored", "comma", @@ -5657,15 +5704,15 @@ checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b" dependencies = [ "proc-macro-hack", "quote", - "syn 2.0.90", + "syn 2.0.96", "unic-langid-impl", ] [[package]] name = "unicase" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" @@ -5795,9 +5842,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "b913a3b5fe84142e269d63cc62b64319ccaf89b748fc31fe025177f767a756c4" dependencies = [ "getrandom", ] @@ -5844,9 +5891,9 @@ checksum = "0f76d9fa52234153eeb40b088de91a8c13dc28a912cf6f31cd89ca4bac9024e0" [[package]] name = "wasm-bindgen" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", @@ -5855,24 +5902,23 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5880,22 +5926,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-component-ld" @@ -5936,12 +5982,12 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.221.0" +version = "0.223.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de35b6c3ef1f53ac7a31b5e69bc00f1542ea337e7e7162dc34c68b537ff82690" +checksum = "7e636076193fa68103e937ac951b5f2f587624097017d764b8984d9c0f149464" dependencies = [ "leb128", - "wasmparser 0.221.0", + "wasmparser 0.223.0", ] [[package]] @@ -5960,15 +6006,6 @@ dependencies = [ "wasmparser 0.219.1", ] -[[package]] -name = "wasmparser" -version = "0.218.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09e46c7fceceaa72b2dd1a8a137ea7fd8f93dfaa69806010a709918e496c5dc" -dependencies = [ - "bitflags", -] - [[package]] name = "wasmparser" version = "0.219.1" @@ -5985,9 +6022,18 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.221.0" +version = "0.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8659e755615170cfe20da468865c989da78c5da16d8652e69a75acda02406a92" +checksum = "4adf50fde1b1a49c1add6a80d47aea500c88db70551805853aa8b88f3ea27ab5" +dependencies = [ + "bitflags", +] + +[[package]] +name = "wasmparser" +version = "0.223.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5a99faceb1a5a84dd6084ec4bfa4b2ab153b5793b43fd8f58b89232634afc35" dependencies = [ "bitflags", "indexmap", @@ -5996,22 +6042,22 @@ dependencies = [ [[package]] name = "wast" -version = "221.0.0" +version = "223.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d8eb1933d493dd07484a255c3f52236123333f5befaa3be36182a50d393ec54" +checksum = "d59b2ba8a2ff9f06194b7be9524f92e45e70149f4dacc0d0c7ad92b59ac875e4" dependencies = [ "bumpalo", "leb128", "memchr", "unicode-width 0.2.0", - "wasm-encoder 0.221.0", + "wasm-encoder 0.223.0", ] [[package]] name = "wat" -version = "1.221.0" +version = "1.223.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c813fd4e5b2b97242830b56e7b7dc5479bc17aaa8730109be35e61909af83993" +checksum = "662786915c427e4918ff01eabb3c4756d4d947cd8f635761526b4cc9da2eaaad" dependencies = [ "wast", ] @@ -6077,7 +6123,7 @@ dependencies = [ "rayon", "serde", "serde_json", - "syn 2.0.90", + "syn 2.0.96", "windows-metadata", ] @@ -6110,7 +6156,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -6121,7 +6167,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -6353,9 +6399,9 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "xattr" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" +checksum = "e105d177a3871454f754b33bb0ee637ecaaac997446375fd3e5d43a2ed00c909" dependencies = [ "libc", "linux-raw-sys", @@ -6400,7 +6446,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "synstructure", ] @@ -6422,7 +6468,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] [[package]] @@ -6442,7 +6488,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", "synstructure", ] @@ -6465,5 +6511,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.96", ] diff --git a/LICENSE-MIT b/LICENSE-MIT index 31aa79387f27e..a2070c4be7acd 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,3 +1,5 @@ +Copyright (c) The Rust Project Contributors + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the diff --git a/RELEASES.md b/RELEASES.md index 99733bade32c4..c4b36ed988bd2 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,119 @@ +Version 1.84.0 (2025-01-09) +========================== + + + +Language +-------- +- [Allow `#[deny]` inside `#[forbid]` as a no-op](https://github.com/rust-lang/rust/pull/121560/) +- [Show a warning when `-Ctarget-feature` is used to toggle features that can lead to unsoundness due to ABI mismatches](https://github.com/rust-lang/rust/pull/129884) +- [Use the next-generation trait solver in coherence](https://github.com/rust-lang/rust/pull/130654) +- [Allow coercions to drop the principal of trait objects](https://github.com/rust-lang/rust/pull/131857) +- [Support `/` as the path separator for `include!()` in all cases on Windows](https://github.com/rust-lang/rust/pull/125205) +- [Taking a raw ref (`raw (const|mut)`) of a deref of a pointer (`*ptr`) is now safe](https://github.com/rust-lang/rust/pull/129248) +- [Stabilize s390x inline assembly](https://github.com/rust-lang/rust/pull/131258) +- [Stabilize Arm64EC inline assembly](https://github.com/rust-lang/rust/pull/131781) +- [Lint against creating pointers to immediately dropped temporaries](https://github.com/rust-lang/rust/pull/128985) +- [Execute drop glue when unwinding in an `extern "C"` function](https://github.com/rust-lang/rust/pull/129582) + + + +Compiler +-------- +- [Add `--print host-tuple` flag to print the host target tuple and affirm the "target tuple" terminology over "target triple"](https://github.com/rust-lang/rust/pull/125579) +- [Declaring functions with a calling convention not supported on the current target now triggers a hard error](https://github.com/rust-lang/rust/pull/129935) +- [Set up indirect access to external data for `loongarch64-unknown-linux-{musl,ohos}`](https://github.com/rust-lang/rust/pull/131583) +- [Enable XRay instrumentation for LoongArch Linux targets](https://github.com/rust-lang/rust/pull/131818) +- [Extend the `unexpected_cfgs` lint to also warn in external macros](https://github.com/rust-lang/rust/pull/132577) +- [Stabilize WebAssembly `multivalue`, `reference-types`, and `tail-call` target features](https://github.com/rust-lang/rust/pull/131080) +- [Added Tier 2 support for the `wasm32v1-none` target](https://github.com/rust-lang/rust/pull/131487) + + + +Libraries +--------- +- [Implement `From<&mut {slice}>` for `Box/Rc/Arc<{slice}>`](https://github.com/rust-lang/rust/pull/129329) +- [Move `::copysign`, `::abs`, `::signum` to `core`](https://github.com/rust-lang/rust/pull/131304) +- [Add `LowerExp` and `UpperExp` implementations to `NonZero`](https://github.com/rust-lang/rust/pull/131377) +- [Implement `FromStr` for `CString` and `TryFrom` for `String`](https://github.com/rust-lang/rust/pull/130608) +- [`std::os::darwin` has been made public](https://github.com/rust-lang/rust/pull/123723) + + + +Stabilized APIs +--------------- + +- [`Ipv6Addr::is_unique_local`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.is_unique_local) +- [`Ipv6Addr::is_unicast_link_local`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.is_unicast_link_local) +- [`core::ptr::with_exposed_provenance`](https://doc.rust-lang.org/stable/core/ptr/fn.with_exposed_provenance.html) +- [`core::ptr::with_exposed_provenance_mut`](https://doc.rust-lang.org/stable/core/ptr/fn.with_exposed_provenance_mut.html) +- [`::addr`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.addr) +- [`::expose_provenance`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.expose_provenance) +- [`::with_addr`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.with_addr) +- [`::map_addr`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.map_addr) +- [`::isqrt`](https://doc.rust-lang.org/stable/core/primitive.i32.html#method.isqrt) +- [`::checked_isqrt`](https://doc.rust-lang.org/stable/core/primitive.i32.html#method.checked_isqrt) +- [`::isqrt`](https://doc.rust-lang.org/stable/core/primitive.u32.html#method.isqrt) +- [`NonZero::isqrt`](https://doc.rust-lang.org/stable/core/num/struct.NonZero.html#impl-NonZero%3Cu128%3E/method.isqrt) +- [`core::ptr::without_provenance`](https://doc.rust-lang.org/stable/core/ptr/fn.without_provenance.html) +- [`core::ptr::without_provenance_mut`](https://doc.rust-lang.org/stable/core/ptr/fn.without_provenance_mut.html) +- [`core::ptr::dangling`](https://doc.rust-lang.org/stable/core/ptr/fn.dangling.html) +- [`core::ptr::dangling_mut`](https://doc.rust-lang.org/stable/core/ptr/fn.dangling_mut.html) +- [`Pin::as_deref_mut`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.as_deref_mut) + +These APIs are now stable in const contexts + +- [`AtomicBool::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicBool.html#method.from_ptr) +- [`AtomicPtr::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicPtr.html#method.from_ptr) +- [`AtomicU8::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicU8.html#method.from_ptr) +- [`AtomicU16::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicU16.html#method.from_ptr) +- [`AtomicU32::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicU32.html#method.from_ptr) +- [`AtomicU64::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicU64.html#method.from_ptr) +- [`AtomicUsize::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicUsize.html#method.from_ptr) +- [`AtomicI8::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicI8.html#method.from_ptr) +- [`AtomicI16::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicI16.html#method.from_ptr) +- [`AtomicI32::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicI32.html#method.from_ptr) +- [`AtomicI64::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicI64.html#method.from_ptr) +- [`AtomicIsize::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicIsize.html#method.from_ptr) +- [`::is_null`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.is_null-1) +- [`::as_ref`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.as_ref-1) +- [`::as_mut`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.as_mut) +- [`Pin::new`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.new) +- [`Pin::new_unchecked`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.new_unchecked) +- [`Pin::get_ref`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.get_ref) +- [`Pin::into_ref`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.into_ref) +- [`Pin::get_mut`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.get_mut) +- [`Pin::get_unchecked_mut`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.get_unchecked_mut) +- [`Pin::static_ref`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.static_ref) +- [`Pin::static_mut`](https://doc.rust-lang.org/stable/core/pin/struct.Pin.html#method.static_mut) + + + +Cargo +----- +- [Stabilize MSRV-aware resolver config](https://github.com/rust-lang/cargo/pull/14639/) +- [Stabilize resolver v3](https://github.com/rust-lang/cargo/pull/14754/) + + + +Rustdoc +------- + +- [rustdoc-search: improve type-driven search](https://github.com/rust-lang/rust/pull/127589) + + + +Compatibility Notes +------------------- +- [Enable by default the `LSX` target feature for LoongArch Linux targets](https://github.com/rust-lang/rust/pull/132140) +- [The unstable `-Zprofile` flag (“gcov-style” coverage instrumentation) has been removed.](https://github.com/rust-lang/rust/pull/131829) This does not affect the stable flags for coverage instrumentation (`-Cinstrument-coverage`) and profile-guided optimization (`-Cprofile-generate`, `-Cprofile-use`), which are unrelated and remain available. +- Support for the target named `wasm32-wasi` has been removed as the target is now named `wasm32-wasip1`. This completes the [transition](https://github.com/rust-lang/compiler-team/issues/607) [plan](https://github.com/rust-lang/compiler-team/issues/695) for this target following [the introduction of `wasm32-wasip1`](https://github.com/rust-lang/rust/pull/120468) in Rust 1.78. Compiler warnings on [use of `wasm32-wasi`](https://github.com/rust-lang/rust/pull/126662) introduced in Rust 1.81 are now gone as well as the target is removed. +- [The syntax `&pin (mut|const) T` is now parsed as a type which in theory could affect macro expansion results in some edge cases](https://github.com/rust-lang/rust/pull/130635#issuecomment-2375462821) +- [Legacy syntax for calling `std::arch` functions is no longer permitted to declare items or bodies (such as closures, inline consts, or async blocks).](https://github.com/rust-lang/rust/pull/130443#issuecomment-2445678945) +- [Declaring functions with a calling convention not supported on the current target now triggers a hard error](https://github.com/rust-lang/rust/pull/129935) +- [The next-generation trait solver is now enabled for coherence, fixing multiple soundness issues](https://github.com/rust-lang/rust/pull/130654) + Version 1.83.0 (2024-11-28) ========================== @@ -2067,7 +2183,7 @@ Language -------- - [Stabilize default_alloc_error_handler](https://github.com/rust-lang/rust/pull/102318/) - This allows usage of `alloc` on stable without requiring the + This allows usage of `alloc` on stable without requiring the definition of a handler for allocation failure. Defining custom handlers is still unstable. - [Stabilize `efiapi` calling convention.](https://github.com/rust-lang/rust/pull/105795/) - [Remove implicit promotion for types with drop glue](https://github.com/rust-lang/rust/pull/105085/) diff --git a/compiler/rustc_abi/src/callconv.rs b/compiler/rustc_abi/src/callconv.rs index ee63e46e88c1d..400395f99ff01 100644 --- a/compiler/rustc_abi/src/callconv.rs +++ b/compiler/rustc_abi/src/callconv.rs @@ -206,7 +206,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> { let (mut result, mut total) = from_fields_at(*self, Size::ZERO)?; match &self.variants { - abi::Variants::Single { .. } => {} + abi::Variants::Single { .. } | abi::Variants::Empty => {} abi::Variants::Multiple { variants, .. } => { // Treat enum variants like union members. // HACK(eddyb) pretend the `enum` field (discriminant) diff --git a/compiler/rustc_abi/src/extern_abi/mod.rs b/compiler/rustc_abi/src/extern_abi/mod.rs index f7e41280131fa..130834d560f72 100644 --- a/compiler/rustc_abi/src/extern_abi/mod.rs +++ b/compiler/rustc_abi/src/extern_abi/mod.rs @@ -1,8 +1,7 @@ use std::fmt; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Symbol, sym}; #[cfg(test)] mod tests; @@ -46,6 +45,9 @@ pub enum ExternAbi { PtxKernel, Msp430Interrupt, X86Interrupt, + /// An entry-point function called by the GPU's host + // FIXME: should not be callable from Rust on GPU targets, is for host's use only + GpuKernel, EfiApi, AvrInterrupt, AvrNonBlockingInterrupt, @@ -123,6 +125,7 @@ const AbiDatas: &[AbiData] = &[ AbiData { abi: Abi::PtxKernel, name: "ptx-kernel" }, AbiData { abi: Abi::Msp430Interrupt, name: "msp430-interrupt" }, AbiData { abi: Abi::X86Interrupt, name: "x86-interrupt" }, + AbiData { abi: Abi::GpuKernel, name: "gpu-kernel" }, AbiData { abi: Abi::EfiApi, name: "efiapi" }, AbiData { abi: Abi::AvrInterrupt, name: "avr-interrupt" }, AbiData { abi: Abi::AvrNonBlockingInterrupt, name: "avr-non-blocking-interrupt" }, @@ -193,6 +196,10 @@ pub fn is_enabled( s } +/// Returns whether the ABI is stable to use. +/// +/// Note that there is a separate check determining whether the ABI is even supported +/// on the current target; see `fn is_abi_supported` in `rustc_target::spec`. pub fn is_stable(name: &str) -> Result<(), AbiDisabled> { match name { // Stable @@ -236,6 +243,10 @@ pub fn is_stable(name: &str) -> Result<(), AbiDisabled> { feature: sym::abi_x86_interrupt, explain: "x86-interrupt ABI is experimental and subject to change", }), + "gpu-kernel" => Err(AbiDisabled::Unstable { + feature: sym::abi_gpu_kernel, + explain: "gpu-kernel ABI is experimental and subject to change", + }), "avr-interrupt" | "avr-non-blocking-interrupt" => Err(AbiDisabled::Unstable { feature: sym::abi_avr_interrupt, explain: "avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change", @@ -290,20 +301,21 @@ impl Abi { PtxKernel => 19, Msp430Interrupt => 20, X86Interrupt => 21, - EfiApi => 22, - AvrInterrupt => 23, - AvrNonBlockingInterrupt => 24, - CCmseNonSecureCall => 25, - CCmseNonSecureEntry => 26, + GpuKernel => 22, + EfiApi => 23, + AvrInterrupt => 24, + AvrNonBlockingInterrupt => 25, + CCmseNonSecureCall => 26, + CCmseNonSecureEntry => 27, // Cross-platform ABIs - System { unwind: false } => 27, - System { unwind: true } => 28, - RustIntrinsic => 29, - RustCall => 30, - Unadjusted => 31, - RustCold => 32, - RiscvInterruptM => 33, - RiscvInterruptS => 34, + System { unwind: false } => 28, + System { unwind: true } => 29, + RustIntrinsic => 30, + RustCall => 31, + Unadjusted => 32, + RustCold => 33, + RiscvInterruptM => 34, + RiscvInterruptS => 35, }; debug_assert!( AbiDatas diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index e6d66f608dae9..b8773f9ff38f6 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -119,6 +119,8 @@ impl LayoutCalculator { .chain(Niche::from_scalar(dl, Size::ZERO, a)) .max_by_key(|niche| niche.available(dl)); + let combined_seed = a.size(&self.cx).bytes().wrapping_add(b.size(&self.cx).bytes()); + LayoutData { variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { @@ -131,6 +133,7 @@ impl LayoutCalculator { size, max_repr_align: None, unadjusted_abi_align: align.abi, + randomization_seed: combined_seed, } } @@ -213,8 +216,9 @@ impl LayoutCalculator { &self, ) -> LayoutData { let dl = self.cx.data_layout(); + // This is also used for uninhabited enums, so we use `Variants::Empty`. LayoutData { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Empty, fields: FieldsShape::Primitive, backend_repr: BackendRepr::Uninhabited, largest_niche: None, @@ -222,6 +226,7 @@ impl LayoutCalculator { size: Size::ZERO, max_repr_align: None, unadjusted_abi_align: dl.i8_align.abi, + randomization_seed: 0, } } @@ -384,6 +389,11 @@ impl LayoutCalculator { return Err(LayoutCalculatorError::EmptyUnion); }; + let combined_seed = only_variant + .iter() + .map(|v| v.randomization_seed) + .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed)); + Ok(LayoutData { variants: Variants::Single { index: only_variant_idx }, fields: FieldsShape::Union(union_field_count), @@ -393,6 +403,7 @@ impl LayoutCalculator { size: size.align_to(align.abi), max_repr_align, unadjusted_abi_align, + randomization_seed: combined_seed, }) } @@ -649,6 +660,11 @@ impl LayoutCalculator { BackendRepr::Memory { sized: true } }; + let combined_seed = variant_layouts + .iter() + .map(|v| v.randomization_seed) + .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed)); + let layout = LayoutData { variants: Variants::Multiple { tag: niche_scalar, @@ -670,6 +686,7 @@ impl LayoutCalculator { align, max_repr_align, unadjusted_abi_align, + randomization_seed: combined_seed, }; Some(TmpLayout { layout, variants: variant_layouts }) @@ -960,6 +977,11 @@ impl LayoutCalculator { let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag); + let combined_seed = layout_variants + .iter() + .map(|v| v.randomization_seed) + .fold(repr.field_shuffle_seed, |acc, seed| acc.wrapping_add(seed)); + let tagged_layout = LayoutData { variants: Variants::Multiple { tag, @@ -977,6 +999,7 @@ impl LayoutCalculator { size, max_repr_align, unadjusted_abi_align, + randomization_seed: combined_seed, }; let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants }; @@ -1004,8 +1027,8 @@ impl LayoutCalculator { Variants::Multiple { tag, tag_encoding, tag_field, .. } => { Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants } } - Variants::Single { .. } => { - panic!("encountered a single-variant enum during multi-variant layout") + Variants::Single { .. } | Variants::Empty => { + panic!("encountered a single-variant or empty enum during multi-variant layout") } }; Ok(best_layout.layout) @@ -1029,12 +1052,15 @@ impl LayoutCalculator { let mut max_repr_align = repr.align; let mut inverse_memory_index: IndexVec = fields.indices().collect(); let optimize_field_order = !repr.inhibit_struct_field_reordering(); - if optimize_field_order && fields.len() > 1 { - let end = - if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; - let optimizing = &mut inverse_memory_index.raw[..end]; - let fields_excluding_tail = &fields.raw[..end]; + let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; + let optimizing = &mut inverse_memory_index.raw[..end]; + let fields_excluding_tail = &fields.raw[..end]; + // unsizable tail fields are excluded so that we use the same seed for the sized and unsized layouts. + let field_seed = fields_excluding_tail + .iter() + .fold(0u64, |acc, f| acc.wrapping_add(f.randomization_seed)); + if optimize_field_order && fields.len() > 1 { // If `-Z randomize-layout` was enabled for the type definition we can shuffle // the field ordering to try and catch some code making assumptions about layouts // we don't guarantee. @@ -1045,8 +1071,9 @@ impl LayoutCalculator { use rand::seq::SliceRandom; // `ReprOptions.field_shuffle_seed` is a deterministic seed we can use to randomize field // ordering. - let mut rng = - rand_xoshiro::Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed); + let mut rng = rand_xoshiro::Xoshiro128StarStar::seed_from_u64( + field_seed.wrapping_add(repr.field_shuffle_seed), + ); // Shuffle the ordering of the fields. optimizing.shuffle(&mut rng); @@ -1343,6 +1370,8 @@ impl LayoutCalculator { unadjusted_abi_align }; + let seed = field_seed.wrapping_add(repr.field_shuffle_seed); + Ok(LayoutData { variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { offsets, memory_index }, @@ -1352,6 +1381,7 @@ impl LayoutCalculator { size, max_repr_align, unadjusted_abi_align, + randomization_seed: seed, }) } diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 15a27c0b6ee0d..8ad33749f3473 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -8,6 +8,38 @@ #![warn(unreachable_pub)] // tidy-alphabetical-end +/*! ABI handling for rustc + +## What is an "ABI"? + +Literally, "application binary interface", which means it is everything about how code interacts, +at the machine level, with other code. This means it technically covers all of the following: +- object binary format for e.g. relocations or offset tables +- in-memory layout of types +- procedure calling conventions + +When we discuss "ABI" in the context of rustc, we are probably discussing calling conventions. +To describe those `rustc_abi` also covers type layout, as it must for values passed on the stack. +Despite `rustc_abi` being about calling conventions, it is good to remember these usages exist. +You will encounter all of them and more if you study target-specific codegen enough! +Even in general conversation, when someone says "the Rust ABI is unstable", it may allude to +either or both of +- `repr(Rust)` types have a mostly-unspecified layout +- `extern "Rust" fn(A) -> R` has an unspecified calling convention + +## Crate Goal + +ABI is a foundational concept, so the `rustc_abi` crate serves as an equally foundational crate. +It cannot carry all details relevant to an ABI: those permeate code generation and linkage. +Instead, `rustc_abi` is intended to provide the interface for reasoning about the binary interface. +It should contain traits and types that other crates then use in their implementation. +For example, a platform's `extern "C" fn` calling convention will be implemented in `rustc_target` +but `rustc_abi` contains the types for calculating layout and describing register-passing. +This makes it easier to describe things in the same way across targets, codegen backends, and +even other Rust compilers, such as rust-analyzer! + +*/ + use std::fmt; #[cfg(feature = "nightly")] use std::iter::Step; @@ -1504,10 +1536,12 @@ impl BackendRepr { #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] pub enum Variants { + /// A type with no valid variants. Must be uninhabited. + Empty, + /// Single enum variants, structs/tuples, unions, and all non-ADTs. Single { - /// Always 0 for non-enums/generators. - /// For enums without a variant, this is an invalid index! + /// Always `0` for types that cannot have multiple variants. index: VariantIdx, }, @@ -1685,6 +1719,18 @@ pub struct LayoutData { /// Only used on aarch64-linux, where the argument passing ABI ignores the requested alignment /// in some cases. pub unadjusted_abi_align: Align, + + /// The randomization seed based on this type's own repr and its fields. + /// + /// Since randomization is toggled on a per-crate basis even crates that do not have randomization + /// enabled should still calculate a seed so that downstream uses can use it to distinguish different + /// types. + /// + /// For every T and U for which we do not guarantee that a repr(Rust) `Foo` can be coerced or + /// transmuted to `Foo` we aim to create probalistically distinct seeds so that Foo can choose + /// to reorder its fields based on that information. The current implementation is a conservative + /// approximation of this goal. + pub randomization_seed: u64, } impl LayoutData { @@ -1705,6 +1751,30 @@ impl LayoutData { let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar); let size = scalar.size(cx); let align = scalar.align(cx); + + let range = scalar.valid_range(cx); + + // All primitive types for which we don't have subtype coercions should get a distinct seed, + // so that types wrapping them can use randomization to arrive at distinct layouts. + // + // Some type information is already lost at this point, so as an approximation we derive + // the seed from what remains. For example on 64-bit targets usize and u64 can no longer + // be distinguished. + let randomization_seed = size + .bytes() + .wrapping_add( + match scalar.primitive() { + Primitive::Int(_, true) => 1, + Primitive::Int(_, false) => 2, + Primitive::Float(_) => 3, + Primitive::Pointer(_) => 4, + } << 32, + ) + // distinguishes references from pointers + .wrapping_add((range.start as u64).rotate_right(16)) + // distinguishes char from u32 and bool from u8 + .wrapping_add((range.end as u64).rotate_right(16)); + LayoutData { variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Primitive, @@ -1714,6 +1784,7 @@ impl LayoutData { align, max_repr_align: None, unadjusted_abi_align: align.abi, + randomization_seed, } } } @@ -1736,6 +1807,7 @@ where variants, max_repr_align, unadjusted_abi_align, + ref randomization_seed, } = self; f.debug_struct("Layout") .field("size", size) @@ -1746,6 +1818,7 @@ where .field("variants", variants) .field("max_repr_align", max_repr_align) .field("unadjusted_abi_align", unadjusted_abi_align) + .field("randomization_seed", randomization_seed) .finish() } } diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 4d8525989cc59..b21ccba93bb44 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -78,7 +78,7 @@ impl ArenaChunk { // been initialized. unsafe { let slice = self.storage.as_mut(); - ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len])); + slice[..len].assume_init_drop(); } } } diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 262e418ecbf74..8e73df63ef54f 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -31,8 +31,7 @@ use rustc_data_structures::sync::Lrc; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; pub use rustc_span::AttrId; use rustc_span::source_map::{Spanned, respan}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; pub use crate::format::*; @@ -242,15 +241,15 @@ impl AngleBracketedArg { } } -impl Into> for AngleBracketedArgs { - fn into(self) -> P { - P(GenericArgs::AngleBracketed(self)) +impl From for P { + fn from(val: AngleBracketedArgs) -> Self { + P(GenericArgs::AngleBracketed(val)) } } -impl Into> for ParenthesizedArgs { - fn into(self) -> P { - P(GenericArgs::Parenthesized(self)) +impl From for P { + fn from(val: ParenthesizedArgs) -> Self { + P(GenericArgs::Parenthesized(val)) } } @@ -388,22 +387,15 @@ impl GenericParam { /// Represents lifetime, type and const parameters attached to a declaration of /// a function, enum, trait, etc. -#[derive(Clone, Encodable, Decodable, Debug)] +#[derive(Clone, Encodable, Decodable, Debug, Default)] pub struct Generics { pub params: ThinVec, pub where_clause: WhereClause, pub span: Span, } -impl Default for Generics { - /// Creates an instance of `Generics`. - fn default() -> Generics { - Generics { params: ThinVec::new(), where_clause: Default::default(), span: DUMMY_SP } - } -} - /// A where-clause in a definition. -#[derive(Clone, Encodable, Decodable, Debug)] +#[derive(Clone, Encodable, Decodable, Debug, Default)] pub struct WhereClause { /// `true` if we ate a `where` token. /// @@ -420,12 +412,6 @@ impl WhereClause { } } -impl Default for WhereClause { - fn default() -> WhereClause { - WhereClause { has_where_token: false, predicates: ThinVec::new(), span: DUMMY_SP } - } -} - /// A single predicate in a where-clause. #[derive(Clone, Encodable, Decodable, Debug)] pub struct WherePredicate { @@ -637,7 +623,7 @@ impl Pat { PatKind::Wild | PatKind::Rest | PatKind::Never - | PatKind::Lit(_) + | PatKind::Expr(_) | PatKind::Range(..) | PatKind::Ident(..) | PatKind::Path(..) @@ -815,8 +801,8 @@ pub enum PatKind { /// A reference pattern (e.g., `&mut (a, b)`). Ref(P, Mutability), - /// A literal. - Lit(P), + /// A literal, const block or path. + Expr(P), /// A range pattern (e.g., `1...2`, `1..2`, `1..`, `..2`, `1..=2`, `..=2`). Range(Option>, Option>, Spanned), @@ -859,6 +845,8 @@ pub enum PatKind { pub enum PatFieldsRest { /// `module::StructName { field, ..}` Rest, + /// `module::StructName { field, syntax error }` + Recovered(ErrorGuaranteed), /// `module::StructName { field }` None, } @@ -1321,11 +1309,15 @@ impl Expr { } pub fn precedence(&self) -> ExprPrecedence { - match self.kind { - ExprKind::Closure(..) => ExprPrecedence::Closure, + match &self.kind { + ExprKind::Closure(closure) => { + match closure.fn_decl.output { + FnRetTy::Default(_) => ExprPrecedence::Jump, + FnRetTy::Ty(_) => ExprPrecedence::Unambiguous, + } + } ExprKind::Break(..) - | ExprKind::Continue(..) | ExprKind::Ret(..) | ExprKind::Yield(..) | ExprKind::Yeet(..) @@ -1359,6 +1351,7 @@ impl Expr { | ExprKind::Block(..) | ExprKind::Call(..) | ExprKind::ConstBlock(_) + | ExprKind::Continue(..) | ExprKind::Field(..) | ExprKind::ForLoop { .. } | ExprKind::FormatArgs(..) diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 8ee3d4d590cd3..51f1858001321 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -1,12 +1,10 @@ //! Functions dealing with attributes and meta items. use std::fmt::Debug; -use std::iter; use std::sync::atomic::{AtomicU32, Ordering}; use rustc_index::bit_set::GrowableBitSet; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use smallvec::{SmallVec, smallvec}; use thin_vec::{ThinVec, thin_vec}; @@ -17,7 +15,9 @@ use crate::ast::{ }; use crate::ptr::P; use crate::token::{self, CommentKind, Delimiter, Token}; -use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree}; +use crate::tokenstream::{ + DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree, +}; use crate::util::comments; use crate::util::literal::escape_string_symbol; @@ -366,12 +366,9 @@ impl MetaItem { } } - fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option - where - I: Iterator, - { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { // FIXME: Share code with `parse_path`. - let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt)); + let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt)); let path = match tt.as_deref() { Some(&TokenTree::Token( Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span }, @@ -379,9 +376,9 @@ impl MetaItem { )) => 'arm: { let mut segments = if let &token::Ident(name, _) = kind { if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - tokens.peek() + iter.peek() { - tokens.next(); + iter.next(); thin_vec![PathSegment::from_ident(Ident::new(name, span))] } else { break 'arm Path::from_ident(Ident::new(name, span)); @@ -391,16 +388,16 @@ impl MetaItem { }; loop { if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() + iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() { segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { return None; } if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - tokens.peek() + iter.peek() { - tokens.next(); + iter.next(); } else { break; } @@ -421,8 +418,8 @@ impl MetaItem { } _ => return None, }; - let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi()); - let kind = MetaItemKind::from_tokens(tokens)?; + let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi()); + let kind = MetaItemKind::from_tokens(iter)?; let hi = match &kind { MetaItemKind::NameValue(lit) => lit.span.hi(), MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()), @@ -439,12 +436,12 @@ impl MetaItem { impl MetaItemKind { // public because it can be called in the hir pub fn list_from_tokens(tokens: TokenStream) -> Option> { - let mut tokens = tokens.trees().peekable(); + let mut iter = tokens.iter(); let mut result = ThinVec::new(); - while tokens.peek().is_some() { - let item = MetaItemInner::from_tokens(&mut tokens)?; + while iter.peek().is_some() { + let item = MetaItemInner::from_tokens(&mut iter)?; result.push(item); - match tokens.next() { + match iter.next() { None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {} _ => return None, } @@ -452,12 +449,10 @@ impl MetaItemKind { Some(result) } - fn name_value_from_tokens<'a>( - tokens: &mut impl Iterator, - ) -> Option { - match tokens.next() { + fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.next() { Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) + MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter()) } Some(TokenTree::Token(token, _)) => { MetaItemLit::from_token(token).map(MetaItemKind::NameValue) @@ -466,19 +461,17 @@ impl MetaItemKind { } } - fn from_tokens<'a>( - tokens: &mut iter::Peekable>, - ) -> Option { - match tokens.peek() { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.peek() { Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => { let inner_tokens = inner_tokens.clone(); - tokens.next(); + iter.next(); MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List) } Some(TokenTree::Delimited(..)) => None, Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => { - tokens.next(); - MetaItemKind::name_value_from_tokens(tokens) + iter.next(); + MetaItemKind::name_value_from_tokens(iter) } _ => Some(MetaItemKind::Word), } @@ -594,22 +587,19 @@ impl MetaItemInner { self.meta_item().is_some() } - fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option - where - I: Iterator, - { - match tokens.peek() { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.peek() { Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => { - tokens.next(); + iter.next(); return Some(MetaItemInner::Lit(lit)); } Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - tokens.next(); - return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable()); + iter.next(); + return MetaItemInner::from_tokens(&mut inner_tokens.iter()); } _ => {} } - MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem) + MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem) } } @@ -733,6 +723,8 @@ impl MetaItemLit { pub trait AttributeExt: Debug { fn id(&self) -> AttrId; + /// For a single-segment attribute (i.e., `#[attr]` and not `#[path::atrr]`), + /// return the name of the attribute, else return the empty identifier. fn name_or_empty(&self) -> Symbol { self.ident().unwrap_or_else(Ident::empty).name } diff --git a/compiler/rustc_ast/src/entry.rs b/compiler/rustc_ast/src/entry.rs index fffcb3ef98890..ab1413d6080e5 100644 --- a/compiler/rustc_ast/src/entry.rs +++ b/compiler/rustc_ast/src/entry.rs @@ -1,5 +1,4 @@ -use rustc_span::Symbol; -use rustc_span::symbol::sym; +use rustc_span::{Symbol, sym}; use crate::attr::{self, AttributeExt}; diff --git a/compiler/rustc_ast/src/expand/allocator.rs b/compiler/rustc_ast/src/expand/allocator.rs index bee7dfb61da87..dd8d5ae624a3c 100644 --- a/compiler/rustc_ast/src/expand/allocator.rs +++ b/compiler/rustc_ast/src/expand/allocator.rs @@ -1,5 +1,5 @@ use rustc_macros::HashStable_Generic; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; #[derive(Clone, Debug, Copy, Eq, PartialEq, HashStable_Generic)] pub enum AllocatorKind { diff --git a/compiler/rustc_ast/src/expand/autodiff_attrs.rs b/compiler/rustc_ast/src/expand/autodiff_attrs.rs index 05714731b9d4d..7ef8bc1797384 100644 --- a/compiler/rustc_ast/src/expand/autodiff_attrs.rs +++ b/compiler/rustc_ast/src/expand/autodiff_attrs.rs @@ -6,7 +6,6 @@ use std::fmt::{self, Display, Formatter}; use std::str::FromStr; -use crate::expand::typetree::TypeTree; use crate::expand::{Decodable, Encodable, HashStable_Generic}; use crate::ptr::P; use crate::{Ty, TyKind}; @@ -79,10 +78,6 @@ pub struct AutoDiffItem { /// The name of the function being generated pub target: String, pub attrs: AutoDiffAttrs, - /// Describe the memory layout of input types - pub inputs: Vec, - /// Describe the memory layout of the output type - pub output: TypeTree, } #[derive(Clone, Eq, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct AutoDiffAttrs { @@ -262,22 +257,14 @@ impl AutoDiffAttrs { !matches!(self.mode, DiffMode::Error | DiffMode::Source) } - pub fn into_item( - self, - source: String, - target: String, - inputs: Vec, - output: TypeTree, - ) -> AutoDiffItem { - AutoDiffItem { source, target, inputs, output, attrs: self } + pub fn into_item(self, source: String, target: String) -> AutoDiffItem { + AutoDiffItem { source, target, attrs: self } } } impl fmt::Display for AutoDiffItem { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Differentiating {} -> {}", self.source, self.target)?; - write!(f, " with attributes: {:?}", self.attrs)?; - write!(f, " with inputs: {:?}", self.inputs)?; - write!(f, " with output: {:?}", self.output) + write!(f, " with attributes: {:?}", self.attrs) } } diff --git a/compiler/rustc_ast/src/expand/mod.rs b/compiler/rustc_ast/src/expand/mod.rs index d259677e98e3d..04c8162932369 100644 --- a/compiler/rustc_ast/src/expand/mod.rs +++ b/compiler/rustc_ast/src/expand/mod.rs @@ -1,8 +1,8 @@ //! Definitions shared by macros / syntax extensions and e.g. `rustc_middle`. use rustc_macros::{Decodable, Encodable, HashStable_Generic}; +use rustc_span::Ident; use rustc_span::def_id::DefId; -use rustc_span::symbol::Ident; use crate::MetaItem; diff --git a/compiler/rustc_ast/src/format.rs b/compiler/rustc_ast/src/format.rs index d5900d83e4d1a..b93846c1fe6f3 100644 --- a/compiler/rustc_ast/src/format.rs +++ b/compiler/rustc_ast/src/format.rs @@ -1,10 +1,10 @@ use rustc_data_structures::fx::FxHashMap; use rustc_macros::{Decodable, Encodable}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::Expr; use crate::ptr::P; +use crate::token::LitKind; // Definitions: // @@ -46,6 +46,10 @@ pub struct FormatArgs { pub span: Span, pub template: Vec, pub arguments: FormatArguments, + /// The raw, un-split format string literal, with no escaping or processing. + /// + /// Generally only useful for lints that care about the raw bytes the user wrote. + pub uncooked_fmt_str: (LitKind, Symbol), } /// A piece of a format template string. diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index f07266a3e2d93..aa88e8369d5b6 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -13,9 +13,8 @@ use std::panic; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::sync::Lrc; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; use smallvec::{Array, SmallVec, smallvec}; use thin_vec::ThinVec; @@ -1513,7 +1512,7 @@ pub fn walk_pat(vis: &mut T, pat: &mut P) { vis.visit_ident(ident); visit_opt(sub, |sub| vis.visit_pat(sub)); } - PatKind::Lit(e) => vis.visit_expr(e), + PatKind::Expr(e) => vis.visit_expr(e), PatKind::TupleStruct(qself, path, elems) => { vis.visit_qself(qself); vis.visit_path(path); @@ -1597,7 +1596,7 @@ fn walk_inline_asm_sym( fn walk_format_args(vis: &mut T, fmt: &mut FormatArgs) { // FIXME: visit the template exhaustively. - let FormatArgs { span, template: _, arguments } = fmt; + let FormatArgs { span, template: _, arguments, uncooked_fmt_str: _ } = fmt; for FormatArgument { kind, expr } in arguments.all_args_mut() { match kind { FormatArgumentKind::Named(ident) | FormatArgumentKind::Captured(ident) => { diff --git a/compiler/rustc_ast/src/ptr.rs b/compiler/rustc_ast/src/ptr.rs index 97c714df8fd2f..dd923305cdfd9 100644 --- a/compiler/rustc_ast/src/ptr.rs +++ b/compiler/rustc_ast/src/ptr.rs @@ -158,9 +158,9 @@ impl From> for P<[T]> { } } -impl Into> for P<[T]> { - fn into(self) -> Vec { - self.into_vec() +impl From> for Vec { + fn from(val: P<[T]>) -> Self { + val.into_vec() } } diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 678f43e351137..3b7367d1ee209 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -11,11 +11,10 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::Edition; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; #[allow(clippy::useless_attribute)] // FIXME: following use of `hidden_glob_reexports` incorrectly triggers `useless_attribute` lint. #[allow(hidden_glob_reexports)] -use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::symbol::{kw, sym}; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; +use rustc_span::{Ident, Symbol}; use crate::ast; use crate::ptr::P; @@ -904,12 +903,14 @@ impl Token { self.is_non_raw_ident_where(|id| id.name == kw) } - /// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this token is an identifier equal to `kw` ignoring the case. + /// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this + /// token is an identifier equal to `kw` ignoring the case. pub fn is_keyword_case(&self, kw: Symbol, case: Case) -> bool { self.is_keyword(kw) || (case == Case::Insensitive && self.is_non_raw_ident_where(|id| { - id.name.as_str().to_lowercase() == kw.as_str().to_lowercase() + // Do an ASCII case-insensitive match, because all keywords are ASCII. + id.name.as_str().eq_ignore_ascii_case(kw.as_str()) })) } @@ -917,6 +918,11 @@ impl Token { self.is_non_raw_ident_where(Ident::is_path_segment_keyword) } + /// Don't use this unless you're doing something very loose and heuristic-y. + pub fn is_any_keyword(&self) -> bool { + self.is_non_raw_ident_where(Ident::is_any_keyword) + } + /// Returns true for reserved identifiers used internally for elided lifetimes, /// unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index c6b6addc946e8..e7b393d869d2b 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -99,7 +99,7 @@ where CTX: crate::HashStableContext, { fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { - for sub_tt in self.trees() { + for sub_tt in self.iter() { sub_tt.hash_stable(hcx, hasher); } } @@ -406,7 +406,7 @@ impl Eq for TokenStream {} impl PartialEq for TokenStream { fn eq(&self, other: &TokenStream) -> bool { - self.trees().eq(other.trees()) + self.iter().eq(other.iter()) } } @@ -423,24 +423,24 @@ impl TokenStream { self.0.len() } - pub fn trees(&self) -> RefTokenTreeCursor<'_> { - RefTokenTreeCursor::new(self) + pub fn get(&self, index: usize) -> Option<&TokenTree> { + self.0.get(index) } - pub fn into_trees(self) -> TokenTreeCursor { - TokenTreeCursor::new(self) + pub fn iter(&self) -> TokenStreamIter<'_> { + TokenStreamIter::new(self) } /// Compares two `TokenStream`s, checking equality without regarding span information. pub fn eq_unspanned(&self, other: &TokenStream) -> bool { - let mut t1 = self.trees(); - let mut t2 = other.trees(); - for (t1, t2) in iter::zip(&mut t1, &mut t2) { - if !t1.eq_unspanned(t2) { + let mut iter1 = self.iter(); + let mut iter2 = other.iter(); + for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) { + if !tt1.eq_unspanned(tt2) { return false; } } - t1.next().is_none() && t2.next().is_none() + iter1.next().is_none() && iter2.next().is_none() } /// Create a token stream containing a single token with alone spacing. The @@ -509,7 +509,7 @@ impl TokenStream { #[must_use] pub fn flattened(&self) -> TokenStream { fn can_skip(stream: &TokenStream) -> bool { - stream.trees().all(|tree| match tree { + stream.iter().all(|tree| match tree { TokenTree::Token(token, _) => !matches!( token.kind, token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..) @@ -522,7 +522,7 @@ impl TokenStream { return self.clone(); } - self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect() + self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect() } // If `vec` is not empty, try to glue `tt` onto its last token. The return @@ -665,25 +665,26 @@ impl TokenStream { } } -/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree` -/// items. #[derive(Clone)] -pub struct RefTokenTreeCursor<'t> { +pub struct TokenStreamIter<'t> { stream: &'t TokenStream, index: usize, } -impl<'t> RefTokenTreeCursor<'t> { +impl<'t> TokenStreamIter<'t> { fn new(stream: &'t TokenStream) -> Self { - RefTokenTreeCursor { stream, index: 0 } + TokenStreamIter { stream, index: 0 } } - pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { - self.stream.0.get(self.index + n) + // Peeking could be done via `Peekable`, but most iterators need peeking, + // and this is simple and avoids the need to use `peekable` and `Peekable` + // at all the use sites. + pub fn peek(&self) -> Option<&'t TokenTree> { + self.stream.0.get(self.index) } } -impl<'t> Iterator for RefTokenTreeCursor<'t> { +impl<'t> Iterator for TokenStreamIter<'t> { type Item = &'t TokenTree; fn next(&mut self) -> Option<&'t TokenTree> { @@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> { } } -/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree` -/// items. -/// -/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to -/// return `&T` from `next`; the need for an explicit lifetime in the `Item` -/// associated type gets in the way. Instead, use `next_ref` (which doesn't -/// involve associated types) for getting individual elements, or -/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for` -/// loop. -#[derive(Clone, Debug)] -pub struct TokenTreeCursor { - pub stream: TokenStream, - index: usize, -} - -impl TokenTreeCursor { - fn new(stream: TokenStream) -> Self { - TokenTreeCursor { stream, index: 0 } - } - - #[inline] - pub fn next_ref(&mut self) -> Option<&TokenTree> { - self.stream.0.get(self.index).map(|tree| { - self.index += 1; - tree - }) - } - - pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { - self.stream.0.get(self.index + n) - } -} - #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpan { pub open: Span, diff --git a/compiler/rustc_ast/src/util/comments/tests.rs b/compiler/rustc_ast/src/util/comments/tests.rs index f88b534a0c114..7a87ccea62a7e 100644 --- a/compiler/rustc_ast/src/util/comments/tests.rs +++ b/compiler/rustc_ast/src/util/comments/tests.rs @@ -1,4 +1,4 @@ -#![cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] +#![allow(rustc::symbol_intern_string_literal)] use rustc_span::create_default_session_globals_then; diff --git a/compiler/rustc_ast/src/util/literal.rs b/compiler/rustc_ast/src/util/literal.rs index 498df5a71441b..4459cb962e8e9 100644 --- a/compiler/rustc_ast/src/util/literal.rs +++ b/compiler/rustc_ast/src/util/literal.rs @@ -5,8 +5,7 @@ use std::{ascii, fmt, str}; use rustc_lexer::unescape::{ MixedUnit, Mode, byte_from_char, unescape_byte, unescape_char, unescape_mixed, unescape_unicode, }; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, kw, sym}; +use rustc_span::{Span, Symbol, kw, sym}; use tracing::debug; use crate::ast::{self, LitKind, MetaItemLit, StrStyle}; diff --git a/compiler/rustc_ast/src/util/parser.rs b/compiler/rustc_ast/src/util/parser.rs index e88bf27021aff..8f2b7a23c01c8 100644 --- a/compiler/rustc_ast/src/util/parser.rs +++ b/compiler/rustc_ast/src/util/parser.rs @@ -1,4 +1,4 @@ -use rustc_span::symbol::kw; +use rustc_span::kw; use crate::ast::{self, BinOpKind}; use crate::token::{self, BinOpToken, Token}; @@ -153,9 +153,10 @@ impl AssocOp { match *self { Assign | AssignOp(_) => Fixity::Right, As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd - | BitXor | BitOr | Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual - | LAnd | LOr => Fixity::Left, - DotDot | DotDotEq => Fixity::None, + | BitXor | BitOr | LAnd | LOr => Fixity::Left, + Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | DotDot | DotDotEq => { + Fixity::None + } } } @@ -231,8 +232,7 @@ impl AssocOp { #[derive(Clone, Copy, PartialEq, PartialOrd)] pub enum ExprPrecedence { - Closure, - // return, break, yield + // return, break, yield, closures Jump, // = += -= *= /= %= &= |= ^= <<= >>= Assign, diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 211d13659ee59..1d6d7330757de 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -15,8 +15,7 @@ pub use rustc_ast_ir::visit::VisitorResult; pub use rustc_ast_ir::{try_visit, visit_opt, walk_list, walk_visitable_list}; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; use crate::ast::*; use crate::ptr::P; @@ -681,7 +680,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) -> V::Res try_visit!(visitor.visit_ident(ident)); visit_opt!(visitor, visit_pat, optional_subpattern); } - PatKind::Lit(expression) => try_visit!(visitor.visit_expr(expression)), + PatKind::Expr(expression) => try_visit!(visitor.visit_expr(expression)), PatKind::Range(lower_bound, upper_bound, _end) => { visit_opt!(visitor, visit_expr, lower_bound); visit_opt!(visitor, visit_expr, upper_bound); @@ -1062,7 +1061,7 @@ pub fn walk_inline_asm_sym<'a, V: Visitor<'a>>( } pub fn walk_format_args<'a, V: Visitor<'a>>(visitor: &mut V, fmt: &'a FormatArgs) -> V::Result { - let FormatArgs { span: _, template: _, arguments } = fmt; + let FormatArgs { span: _, template: _, arguments, uncooked_fmt_str: _ } = fmt; for FormatArgument { kind, expr } in arguments.all_args() { match kind { FormatArgumentKind::Named(ident) | FormatArgumentKind::Captured(ident) => { diff --git a/compiler/rustc_ast_lowering/Cargo.toml b/compiler/rustc_ast_lowering/Cargo.toml index 8cc4521e0a78d..bf84013613309 100644 --- a/compiler/rustc_ast_lowering/Cargo.toml +++ b/compiler/rustc_ast_lowering/Cargo.toml @@ -10,6 +10,7 @@ doctest = false # tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } +rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } @@ -17,6 +18,7 @@ rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } +rustc_parse = { path = "../rustc_parse" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } diff --git a/compiler/rustc_ast_lowering/src/asm.rs b/compiler/rustc_ast_lowering/src/asm.rs index 569a15b0e07ba..2f1f1269ece57 100644 --- a/compiler/rustc_ast_lowering/src/asm.rs +++ b/compiler/rustc_ast_lowering/src/asm.rs @@ -7,8 +7,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_session::parse::feature_err; -use rustc_span::symbol::kw; -use rustc_span::{Span, sym}; +use rustc_span::{Span, kw, sym}; use rustc_target::asm; use super::LoweringContext; diff --git a/compiler/rustc_ast_lowering/src/block.rs b/compiler/rustc_ast_lowering/src/block.rs index 88ce6f80e10bd..1d9ca6bb9c8cb 100644 --- a/compiler/rustc_ast_lowering/src/block.rs +++ b/compiler/rustc_ast_lowering/src/block.rs @@ -108,7 +108,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; let span = self.lower_span(l.span); let source = hir::LocalSource::Normal; - self.lower_attrs(hir_id, &l.attrs); + self.lower_attrs(hir_id, &l.attrs, l.span); self.arena.alloc(hir::LetStmt { hir_id, ty, pat, init, els, span, source }) } diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index 70c94f4019ae2..266e77c0e02e6 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -46,8 +46,7 @@ use rustc_errors::ErrorGuaranteed; use rustc_hir::def_id::DefId; use rustc_middle::span_bug; use rustc_middle::ty::{Asyncness, ResolverAstLowering}; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; use rustc_target::spec::abi; use {rustc_ast as ast, rustc_hir as hir}; @@ -189,7 +188,14 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FnSig<'hir> { let header = if let Some(local_sig_id) = sig_id.as_local() { match self.resolver.delegation_fn_sigs.get(&local_sig_id) { - Some(sig) => self.lower_fn_header(sig.header, hir::Safety::Safe), + Some(sig) => self.lower_fn_header( + sig.header, + // HACK: we override the default safety instead of generating attributes from the ether. + // We are not forwarding the attributes, as the delegation fn sigs are collected on the ast, + // and here we need the hir attributes. + if sig.target_feature { hir::Safety::Unsafe } else { hir::Safety::Safe }, + &[], + ), None => self.generate_header_error(), } } else { @@ -199,7 +205,11 @@ impl<'hir> LoweringContext<'_, 'hir> { Asyncness::No => hir::IsAsync::NotAsync, }; hir::FnHeader { - safety: sig.safety, + safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features { + hir::HeaderSafety::SafeTargetFeatures + } else { + hir::HeaderSafety::Normal(sig.safety) + }, constness: self.tcx.constness(sig_id), asyncness, abi: sig.abi, @@ -385,7 +395,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn generate_header_error(&self) -> hir::FnHeader { hir::FnHeader { - safety: hir::Safety::Safe, + safety: hir::Safety::Safe.into(), constness: hir::Constness::NotConst, asyncness: hir::IsAsync::NotAsync, abi: abi::Abi::Rust, diff --git a/compiler/rustc_ast_lowering/src/errors.rs b/compiler/rustc_ast_lowering/src/errors.rs index 2564d4e27726c..f727691bf479c 100644 --- a/compiler/rustc_ast_lowering/src/errors.rs +++ b/compiler/rustc_ast_lowering/src/errors.rs @@ -1,8 +1,7 @@ use rustc_errors::codes::*; use rustc_errors::{Diag, DiagArgFromDisplay, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic}; use rustc_macros::{Diagnostic, Subdiagnostic}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; #[derive(Diagnostic)] #[diag(ast_lowering_generic_type_with_parentheses, code = E0214)] diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 3290580634316..5a43fcad21041 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -13,8 +13,7 @@ use rustc_middle::span_bug; use rustc_middle::ty::TyCtxt; use rustc_session::errors::report_lit_error; use rustc_span::source_map::{Spanned, respan}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, DesugaringKind, Span}; +use rustc_span::{DUMMY_SP, DesugaringKind, Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use visit::{Visitor, walk_expr}; @@ -78,9 +77,8 @@ impl<'hir> LoweringContext<'_, 'hir> { self.attrs.insert( ex.hir_id.local_id, &*self.arena.alloc_from_iter( - e.attrs - .iter() - .map(|a| self.lower_attr(a)) + self.lower_attrs_vec(&e.attrs, e.span) + .into_iter() .chain(old_attrs.iter().cloned()), ), ); @@ -99,21 +97,11 @@ impl<'hir> LoweringContext<'_, 'hir> { } let expr_hir_id = self.lower_node_id(e.id); - self.lower_attrs(expr_hir_id, &e.attrs); + self.lower_attrs(expr_hir_id, &e.attrs, e.span); let kind = match &e.kind { ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), - ExprKind::ConstBlock(c) => { - let c = self.with_new_scopes(c.value.span, |this| { - let def_id = this.local_def_id(c.id); - hir::ConstBlock { - def_id, - hir_id: this.lower_node_id(c.id), - body: this.lower_const_body(c.value.span, Some(&c.value)), - } - }); - hir::ExprKind::ConstBlock(c) - } + ExprKind::ConstBlock(c) => hir::ExprKind::ConstBlock(self.lower_const_block(c)), ExprKind::Repeat(expr, count) => { let expr = self.lower_expr(expr); let count = self.lower_array_length_to_const_arg(count); @@ -154,18 +142,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let ohs = self.lower_expr(ohs); hir::ExprKind::Unary(op, ohs) } - ExprKind::Lit(token_lit) => { - let lit_kind = match LitKind::from_token_lit(*token_lit) { - Ok(lit_kind) => lit_kind, - Err(err) => { - let guar = - report_lit_error(&self.tcx.sess.psess, err, *token_lit, e.span); - LitKind::Err(guar) - } - }; - let lit = self.arena.alloc(respan(self.lower_span(e.span), lit_kind)); - hir::ExprKind::Lit(lit) - } + ExprKind::Lit(token_lit) => hir::ExprKind::Lit(self.lower_lit(token_lit, e.span)), ExprKind::IncludedBytes(bytes) => { let lit = self.arena.alloc(respan( self.lower_span(e.span), @@ -404,6 +381,32 @@ impl<'hir> LoweringContext<'_, 'hir> { }) } + pub(crate) fn lower_const_block(&mut self, c: &AnonConst) -> hir::ConstBlock { + self.with_new_scopes(c.value.span, |this| { + let def_id = this.local_def_id(c.id); + hir::ConstBlock { + def_id, + hir_id: this.lower_node_id(c.id), + body: this.lower_const_body(c.value.span, Some(&c.value)), + } + }) + } + + pub(crate) fn lower_lit( + &mut self, + token_lit: &token::Lit, + span: Span, + ) -> &'hir Spanned { + let lit_kind = match LitKind::from_token_lit(*token_lit) { + Ok(lit_kind) => lit_kind, + Err(err) => { + let guar = report_lit_error(&self.tcx.sess.psess, err, *token_lit, span); + LitKind::Err(guar) + } + }; + self.arena.alloc(respan(self.lower_span(span), lit_kind)) + } + fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { match u { UnOp::Deref => hir::UnOp::Deref, @@ -643,7 +646,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let guard = arm.guard.as_ref().map(|cond| self.lower_expr(cond)); let hir_id = self.next_id(); let span = self.lower_span(arm.span); - self.lower_attrs(hir_id, &arm.attrs); + self.lower_attrs(hir_id, &arm.attrs, arm.span); let is_never_pattern = pat.is_never_pattern(); let body = if let Some(body) = &arm.body && !is_never_pattern @@ -801,15 +804,19 @@ impl<'hir> LoweringContext<'_, 'hir> { span, Some(Lrc::clone(&self.allow_gen_future)), ); - self.lower_attrs(inner_hir_id, &[Attribute { - kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new( - sym::track_caller, - span, - )))), - id: self.tcx.sess.psess.attr_id_generator.mk_attr_id(), - style: AttrStyle::Outer, - span: unstable_span, - }]); + self.lower_attrs( + inner_hir_id, + &[Attribute { + kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new( + sym::track_caller, + span, + )))), + id: self.tcx.sess.psess.attr_id_generator.mk_attr_id(), + style: AttrStyle::Outer, + span: unstable_span, + }], + span, + ); } } @@ -1614,7 +1621,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs); + self.lower_attrs(hir_id, &f.attrs, f.span); hir::ExprField { hir_id, ident: self.lower_ident(f.ident), @@ -1877,7 +1884,7 @@ impl<'hir> LoweringContext<'_, 'hir> { // // Also, add the attributes to the outer returned expr node. let expr = self.expr_drop_temps_mut(for_span, match_expr); - self.lower_attrs(expr.hir_id, &e.attrs); + self.lower_attrs(expr.hir_id, &e.attrs, e.span); expr } @@ -1934,7 +1941,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let val_ident = Ident::with_dummy_span(sym::val); let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); let val_expr = self.expr_ident(span, val_ident, val_pat_nid); - self.lower_attrs(val_expr.hir_id, &attrs); + self.lower_attrs(val_expr.hir_id, &attrs, span); let continue_pat = self.pat_cf_continue(unstable_span, val_pat); self.arm(continue_pat, val_expr) }; @@ -1964,7 +1971,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } else { self.arena.alloc(self.expr(try_span, hir::ExprKind::Ret(Some(from_residual_expr)))) }; - self.lower_attrs(ret_expr.hir_id, &attrs); + self.lower_attrs(ret_expr.hir_id, &attrs, ret_expr.span); let break_pat = self.pat_cf_break(try_span, residual_local); self.arm(break_pat, ret_expr) diff --git a/compiler/rustc_ast_lowering/src/format.rs b/compiler/rustc_ast_lowering/src/format.rs index 653116e1fe00d..22aa1e6fc201a 100644 --- a/compiler/rustc_ast_lowering/src/format.rs +++ b/compiler/rustc_ast_lowering/src/format.rs @@ -6,8 +6,7 @@ use rustc_ast::*; use rustc_data_structures::fx::FxIndexMap; use rustc_hir as hir; use rustc_session::config::FmtDebug; -use rustc_span::symbol::{Ident, kw}; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use super::LoweringContext; diff --git a/compiler/rustc_ast_lowering/src/index.rs b/compiler/rustc_ast_lowering/src/index.rs index c3ff7b4b89722..29d4fb9ef2597 100644 --- a/compiler/rustc_ast_lowering/src/index.rs +++ b/compiler/rustc_ast_lowering/src/index.rs @@ -209,6 +209,14 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { }); } + fn visit_pat_expr(&mut self, expr: &'hir PatExpr<'hir>) { + self.insert(expr.span, expr.hir_id, Node::PatExpr(expr)); + + self.with_parent(expr.hir_id, |this| { + intravisit::walk_pat_expr(this, expr); + }); + } + fn visit_pat_field(&mut self, field: &'hir PatField<'hir>) { self.insert(field.span, field.hir_id, Node::PatField(field)); self.with_parent(field.hir_id, |this| { diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index ad4410c57dd0e..f3041b053a265 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -10,8 +10,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::span_bug; use rustc_middle::ty::{ResolverAstLowering, TyCtxt}; use rustc_span::edit_distance::find_best_match_for_name; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{DesugaringKind, Span, Symbol}; +use rustc_span::{DUMMY_SP, DesugaringKind, Ident, Span, Symbol, kw, sym}; use rustc_target::spec::abi; use smallvec::{SmallVec, smallvec}; use thin_vec::ThinVec; @@ -94,7 +93,8 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> { debug_assert_eq!(self.resolver.node_id_to_def_id[&CRATE_NODE_ID], CRATE_DEF_ID); self.with_lctx(CRATE_NODE_ID, |lctx| { let module = lctx.lower_mod(&c.items, &c.spans); - lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs); + // FIXME(jdonszelman): is dummy span ever a problem here? + lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP); hir::OwnerNode::Crate(module) }) } @@ -158,7 +158,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let mut ident = i.ident; let vis_span = self.lower_span(i.vis.span); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); let kind = self.lower_item_kind(i.span, i.id, hir_id, &mut ident, attrs, vis_span, &i.kind); let item = hir::Item { owner_id: hir_id.expect_owner(), @@ -223,6 +223,7 @@ impl<'hir> LoweringContext<'_, 'hir> { decl, coroutine_kind, body.as_deref(), + attrs, ); let itctx = ImplTraitContext::Universal; @@ -231,10 +232,10 @@ impl<'hir> LoweringContext<'_, 'hir> { }); let sig = hir::FnSig { decl, - header: this.lower_fn_header(*header, hir::Safety::Safe), + header: this.lower_fn_header(*header, hir::Safety::Safe, attrs), span: this.lower_span(*fn_sig_span), }; - hir::ItemKind::Fn(sig, generics, body_id) + hir::ItemKind::Fn { sig, generics, body: body_id, has_body: body.is_some() } }) } ItemKind::Mod(_, mod_kind) => match mod_kind { @@ -436,11 +437,12 @@ impl<'hir> LoweringContext<'_, 'hir> { } ItemKind::Delegation(box delegation) => { let delegation_results = self.lower_delegation(delegation, id); - hir::ItemKind::Fn( - delegation_results.sig, - delegation_results.generics, - delegation_results.body_id, - ) + hir::ItemKind::Fn { + sig: delegation_results.sig, + generics: delegation_results.generics, + body: delegation_results.body_id, + has_body: true, + } } ItemKind::MacCall(..) | ItemKind::DelegationMac(..) => { panic!("macros should have been expanded by now") @@ -609,7 +611,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); let owner_id = hir_id.expect_owner(); - self.lower_attrs(hir_id, &i.attrs); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); let item = hir::ForeignItem { owner_id, ident: self.lower_ident(i.ident), @@ -633,7 +635,7 @@ impl<'hir> LoweringContext<'_, 'hir> { }); // Unmarked safety in unsafe block defaults to unsafe. - let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe); + let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe, attrs); hir::ForeignItemKind::Fn( hir::FnSig { header, decl, span: self.lower_span(sig.span) }, @@ -667,7 +669,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> { let hir_id = self.lower_node_id(v.id); - self.lower_attrs(hir_id, &v.attrs); + self.lower_attrs(hir_id, &v.attrs, v.span); hir::Variant { hir_id, def_id: self.local_def_id(v.id), @@ -729,7 +731,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FieldDef<'hir> { let ty = self.lower_ty(&f.ty, ImplTraitContext::Disallowed(ImplTraitPosition::FieldTy)); let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs); + self.lower_attrs(hir_id, &f.attrs, f.span); hir::FieldDef { span: self.lower_span(f.span), hir_id, @@ -748,7 +750,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - self.lower_attrs(hir_id, &i.attrs); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); let trait_item_def_id = hir_id.expect_owner(); let (generics, kind, has_default) = match &i.kind { @@ -775,6 +777,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, FnDeclKind::Trait, sig.header.coroutine_kind, + attrs, ); (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false) } @@ -786,6 +789,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &sig.decl, sig.header.coroutine_kind, Some(body), + attrs, ); let (generics, sig) = self.lower_method_sig( generics, @@ -793,6 +797,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, FnDeclKind::Trait, sig.header.coroutine_kind, + attrs, ); (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true) } @@ -878,7 +883,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let has_value = true; let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - self.lower_attrs(hir_id, &i.attrs); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); let (generics, kind) = match &i.kind { AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => self.lower_generics( @@ -901,6 +906,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &sig.decl, sig.header.coroutine_kind, body.as_deref(), + attrs, ); let (generics, sig) = self.lower_method_sig( generics, @@ -908,6 +914,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, if self.is_in_trait_impl { FnDeclKind::Impl } else { FnDeclKind::Inherent }, sig.header.coroutine_kind, + attrs, ); (generics, hir::ImplItemKind::Fn(sig, body_id)) @@ -1033,7 +1040,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> { let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs); + self.lower_attrs(hir_id, ¶m.attrs, param.span); hir::Param { hir_id, pat: self.lower_pat(¶m.pat), @@ -1055,20 +1062,8 @@ impl<'hir> LoweringContext<'_, 'hir> { }) } - fn lower_fn_body_block( - &mut self, - span: Span, - decl: &FnDecl, - body: Option<&Block>, - ) -> hir::BodyId { - self.lower_fn_body(decl, |this| this.lower_block_expr_opt(span, body)) - } - - fn lower_block_expr_opt(&mut self, span: Span, block: Option<&Block>) -> hir::Expr<'hir> { - match block { - Some(block) => self.lower_block_expr(block), - None => self.expr_err(span, self.dcx().has_errors().unwrap()), - } + fn lower_fn_body_block(&mut self, decl: &FnDecl, body: &Block) -> hir::BodyId { + self.lower_fn_body(decl, |this| this.lower_block_expr(body)) } pub(super) fn lower_const_body(&mut self, span: Span, expr: Option<&Expr>) -> hir::BodyId { @@ -1090,9 +1085,37 @@ impl<'hir> LoweringContext<'_, 'hir> { decl: &FnDecl, coroutine_kind: Option, body: Option<&Block>, + attrs: &'hir [hir::Attribute], ) -> hir::BodyId { - let (Some(coroutine_kind), Some(body)) = (coroutine_kind, body) else { - return self.lower_fn_body_block(span, decl, body); + let Some(body) = body else { + // Functions without a body are an error, except if this is an intrinsic. For those we + // create a fake body so that the entire rest of the compiler doesn't have to deal with + // this as a special case. + return self.lower_fn_body(decl, |this| { + if attrs.iter().any(|a| a.name_or_empty() == sym::rustc_intrinsic) { + let empty_block = hir::Block { + hir_id: this.next_id(), + stmts: &[], + expr: None, + rules: hir::BlockCheckMode::DefaultBlock, + span, + targeted_by_break: false, + }; + let loop_ = hir::ExprKind::Loop( + this.arena.alloc(empty_block), + None, + hir::LoopSource::Loop, + span, + ); + hir::Expr { hir_id: this.next_id(), kind: loop_, span } + } else { + this.expr_err(span, this.dcx().has_errors().unwrap()) + } + }); + }; + let Some(coroutine_kind) = coroutine_kind else { + // Typical case: not a coroutine. + return self.lower_fn_body_block(decl, body); }; self.lower_body(|this| { let (parameters, expr) = this.lower_coroutine_body_with_moved_arguments( @@ -1172,9 +1195,7 @@ impl<'hir> LoweringContext<'_, 'hir> { // we can keep the same name for the parameter. // This lets rustdoc render it correctly in documentation. hir::PatKind::Binding(_, _, ident, _) => (ident, false), - hir::PatKind::Wild => { - (Ident::with_dummy_span(rustc_span::symbol::kw::Underscore), false) - } + hir::PatKind::Wild => (Ident::with_dummy_span(rustc_span::kw::Underscore), false), _ => { // Replace the ident for bindings that aren't simple. let name = format!("__arg{index}"); @@ -1322,8 +1343,9 @@ impl<'hir> LoweringContext<'_, 'hir> { id: NodeId, kind: FnDeclKind, coroutine_kind: Option, + attrs: &[hir::Attribute], ) -> (&'hir hir::Generics<'hir>, hir::FnSig<'hir>) { - let header = self.lower_fn_header(sig.header, hir::Safety::Safe); + let header = self.lower_fn_header(sig.header, hir::Safety::Safe, attrs); let itctx = ImplTraitContext::Universal; let (generics, decl) = self.lower_generics(generics, id, itctx, |this| { this.lower_fn_decl(&sig.decl, id, sig.span, kind, coroutine_kind) @@ -1335,14 +1357,28 @@ impl<'hir> LoweringContext<'_, 'hir> { &mut self, h: FnHeader, default_safety: hir::Safety, + attrs: &[hir::Attribute], ) -> hir::FnHeader { let asyncness = if let Some(CoroutineKind::Async { span, .. }) = h.coroutine_kind { hir::IsAsync::Async(span) } else { hir::IsAsync::NotAsync }; + + let safety = self.lower_safety(h.safety, default_safety); + + // Treat safe `#[target_feature]` functions as unsafe, but also remember that we did so. + let safety = if attrs.iter().any(|attr| attr.has_name(sym::target_feature)) + && safety.is_safe() + && !self.tcx.sess.target.is_like_wasm + { + hir::HeaderSafety::SafeTargetFeatures + } else { + safety.into() + }; + hir::FnHeader { - safety: self.lower_safety(h.safety, default_safety), + safety, asyncness, constness: self.lower_constness(h.constness), abi: self.lower_extern(h.ext), diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 41fa4c134427f..75e05f810e7a1 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -35,6 +35,7 @@ #![doc(rust_logo)] #![feature(assert_matches)] #![feature(box_patterns)] +#![feature(if_let_guard)] #![feature(let_chains)] #![feature(rustdoc_internals)] #![warn(unreachable_pub)] @@ -42,6 +43,7 @@ use rustc_ast::node_id::NodeMap; use rustc_ast::{self as ast, *}; +use rustc_attr_parsing::{AttributeParser, OmitDoc}; use rustc_data_structures::captures::Captures; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::sorted_map::SortedMap; @@ -133,10 +135,13 @@ struct LoweringContext<'a, 'hir> { allow_async_iterator: Lrc<[Symbol]>, allow_for_await: Lrc<[Symbol]>, allow_async_fn_traits: Lrc<[Symbol]>, + + attribute_parse_context: AttributeParser<'hir>, } impl<'a, 'hir> LoweringContext<'a, 'hir> { fn new(tcx: TyCtxt<'hir>, resolver: &'a mut ResolverAstLowering) -> Self { + let registered_tools = tcx.registered_tools(()).iter().map(|x| x.name).collect(); Self { // Pseudo-globals. tcx, @@ -176,6 +181,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // FIXME(gen_blocks): how does `closure_track_caller`/`async_fn_track_caller` // interact with `gen`/`async gen` blocks allow_async_iterator: [sym::gen_future, sym::async_iterator].into(), + + attribute_parse_context: AttributeParser::new( + tcx.sess, + tcx.features(), + registered_tools, + ), } } @@ -211,7 +222,6 @@ impl ResolverAstLowering { None } - /// Obtains resolution for a `NodeId` with a single resolution. fn get_partial_res(&self, id: NodeId) -> Option { self.partial_res_map.get(&id).copied() } @@ -846,45 +856,27 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ret } - fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> &'hir [hir::Attribute] { + fn lower_attrs( + &mut self, + id: HirId, + attrs: &[Attribute], + target_span: Span, + ) -> &'hir [hir::Attribute] { if attrs.is_empty() { &[] } else { + let lowered_attrs = self.lower_attrs_vec(attrs, target_span); + debug_assert_eq!(id.owner, self.current_hir_id_owner); - let ret = self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a))); + let ret = self.arena.alloc_from_iter(lowered_attrs); debug_assert!(!ret.is_empty()); self.attrs.insert(id.local_id, ret); ret } } - fn lower_attr(&self, attr: &Attribute) -> hir::Attribute { - // Note that we explicitly do not walk the path. Since we don't really - // lower attributes (we use the AST version) there is nowhere to keep - // the `HirId`s. We don't actually need HIR version of attributes anyway. - // Tokens are also not needed after macro expansion and parsing. - let kind = match attr.kind { - AttrKind::Normal(ref normal) => hir::AttrKind::Normal(Box::new(hir::AttrItem { - unsafety: self.lower_safety(normal.item.unsafety, hir::Safety::Safe), - path: hir::AttrPath { - segments: normal - .item - .path - .segments - .iter() - .map(|i| i.ident) - .collect::>() - .into_boxed_slice(), - span: normal.item.path.span, - }, - args: self.lower_attr_args(&normal.item.args), - })), - AttrKind::DocComment(comment_kind, data) => { - hir::AttrKind::DocComment(comment_kind, data) - } - }; - - hir::Attribute { kind, id: attr.id, style: attr.style, span: self.lower_span(attr.span) } + fn lower_attrs_vec(&self, attrs: &[Attribute], target_span: Span) -> Vec { + self.attribute_parse_context.parse_attribute_list(attrs, target_span, OmitDoc::Lower) } fn alias_attrs(&mut self, id: HirId, target_id: HirId) { @@ -896,34 +888,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } } - fn lower_attr_args(&self, args: &AttrArgs) -> hir::AttrArgs { - match args { - AttrArgs::Empty => hir::AttrArgs::Empty, - AttrArgs::Delimited(args) => hir::AttrArgs::Delimited(self.lower_delim_args(args)), - // This is an inert key-value attribute - it will never be visible to macros - // after it gets lowered to HIR. Therefore, we can extract literals to handle - // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). - &AttrArgs::Eq { eq_span, ref expr } => { - // In valid code the value always ends up as a single literal. Otherwise, a dummy - // literal suffices because the error is handled elsewhere. - let lit = if let ExprKind::Lit(token_lit) = expr.kind - && let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span) - { - lit - } else { - let guar = self.dcx().has_errors().unwrap(); - MetaItemLit { - symbol: kw::Empty, - suffix: None, - kind: LitKind::Err(guar), - span: DUMMY_SP, - } - }; - hir::AttrArgs::Eq { eq_span, expr: lit } - } - } - } - fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs { DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() } } @@ -1824,7 +1788,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let (name, kind) = self.lower_generic_param_kind(param, source); let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs); + self.lower_attrs(hir_id, ¶m.attrs, param.span()); hir::GenericParam { hir_id, def_id: self.local_def_id(param.id), @@ -1846,11 +1810,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { GenericParamKind::Lifetime => { // AST resolution emitted an error on those parameters, so we lower them using // `ParamName::Error`. + let ident = self.lower_ident(param.ident); let param_name = if let Some(LifetimeRes::Error) = self.resolver.get_lifetime_res(param.id) { - ParamName::Error + ParamName::Error(ident) } else { - let ident = self.lower_ident(param.ident); ParamName::Plain(ident) }; let kind = diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs index c4bae084a3f8c..93edf21c401be 100644 --- a/compiler/rustc_ast_lowering/src/pat.rs +++ b/compiler/rustc_ast_lowering/src/pat.rs @@ -1,11 +1,13 @@ +use std::sync::Arc; + use rustc_ast::ptr::P; use rustc_ast::*; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; use rustc_hir::def::Res; -use rustc_span::Span; -use rustc_span::source_map::Spanned; -use rustc_span::symbol::Ident; +use rustc_middle::span_bug; +use rustc_span::source_map::{Spanned, respan}; +use rustc_span::{Ident, Span}; use super::errors::{ ArbitraryExpressionInPattern, ExtraDoubleDot, MisplacedDoubleDot, SubTupleBinding, @@ -36,8 +38,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { lower_sub, ); } - PatKind::Lit(e) => { - break hir::PatKind::Lit(self.lower_expr_within_pat(e, false)); + PatKind::Expr(e) => { + break hir::PatKind::Expr(self.lower_expr_within_pat(e, false)); } PatKind::TupleStruct(qself, path, pats) => { let qpath = self.lower_qpath( @@ -82,7 +84,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let fs = self.arena.alloc_from_iter(fields.iter().map(|f| { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs); + self.lower_attrs(hir_id, &f.attrs, f.span); hir::PatField { hir_id, @@ -92,7 +94,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { span: self.lower_span(f.span), } })); - break hir::PatKind::Struct(qpath, fs, *etc == ast::PatFieldsRest::Rest); + break hir::PatKind::Struct( + qpath, + fs, + matches!( + etc, + ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_) + ), + ); } PatKind::Tuple(pats) => { let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple"); @@ -114,8 +123,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.lower_range_end(end, e2.is_some()), ); } - // FIXME(guard_patterns): lower pattern guards to HIR - PatKind::Guard(inner, _) => pattern = inner, + PatKind::Guard(inner, cond) => { + break hir::PatKind::Guard(self.lower_pat(inner), self.lower_expr(cond)); + } PatKind::Slice(pats) => break self.lower_pat_slice(pats), PatKind::Rest => { // If we reach here the `..` pattern is not semantically allowed. @@ -360,24 +370,54 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // } // m!(S); // ``` - fn lower_expr_within_pat(&mut self, expr: &Expr, allow_paths: bool) -> &'hir hir::Expr<'hir> { - match &expr.kind { - ExprKind::Lit(..) - | ExprKind::ConstBlock(..) - | ExprKind::IncludedBytes(..) - | ExprKind::Err(_) - | ExprKind::Dummy => {} - ExprKind::Path(..) if allow_paths => {} - ExprKind::Unary(UnOp::Neg, inner) if matches!(inner.kind, ExprKind::Lit(_)) => {} + fn lower_expr_within_pat( + &mut self, + expr: &Expr, + allow_paths: bool, + ) -> &'hir hir::PatExpr<'hir> { + let err = |guar| hir::PatExprKind::Lit { + lit: self.arena.alloc(respan(self.lower_span(expr.span), LitKind::Err(guar))), + negated: false, + }; + let kind = match &expr.kind { + ExprKind::Lit(lit) => { + hir::PatExprKind::Lit { lit: self.lower_lit(lit, expr.span), negated: false } + } + ExprKind::ConstBlock(c) => hir::PatExprKind::ConstBlock(self.lower_const_block(c)), + ExprKind::IncludedBytes(bytes) => hir::PatExprKind::Lit { + lit: self.arena.alloc(respan( + self.lower_span(expr.span), + LitKind::ByteStr(Arc::clone(bytes), StrStyle::Cooked), + )), + negated: false, + }, + ExprKind::Err(guar) => err(*guar), + ExprKind::Dummy => span_bug!(expr.span, "lowered ExprKind::Dummy"), + ExprKind::Path(qself, path) if allow_paths => hir::PatExprKind::Path(self.lower_qpath( + expr.id, + qself, + path, + ParamMode::Optional, + AllowReturnTypeNotation::No, + ImplTraitContext::Disallowed(ImplTraitPosition::Path), + None, + )), + ExprKind::Unary(UnOp::Neg, inner) if let ExprKind::Lit(lit) = &inner.kind => { + hir::PatExprKind::Lit { lit: self.lower_lit(lit, expr.span), negated: true } + } _ => { let pattern_from_macro = expr.is_approximately_pattern(); let guar = self.dcx().emit_err(ArbitraryExpressionInPattern { span: expr.span, pattern_from_macro_note: pattern_from_macro, }); - return self.arena.alloc(self.expr_err(expr.span, guar)); + err(guar) } - } - self.lower_expr(expr) + }; + self.arena.alloc(hir::PatExpr { + hir_id: self.lower_node_id(expr.id), + span: expr.span, + kind, + }) } } diff --git a/compiler/rustc_ast_lowering/src/path.rs b/compiler/rustc_ast_lowering/src/path.rs index 133793e26ea3c..043144a54649b 100644 --- a/compiler/rustc_ast_lowering/src/path.rs +++ b/compiler/rustc_ast_lowering/src/path.rs @@ -6,8 +6,7 @@ use rustc_hir::def::{DefKind, PartialRes, Res}; use rustc_hir::def_id::DefId; use rustc_middle::span_bug; use rustc_session::parse::add_feature_diagnostics; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Span, Symbol}; +use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Ident, Span, Symbol, kw, sym}; use smallvec::{SmallVec, smallvec}; use tracing::{debug, instrument}; diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index 5a0ec865f9d4d..25944392a52a9 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -207,8 +207,6 @@ ast_passes_precise_capturing_duplicated = duplicate `use<...>` precise capturing ast_passes_precise_capturing_not_allowed_here = `use<...>` precise capturing syntax not allowed in {$loc} -ast_passes_stability_outside_std = stability attributes may not be used outside of the standard library - ast_passes_static_without_body = free static item without body .suggestion = provide a definition for the static diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 3f071f5ecb520..236ca7ba70357 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -34,8 +34,7 @@ use rustc_session::lint::builtin::{ PATTERNS_IN_FNS_WITHOUT_BODY, }; use rustc_session::lint::{BuiltinLintDiag, LintBuffer}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, kw, sym}; +use rustc_span::{Ident, Span, kw, sym}; use rustc_target::spec::abi; use thin_vec::thin_vec; @@ -921,7 +920,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ItemKind::Fn(box Fn { defaultness, sig, generics, body }) => { self.check_defaultness(item.span, *defaultness); - if body.is_none() { + let is_intrinsic = + item.attrs.iter().any(|a| a.name_or_empty() == sym::rustc_intrinsic); + if body.is_none() && !is_intrinsic { self.dcx().emit_err(errors::FnWithoutBody { span: item.span, replace_span: self.ending_semi_or_hi(item.span), diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index 9b600e3ee92a1..0d8a7734ccdfc 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -4,8 +4,7 @@ use rustc_ast::ParamKindOrd; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic}; use rustc_macros::{Diagnostic, Subdiagnostic}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; @@ -733,13 +732,6 @@ pub(crate) struct AssociatedSuggestion2 { pub potential_assoc: Ident, } -#[derive(Diagnostic)] -#[diag(ast_passes_stability_outside_std, code = E0734)] -pub(crate) struct StabilityOutsideStd { - #[primary_span] - pub span: Span, -} - #[derive(Diagnostic)] #[diag(ast_passes_feature_on_non_nightly, code = E0554)] pub(crate) struct FeatureOnNonNightly { diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index c10b329649713..160f837e82967 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -4,9 +4,8 @@ use rustc_ast::{NodeId, PatKind, attr, token}; use rustc_feature::{AttributeGate, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute, Features, GateIssue}; use rustc_session::Session; use rustc_session::parse::{feature_err, feature_err_issue, feature_warn}; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use rustc_target::spec::abi; use thin_vec::ThinVec; @@ -209,18 +208,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ); } } - - // Emit errors for non-staged-api crates. - if !self.features.staged_api() { - if attr.has_name(sym::unstable) - || attr.has_name(sym::stable) - || attr.has_name(sym::rustc_const_unstable) - || attr.has_name(sym::rustc_const_stable) - || attr.has_name(sym::rustc_default_body_unstable) - { - self.sess.dcx().emit_err(errors::StabilityOutsideStd { span: attr.span }); - } - } } fn visit_item(&mut self, i: &'a ast::Item) { @@ -243,7 +230,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } } - ast::ItemKind::Struct(..) => { + ast::ItemKind::Struct(..) | ast::ItemKind::Enum(..) | ast::ItemKind::Union(..) => { for attr in attr::filter_by_name(&i.attrs, sym::repr) { for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) { if item.has_name(sym::simd) { @@ -264,7 +251,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { &self, negative_impls, span.to(of_trait.as_ref().map_or(span, |t| t.path.span)), - "negative trait bounds are not yet fully implemented; \ + "negative trait bounds are not fully implemented; \ use marker types for now" ); } @@ -693,7 +680,7 @@ fn check_new_solver_banned_features(sess: &Session, features: &Features) { .find(|feat| feat.gate_name == sym::generic_const_exprs) .map(|feat| feat.attr_sp) { - #[cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] + #[allow(rustc::symbol_intern_string_literal)] sess.dcx().emit_err(errors::IncompatibleFeatures { spans: vec![gce_span], f1: Symbol::intern("-Znext-solver=globally"), diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 41fded027cb30..9b958ed6b0d6b 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -24,8 +24,8 @@ use rustc_ast::{ use rustc_data_structures::sync::Lrc; use rustc_span::edition::Edition; use rustc_span::source_map::{SourceMap, Spanned}; -use rustc_span::symbol::{Ident, IdentPrinter, Symbol, kw, sym}; -use rustc_span::{BytePos, CharPos, DUMMY_SP, FileName, Pos, Span}; +use rustc_span::symbol::IdentPrinter; +use rustc_span::{BytePos, CharPos, DUMMY_SP, FileName, Ident, Pos, Span, Symbol, kw, sym}; use thin_vec::ThinVec; use crate::pp::Breaks::{Consistent, Inconsistent}; @@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere // E.g. we have seen cases where a proc macro can handle `a :: b` but not // `a::b`. See #117433 for some examples. fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) { - let mut iter = tts.trees().peekable(); + let mut iter = tts.iter().peekable(); while let Some(tt) = iter.next() { let spacing = self.print_tt(tt, convert_dollar_crate); if let Some(next) = iter.peek() { @@ -1204,8 +1204,10 @@ impl<'a> State<'a> { } ast::TyKind::Path(Some(qself), path) => self.print_qpath(path, qself, false), ast::TyKind::TraitObject(bounds, syntax) => { - if *syntax == ast::TraitObjectSyntax::Dyn { - self.word_nbsp("dyn"); + match syntax { + ast::TraitObjectSyntax::Dyn => self.word_nbsp("dyn"), + ast::TraitObjectSyntax::DynStar => self.word_nbsp("dyn*"), + ast::TraitObjectSyntax::None => {} } self.print_type_bounds(bounds); } @@ -1654,11 +1656,14 @@ impl<'a> State<'a> { }, |f| f.pat.span, ); - if *etc == ast::PatFieldsRest::Rest { + if let ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_) = etc { if !fields.is_empty() { self.word_space(","); } self.word(".."); + if let ast::PatFieldsRest::Recovered(_) = etc { + self.word("/* recovered parse error */"); + } } if !empty { self.space(); @@ -1696,7 +1701,7 @@ impl<'a> State<'a> { self.print_pat(inner); } } - PatKind::Lit(e) => self.print_expr(e, FixupContext::default()), + PatKind::Expr(e) => self.print_expr(e, FixupContext::default()), PatKind::Range(begin, end, Spanned { node: end_kind, .. }) => { if let Some(e) = begin { self.print_expr(e, FixupContext::default()); diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index dce76fb1e7707..4b1374ceef31e 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -213,7 +213,9 @@ impl<'a> State<'a> { fn print_expr_call(&mut self, func: &ast::Expr, args: &[P], fixup: FixupContext) { let needs_paren = match func.kind { - ast::ExprKind::Field(..) => true, + // In order to call a named field, needs parens: `(self.fun)()` + // But not for an unnamed field: `self.0()` + ast::ExprKind::Field(_, name) => !name.is_numeric(), _ => func.precedence() < ExprPrecedence::Unambiguous, }; @@ -245,19 +247,21 @@ impl<'a> State<'a> { base_args: &[P], fixup: FixupContext, ) { - // Unlike in `print_expr_call`, no change to fixup here because + // The fixup here is different than in `print_expr_call` because // statement boundaries never occur in front of a `.` (or `?`) token. // - // match () { _ => f }.method(); + // Needs parens: + // + // (loop { break x; })(); + // + // Does not need parens: + // + // loop { break x; }.method(); // - // Parenthesizing only for precedence and not with regard to statement - // boundaries, `$receiver.method()` can be parsed back as a statement - // containing an expression if and only if `$receiver` can be parsed as - // a statement containing an expression. self.print_expr_cond_paren( receiver, receiver.precedence() < ExprPrecedence::Unambiguous, - fixup, + fixup.leftmost_subexpression_with_dot(), ); self.word("."); @@ -503,7 +507,7 @@ impl<'a> State<'a> { self.print_expr_cond_paren( expr, expr.precedence() < ExprPrecedence::Unambiguous, - fixup, + fixup.leftmost_subexpression_with_dot(), ); self.word_nbsp(".match"); } @@ -567,7 +571,7 @@ impl<'a> State<'a> { self.print_expr_cond_paren( expr, expr.precedence() < ExprPrecedence::Unambiguous, - fixup, + fixup.leftmost_subexpression_with_dot(), ); self.word(".await"); } @@ -606,7 +610,7 @@ impl<'a> State<'a> { self.print_expr_cond_paren( expr, expr.precedence() < ExprPrecedence::Unambiguous, - fixup, + fixup.leftmost_subexpression_with_dot(), ); self.word("."); self.print_ident(*ident); @@ -763,7 +767,11 @@ impl<'a> State<'a> { } } ast::ExprKind::Try(e) => { - self.print_expr_cond_paren(e, e.precedence() < ExprPrecedence::Unambiguous, fixup); + self.print_expr_cond_paren( + e, + e.precedence() < ExprPrecedence::Unambiguous, + fixup.leftmost_subexpression_with_dot(), + ); self.word("?") } ast::ExprKind::TryBlock(blk) => { diff --git a/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs b/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs index 6f5382ce61d3b..3ef21f5cb29eb 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs @@ -1,7 +1,9 @@ use rustc_ast::Expr; use rustc_ast::util::{classify, parser}; -#[derive(Copy, Clone, Debug)] +// The default amount of fixing is minimal fixing, so all fixups are set to `false` by `Default`. +// Fixups should be turned on in a targeted fashion where needed. +#[derive(Copy, Clone, Debug, Default)] pub(crate) struct FixupContext { /// Print expression such that it can be parsed back as a statement /// consisting of the original expression. @@ -93,20 +95,6 @@ pub(crate) struct FixupContext { parenthesize_exterior_struct_lit: bool, } -/// The default amount of fixing is minimal fixing. Fixups should be turned on -/// in a targeted fashion where needed. -impl Default for FixupContext { - fn default() -> Self { - FixupContext { - stmt: false, - leftmost_subexpression_in_stmt: false, - match_arm: false, - leftmost_subexpression_in_match_arm: false, - parenthesize_exterior_struct_lit: false, - } - } -} - impl FixupContext { /// Create the initial fixup for printing an expression in statement /// position. @@ -150,6 +138,20 @@ impl FixupContext { } } + /// Transform this fixup into the one that should apply when printing a + /// leftmost subexpression followed by a `.` or `?` token, which confer + /// different statement boundary rules compared to other leftmost + /// subexpressions. + pub(crate) fn leftmost_subexpression_with_dot(self) -> Self { + FixupContext { + stmt: self.stmt || self.leftmost_subexpression_in_stmt, + leftmost_subexpression_in_stmt: false, + match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm, + leftmost_subexpression_in_match_arm: false, + ..self + } + } + /// Transform this fixup into the one that should apply when printing any /// subexpression that is neither a leftmost subexpression nor surrounded in /// delimiters. diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs index 1ae765c0130f6..897c275d850c2 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs @@ -3,7 +3,7 @@ use itertools::{Itertools, Position}; use rustc_ast as ast; use rustc_ast::ModKind; use rustc_ast::ptr::P; -use rustc_span::symbol::Ident; +use rustc_span::Ident; use crate::pp::Breaks::Inconsistent; use crate::pprust::state::fixup::FixupContext; diff --git a/compiler/rustc_ast_pretty/src/pprust/tests.rs b/compiler/rustc_ast_pretty/src/pprust/tests.rs index 01e5dff34b7a6..4c42dd1f2023f 100644 --- a/compiler/rustc_ast_pretty/src/pprust/tests.rs +++ b/compiler/rustc_ast_pretty/src/pprust/tests.rs @@ -1,6 +1,5 @@ use rustc_ast as ast; -use rustc_span::symbol::Ident; -use rustc_span::{DUMMY_SP, create_default_session_globals_then}; +use rustc_span::{DUMMY_SP, Ident, create_default_session_globals_then}; use thin_vec::ThinVec; use super::*; diff --git a/compiler/rustc_attr_data_structures/Cargo.toml b/compiler/rustc_attr_data_structures/Cargo.toml index 2ee58f24470e9..28aafcfcf6fa6 100644 --- a/compiler/rustc_attr_data_structures/Cargo.toml +++ b/compiler/rustc_attr_data_structures/Cargo.toml @@ -5,16 +5,11 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -rustc_abi = { path = "../rustc_abi" } -rustc_ast = { path = "../rustc_ast" } -rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_data_structures = { path = "../rustc_data_structures" } -rustc_errors = { path = "../rustc_errors" } -rustc_feature = { path = "../rustc_feature" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_lexer = { path = "../rustc_lexer" } -rustc_macros = { path = "../rustc_macros" } -rustc_serialize = { path = "../rustc_serialize" } -rustc_session = { path = "../rustc_session" } -rustc_span = { path = "../rustc_span" } +rustc_abi = {path = "../rustc_abi"} +rustc_ast = {path = "../rustc_ast"} +rustc_data_structures = {path = "../rustc_data_structures"} +rustc_macros = {path = "../rustc_macros"} +rustc_serialize = {path = "../rustc_serialize"} +rustc_span = {path = "../rustc_span"} +thin-vec = "0.2.12" # tidy-alphabetical-end diff --git a/compiler/rustc_attr_data_structures/src/attributes.rs b/compiler/rustc_attr_data_structures/src/attributes.rs index 8986bec57de76..f886f889b7d80 100644 --- a/compiler/rustc_attr_data_structures/src/attributes.rs +++ b/compiler/rustc_attr_data_structures/src/attributes.rs @@ -1,9 +1,12 @@ use rustc_abi::Align; -use rustc_ast as ast; +use rustc_ast::token::CommentKind; +use rustc_ast::{self as ast, AttrStyle}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; +use rustc_span::hygiene::Transparency; use rustc_span::{Span, Symbol}; +use thin_vec::ThinVec; -use crate::RustcVersion; +use crate::{DefaultBodyStability, PartialConstStability, RustcVersion, Stability}; #[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum InlineAttr { @@ -11,6 +14,22 @@ pub enum InlineAttr { Hint, Always, Never, + /// `#[rustc_force_inline]` forces inlining to happen in the MIR inliner - it reports an error + /// if the inlining cannot happen. It is limited to only free functions so that the calls + /// can always be resolved. + Force { + attr_span: Span, + reason: Option, + }, +} + +impl InlineAttr { + pub fn always(&self) -> bool { + match self { + InlineAttr::Always | InlineAttr::Force { .. } => true, + InlineAttr::None | InlineAttr::Hint | InlineAttr::Never => false, + } + } } #[derive(Clone, Encodable, Decodable, Debug, PartialEq, Eq, HashStable_Generic)] @@ -26,7 +45,7 @@ pub enum OptimizeAttr { Size, } -#[derive(Clone, Debug, Encodable, Decodable)] +#[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)] pub enum DiagnosticAttribute { // tidy-alphabetical-start DoNotRecommend, @@ -34,7 +53,7 @@ pub enum DiagnosticAttribute { // tidy-alphabetical-end } -#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone)] +#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone, HashStable_Generic)] pub enum ReprAttr { ReprInt(IntType), ReprRust, @@ -43,6 +62,8 @@ pub enum ReprAttr { ReprSimd, ReprTransparent, ReprAlign(Align), + // this one is just so we can emit a lint for it + ReprEmpty, } pub use ReprAttr::*; @@ -52,7 +73,7 @@ pub enum TransparencyError { } #[derive(Eq, PartialEq, Debug, Copy, Clone)] -#[derive(Encodable, Decodable)] +#[derive(Encodable, Decodable, HashStable_Generic)] pub enum IntType { SignedInt(ast::IntTy), UnsignedInt(ast::UintTy), @@ -104,3 +125,64 @@ impl Deprecation { matches!(self.since, DeprecatedSince::RustcVersion(_)) } } + +/// Attributes represent parsed, *built in* attributes. That means, +/// attributes that are not actually ever expanded. They're instead used as markers, +/// to guide the compilation process in various way in most every stage of the compiler. +/// These are kept around after the AST, into the HIR and further on. +/// +/// The word parsed could be a little misleading here, because the parser already parses +/// attributes early on. However, the result, an [`ast::Attribute`] +/// is only parsed at a high level, still containing a token stream in many cases. That is +/// because the structure of the contents varies from attribute to attribute. +/// With a parsed attribute I mean that each attribute is processed individually into a +/// final structure, which on-site (the place where the attribute is useful for, think the +/// the place where `must_use` is checked) little to no extra parsing or validating needs to +/// happen. +/// +/// For more docs, look in [`rustc_attr`](https://doc.rust-lang.org/stable/nightly-rustc/rustc_attr/index.html) +#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)] +pub enum AttributeKind { + // tidy-alphabetical-start + AllowConstFnUnstable(ThinVec), + AllowInternalUnstable(ThinVec<(Symbol, Span)>), + BodyStability { + stability: DefaultBodyStability, + /// Span of the `#[rustc_default_body_unstable(...)]` attribute + span: Span, + }, + Confusables { + symbols: ThinVec, + // FIXME(jdonszelmann): remove when target validation code is moved + first_span: Span, + }, + ConstStability { + stability: PartialConstStability, + /// Span of the `#[rustc_const_stable(...)]` or `#[rustc_const_unstable(...)]` attribute + span: Span, + }, + ConstStabilityIndirect, + Deprecation { + deprecation: Deprecation, + span: Span, + }, + Diagnostic(DiagnosticAttribute), + Doc, + /// A doc comment (e.g. `/// ...`, `//! ...`, `/** ... */`, `/*! ... */`). + /// Doc attributes (e.g. `#[doc="..."]`) are represented with the `Normal` + /// variant (which is much less compact and thus more expensive). + DocComment { + style: AttrStyle, + kind: CommentKind, + span: Span, + comment: Symbol, + }, + MacroTransparency(Transparency), + Repr(ThinVec<(ReprAttr, Span)>), + Stability { + stability: Stability, + /// Span of the `#[stable(...)]` or `#[unstable(...)]` attribute + span: Span, + }, + // tidy-alphabetical-end +} diff --git a/compiler/rustc_attr_data_structures/src/lib.rs b/compiler/rustc_attr_data_structures/src/lib.rs index 4f204aeab640f..93d4aa7ddf30e 100644 --- a/compiler/rustc_attr_data_structures/src/lib.rs +++ b/compiler/rustc_attr_data_structures/src/lib.rs @@ -11,6 +11,10 @@ mod stability; mod version; pub use attributes::*; -pub(crate) use rustc_session::HashStableContext; pub use stability::*; pub use version::*; + +/// Requirements for a `StableHashingContext` to be used in this crate. +/// This is a hack to allow using the `HashStable_Generic` derive macro +/// instead of implementing everything in `rustc_middle`. +pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStableContext {} diff --git a/compiler/rustc_attr_data_structures/src/stability.rs b/compiler/rustc_attr_data_structures/src/stability.rs index 021fe40e3e04c..dfda04387ec5a 100644 --- a/compiler/rustc_attr_data_structures/src/stability.rs +++ b/compiler/rustc_attr_data_structures/src/stability.rs @@ -9,7 +9,12 @@ use crate::RustcVersion; /// `since` field of the `#[stable]` attribute. /// /// For more, see [this pull request](https://github.com/rust-lang/rust/pull/100591). -pub const VERSION_PLACEHOLDER: &str = "CURRENT_RUSTC_VERSION"; +pub const VERSION_PLACEHOLDER: &str = concat!("CURRENT_RUSTC_VERSIO", "N"); +// Note that the `concat!` macro above prevents `src/tools/replace-version-placeholder` from +// replacing the constant with the current version. Hardcoding the tool to skip this file doesn't +// work as the file can (and at some point will) be moved around. +// +// Turning the `concat!` macro into a string literal will make Pietro cry. That'd be sad :( /// Represents the following attributes: /// @@ -96,6 +101,16 @@ impl PartialConstStability { } } +#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)] +#[derive(HashStable_Generic)] +pub enum AllowedThroughUnstableModules { + /// This does not get a deprecation warning. We still generally would prefer people to use the + /// fully stable path, and a warning will likely be emitted in the future. + WithoutDeprecation, + /// Emit the given deprecation warning. + WithDeprecation(Symbol), +} + /// The available stability levels. #[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)] #[derive(HashStable_Generic)] @@ -132,9 +147,8 @@ pub enum StabilityLevel { Stable { /// Rust release which stabilized this feature. since: StableSince, - /// Is this item allowed to be referred to on stable, despite being contained in unstable - /// modules? - allowed_through_unstable_modules: bool, + /// This is `Some` if this item allowed to be referred to on stable via unstable modules. + allowed_through_unstable_modules: Option, }, } diff --git a/compiler/rustc_attr_parsing/Cargo.toml b/compiler/rustc_attr_parsing/Cargo.toml index 7ccedf40c3fa9..adbfd588fe565 100644 --- a/compiler/rustc_attr_parsing/Cargo.toml +++ b/compiler/rustc_attr_parsing/Cargo.toml @@ -13,9 +13,11 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } +thin-vec = "0.2.12" # tidy-alphabetical-end diff --git a/compiler/rustc_attr_parsing/messages.ftl b/compiler/rustc_attr_parsing/messages.ftl index faa2865cb9130..4d44cdfe50b69 100644 --- a/compiler/rustc_attr_parsing/messages.ftl +++ b/compiler/rustc_attr_parsing/messages.ftl @@ -6,6 +6,8 @@ attr_parsing_deprecated_item_suggestion = .help = add `#![feature(deprecated_suggestion)]` to the crate root .note = see #94785 for more details +attr_parsing_empty_confusables = + expected at least one confusable name attr_parsing_expected_one_cfg_pattern = expected 1 cfg-pattern @@ -21,8 +23,8 @@ attr_parsing_expects_feature_list = attr_parsing_expects_features = `{$name}` expects feature names -attr_parsing_incorrect_meta_item = - incorrect meta item +attr_parsing_incorrect_meta_item = expected a quoted string literal +attr_parsing_incorrect_meta_item_suggestion = consider surrounding this with quotes attr_parsing_incorrect_repr_format_align_one_arg = incorrect `repr(align)` attribute format: `align` takes exactly one argument in parentheses @@ -88,18 +90,19 @@ attr_parsing_multiple_stability_levels = attr_parsing_non_ident_feature = 'feature' is not an identifier +attr_parsing_repr_ident = + meta item in `repr` must be an identifier attr_parsing_rustc_allowed_unstable_pairing = `rustc_allowed_through_unstable_modules` attribute must be paired with a `stable` attribute -attr_parsing_rustc_const_stable_indirect_pairing = - `const_stable_indirect` attribute does not make sense on `rustc_const_stable` function, its behavior is already implied - attr_parsing_rustc_promotable_pairing = `rustc_promotable` attribute must be paired with either a `rustc_const_unstable` or a `rustc_const_stable` attribute attr_parsing_soft_no_args = `soft` should not have any arguments +attr_parsing_stability_outside_std = stability attributes may not be used outside of the standard library + attr_parsing_unknown_meta_item = unknown meta item '{$item}' .label = expected one of {$expected} @@ -107,6 +110,10 @@ attr_parsing_unknown_meta_item = attr_parsing_unknown_version_literal = unknown version literal format, assuming it refers to a future version +attr_parsing_unrecognized_repr_hint = + unrecognized representation hint + .help = valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize` + attr_parsing_unstable_cfg_target_compact = compact `cfg(target(..))` is experimental and subject to change @@ -122,3 +129,8 @@ attr_parsing_unsupported_literal_generic = unsupported literal attr_parsing_unsupported_literal_suggestion = consider removing the prefix + +attr_parsing_unused_multiple = + multiple `{$name}` attributes + .suggestion = remove this attribute + .note = attribute also specified here diff --git a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs index b9f841800abca..d37ede86cfd2a 100644 --- a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs +++ b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs @@ -1,49 +1,67 @@ -use rustc_ast::attr::{AttributeExt, filter_by_name}; -use rustc_session::Session; -use rustc_span::symbol::{Symbol, sym}; +use std::iter; +use rustc_attr_data_structures::AttributeKind; +use rustc_span::{Span, Symbol, sym}; + +use super::{CombineAttributeParser, ConvertFn}; +use crate::context::AcceptContext; +use crate::parser::ArgParser; use crate::session_diagnostics; -pub fn allow_internal_unstable<'a>( - sess: &'a Session, - attrs: &'a [impl AttributeExt], -) -> impl Iterator + 'a { - allow_unstable(sess, attrs, sym::allow_internal_unstable) +pub(crate) struct AllowInternalUnstableParser; +impl CombineAttributeParser for AllowInternalUnstableParser { + const PATH: &'static [rustc_span::Symbol] = &[sym::allow_internal_unstable]; + type Item = (Symbol, Span); + const CONVERT: ConvertFn = AttributeKind::AllowInternalUnstable; + + fn extend<'a>( + cx: &'a AcceptContext<'a>, + args: &'a ArgParser<'a>, + ) -> impl IntoIterator + 'a { + parse_unstable(cx, args, Self::PATH[0]).into_iter().zip(iter::repeat(cx.attr_span)) + } } -pub fn rustc_allow_const_fn_unstable<'a>( - sess: &'a Session, - attrs: &'a [impl AttributeExt], -) -> impl Iterator + 'a { - allow_unstable(sess, attrs, sym::rustc_allow_const_fn_unstable) +pub(crate) struct AllowConstFnUnstableParser; +impl CombineAttributeParser for AllowConstFnUnstableParser { + const PATH: &'static [rustc_span::Symbol] = &[sym::rustc_allow_const_fn_unstable]; + type Item = Symbol; + const CONVERT: ConvertFn = AttributeKind::AllowConstFnUnstable; + + fn extend<'a>( + cx: &'a AcceptContext<'a>, + args: &'a ArgParser<'a>, + ) -> impl IntoIterator + 'a { + parse_unstable(cx, args, Self::PATH[0]) + } } -fn allow_unstable<'a>( - sess: &'a Session, - attrs: &'a [impl AttributeExt], +fn parse_unstable<'a>( + cx: &AcceptContext<'_>, + args: &'a ArgParser<'a>, symbol: Symbol, -) -> impl Iterator + 'a { - let attrs = filter_by_name(attrs, symbol); - let list = attrs - .filter_map(move |attr| { - attr.meta_item_list().or_else(|| { - sess.dcx().emit_err(session_diagnostics::ExpectsFeatureList { - span: attr.span(), - name: symbol.to_ident_string(), - }); - None - }) - }) - .flatten(); - - list.into_iter().filter_map(move |it| { - let name = it.ident().map(|ident| ident.name); - if name.is_none() { - sess.dcx().emit_err(session_diagnostics::ExpectsFeatures { - span: it.span(), +) -> impl IntoIterator { + let mut res = Vec::new(); + + let Some(list) = args.list() else { + cx.emit_err(session_diagnostics::ExpectsFeatureList { + span: cx.attr_span, + name: symbol.to_ident_string(), + }); + return res; + }; + + for param in list.mixed() { + let param_span = param.span(); + if let Some(ident) = param.meta_item().and_then(|i| i.word_without_args()) { + res.push(ident.name); + } else { + cx.emit_err(session_diagnostics::ExpectsFeatures { + span: param_span, name: symbol.to_ident_string(), }); } - name - }) + } + + res } diff --git a/compiler/rustc_attr_parsing/src/attributes/cfg.rs b/compiler/rustc_attr_parsing/src/attributes/cfg.rs index 8cf8e86100f9a..0d6d521b40c61 100644 --- a/compiler/rustc_attr_parsing/src/attributes/cfg.rs +++ b/compiler/rustc_attr_parsing/src/attributes/cfg.rs @@ -1,6 +1,4 @@ -//! Parsing and validation of builtin attributes - -use rustc_ast::{self as ast, LitKind, MetaItem, MetaItemInner, MetaItemKind, MetaItemLit, NodeId}; +use rustc_ast::{LitKind, MetaItem, MetaItemInner, MetaItemKind, MetaItemLit, NodeId}; use rustc_ast_pretty::pprust; use rustc_attr_data_structures::RustcVersion; use rustc_feature::{Features, GatedCfg, find_gated_cfg}; @@ -9,11 +7,11 @@ use rustc_session::config::ExpectedValues; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::UNEXPECTED_CFGS; use rustc_session::parse::feature_err; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, kw, sym}; +use rustc_span::symbol::kw; +use rustc_span::{Span, Symbol, sym}; -use crate::util::UnsupportedLiteralReason; -use crate::{fluent_generated, parse_version, session_diagnostics}; +use crate::session_diagnostics::{self, UnsupportedLiteralReason}; +use crate::{fluent_generated, parse_version}; #[derive(Clone, Debug)] pub struct Condition { @@ -26,7 +24,7 @@ pub struct Condition { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches( - cfg: &ast::MetaItemInner, + cfg: &MetaItemInner, sess: &Session, lint_node_id: NodeId, features: Option<&Features>, @@ -81,7 +79,7 @@ fn gate_cfg(gated_cfg: &GatedCfg, cfg_span: Span, sess: &Session, features: &Fea /// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to /// evaluate individual items. pub fn eval_condition( - cfg: &ast::MetaItemInner, + cfg: &MetaItemInner, sess: &Session, features: Option<&Features>, eval: &mut impl FnMut(Condition) -> bool, @@ -89,8 +87,8 @@ pub fn eval_condition( let dcx = sess.dcx(); let cfg = match cfg { - ast::MetaItemInner::MetaItem(meta_item) => meta_item, - ast::MetaItemInner::Lit(MetaItemLit { kind: LitKind::Bool(b), .. }) => { + MetaItemInner::MetaItem(meta_item) => meta_item, + MetaItemInner::Lit(MetaItemLit { kind: LitKind::Bool(b), .. }) => { if let Some(features) = features { // we can't use `try_gate_cfg` as symbols don't differentiate between `r#true` // and `true`, and we want to keep the former working without feature gate @@ -119,7 +117,7 @@ pub fn eval_condition( }; match &cfg.kind { - ast::MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => { + MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => { try_gate_cfg(sym::version, cfg.span, sess, features); let (min_version, span) = match &mis[..] { [MetaItemInner::Lit(MetaItemLit { kind: LitKind::Str(sym, ..), span, .. })] => { @@ -151,7 +149,7 @@ pub fn eval_condition( RustcVersion::CURRENT >= min_version } } - ast::MetaItemKind::List(mis) => { + MetaItemKind::List(mis) => { for mi in mis.iter() { if mi.meta_item_or_bool().is_none() { dcx.emit_err(session_diagnostics::UnsupportedLiteral { @@ -210,12 +208,7 @@ pub fn eval_condition( seg.ident.name = Symbol::intern(&format!("target_{}", seg.ident.name)); } - res & eval_condition( - &ast::MetaItemInner::MetaItem(mi), - sess, - features, - eval, - ) + res & eval_condition(&MetaItemInner::MetaItem(mi), sess, features, eval) }) } _ => { @@ -227,7 +220,7 @@ pub fn eval_condition( } } } - ast::MetaItemKind::Word | MetaItemKind::NameValue(..) if cfg.path.segments.len() != 1 => { + MetaItemKind::Word | MetaItemKind::NameValue(..) if cfg.path.segments.len() != 1 => { dcx.emit_err(session_diagnostics::CfgPredicateIdentifier { span: cfg.path.span }); true } @@ -240,7 +233,7 @@ pub fn eval_condition( }); true } - ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => { + MetaItemKind::Word | MetaItemKind::NameValue(..) => { let ident = cfg.ident().expect("multi-segment cfg predicate"); eval(Condition { name: ident.name, diff --git a/compiler/rustc_attr_parsing/src/attributes/confusables.rs b/compiler/rustc_attr_parsing/src/attributes/confusables.rs index 672fcf05eda37..6cff952fcf229 100644 --- a/compiler/rustc_attr_parsing/src/attributes/confusables.rs +++ b/compiler/rustc_attr_parsing/src/attributes/confusables.rs @@ -1,21 +1,58 @@ -//! Parsing and validation of builtin attributes +use rustc_attr_data_structures::AttributeKind; +use rustc_span::{Span, Symbol, sym}; +use thin_vec::ThinVec; -use rustc_ast::MetaItemInner; -use rustc_ast::attr::AttributeExt; -use rustc_span::symbol::Symbol; +use super::{AcceptMapping, AttributeParser}; +use crate::context::FinalizeContext; +use crate::session_diagnostics; -/// Read the content of a `rustc_confusables` attribute, and return the list of candidate names. -pub fn parse_confusables(attr: &impl AttributeExt) -> Option> { - let metas = attr.meta_item_list()?; +#[derive(Default)] +pub(crate) struct ConfusablesParser { + confusables: ThinVec, + first_span: Option, +} + +impl AttributeParser for ConfusablesParser { + const ATTRIBUTES: AcceptMapping = &[(&[sym::rustc_confusables], |this, cx, args| { + let Some(list) = args.list() else { + // FIXME(jdonszelmann): error when not a list? Bring validation code here. + // NOTE: currently subsequent attributes are silently ignored using + // tcx.get_attr(). + return; + }; + + if list.is_empty() { + cx.emit_err(session_diagnostics::EmptyConfusables { span: cx.attr_span }); + } + + for param in list.mixed() { + let span = param.span(); - let mut candidates = Vec::new(); + let Some(lit) = param.lit() else { + cx.emit_err(session_diagnostics::IncorrectMetaItem { + span, + suggestion: Some(session_diagnostics::IncorrectMetaItemSuggestion { + lo: span.shrink_to_lo(), + hi: span.shrink_to_hi(), + }), + }); + continue; + }; - for meta in metas { - let MetaItemInner::Lit(meta_lit) = meta else { + this.confusables.push(lit.symbol); + } + + this.first_span.get_or_insert(cx.attr_span); + })]; + + fn finalize(self, _cx: &FinalizeContext<'_>) -> Option { + if self.confusables.is_empty() { return None; - }; - candidates.push(meta_lit.symbol); - } + } - Some(candidates) + Some(AttributeKind::Confusables { + symbols: self.confusables, + first_span: self.first_span.unwrap(), + }) + } } diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs index 2f9872cc31131..7c5b74e64c5f0 100644 --- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs +++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs @@ -1,122 +1,129 @@ -//! Parsing and validation of builtin attributes +use rustc_attr_data_structures::{AttributeKind, DeprecatedSince, Deprecation}; +use rustc_span::symbol::Ident; +use rustc_span::{Span, Symbol, sym}; -use rustc_ast::attr::AttributeExt; -use rustc_ast::{MetaItem, MetaItemInner}; -use rustc_ast_pretty::pprust; -use rustc_attr_data_structures::{DeprecatedSince, Deprecation}; -use rustc_feature::Features; -use rustc_session::Session; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use super::SingleAttributeParser; +use super::util::parse_version; +use crate::context::AcceptContext; +use crate::parser::ArgParser; +use crate::session_diagnostics; +use crate::session_diagnostics::UnsupportedLiteralReason; -use super::util::UnsupportedLiteralReason; -use crate::{parse_version, session_diagnostics}; +pub(crate) struct DeprecationParser; -/// Finds the deprecation attribute. `None` if none exists. -pub fn find_deprecation( - sess: &Session, - features: &Features, - attrs: &[impl AttributeExt], -) -> Option<(Deprecation, Span)> { - let mut depr: Option<(Deprecation, Span)> = None; - let is_rustc = features.staged_api(); - - 'outer: for attr in attrs { - if !attr.has_name(sym::deprecated) { - continue; +fn get( + cx: &AcceptContext<'_>, + ident: Ident, + param_span: Span, + arg: &ArgParser<'_>, + item: &mut Option, +) -> bool { + if item.is_some() { + cx.emit_err(session_diagnostics::MultipleItem { + span: param_span, + item: ident.to_string(), + }); + return false; + } + if let Some(v) = arg.name_value() { + if let Some(value_str) = v.value_as_str() { + *item = Some(value_str); + true + } else { + let lit = v.value_as_lit(); + cx.emit_err(session_diagnostics::UnsupportedLiteral { + span: v.value_span, + reason: UnsupportedLiteralReason::DeprecatedString, + is_bytestr: lit.kind.is_bytestr(), + start_point_span: cx.sess().source_map().start_point(lit.span), + }); + false } + } else { + // FIXME(jdonszelmann): suggestion? + cx.emit_err(session_diagnostics::IncorrectMetaItem { span: param_span, suggestion: None }); + false + } +} + +impl SingleAttributeParser for DeprecationParser { + const PATH: &'static [rustc_span::Symbol] = &[sym::deprecated]; + + fn on_duplicate(cx: &AcceptContext<'_>, first_span: rustc_span::Span) { + // FIXME(jdonszelmann): merge with errors from check_attrs.rs + cx.emit_err(session_diagnostics::UnusedMultiple { + this: cx.attr_span, + other: first_span, + name: sym::deprecated, + }); + } + + fn convert(cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option { + let features = cx.features(); let mut since = None; let mut note = None; let mut suggestion = None; - if attr.is_doc_comment() { - continue; - } else if attr.is_word() { - } else if let Some(value) = attr.value_str() { - note = Some(value) - } else if let Some(list) = attr.meta_item_list() { - let get = |meta: &MetaItem, item: &mut Option| { - if item.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleItem { - span: meta.span, - item: pprust::path_to_string(&meta.path), + let is_rustc = features.staged_api(); + + if let Some(value) = args.name_value() + && let Some(value_str) = value.value_as_str() + { + note = Some(value_str) + } else if let Some(list) = args.list() { + for param in list.mixed() { + let param_span = param.span(); + let Some(param) = param.meta_item() else { + cx.emit_err(session_diagnostics::UnsupportedLiteral { + span: param_span, + reason: UnsupportedLiteralReason::DeprecatedKvPair, + is_bytestr: false, + start_point_span: cx.sess().source_map().start_point(param_span), }); - return false; - } - if let Some(v) = meta.value_str() { - *item = Some(v); - true - } else { - if let Some(lit) = meta.name_value_literal() { - sess.dcx().emit_err(session_diagnostics::UnsupportedLiteral { - span: lit.span, - reason: UnsupportedLiteralReason::DeprecatedString, - is_bytestr: lit.kind.is_bytestr(), - start_point_span: sess.source_map().start_point(lit.span), - }); - } else { - sess.dcx() - .emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span }); - } - false - } - }; + return None; + }; - for meta in &list { - match meta { - MetaItemInner::MetaItem(mi) => match mi.name_or_empty() { - sym::since => { - if !get(mi, &mut since) { - continue 'outer; - } - } - sym::note => { - if !get(mi, &mut note) { - continue 'outer; - } - } - sym::suggestion => { - if !features.deprecated_suggestion() { - sess.dcx().emit_err( - session_diagnostics::DeprecatedItemSuggestion { - span: mi.span, - is_nightly: sess.is_nightly_build(), - details: (), - }, - ); - } + let (ident, arg) = param.word_or_empty(); - if !get(mi, &mut suggestion) { - continue 'outer; - } + match ident.name { + sym::since => { + if !get(cx, ident, param_span, arg, &mut since) { + return None; + } + } + sym::note => { + if !get(cx, ident, param_span, arg, &mut note) { + return None; } - _ => { - sess.dcx().emit_err(session_diagnostics::UnknownMetaItem { - span: meta.span(), - item: pprust::path_to_string(&mi.path), - expected: if features.deprecated_suggestion() { - &["since", "note", "suggestion"] - } else { - &["since", "note"] - }, + } + sym::suggestion => { + if !features.deprecated_suggestion() { + cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { + span: param_span, + is_nightly: cx.sess().is_nightly_build(), + details: (), }); - continue 'outer; } - }, - MetaItemInner::Lit(lit) => { - sess.dcx().emit_err(session_diagnostics::UnsupportedLiteral { - span: lit.span, - reason: UnsupportedLiteralReason::DeprecatedKvPair, - is_bytestr: false, - start_point_span: sess.source_map().start_point(lit.span), + + if !get(cx, ident, param_span, arg, &mut suggestion) { + return None; + } + } + _ => { + cx.emit_err(session_diagnostics::UnknownMetaItem { + span: param_span, + item: ident.to_string(), + expected: if features.deprecated_suggestion() { + &["since", "note", "suggestion"] + } else { + &["since", "note"] + }, }); - continue 'outer; + return None; } } } - } else { - continue; } let since = if let Some(since) = since { @@ -127,23 +134,24 @@ pub fn find_deprecation( } else if let Some(version) = parse_version(since) { DeprecatedSince::RustcVersion(version) } else { - sess.dcx().emit_err(session_diagnostics::InvalidSince { span: attr.span() }); + cx.emit_err(session_diagnostics::InvalidSince { span: cx.attr_span }); DeprecatedSince::Err } } else if is_rustc { - sess.dcx().emit_err(session_diagnostics::MissingSince { span: attr.span() }); + cx.emit_err(session_diagnostics::MissingSince { span: cx.attr_span }); DeprecatedSince::Err } else { DeprecatedSince::Unspecified }; if is_rustc && note.is_none() { - sess.dcx().emit_err(session_diagnostics::MissingNote { span: attr.span() }); - continue; + cx.emit_err(session_diagnostics::MissingNote { span: cx.attr_span }); + return None; } - depr = Some((Deprecation { since, note, suggestion }, attr.span())); + Some(AttributeKind::Deprecation { + deprecation: Deprecation { since, note, suggestion }, + span: cx.attr_span, + }) } - - depr } diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs index a78e0b54b64aa..11033af8e753f 100644 --- a/compiler/rustc_attr_parsing/src/attributes/mod.rs +++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs @@ -1,17 +1,150 @@ -mod allow_unstable; -mod cfg; -mod confusables; -mod deprecation; -mod repr; -mod stability; -mod transparency; - -pub mod util; - -pub use allow_unstable::*; -pub use cfg::*; -pub use confusables::*; -pub use deprecation::*; -pub use repr::*; -pub use stability::*; -pub use transparency::*; +//! You can find more docs on what groups are on [`AttributeParser`] itself. +//! However, for many types of attributes, implementing [`AttributeParser`] is not necessary. +//! It allows for a lot of flexibility you might not want. +//! +//! Specifically, you might not care about managing the state of your [`AttributeParser`] +//! state machine yourself. In this case you can choose to implement: +//! +//! - [`SingleAttributeParser`]: makes it easy to implement an attribute which should error if it +//! appears more than once in a list of attributes +//! - [`CombineAttributeParser`]: makes it easy to implement an attribute which should combine the +//! contents of attributes, if an attribute appear multiple times in a list +//! +//! Attributes should be added to [`ATTRIBUTE_GROUP_MAPPING`](crate::context::ATTRIBUTE_GROUP_MAPPING) to be parsed. + +use std::marker::PhantomData; + +use rustc_attr_data_structures::AttributeKind; +use rustc_span::Span; +use thin_vec::ThinVec; + +use crate::context::{AcceptContext, FinalizeContext}; +use crate::parser::ArgParser; + +pub(crate) mod allow_unstable; +pub(crate) mod cfg; +pub(crate) mod confusables; +pub(crate) mod deprecation; +pub(crate) mod repr; +pub(crate) mod stability; +pub(crate) mod transparency; +pub(crate) mod util; + +type AcceptFn = fn(&mut T, &AcceptContext<'_>, &ArgParser<'_>); +type AcceptMapping = &'static [(&'static [rustc_span::Symbol], AcceptFn)]; + +/// An [`AttributeParser`] is a type which searches for syntactic attributes. +/// +/// Parsers are often tiny state machines that gets to see all syntactical attributes on an item. +/// [`Default::default`] creates a fresh instance that sits in some kind of initial state, usually that the +/// attribute it is looking for was not yet seen. +/// +/// Then, it defines what paths this group will accept in [`AttributeParser::ATTRIBUTES`]. +/// These are listed as pairs, of symbols and function pointers. The function pointer will +/// be called when that attribute is found on an item, which can influence the state of the little +/// state machine. +/// +/// Finally, after all attributes on an item have been seen, and possibly been accepted, +/// the [`finalize`](AttributeParser::finalize) functions for all attribute parsers are called. Each can then report +/// whether it has seen the attribute it has been looking for. +/// +/// The state machine is automatically reset to parse attributes on the next item. +pub(crate) trait AttributeParser: Default + 'static { + /// The symbols for the attributes that this parser is interested in. + /// + /// If an attribute has this symbol, the `accept` function will be called on it. + const ATTRIBUTES: AcceptMapping; + + /// The parser has gotten a chance to accept the attributes on an item, + /// here it can produce an attribute. + fn finalize(self, cx: &FinalizeContext<'_>) -> Option; +} + +/// Alternative to [`AttributeParser`] that automatically handles state management. +/// A slightly simpler and more restricted way to convert attributes. +/// Assumes that an attribute can only appear a single time on an item, +/// and errors when it sees more. +/// +/// [`Single where T: SingleAttributeParser`](Single) implements [`AttributeParser`]. +/// +/// [`SingleAttributeParser`] can only convert attributes one-to-one, and cannot combine multiple +/// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example. +pub(crate) trait SingleAttributeParser: 'static { + const PATH: &'static [rustc_span::Symbol]; + + /// Caled when a duplicate attribute is found. + /// + /// `first_span` is the span of the first occurrence of this attribute. + // FIXME(jdonszelmann): default error + fn on_duplicate(cx: &AcceptContext<'_>, first_span: Span); + + /// Converts a single syntactical attribute to a single semantic attribute, or [`AttributeKind`] + fn convert(cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option; +} + +pub(crate) struct Single(PhantomData, Option<(AttributeKind, Span)>); + +impl Default for Single { + fn default() -> Self { + Self(Default::default(), Default::default()) + } +} + +impl AttributeParser for Single { + const ATTRIBUTES: AcceptMapping = &[(T::PATH, |group: &mut Single, cx, args| { + if let Some((_, s)) = group.1 { + T::on_duplicate(cx, s); + return; + } + + if let Some(pa) = T::convert(cx, args) { + group.1 = Some((pa, cx.attr_span)); + } + })]; + + fn finalize(self, _cx: &FinalizeContext<'_>) -> Option { + Some(self.1?.0) + } +} + +type ConvertFn = fn(ThinVec) -> AttributeKind; + +/// Alternative to [`AttributeParser`] that automatically handles state management. +/// If multiple attributes appear on an element, combines the values of each into a +/// [`ThinVec`]. +/// [`Combine where T: CombineAttributeParser`](Combine) implements [`AttributeParser`]. +/// +/// [`CombineAttributeParser`] can only convert a single kind of attribute, and cannot combine multiple +/// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example. +pub(crate) trait CombineAttributeParser: 'static { + const PATH: &'static [rustc_span::Symbol]; + + type Item; + const CONVERT: ConvertFn; + + /// Converts a single syntactical attribute to a number of elements of the semantic attribute, or [`AttributeKind`] + fn extend<'a>( + cx: &'a AcceptContext<'a>, + args: &'a ArgParser<'a>, + ) -> impl IntoIterator + 'a; +} + +pub(crate) struct Combine( + PhantomData, + ThinVec<::Item>, +); + +impl Default for Combine { + fn default() -> Self { + Self(Default::default(), Default::default()) + } +} + +impl AttributeParser for Combine { + const ATTRIBUTES: AcceptMapping = + &[(T::PATH, |group: &mut Combine, cx, args| group.1.extend(T::extend(cx, args)))]; + + fn finalize(self, _cx: &FinalizeContext<'_>) -> Option { + if self.1.is_empty() { None } else { Some(T::CONVERT(self.1)) } + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/repr.rs b/compiler/rustc_attr_parsing/src/attributes/repr.rs index 008edfe6366eb..39940db0f6b9d 100644 --- a/compiler/rustc_attr_parsing/src/attributes/repr.rs +++ b/compiler/rustc_attr_parsing/src/attributes/repr.rs @@ -1,15 +1,13 @@ -//! Parsing and validation of builtin attributes - use rustc_abi::Align; -use rustc_ast::attr::AttributeExt; -use rustc_ast::{self as ast, MetaItemKind}; -use rustc_attr_data_structures::IntType; -use rustc_attr_data_structures::ReprAttr::*; -use rustc_session::Session; -use rustc_span::symbol::{Symbol, sym}; +use rustc_ast::{IntTy, LitIntType, LitKind, UintTy}; +use rustc_attr_data_structures::{AttributeKind, IntType, ReprAttr}; +use rustc_span::{Span, Symbol, sym}; -use crate::ReprAttr; -use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; +use super::{CombineAttributeParser, ConvertFn}; +use crate::context::AcceptContext; +use crate::parser::{ArgParser, MetaItemListParser, MetaItemParser}; +use crate::session_diagnostics; +use crate::session_diagnostics::IncorrectReprFormatGenericCause; /// Parse #[repr(...)] forms. /// @@ -18,185 +16,215 @@ use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; /// the same discriminant size that the corresponding C enum would or C /// structure layout, `packed` to remove padding, and `transparent` to delegate representation /// concerns to the only non-ZST field. -pub fn find_repr_attrs(sess: &Session, attr: &impl AttributeExt) -> Vec { - if attr.has_name(sym::repr) { parse_repr_attr(sess, attr) } else { Vec::new() } -} +// FIXME(jdonszelmann): is a vec the right representation here even? isn't it just a struct? +pub(crate) struct ReprParser; -pub fn parse_repr_attr(sess: &Session, attr: &impl AttributeExt) -> Vec { - assert!(attr.has_name(sym::repr), "expected `#[repr(..)]`, found: {attr:?}"); - let mut acc = Vec::new(); - let dcx = sess.dcx(); - - if let Some(items) = attr.meta_item_list() { - for item in items { - let mut recognised = false; - if item.is_word() { - let hint = match item.name_or_empty() { - sym::Rust => Some(ReprRust), - sym::C => Some(ReprC), - sym::packed => Some(ReprPacked(Align::ONE)), - sym::simd => Some(ReprSimd), - sym::transparent => Some(ReprTransparent), - sym::align => { - sess.dcx().emit_err(session_diagnostics::InvalidReprAlignNeedArg { - span: item.span(), - }); - recognised = true; - None - } - name => int_type_of_word(name).map(ReprInt), - }; - - if let Some(h) = hint { - recognised = true; - acc.push(h); - } - } else if let Some((name, value)) = item.singleton_lit_list() { - let mut literal_error = None; - let mut err_span = item.span(); - if name == sym::align { - recognised = true; - match parse_alignment(&value.kind) { - Ok(literal) => acc.push(ReprAlign(literal)), - Err(message) => { - err_span = value.span; - literal_error = Some(message) - } - }; - } else if name == sym::packed { - recognised = true; - match parse_alignment(&value.kind) { - Ok(literal) => acc.push(ReprPacked(literal)), - Err(message) => { - err_span = value.span; - literal_error = Some(message) - } - }; - } else if matches!(name, sym::Rust | sym::C | sym::simd | sym::transparent) - || int_type_of_word(name).is_some() - { - recognised = true; - sess.dcx().emit_err(session_diagnostics::InvalidReprHintNoParen { - span: item.span(), - name: name.to_ident_string(), - }); - } - if let Some(literal_error) = literal_error { - sess.dcx().emit_err(session_diagnostics::InvalidReprGeneric { - span: err_span, - repr_arg: name.to_ident_string(), - error_part: literal_error, - }); - } - } else if let Some(meta_item) = item.meta_item() { - match &meta_item.kind { - MetaItemKind::NameValue(value) => { - if meta_item.has_name(sym::align) || meta_item.has_name(sym::packed) { - let name = meta_item.name_or_empty().to_ident_string(); - recognised = true; - sess.dcx().emit_err(session_diagnostics::IncorrectReprFormatGeneric { - span: item.span(), - repr_arg: &name, - cause: IncorrectReprFormatGenericCause::from_lit_kind( - item.span(), - &value.kind, - &name, - ), - }); - } else if matches!( - meta_item.name_or_empty(), - sym::Rust | sym::C | sym::simd | sym::transparent - ) || int_type_of_word(meta_item.name_or_empty()).is_some() - { - recognised = true; - sess.dcx().emit_err(session_diagnostics::InvalidReprHintNoValue { - span: meta_item.span, - name: meta_item.name_or_empty().to_ident_string(), - }); - } - } - MetaItemKind::List(nested_items) => { - if meta_item.has_name(sym::align) { - recognised = true; - if let [nested_item] = nested_items.as_slice() { - sess.dcx().emit_err( - session_diagnostics::IncorrectReprFormatExpectInteger { - span: nested_item.span(), - }, - ); - } else { - sess.dcx().emit_err( - session_diagnostics::IncorrectReprFormatAlignOneArg { - span: meta_item.span, - }, - ); - } - } else if meta_item.has_name(sym::packed) { - recognised = true; - if let [nested_item] = nested_items.as_slice() { - sess.dcx().emit_err( - session_diagnostics::IncorrectReprFormatPackedExpectInteger { - span: nested_item.span(), - }, - ); - } else { - sess.dcx().emit_err( - session_diagnostics::IncorrectReprFormatPackedOneOrZeroArg { - span: meta_item.span, - }, - ); - } - } else if matches!( - meta_item.name_or_empty(), - sym::Rust | sym::C | sym::simd | sym::transparent - ) || int_type_of_word(meta_item.name_or_empty()).is_some() - { - recognised = true; - sess.dcx().emit_err(session_diagnostics::InvalidReprHintNoParen { - span: meta_item.span, - name: meta_item.name_or_empty().to_ident_string(), - }); - } - } - _ => (), - } - } - if !recognised { - // Not a word we recognize. This will be caught and reported by - // the `check_mod_attrs` pass, but this pass doesn't always run - // (e.g. if we only pretty-print the source), so we have to gate - // the `span_delayed_bug` call as follows: - if sess.opts.pretty.map_or(true, |pp| pp.needs_analysis()) { - dcx.span_delayed_bug(item.span(), "unrecognized representation hint"); - } +impl CombineAttributeParser for ReprParser { + type Item = (ReprAttr, Span); + const PATH: &'static [rustc_span::Symbol] = &[sym::repr]; + const CONVERT: ConvertFn = AttributeKind::Repr; + + fn extend<'a>( + cx: &'a AcceptContext<'a>, + args: &'a ArgParser<'a>, + ) -> impl IntoIterator + 'a { + let mut reprs = Vec::new(); + + let Some(list) = args.list() else { + return reprs; + }; + + if list.is_empty() { + // this is so validation can emit a lint + reprs.push((ReprAttr::ReprEmpty, cx.attr_span)); + } + + for param in list.mixed() { + if let Some(_) = param.lit() { + cx.emit_err(session_diagnostics::ReprIdent { span: cx.attr_span }); + continue; } + + let span = param.span(); + reprs.extend(param.meta_item().and_then(|mi| parse_repr(cx, &mi)).map(|r| (r, span))); } + + reprs } - acc +} + +macro_rules! int_pat { + () => { + sym::i8 + | sym::u8 + | sym::i16 + | sym::u16 + | sym::i32 + | sym::u32 + | sym::i64 + | sym::u64 + | sym::i128 + | sym::u128 + | sym::isize + | sym::usize + }; } fn int_type_of_word(s: Symbol) -> Option { - use rustc_attr_data_structures::IntType::*; + use IntType::*; match s { - sym::i8 => Some(SignedInt(ast::IntTy::I8)), - sym::u8 => Some(UnsignedInt(ast::UintTy::U8)), - sym::i16 => Some(SignedInt(ast::IntTy::I16)), - sym::u16 => Some(UnsignedInt(ast::UintTy::U16)), - sym::i32 => Some(SignedInt(ast::IntTy::I32)), - sym::u32 => Some(UnsignedInt(ast::UintTy::U32)), - sym::i64 => Some(SignedInt(ast::IntTy::I64)), - sym::u64 => Some(UnsignedInt(ast::UintTy::U64)), - sym::i128 => Some(SignedInt(ast::IntTy::I128)), - sym::u128 => Some(UnsignedInt(ast::UintTy::U128)), - sym::isize => Some(SignedInt(ast::IntTy::Isize)), - sym::usize => Some(UnsignedInt(ast::UintTy::Usize)), + sym::i8 => Some(SignedInt(IntTy::I8)), + sym::u8 => Some(UnsignedInt(UintTy::U8)), + sym::i16 => Some(SignedInt(IntTy::I16)), + sym::u16 => Some(UnsignedInt(UintTy::U16)), + sym::i32 => Some(SignedInt(IntTy::I32)), + sym::u32 => Some(UnsignedInt(UintTy::U32)), + sym::i64 => Some(SignedInt(IntTy::I64)), + sym::u64 => Some(UnsignedInt(UintTy::U64)), + sym::i128 => Some(SignedInt(IntTy::I128)), + sym::u128 => Some(UnsignedInt(UintTy::U128)), + sym::isize => Some(SignedInt(IntTy::Isize)), + sym::usize => Some(UnsignedInt(UintTy::Usize)), _ => None, } } -pub fn parse_alignment(node: &ast::LitKind) -> Result { - if let ast::LitKind::Int(literal, ast::LitIntType::Unsuffixed) = node { +fn parse_repr(cx: &AcceptContext<'_>, param: &MetaItemParser<'_>) -> Option { + use ReprAttr::*; + + // FIXME(jdonszelmann): invert the parsing here to match on the word first and then the + // structure. + let (ident, args) = param.word_or_empty(); + + match (ident.name, args) { + (sym::align, ArgParser::NoArgs) => { + cx.emit_err(session_diagnostics::InvalidReprAlignNeedArg { span: ident.span }); + None + } + (sym::align, ArgParser::List(l)) => parse_repr_align(cx, l, param.span(), AlignKind::Align), + + (sym::packed, ArgParser::NoArgs) => Some(ReprPacked(Align::ONE)), + (sym::packed, ArgParser::List(l)) => { + parse_repr_align(cx, l, param.span(), AlignKind::Packed) + } + + (sym::align | sym::packed, ArgParser::NameValue(l)) => { + cx.emit_err(session_diagnostics::IncorrectReprFormatGeneric { + span: param.span(), + // FIXME(jdonszelmann) can just be a string in the diag type + repr_arg: &ident.to_string(), + cause: IncorrectReprFormatGenericCause::from_lit_kind( + param.span(), + &l.value_as_lit().kind, + ident.name.as_str(), + ), + }); + None + } + + (sym::Rust, ArgParser::NoArgs) => Some(ReprRust), + (sym::C, ArgParser::NoArgs) => Some(ReprC), + (sym::simd, ArgParser::NoArgs) => Some(ReprSimd), + (sym::transparent, ArgParser::NoArgs) => Some(ReprTransparent), + (i @ int_pat!(), ArgParser::NoArgs) => { + // int_pat!() should make sure it always parses + Some(ReprInt(int_type_of_word(i).unwrap())) + } + + ( + sym::Rust | sym::C | sym::simd | sym::transparent | int_pat!(), + ArgParser::NameValue(_), + ) => { + cx.emit_err(session_diagnostics::InvalidReprHintNoValue { + span: param.span(), + name: ident.to_string(), + }); + None + } + (sym::Rust | sym::C | sym::simd | sym::transparent | int_pat!(), ArgParser::List(_)) => { + cx.emit_err(session_diagnostics::InvalidReprHintNoParen { + span: param.span(), + name: ident.to_string(), + }); + None + } + + _ => { + cx.emit_err(session_diagnostics::UnrecognizedReprHint { span: param.span() }); + None + } + } +} + +enum AlignKind { + Packed, + Align, +} + +fn parse_repr_align( + cx: &AcceptContext<'_>, + list: &MetaItemListParser<'_>, + param_span: Span, + align_kind: AlignKind, +) -> Option { + use AlignKind::*; + + let Some(align) = list.single() else { + match align_kind { + Packed => { + cx.emit_err(session_diagnostics::IncorrectReprFormatPackedOneOrZeroArg { + span: param_span, + }); + } + Align => { + cx.dcx().emit_err(session_diagnostics::IncorrectReprFormatAlignOneArg { + span: param_span, + }); + } + } + + return None; + }; + + let Some(lit) = align.lit() else { + match align_kind { + Packed => { + cx.emit_err(session_diagnostics::IncorrectReprFormatPackedExpectInteger { + span: align.span(), + }); + } + Align => { + cx.emit_err(session_diagnostics::IncorrectReprFormatExpectInteger { + span: align.span(), + }); + } + } + + return None; + }; + + match parse_alignment(&lit.kind) { + Ok(literal) => Some(match align_kind { + AlignKind::Packed => ReprAttr::ReprPacked(literal), + AlignKind::Align => ReprAttr::ReprAlign(literal), + }), + Err(message) => { + cx.emit_err(session_diagnostics::InvalidReprGeneric { + span: lit.span, + repr_arg: match align_kind { + Packed => "packed".to_string(), + Align => "align".to_string(), + }, + error_part: message, + }); + None + } + } +} + +fn parse_alignment(node: &LitKind) -> Result { + if let LitKind::Int(literal, LitIntType::Unsuffixed) = node { // `Align::from_bytes` accepts 0 as an input, check is_power_of_two() first if literal.get().is_power_of_two() { // Only possible error is larger than 2^29 diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index 0d5bd105f057b..734c5dec86aef 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -1,258 +1,257 @@ -//! Parsing and validation of builtin attributes - use std::num::NonZero; -use rustc_ast::MetaItem; -use rustc_ast::attr::AttributeExt; -use rustc_ast_pretty::pprust; use rustc_attr_data_structures::{ - ConstStability, DefaultBodyStability, Stability, StabilityLevel, StableSince, UnstableReason, - VERSION_PLACEHOLDER, + AllowedThroughUnstableModules, AttributeKind, DefaultBodyStability, PartialConstStability, + Stability, StabilityLevel, StableSince, UnstableReason, VERSION_PLACEHOLDER, }; -use rustc_errors::ErrorGuaranteed; -use rustc_session::Session; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; - -use crate::attributes::util::UnsupportedLiteralReason; -use crate::{parse_version, session_diagnostics}; - -/// Collects stability info from `stable`/`unstable`/`rustc_allowed_through_unstable_modules` -/// attributes in `attrs`. Returns `None` if no stability attributes are found. -pub fn find_stability( - sess: &Session, - attrs: &[impl AttributeExt], - item_sp: Span, -) -> Option<(Stability, Span)> { - let mut stab: Option<(Stability, Span)> = None; - let mut allowed_through_unstable_modules = false; - - for attr in attrs { - match attr.name_or_empty() { - sym::rustc_allowed_through_unstable_modules => allowed_through_unstable_modules = true, - sym::unstable => { - if stab.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleStabilityLevels { - span: attr.span(), - }); - break; - } +use rustc_span::{ErrorGuaranteed, Span, Symbol, sym}; + +use super::util::parse_version; +use super::{AcceptMapping, AttributeParser, SingleAttributeParser}; +use crate::context::{AcceptContext, FinalizeContext}; +use crate::parser::{ArgParser, MetaItemParser}; +use crate::session_diagnostics::{self, UnsupportedLiteralReason}; + +macro_rules! reject_outside_std { + ($cx: ident) => { + // Emit errors for non-staged-api crates. + if !$cx.features().staged_api() { + $cx.emit_err(session_diagnostics::StabilityOutsideStd { span: $cx.attr_span }); + return; + } + }; +} - if let Some((feature, level)) = parse_unstability(sess, attr) { - stab = Some((Stability { level, feature }, attr.span())); - } - } - sym::stable => { - if stab.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleStabilityLevels { - span: attr.span(), - }); - break; - } - if let Some((feature, level)) = parse_stability(sess, attr) { - stab = Some((Stability { level, feature }, attr.span())); - } - } - _ => {} +#[derive(Default)] +pub(crate) struct StabilityParser { + allowed_through_unstable_modules: Option, + stability: Option<(Stability, Span)>, +} + +impl StabilityParser { + /// Checks, and emits an error when a stability (or unstability) was already set, which would be a duplicate. + fn check_duplicate(&self, cx: &AcceptContext<'_>) -> bool { + if let Some((_, _)) = self.stability { + cx.emit_err(session_diagnostics::MultipleStabilityLevels { span: cx.attr_span }); + true + } else { + false } } +} - if allowed_through_unstable_modules { - match &mut stab { - Some(( +impl AttributeParser for StabilityParser { + const ATTRIBUTES: AcceptMapping = &[ + (&[sym::stable], |this, cx, args| { + reject_outside_std!(cx); + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_stability(cx, args) + { + this.stability = Some((Stability { level, feature }, cx.attr_span)); + } + }), + (&[sym::unstable], |this, cx, args| { + reject_outside_std!(cx); + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_unstability(cx, args) + { + this.stability = Some((Stability { level, feature }, cx.attr_span)); + } + }), + (&[sym::rustc_allowed_through_unstable_modules], |this, cx, args| { + reject_outside_std!(cx); + this.allowed_through_unstable_modules = + Some(match args.name_value().and_then(|i| i.value_as_str()) { + Some(msg) => AllowedThroughUnstableModules::WithDeprecation(msg), + None => AllowedThroughUnstableModules::WithoutDeprecation, + }); + }), + ]; + + fn finalize(mut self, cx: &FinalizeContext<'_>) -> Option { + if let Some(atum) = self.allowed_through_unstable_modules { + if let Some(( Stability { - level: StabilityLevel::Stable { allowed_through_unstable_modules, .. }, + level: StabilityLevel::Stable { ref mut allowed_through_unstable_modules, .. }, .. }, _, - )) => *allowed_through_unstable_modules = true, - _ => { - sess.dcx() - .emit_err(session_diagnostics::RustcAllowedUnstablePairing { span: item_sp }); + )) = self.stability + { + *allowed_through_unstable_modules = Some(atum); + } else { + cx.dcx().emit_err(session_diagnostics::RustcAllowedUnstablePairing { + span: cx.target_span, + }); } } - } - stab + let (stability, span) = self.stability?; + + Some(AttributeKind::Stability { stability, span }) + } } -/// Collects stability info from `rustc_const_stable`/`rustc_const_unstable`/`rustc_promotable` -/// attributes in `attrs`. Returns `None` if no stability attributes are found. -pub fn find_const_stability( - sess: &Session, - attrs: &[impl AttributeExt], - item_sp: Span, -) -> Option<(ConstStability, Span)> { - let mut const_stab: Option<(ConstStability, Span)> = None; - let mut promotable = false; - let mut const_stable_indirect = false; - - for attr in attrs { - match attr.name_or_empty() { - sym::rustc_promotable => promotable = true, - sym::rustc_const_stable_indirect => const_stable_indirect = true, - sym::rustc_const_unstable => { - if const_stab.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleStabilityLevels { - span: attr.span(), - }); - break; - } +// FIXME(jdonszelmann) change to Single +#[derive(Default)] +pub(crate) struct BodyStabilityParser { + stability: Option<(DefaultBodyStability, Span)>, +} - if let Some((feature, level)) = parse_unstability(sess, attr) { - const_stab = Some(( - ConstStability { - level, - feature, - const_stable_indirect: false, - promotable: false, - }, - attr.span(), - )); - } - } - sym::rustc_const_stable => { - if const_stab.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleStabilityLevels { - span: attr.span(), - }); - break; - } - if let Some((feature, level)) = parse_stability(sess, attr) { - const_stab = Some(( - ConstStability { - level, - feature, - const_stable_indirect: false, - promotable: false, - }, - attr.span(), - )); - } +impl AttributeParser for BodyStabilityParser { + const ATTRIBUTES: AcceptMapping = + &[(&[sym::rustc_default_body_unstable], |this, cx, args| { + reject_outside_std!(cx); + if this.stability.is_some() { + cx.dcx() + .emit_err(session_diagnostics::MultipleStabilityLevels { span: cx.attr_span }); + } else if let Some((feature, level)) = parse_unstability(cx, args) { + this.stability = Some((DefaultBodyStability { level, feature }, cx.attr_span)); } - _ => {} - } - } + })]; - // Merge promotable and const_stable_indirect into stability info - if promotable { - match &mut const_stab { - Some((stab, _)) => stab.promotable = promotable, - _ => { - _ = sess - .dcx() - .emit_err(session_diagnostics::RustcPromotablePairing { span: item_sp }) - } - } + fn finalize(self, _cx: &FinalizeContext<'_>) -> Option { + let (stability, span) = self.stability?; + + Some(AttributeKind::BodyStability { stability, span }) } - if const_stable_indirect { - match &mut const_stab { - Some((stab, _)) => { - if stab.is_const_unstable() { - stab.const_stable_indirect = true; - } else { - _ = sess.dcx().emit_err(session_diagnostics::RustcConstStableIndirectPairing { - span: item_sp, - }) - } - } - _ => { - // This function has no const stability attribute, but has `const_stable_indirect`. - // We ignore that; unmarked functions are subject to recursive const stability - // checks by default so we do carry out the user's intent. - } - } +} + +pub(crate) struct ConstStabilityIndirectParser; +// FIXME(jdonszelmann): single word attribute group when we have these +impl SingleAttributeParser for ConstStabilityIndirectParser { + const PATH: &'static [rustc_span::Symbol] = &[sym::rustc_const_stable_indirect]; + + // ignore + fn on_duplicate(_cx: &AcceptContext<'_>, _first_span: Span) {} + + fn convert(_cx: &AcceptContext<'_>, _args: &ArgParser<'_>) -> Option { + Some(AttributeKind::ConstStabilityIndirect) } +} - const_stab +#[derive(Default)] +pub(crate) struct ConstStabilityParser { + promotable: bool, + stability: Option<(PartialConstStability, Span)>, } -/// Calculates the const stability for a const function in a `-Zforce-unstable-if-unmarked` crate -/// without the `staged_api` feature. -pub fn unmarked_crate_const_stab( - _sess: &Session, - attrs: &[impl AttributeExt], - regular_stab: Stability, -) -> ConstStability { - assert!(regular_stab.level.is_unstable()); - // The only attribute that matters here is `rustc_const_stable_indirect`. - // We enforce recursive const stability rules for those functions. - let const_stable_indirect = - attrs.iter().any(|a| a.name_or_empty() == sym::rustc_const_stable_indirect); - ConstStability { - feature: regular_stab.feature, - const_stable_indirect, - promotable: false, - level: regular_stab.level, +impl ConstStabilityParser { + /// Checks, and emits an error when a stability (or unstability) was already set, which would be a duplicate. + fn check_duplicate(&self, cx: &AcceptContext<'_>) -> bool { + if let Some((_, _)) = self.stability { + cx.emit_err(session_diagnostics::MultipleStabilityLevels { span: cx.attr_span }); + true + } else { + false + } } } -/// Collects stability info from `rustc_default_body_unstable` attributes in `attrs`. -/// Returns `None` if no stability attributes are found. -pub fn find_body_stability( - sess: &Session, - attrs: &[impl AttributeExt], -) -> Option<(DefaultBodyStability, Span)> { - let mut body_stab: Option<(DefaultBodyStability, Span)> = None; - - for attr in attrs { - if attr.has_name(sym::rustc_default_body_unstable) { - if body_stab.is_some() { - sess.dcx() - .emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span() }); - break; +impl AttributeParser for ConstStabilityParser { + const ATTRIBUTES: AcceptMapping = &[ + (&[sym::rustc_const_stable], |this, cx, args| { + reject_outside_std!(cx); + + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_stability(cx, args) + { + this.stability = Some(( + PartialConstStability { level, feature, promotable: false }, + cx.attr_span, + )); } - - if let Some((feature, level)) = parse_unstability(sess, attr) { - body_stab = Some((DefaultBodyStability { level, feature }, attr.span())); + }), + (&[sym::rustc_const_unstable], |this, cx, args| { + reject_outside_std!(cx); + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_unstability(cx, args) + { + this.stability = Some(( + PartialConstStability { level, feature, promotable: false }, + cx.attr_span, + )); + } + }), + (&[sym::rustc_promotable], |this, cx, _| { + reject_outside_std!(cx); + this.promotable = true; + }), + ]; + + fn finalize(mut self, cx: &FinalizeContext<'_>) -> Option { + if self.promotable { + if let Some((ref mut stab, _)) = self.stability { + stab.promotable = true; + } else { + cx.dcx() + .emit_err(session_diagnostics::RustcPromotablePairing { span: cx.target_span }); } } - } - body_stab + let (stability, span) = self.stability?; + + Some(AttributeKind::ConstStability { stability, span }) + } } -fn insert_or_error(sess: &Session, meta: &MetaItem, item: &mut Option) -> Option<()> { +/// Tries to insert the value of a `key = value` meta item into an option. +/// +/// Emits an error when either the option was already Some, or the arguments weren't of form +/// `name = value` +fn insert_value_into_option_or_error( + cx: &AcceptContext<'_>, + param: &MetaItemParser<'_>, + item: &mut Option, +) -> Option<()> { if item.is_some() { - sess.dcx().emit_err(session_diagnostics::MultipleItem { - span: meta.span, - item: pprust::path_to_string(&meta.path), + cx.emit_err(session_diagnostics::MultipleItem { + span: param.span(), + item: param.path_without_args().to_string(), }); None - } else if let Some(v) = meta.value_str() { - *item = Some(v); + } else if let Some(v) = param.args().name_value() + && let Some(s) = v.value_as_str() + { + *item = Some(s); Some(()) } else { - sess.dcx().emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span }); + cx.emit_err(session_diagnostics::IncorrectMetaItem { + span: param.span(), + suggestion: None, + }); None } } /// Read the content of a `stable`/`rustc_const_stable` attribute, and return the feature name and /// its stability information. -fn parse_stability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol, StabilityLevel)> { - let metas = attr.meta_item_list()?; - +pub(crate) fn parse_stability( + cx: &AcceptContext<'_>, + args: &ArgParser<'_>, +) -> Option<(Symbol, StabilityLevel)> { let mut feature = None; let mut since = None; - for meta in metas { - let Some(mi) = meta.meta_item() else { - sess.dcx().emit_err(session_diagnostics::UnsupportedLiteral { - span: meta.span(), + + for param in args.list()?.mixed() { + let param_span = param.span(); + let Some(param) = param.meta_item() else { + cx.emit_err(session_diagnostics::UnsupportedLiteral { + span: param_span, reason: UnsupportedLiteralReason::Generic, is_bytestr: false, - start_point_span: sess.source_map().start_point(meta.span()), + start_point_span: cx.sess().source_map().start_point(param_span), }); return None; }; - match mi.name_or_empty() { - sym::feature => insert_or_error(sess, mi, &mut feature)?, - sym::since => insert_or_error(sess, mi, &mut since)?, + match param.word_or_empty_without_args().name { + sym::feature => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, + sym::since => insert_value_into_option_or_error(cx, ¶m, &mut since)?, _ => { - sess.dcx().emit_err(session_diagnostics::UnknownMetaItem { - span: meta.span(), - item: pprust::path_to_string(&mi.path), + cx.emit_err(session_diagnostics::UnknownMetaItem { + span: param_span, + item: param.path_without_args().to_string(), expected: &["feature", "since"], }); return None; @@ -263,9 +262,9 @@ fn parse_stability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol, let feature = match feature { Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature), Some(_bad_feature) => { - Err(sess.dcx().emit_err(session_diagnostics::NonIdentFeature { span: attr.span() })) + Err(cx.emit_err(session_diagnostics::NonIdentFeature { span: cx.attr_span })) } - None => Err(sess.dcx().emit_err(session_diagnostics::MissingFeature { span: attr.span() })), + None => Err(cx.emit_err(session_diagnostics::MissingFeature { span: cx.attr_span })), }; let since = if let Some(since) = since { @@ -274,63 +273,65 @@ fn parse_stability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol, } else if let Some(version) = parse_version(since) { StableSince::Version(version) } else { - sess.dcx().emit_err(session_diagnostics::InvalidSince { span: attr.span() }); + cx.emit_err(session_diagnostics::InvalidSince { span: cx.attr_span }); StableSince::Err } } else { - sess.dcx().emit_err(session_diagnostics::MissingSince { span: attr.span() }); + cx.emit_err(session_diagnostics::MissingSince { span: cx.attr_span }); StableSince::Err }; match feature { Ok(feature) => { - let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false }; + let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: None }; Some((feature, level)) } Err(ErrorGuaranteed { .. }) => None, } } -/// Read the content of a `unstable`/`rustc_const_unstable`/`rustc_default_body_unstable` +// Read the content of a `unstable`/`rustc_const_unstable`/`rustc_default_body_unstable` /// attribute, and return the feature name and its stability information. -fn parse_unstability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol, StabilityLevel)> { - let metas = attr.meta_item_list()?; - +pub(crate) fn parse_unstability( + cx: &AcceptContext<'_>, + args: &ArgParser<'_>, +) -> Option<(Symbol, StabilityLevel)> { let mut feature = None; let mut reason = None; let mut issue = None; let mut issue_num = None; let mut is_soft = false; let mut implied_by = None; - for meta in metas { - let Some(mi) = meta.meta_item() else { - sess.dcx().emit_err(session_diagnostics::UnsupportedLiteral { - span: meta.span(), + for param in args.list()?.mixed() { + let Some(param) = param.meta_item() else { + cx.emit_err(session_diagnostics::UnsupportedLiteral { + span: param.span(), reason: UnsupportedLiteralReason::Generic, is_bytestr: false, - start_point_span: sess.source_map().start_point(meta.span()), + start_point_span: cx.sess().source_map().start_point(param.span()), }); return None; }; - match mi.name_or_empty() { - sym::feature => insert_or_error(sess, mi, &mut feature)?, - sym::reason => insert_or_error(sess, mi, &mut reason)?, + let (word, args) = param.word_or_empty(); + match word.name { + sym::feature => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, + sym::reason => insert_value_into_option_or_error(cx, ¶m, &mut reason)?, sym::issue => { - insert_or_error(sess, mi, &mut issue)?; + insert_value_into_option_or_error(cx, ¶m, &mut issue)?; - // These unwraps are safe because `insert_or_error` ensures the meta item + // These unwraps are safe because `insert_value_into_option_or_error` ensures the meta item // is a name/value pair string literal. issue_num = match issue.unwrap().as_str() { "none" => None, - issue => match issue.parse::>() { + issue_str => match issue_str.parse::>() { Ok(num) => Some(num), Err(err) => { - sess.dcx().emit_err( + cx.emit_err( session_diagnostics::InvalidIssueString { - span: mi.span, + span: param.span(), cause: session_diagnostics::InvalidIssueStringCause::from_int_error_kind( - mi.name_value_literal_span().unwrap(), + args.name_value().unwrap().value_span, err.kind(), ), }, @@ -341,16 +342,16 @@ fn parse_unstability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol }; } sym::soft => { - if !mi.is_word() { - sess.dcx().emit_err(session_diagnostics::SoftNoArgs { span: mi.span }); + if !args.no_args() { + cx.emit_err(session_diagnostics::SoftNoArgs { span: param.span() }); } is_soft = true; } - sym::implied_by => insert_or_error(sess, mi, &mut implied_by)?, + sym::implied_by => insert_value_into_option_or_error(cx, ¶m, &mut implied_by)?, _ => { - sess.dcx().emit_err(session_diagnostics::UnknownMetaItem { - span: meta.span(), - item: pprust::path_to_string(&mi.path), + cx.emit_err(session_diagnostics::UnknownMetaItem { + span: param.span(), + item: param.path_without_args().to_string(), expected: &["feature", "reason", "issue", "soft", "implied_by"], }); return None; @@ -361,14 +362,13 @@ fn parse_unstability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol let feature = match feature { Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature), Some(_bad_feature) => { - Err(sess.dcx().emit_err(session_diagnostics::NonIdentFeature { span: attr.span() })) + Err(cx.emit_err(session_diagnostics::NonIdentFeature { span: cx.attr_span })) } - None => Err(sess.dcx().emit_err(session_diagnostics::MissingFeature { span: attr.span() })), + None => Err(cx.emit_err(session_diagnostics::MissingFeature { span: cx.attr_span })), }; - let issue = issue.ok_or_else(|| { - sess.dcx().emit_err(session_diagnostics::MissingIssue { span: attr.span() }) - }); + let issue = + issue.ok_or_else(|| cx.emit_err(session_diagnostics::MissingIssue { span: cx.attr_span })); match (feature, issue) { (Ok(feature), Ok(_)) => { diff --git a/compiler/rustc_attr_parsing/src/attributes/transparency.rs b/compiler/rustc_attr_parsing/src/attributes/transparency.rs index f4065a7704841..ad83a1f7af80c 100644 --- a/compiler/rustc_attr_parsing/src/attributes/transparency.rs +++ b/compiler/rustc_attr_parsing/src/attributes/transparency.rs @@ -1,36 +1,33 @@ -use rustc_ast::attr::AttributeExt; -use rustc_attr_data_structures::TransparencyError; +use rustc_attr_data_structures::AttributeKind; use rustc_span::hygiene::Transparency; use rustc_span::sym; -pub fn find_transparency( - attrs: &[impl AttributeExt], - macro_rules: bool, -) -> (Transparency, Option) { - let mut transparency = None; - let mut error = None; - for attr in attrs { - if attr.has_name(sym::rustc_macro_transparency) { - if let Some((_, old_span)) = transparency { - error = Some(TransparencyError::MultipleTransparencyAttrs(old_span, attr.span())); - break; - } else if let Some(value) = attr.value_str() { - transparency = Some(( - match value { - sym::transparent => Transparency::Transparent, - sym::semitransparent => Transparency::SemiTransparent, - sym::opaque => Transparency::Opaque, - _ => { - error = - Some(TransparencyError::UnknownTransparency(value, attr.span())); - continue; - } - }, - attr.span(), - )); +use super::{AcceptContext, SingleAttributeParser}; +use crate::parser::ArgParser; + +pub(crate) struct TransparencyParser; + +// FIXME(jdonszelmann): make these proper diagnostics +#[allow(rustc::untranslatable_diagnostic)] +#[allow(rustc::diagnostic_outside_of_impl)] +impl SingleAttributeParser for TransparencyParser { + const PATH: &'static [rustc_span::Symbol] = &[sym::rustc_macro_transparency]; + + fn on_duplicate(cx: &crate::context::AcceptContext<'_>, first_span: rustc_span::Span) { + cx.dcx().span_err(vec![first_span, cx.attr_span], "multiple macro transparency attributes"); + } + + fn convert(cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option { + match args.name_value().and_then(|nv| nv.value_as_str()) { + Some(sym::transparent) => Some(Transparency::Transparent), + Some(sym::semitransparent) => Some(Transparency::SemiTransparent), + Some(sym::opaque) => Some(Transparency::Opaque), + Some(other) => { + cx.dcx().span_err(cx.attr_span, format!("unknown macro transparency: `{other}`")); + None } + None => None, } + .map(AttributeKind::MacroTransparency) } - let fallback = if macro_rules { Transparency::SemiTransparent } else { Transparency::Opaque }; - (transparency.map_or(fallback, |t| t.0), error) } diff --git a/compiler/rustc_attr_parsing/src/attributes/util.rs b/compiler/rustc_attr_parsing/src/attributes/util.rs index c59d2fec1dcb7..05a9029c59aa5 100644 --- a/compiler/rustc_attr_parsing/src/attributes/util.rs +++ b/compiler/rustc_attr_parsing/src/attributes/util.rs @@ -1,23 +1,7 @@ use rustc_ast::attr::{AttributeExt, first_attr_value_str_by_name}; use rustc_attr_data_structures::RustcVersion; use rustc_feature::is_builtin_attr_name; -use rustc_span::symbol::{Symbol, sym}; - -pub(crate) enum UnsupportedLiteralReason { - Generic, - CfgString, - CfgBoolean, - DeprecatedString, - DeprecatedKvPair, -} - -pub fn is_builtin_attr(attr: &impl AttributeExt) -> bool { - attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name)) -} - -pub fn find_crate_name(attrs: &[impl AttributeExt]) -> Option { - first_attr_value_str_by_name(attrs, sym::crate_name) -} +use rustc_span::{Symbol, sym}; /// Parse a rustc version number written inside string literal in an attribute, /// like appears in `since = "1.0.0"`. Suffixes like "-dev" and "-nightly" are @@ -34,3 +18,11 @@ pub fn parse_version(s: Symbol) -> Option { let patch = digits.next().unwrap_or("0").parse().ok()?; Some(RustcVersion { major, minor, patch }) } + +pub fn is_builtin_attr(attr: &impl AttributeExt) -> bool { + attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name)) +} + +pub fn find_crate_name(attrs: &[impl AttributeExt]) -> Option { + first_attr_value_str_by_name(attrs, sym::crate_name) +} diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs new file mode 100644 index 0000000000000..a1019a9006bca --- /dev/null +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -0,0 +1,344 @@ +use std::cell::RefCell; +use std::collections::BTreeMap; +use std::ops::Deref; +use std::sync::LazyLock; + +use rustc_ast::{self as ast, DelimArgs}; +use rustc_attr_data_structures::AttributeKind; +use rustc_errors::{DiagCtxtHandle, Diagnostic}; +use rustc_feature::Features; +use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId}; +use rustc_session::Session; +use rustc_span::symbol::kw; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; + +use crate::attributes::allow_unstable::{AllowConstFnUnstableParser, AllowInternalUnstableParser}; +use crate::attributes::confusables::ConfusablesParser; +use crate::attributes::deprecation::DeprecationParser; +use crate::attributes::repr::ReprParser; +use crate::attributes::stability::{ + BodyStabilityParser, ConstStabilityIndirectParser, ConstStabilityParser, StabilityParser, +}; +use crate::attributes::transparency::TransparencyParser; +use crate::attributes::{AttributeParser as _, Combine, Single}; +use crate::parser::{ArgParser, MetaItemParser}; + +macro_rules! attribute_groups { + ( + pub(crate) static $name: ident = [$($names: ty),* $(,)?]; + ) => { + pub(crate) static $name: LazyLock<( + BTreeMap<&'static [Symbol], Vec, &ArgParser<'_>) + Send + Sync>>>, + Vec) -> Option>> + )> = LazyLock::new(|| { + let mut accepts = BTreeMap::<_, Vec, &ArgParser<'_>) + Send + Sync>>>::new(); + let mut finalizes = Vec::) -> Option>>::new(); + + $( + { + thread_local! { + static STATE_OBJECT: RefCell<$names> = RefCell::new(<$names>::default()); + }; + + for (k, v) in <$names>::ATTRIBUTES { + accepts.entry(*k).or_default().push(Box::new(|cx, args| { + STATE_OBJECT.with_borrow_mut(|s| { + v(s, cx, args) + }) + })); + } + + finalizes.push(Box::new(|cx| { + let state = STATE_OBJECT.take(); + state.finalize(cx) + })); + } + )* + + (accepts, finalizes) + }); + }; +} + +attribute_groups!( + pub(crate) static ATTRIBUTE_GROUP_MAPPING = [ + // tidy-alphabetical-start + BodyStabilityParser, + ConfusablesParser, + ConstStabilityParser, + StabilityParser, + // tidy-alphabetical-end + + // tidy-alphabetical-start + Combine, + Combine, + Combine, + // tidy-alphabetical-end + + // tidy-alphabetical-start + Single, + Single, + Single, + // tidy-alphabetical-end + ]; +); + +/// Context given to every attribute parser when accepting +/// +/// Gives [`AttributeParser`]s enough information to create errors, for example. +pub(crate) struct AcceptContext<'a> { + pub(crate) group_cx: &'a FinalizeContext<'a>, + /// The span of the attribute currently being parsed + pub(crate) attr_span: Span, +} + +impl<'a> AcceptContext<'a> { + pub(crate) fn emit_err(&self, diag: impl Diagnostic<'a>) -> ErrorGuaranteed { + if !self.limit_diagnostics { + self.dcx().emit_err(diag) + } else { + self.dcx() + .span_delayed_bug(self.attr_span, "diagnostic limited during attribute parsing") + } + } +} + +impl<'a> Deref for AcceptContext<'a> { + type Target = FinalizeContext<'a>; + + fn deref(&self) -> &Self::Target { + &self.group_cx + } +} + +/// Context given to every attribute parser during finalization. +/// +/// Gives [`AttributeParser`](crate::attributes::AttributeParser)s enough information to create errors, for example. +pub(crate) struct FinalizeContext<'a> { + /// The parse context, gives access to the session and the + /// diagnostics context. + pub(crate) cx: &'a AttributeParser<'a>, + /// The span of the syntactical component this attribute was applied to + pub(crate) target_span: Span, +} + +impl<'a> Deref for FinalizeContext<'a> { + type Target = AttributeParser<'a>; + + fn deref(&self) -> &Self::Target { + &self.cx + } +} + +#[derive(PartialEq, Clone, Copy, Debug)] +pub enum OmitDoc { + Lower, + Skip, +} + +/// Context created once, for example as part of the ast lowering +/// context, through which all attributes can be lowered. +pub struct AttributeParser<'sess> { + #[allow(dead_code)] // FIXME(jdonszelmann): needed later to verify we parsed all attributes + tools: Vec, + sess: &'sess Session, + features: Option<&'sess Features>, + + /// *only* parse attributes with this symbol. + /// + /// Used in cases where we want the lowering infrastructure for + /// parse just a single attribute. + parse_only: Option, + + /// Can be used to instruct parsers to reduce the number of diagnostics it emits. + /// Useful when using `parse_limited` and you know the attr will be reparsed later. + pub(crate) limit_diagnostics: bool, +} + +impl<'sess> AttributeParser<'sess> { + /// This method allows you to parse attributes *before* you have access to features or tools. + /// One example where this is necessary, is to parse `feature` attributes themselves for + /// example. + /// + /// Try to use this as little as possible. Attributes *should* be lowered during `rustc_ast_lowering`. + /// Some attributes require access to features to parse, which would crash if you tried to do so + /// through [`parse_limited`](Self::parse_limited). + /// + /// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with + /// that symbol are picked out of the list of instructions and parsed. Those are returned. + pub fn parse_limited( + sess: &'sess Session, + attrs: &[ast::Attribute], + sym: Symbol, + target_span: Span, + limit_diagnostics: bool, + ) -> Option { + let mut parsed = Self { + sess, + features: None, + tools: Vec::new(), + parse_only: Some(sym), + limit_diagnostics, + } + .parse_attribute_list(attrs, target_span, OmitDoc::Skip); + + assert!(parsed.len() <= 1); + + parsed.pop() + } + + pub fn new(sess: &'sess Session, features: &'sess Features, tools: Vec) -> Self { + Self { sess, features: Some(features), tools, parse_only: None, limit_diagnostics: false } + } + + pub(crate) fn sess(&self) -> &'sess Session { + self.sess + } + + pub(crate) fn features(&self) -> &'sess Features { + self.features.expect("features not available at this point in the compiler") + } + + pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> { + self.sess.dcx() + } + + /// Parse a list of attributes. + /// + /// `target_span` is the span of the thing this list of attributes is applied to, + /// and when `omit_doc` is set, doc attributes are filtered out. + pub fn parse_attribute_list<'a>( + &'a self, + attrs: &'a [ast::Attribute], + target_span: Span, + omit_doc: OmitDoc, + ) -> Vec { + let mut attributes = Vec::new(); + + let group_cx = FinalizeContext { cx: self, target_span }; + + for attr in attrs { + // if we're only looking for a single attribute, + // skip all the ones we don't care about + if let Some(expected) = self.parse_only { + if attr.name_or_empty() != expected { + continue; + } + } + + // sometimes, for example for `#![doc = include_str!("readme.md")]`, + // doc still contains a non-literal. You might say, when we're lowering attributes + // that's expanded right? But no, sometimes, when parsing attributes on macros, + // we already use the lowering logic and these are still there. So, when `omit_doc` + // is set we *also* want to ignore these + if omit_doc == OmitDoc::Skip && attr.name_or_empty() == sym::doc { + continue; + } + + match &attr.kind { + ast::AttrKind::DocComment(comment_kind, symbol) => { + if omit_doc == OmitDoc::Skip { + continue; + } + + attributes.push(Attribute::Parsed(AttributeKind::DocComment { + style: attr.style, + kind: *comment_kind, + span: attr.span, + comment: *symbol, + })) + } + // // FIXME: make doc attributes go through a proper attribute parser + // ast::AttrKind::Normal(n) if n.name_or_empty() == sym::doc => { + // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); + // + // attributes.push(Attribute::Parsed(AttributeKind::DocComment { + // style: attr.style, + // kind: CommentKind::Line, + // span: attr.span, + // comment: p.args().name_value(), + // })) + // } + ast::AttrKind::Normal(n) => { + let parser = MetaItemParser::from_attr(n, self.dcx()); + let (path, args) = parser.deconstruct(); + let parts = path.segments().map(|i| i.name).collect::>(); + + if let Some(accepts) = ATTRIBUTE_GROUP_MAPPING.0.get(parts.as_slice()) { + for f in accepts { + let cx = AcceptContext { group_cx: &group_cx, attr_span: attr.span }; + + f(&cx, &args) + } + } else { + // if we're here, we must be compiling a tool attribute... Or someone forgot to + // parse their fancy new attribute. Let's warn them in any case. If you are that + // person, and you really your attribute should remain unparsed, carefully read the + // documentation in this module and if you still think so you can add an exception + // to this assertion. + + // FIXME(jdonszelmann): convert other attributes, and check with this that + // we caught em all + // const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg]; + // assert!( + // self.tools.contains(&parts[0]) || true, + // // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]), + // "attribute {path} wasn't parsed and isn't a know tool attribute", + // ); + + attributes.push(Attribute::Unparsed(Box::new(AttrItem { + path: AttrPath::from_ast(&n.item.path), + args: self.lower_attr_args(&n.item.args), + id: HashIgnoredAttrId { attr_id: attr.id }, + style: attr.style, + span: attr.span, + }))); + } + } + } + } + + let mut parsed_attributes = Vec::new(); + for f in &ATTRIBUTE_GROUP_MAPPING.1 { + if let Some(attr) = f(&group_cx) { + parsed_attributes.push(Attribute::Parsed(attr)); + } + } + + attributes.extend(parsed_attributes); + + attributes + } + + fn lower_attr_args(&self, args: &ast::AttrArgs) -> AttrArgs { + match args { + ast::AttrArgs::Empty => AttrArgs::Empty, + ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs { + dspan: args.dspan, + delim: args.delim, + tokens: args.tokens.flattened(), + }), + // This is an inert key-value attribute - it will never be visible to macros + // after it gets lowered to HIR. Therefore, we can extract literals to handle + // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). + ast::AttrArgs::Eq { eq_span, expr } => { + // In valid code the value always ends up as a single literal. Otherwise, a dummy + // literal suffices because the error is handled elsewhere. + let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind + && let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span) + { + lit + } else { + let guar = self.dcx().has_errors().unwrap(); + ast::MetaItemLit { + symbol: kw::Empty, + suffix: None, + kind: ast::LitKind::Err(guar), + span: DUMMY_SP, + } + }; + AttrArgs::Eq { eq_span: *eq_span, expr: lit } + } + } + } +} diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs index a1264a6875f69..f354fc842ed7e 100644 --- a/compiler/rustc_attr_parsing/src/lib.rs +++ b/compiler/rustc_attr_parsing/src/lib.rs @@ -1,8 +1,41 @@ -//! Functions and types dealing with attributes and meta items. +//! Centralized logic for parsing and attributes. //! -//! FIXME(Centril): For now being, much of the logic is still in `rustc_ast::attr`. -//! The goal is to move the definition of `MetaItem` and things that don't need to be in `syntax` -//! to this crate. +//! Part of a series of crates: +//! - rustc_attr_data_structures: contains types that the parsers parse into +//! - rustc_attr_parsing: this crate +//! - (in the future): rustc_attr_validation +//! +//! History: Check out [#131229](https://github.com/rust-lang/rust/issues/131229). +//! There used to be only one definition of attributes in the compiler: `ast::Attribute`. +//! These were then parsed or validated or both in places distributed all over the compiler. +//! This was a mess... +//! +//! Attributes are markers on items. Most are actually attribute-like proc-macros, and are expanded +//! but some remain as the so-called built-in attributes. These are not macros at all, and really +//! are just markers to guide the compilation process. An example is `#[inline(...)]` which changes +//! how code for functions is generated. Built-in attributes aren't macros because there's no rust +//! syntax they could expand to. +//! +//! In this crate, syntactical attributes (sequences of tokens that look like +//! `#[something(something else)]`) are parsed into more semantic attributes, markers on items. +//! Multiple syntactic attributes might influence a single semantic attribute. For example, +//! `#[stable(...)]` and `#[unstable()]` cannot occur together, and both semantically define +//! a "stability" of an item. So, the stability attribute has an +//! [`AttributeParser`](attributes::AttributeParser) that recognizes both the `#[stable()]` +//! and `#[unstable()]` syntactic attributes, and at the end produce a single [`AttributeKind::Stability`]. +//! +//! As a rule of thumb, when a syntactical attribute can be applied more than once, they should be +//! combined into a single semantic attribute. For example: +//! +//! ``` +//! #[repr(C)] +//! #[repr(packed)] +//! struct Meow {} +//! ``` +//! +//! should result in a single `AttributeKind::Repr` containing a list of repr annotations, in this +//! case `C` and `packed`. This is equivalent to writing `#[repr(C, packed)]` in a single +//! syntactical annotation. // tidy-alphabetical-start #![allow(internal_features)] @@ -12,11 +45,56 @@ #![warn(unreachable_pub)] // tidy-alphabetical-end +#[macro_use] mod attributes; +mod context; +pub mod parser; mod session_diagnostics; -pub use attributes::*; +pub use attributes::cfg::*; +pub use attributes::util::{find_crate_name, is_builtin_attr, parse_version}; +pub use context::{AttributeParser, OmitDoc}; pub use rustc_attr_data_structures::*; -pub use util::{find_crate_name, is_builtin_attr, parse_version}; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } + +/// Finds attributes in sequences of attributes by pattern matching. +/// +/// A little like `matches` but for attributes. +/// +/// ```rust,no_run +/// // finds the repr attribute +/// if let Some(r) = find_attr!(attrs, AttributeKind::Repr(r) => r) { +/// +/// } +/// +/// // checks if one has matched +/// if find_attr!(attrs, AttributeKind::Repr(_)) { +/// +/// } +/// ``` +#[macro_export] +macro_rules! find_attr { + ($attributes_list: expr, $pattern: pat $(if $guard: expr)?) => {{ + $crate::find_attr!($attributes_list, $pattern $(if $guard)? => ()).is_some() + }}; + + ($attributes_list: expr, $pattern: pat $(if $guard: expr)? => $e: expr) => {{ + fn check_attribute_iterator<'a>(_: &'_ impl IntoIterator) {} + check_attribute_iterator(&$attributes_list); + + let find_attribute = |iter| { + for i in $attributes_list { + match i { + rustc_hir::Attribute::Parsed($pattern) $(if $guard: expr)? => { + return Some($e); + } + _ => {} + } + } + + None + }; + find_attribute($attributes_list) + }}; +} diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs new file mode 100644 index 0000000000000..0a7bf14b9f14f --- /dev/null +++ b/compiler/rustc_attr_parsing/src/parser.rs @@ -0,0 +1,626 @@ +use core::panic; +use std::fmt::{Debug, Display}; +use std::iter::Peekable; + +use rustc_ast::token::{self, Delimiter, Token}; +use rustc_ast::tokenstream::{TokenStreamIter, TokenTree}; +use rustc_ast::{AttrArgs, DelimArgs, Expr, ExprKind, LitKind, MetaItemLit, NormalAttr, Path}; +use rustc_ast_pretty::pprust; +use rustc_errors::DiagCtxtHandle; +use rustc_hir::{self as hir, AttrPath}; +use rustc_span::symbol::{Ident, kw}; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol}; + +pub struct SegmentIterator<'a> { + offset: usize, + path: &'a PathParser<'a>, +} + +impl<'a> Iterator for SegmentIterator<'a> { + type Item = &'a Ident; + + fn next(&mut self) -> Option { + if self.offset >= self.path.len() { + return None; + } + + let res = match self.path { + PathParser::Ast(ast_path) => &ast_path.segments[self.offset].ident, + PathParser::Attr(attr_path) => &attr_path.segments[self.offset], + }; + + self.offset += 1; + Some(res) + } +} + +#[derive(Clone, Debug)] +pub enum PathParser<'a> { + Ast(&'a Path), + Attr(AttrPath), +} + +impl<'a> PathParser<'a> { + pub fn get_attribute_path(&self) -> hir::AttrPath { + AttrPath { + segments: self.segments().copied().collect::>().into_boxed_slice(), + span: self.span(), + } + } + + pub fn segments(&'a self) -> impl Iterator { + SegmentIterator { offset: 0, path: self } + } + + pub fn span(&self) -> Span { + match self { + PathParser::Ast(path) => path.span, + PathParser::Attr(attr_path) => attr_path.span, + } + } + + pub fn len(&self) -> usize { + match self { + PathParser::Ast(path) => path.segments.len(), + PathParser::Attr(attr_path) => attr_path.segments.len(), + } + } + + pub fn segments_is(&self, segments: &[Symbol]) -> bool { + self.len() == segments.len() && self.segments().zip(segments).all(|(a, b)| a.name == *b) + } + + pub fn word(&self) -> Option { + (self.len() == 1).then(|| **self.segments().next().as_ref().unwrap()) + } + + pub fn word_or_empty(&self) -> Ident { + self.word().unwrap_or_else(Ident::empty) + } + + /// Asserts that this MetaItem is some specific word. + /// + /// See [`word`](Self::word) for examples of what a word is. + pub fn word_is(&self, sym: Symbol) -> bool { + self.word().map(|i| i.name == sym).unwrap_or(false) + } +} + +impl Display for PathParser<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PathParser::Ast(path) => write!(f, "{}", pprust::path_to_string(path)), + PathParser::Attr(attr_path) => write!(f, "{attr_path}"), + } + } +} + +#[derive(Clone, Debug)] +#[must_use] +pub enum ArgParser<'a> { + NoArgs, + List(MetaItemListParser<'a>), + NameValue(NameValueParser), +} + +impl<'a> ArgParser<'a> { + pub fn span(&self) -> Option { + match self { + Self::NoArgs => None, + Self::List(l) => Some(l.span), + Self::NameValue(n) => Some(n.value_span.with_lo(n.eq_span.lo())), + } + } + + pub fn from_attr_args(value: &'a AttrArgs, dcx: DiagCtxtHandle<'a>) -> Self { + match value { + AttrArgs::Empty => Self::NoArgs, + AttrArgs::Delimited(args) if args.delim == Delimiter::Parenthesis => { + Self::List(MetaItemListParser::new(args, dcx)) + } + AttrArgs::Delimited(args) => { + Self::List(MetaItemListParser { sub_parsers: vec![], span: args.dspan.entire() }) + } + AttrArgs::Eq { eq_span, expr } => Self::NameValue(NameValueParser { + eq_span: *eq_span, + value: expr_to_lit(dcx, &expr), + value_span: expr.span, + }), + } + } + + /// Asserts that this MetaItem is a list + /// + /// Some examples: + /// + /// - `#[allow(clippy::complexity)]`: `(clippy::complexity)` is a list + /// - `#[rustfmt::skip::macros(target_macro_name)]`: `(target_macro_name)` is a list + pub fn list(&self) -> Option<&MetaItemListParser<'a>> { + match self { + Self::List(l) => Some(l), + Self::NameValue(_) | Self::NoArgs => None, + } + } + + /// Asserts that this MetaItem is a name-value pair. + /// + /// Some examples: + /// + /// - `#[clippy::cyclomatic_complexity = "100"]`: `clippy::cyclomatic_complexity = "100"` is a name value pair, + /// where the name is a path (`clippy::cyclomatic_complexity`). You already checked the path + /// to get an `ArgParser`, so this method will effectively only assert that the `= "100"` is + /// there + /// - `#[doc = "hello"]`: `doc = "hello` is also a name value pair + pub fn name_value(&self) -> Option<&NameValueParser> { + match self { + Self::NameValue(n) => Some(n), + Self::List(_) | Self::NoArgs => None, + } + } + + /// Asserts that there are no arguments + pub fn no_args(&self) -> bool { + matches!(self, Self::NoArgs) + } +} + +/// Inside lists, values could be either literals, or more deeply nested meta items. +/// This enum represents that. +/// +/// Choose which one you want using the provided methods. +#[derive(Debug, Clone)] +pub enum MetaItemOrLitParser<'a> { + MetaItemParser(MetaItemParser<'a>), + Lit(MetaItemLit), + Err(Span, ErrorGuaranteed), +} + +impl<'a> MetaItemOrLitParser<'a> { + pub fn span(&self) -> Span { + match self { + MetaItemOrLitParser::MetaItemParser(generic_meta_item_parser) => { + generic_meta_item_parser.span() + } + MetaItemOrLitParser::Lit(meta_item_lit) => meta_item_lit.span, + MetaItemOrLitParser::Err(span, _) => *span, + } + } + + pub fn lit(&self) -> Option<&MetaItemLit> { + match self { + MetaItemOrLitParser::Lit(meta_item_lit) => Some(meta_item_lit), + _ => None, + } + } + + pub fn meta_item(&self) -> Option<&MetaItemParser<'a>> { + match self { + MetaItemOrLitParser::MetaItemParser(parser) => Some(parser), + _ => None, + } + } +} + +/// Utility that deconstructs a MetaItem into usable parts. +/// +/// MetaItems are syntactically extremely flexible, but specific attributes want to parse +/// them in custom, more restricted ways. This can be done using this struct. +/// +/// MetaItems consist of some path, and some args. The args could be empty. In other words: +/// +/// - `name` -> args are empty +/// - `name(...)` -> args are a [`list`](ArgParser::list), which is the bit between the parentheses +/// - `name = value`-> arg is [`name_value`](ArgParser::name_value), where the argument is the +/// `= value` part +/// +/// The syntax of MetaItems can be found at +#[derive(Clone)] +pub struct MetaItemParser<'a> { + path: PathParser<'a>, + args: ArgParser<'a>, +} + +impl<'a> Debug for MetaItemParser<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("MetaItemParser") + .field("path", &self.path) + .field("args", &self.args) + .finish() + } +} + +impl<'a> MetaItemParser<'a> { + /// Create a new parser from a [`NormalAttr`], which is stored inside of any + /// [`ast::Attribute`](Attribute) + pub fn from_attr(attr: &'a NormalAttr, dcx: DiagCtxtHandle<'a>) -> Self { + Self { + path: PathParser::Ast(&attr.item.path), + args: ArgParser::from_attr_args(&attr.item.args, dcx), + } + } +} + +impl<'a> MetaItemParser<'a> { + pub fn span(&self) -> Span { + if let Some(other) = self.args.span() { + self.path.span().with_hi(other.hi()) + } else { + self.path.span() + } + } + + /// Gets just the path, without the args. + pub fn path_without_args(&self) -> PathParser<'a> { + self.path.clone() + } + + /// Gets just the args parser, without caring about the path. + pub fn args(&self) -> &ArgParser<'a> { + &self.args + } + + pub fn deconstruct(&self) -> (PathParser<'a>, &ArgParser<'a>) { + (self.path_without_args(), self.args()) + } + + /// Asserts that this MetaItem starts with a path. Some examples: + /// + /// - `#[rustfmt::skip]`: `rustfmt::skip` is a path + /// - `#[allow(clippy::complexity)]`: `clippy::complexity` is a path + /// - `#[inline]`: `inline` is a single segment path + /// - `#[inline(always)]`: `always` is a single segment path, but `inline` is *not and + /// should be parsed using [`list`](Self::list) + pub fn path(&self) -> (PathParser<'a>, &ArgParser<'a>) { + self.deconstruct() + } + + /// Asserts that this MetaItem starts with a word, or single segment path. + /// Doesn't return the args parser. + /// + /// For examples. see [`Self::word`] + pub fn word_without_args(&self) -> Option { + Some(self.word()?.0) + } + + /// Like [`word`](Self::word), but returns an empty symbol instead of None + pub fn word_or_empty_without_args(&self) -> Ident { + self.word_or_empty().0 + } + + /// Asserts that this MetaItem starts with a word, or single segment path. + /// + /// Some examples: + /// - `#[inline]`: `inline` is a word + /// - `#[rustfmt::skip]`: `rustfmt::skip` is a path, and not a word + /// - `#[inline(always)]`: `always` is a word, but `inline` is *not and should be parsed + /// using [`path_list`](Self::path_list) + /// - `#[allow(clippy::complexity)]`: `clippy::complexity` is *not* a word, and should instead be parsed + /// using [`path`](Self::path) + pub fn word(&self) -> Option<(Ident, &ArgParser<'a>)> { + let (path, args) = self.deconstruct(); + Some((path.word()?, args)) + } + + /// Like [`word`](Self::word), but returns an empty symbol instead of None + pub fn word_or_empty(&self) -> (Ident, &ArgParser<'a>) { + let (path, args) = self.deconstruct(); + (path.word().unwrap_or(Ident::empty()), args) + } + + /// Asserts that this MetaItem starts with some specific word. + /// + /// See [`word`](Self::word) for examples of what a word is. + pub fn word_is(&self, sym: Symbol) -> Option<&ArgParser<'a>> { + self.path_without_args().word_is(sym).then(|| self.args()) + } + + /// Asserts that this MetaItem starts with some specific path. + /// + /// See [`word`](Self::path) for examples of what a word is. + pub fn path_is(&self, segments: &[Symbol]) -> Option<&ArgParser<'a>> { + self.path_without_args().segments_is(segments).then(|| self.args()) + } +} + +#[derive(Clone)] +pub struct NameValueParser { + pub eq_span: Span, + value: MetaItemLit, + pub value_span: Span, +} + +impl Debug for NameValueParser { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("NameValueParser") + .field("eq_span", &self.eq_span) + .field("value", &self.value) + .field("value_span", &self.value_span) + .finish() + } +} + +impl NameValueParser { + pub fn value_as_lit(&self) -> &MetaItemLit { + &self.value + } + + pub fn value_as_str(&self) -> Option { + self.value_as_lit().kind.str() + } +} + +fn expr_to_lit(dcx: DiagCtxtHandle<'_>, expr: &Expr) -> MetaItemLit { + // In valid code the value always ends up as a single literal. Otherwise, a dummy + // literal suffices because the error is handled elsewhere. + if let ExprKind::Lit(token_lit) = expr.kind + && let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span) + { + lit + } else { + let guar = dcx.has_errors().unwrap(); + MetaItemLit { symbol: kw::Empty, suffix: None, kind: LitKind::Err(guar), span: DUMMY_SP } + } +} + +struct MetaItemListParserContext<'a> { + // the tokens inside the delimiters, so `#[some::attr(a b c)]` would have `a b c` inside + inside_delimiters: Peekable>, + dcx: DiagCtxtHandle<'a>, +} + +impl<'a> MetaItemListParserContext<'a> { + fn done(&mut self) -> bool { + self.inside_delimiters.peek().is_none() + } + + fn next_path(&mut self) -> Option { + // FIXME: Share code with `parse_path`. + let tt = self.inside_delimiters.next().map(|tt| TokenTree::uninterpolate(tt)); + + match tt.as_deref() { + Some(&TokenTree::Token( + Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span }, + _, + )) => { + let mut segments = if let &token::Ident(name, _) = kind { + if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = + self.inside_delimiters.peek() + { + self.inside_delimiters.next(); + vec![Ident::new(name, span)] + } else { + return Some(AttrPath { + segments: vec![Ident::new(name, span)].into_boxed_slice(), + span, + }); + } + } else { + vec![Ident::new(kw::PathRoot, span)] + }; + loop { + if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = + self.inside_delimiters + .next() + .map(|tt| TokenTree::uninterpolate(tt)) + .as_deref() + { + segments.push(Ident::new(name, span)); + } else { + unreachable!() + } + if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = + self.inside_delimiters.peek() + { + self.inside_delimiters.next(); + } else { + break; + } + } + let span = span.with_hi(segments.last().unwrap().span.hi()); + Some(AttrPath { segments: segments.into_boxed_slice(), span }) + } + Some(TokenTree::Token( + Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. }, + _, + )) => { + panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tt); + } + Some(x) => { + // malformed attributes can get here. We can't crash, but somewhere else should've + // already warned for this. + self.dcx.span_delayed_bug( + x.span(), + format!("unexpected token {x:?} in built-in attribute path"), + ); + None + } + None => None, + } + } + + fn value(&mut self) -> Option { + match self.inside_delimiters.next() { + Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { + MetaItemListParserContext { + inside_delimiters: inner_tokens.iter().peekable(), + dcx: self.dcx, + } + .value() + } + Some(TokenTree::Token(ref token, _)) => { + if let Some(s) = MetaItemLit::from_token(token) { Some(s) } else { None } + } + x => unreachable!("{x:?}"), + } + } + + // parses one element on the inside of a list attribute like `#[my_attr( )]` + // + // parses a path followed be either: + // 1. nothing (a word attr) + // 2. a parenthesized list + // 3. an equals sign and a literal (name-value) + // + // Can also parse *just* a literal. This is for cases like as `#[my_attr("literal")]` + // where no path is given before the literal + // + // Some exceptions too for interpolated attributes which are already pre-processed + fn next(&mut self) -> Option> { + // a list element is either a literal + if let Some(TokenTree::Token(token, _)) = self.inside_delimiters.peek() + && let Some(lit) = MetaItemLit::from_token(token) + { + self.inside_delimiters.next(); + return Some(MetaItemOrLitParser::Lit(lit)); + } + + // or a path. + let path = + if let Some(TokenTree::Token(Token { kind: token::Interpolated(nt), span, .. }, _)) = + self.inside_delimiters.peek() + { + match &**nt { + // or maybe a full nt meta including the path but we return immediately + token::Nonterminal::NtMeta(item) => { + self.inside_delimiters.next(); + + return Some(MetaItemOrLitParser::MetaItemParser(MetaItemParser { + path: PathParser::Ast(&item.path), + args: ArgParser::from_attr_args(&item.args, self.dcx), + })); + } + // an already interpolated path from a macro expansion is a path, no need to parse + // one from tokens + token::Nonterminal::NtPath(path) => { + self.inside_delimiters.next(); + + AttrPath::from_ast(path) + } + _ => { + self.inside_delimiters.next(); + // we go into this path if an expr ended up in an attribute that + // expansion did not turn into a literal. Say, `#[repr(align(macro!()))]` + // where the macro didn't expand to a literal. An error is already given + // for this at this point, and then we do continue. This makes this path + // reachable... + let e = self.dcx.span_delayed_bug( + *span, + "expr in place where literal is expected (builtin attr parsing)", + ); + + return Some(MetaItemOrLitParser::Err(*span, e)); + } + } + } else { + self.next_path()? + }; + + // Paths can be followed by: + // - `(more meta items)` (another list) + // - `= lit` (a name-value) + // - nothing + Some(MetaItemOrLitParser::MetaItemParser(match self.inside_delimiters.peek() { + Some(TokenTree::Delimited(dspan, _, Delimiter::Parenthesis, inner_tokens)) => { + self.inside_delimiters.next(); + + MetaItemParser { + path: PathParser::Attr(path), + args: ArgParser::List(MetaItemListParser::new_tts( + inner_tokens.iter(), + dspan.entire(), + self.dcx, + )), + } + } + Some(TokenTree::Delimited(span, ..)) => { + self.inside_delimiters.next(); + self.dcx.span_delayed_bug(span.entire(), "wrong delimiters"); + return None; + } + Some(TokenTree::Token(Token { kind: token::Eq, span }, _)) => { + self.inside_delimiters.next(); + let value = self.value()?; + MetaItemParser { + path: PathParser::Attr(path), + args: ArgParser::NameValue(NameValueParser { + eq_span: *span, + value_span: value.span, + value, + }), + } + } + _ => MetaItemParser { path: PathParser::Attr(path), args: ArgParser::NoArgs }, + })) + } + + fn parse(mut self, span: Span) -> MetaItemListParser<'a> { + let mut sub_parsers = Vec::new(); + + while !self.done() { + let Some(n) = self.next() else { + continue; + }; + sub_parsers.push(n); + + match self.inside_delimiters.peek() { + None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => { + self.inside_delimiters.next(); + } + Some(_) => {} + } + } + + MetaItemListParser { sub_parsers, span } + } +} + +#[derive(Debug, Clone)] +pub struct MetaItemListParser<'a> { + sub_parsers: Vec>, + pub span: Span, +} + +impl<'a> MetaItemListParser<'a> { + fn new(delim: &'a DelimArgs, dcx: DiagCtxtHandle<'a>) -> MetaItemListParser<'a> { + MetaItemListParser::new_tts(delim.tokens.iter(), delim.dspan.entire(), dcx) + } + + fn new_tts(tts: TokenStreamIter<'a>, span: Span, dcx: DiagCtxtHandle<'a>) -> Self { + MetaItemListParserContext { inside_delimiters: tts.peekable(), dcx }.parse(span) + } + + /// Lets you pick and choose as what you want to parse each element in the list + pub fn mixed<'s>(&'s self) -> impl Iterator> + 's { + self.sub_parsers.iter() + } + + pub fn len(&self) -> usize { + self.sub_parsers.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Asserts that every item in the list is another list starting with a word. + /// + /// See [`MetaItemParser::word`] for examples of words. + pub fn all_word_list<'s>(&'s self) -> Option)>> { + self.mixed().map(|i| i.meta_item()?.word()).collect() + } + + /// Asserts that every item in the list is another list starting with a full path. + /// + /// See [`MetaItemParser::path`] for examples of paths. + pub fn all_path_list<'s>(&'s self) -> Option, &'s ArgParser<'a>)>> { + self.mixed().map(|i| Some(i.meta_item()?.path())).collect() + } + + /// Returns Some if the list contains only a single element. + /// + /// Inside the Some is the parser to parse this single element. + pub fn single(&self) -> Option<&MetaItemOrLitParser<'a>> { + let mut iter = self.mixed(); + iter.next().filter(|_| iter.next().is_none()) + } +} diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs index b1d8ec497740d..6d495b7a8bf82 100644 --- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs +++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs @@ -6,9 +6,16 @@ use rustc_errors::{Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuar use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_span::{Span, Symbol}; -use crate::attributes::util::UnsupportedLiteralReason; use crate::fluent_generated as fluent; +pub(crate) enum UnsupportedLiteralReason { + Generic, + CfgString, + CfgBoolean, + DeprecatedString, + DeprecatedKvPair, +} + #[derive(Diagnostic)] #[diag(attr_parsing_expected_one_cfg_pattern, code = E0536)] pub(crate) struct ExpectedOneCfgPattern { @@ -39,6 +46,21 @@ pub(crate) struct MultipleItem { pub(crate) struct IncorrectMetaItem { #[primary_span] pub span: Span, + + #[subdiagnostic] + pub suggestion: Option, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + attr_parsing_incorrect_meta_item_suggestion, + applicability = "maybe-incorrect" +)] +pub(crate) struct IncorrectMetaItemSuggestion { + #[suggestion_part(code = "\"")] + pub lo: Span, + #[suggestion_part(code = "\"")] + pub hi: Span, } /// Error code: E0541 @@ -331,13 +353,6 @@ pub(crate) struct RustcPromotablePairing { pub span: Span, } -#[derive(Diagnostic)] -#[diag(attr_parsing_rustc_const_stable_indirect_pairing)] -pub(crate) struct RustcConstStableIndirectPairing { - #[primary_span] - pub span: Span, -} - #[derive(Diagnostic)] #[diag(attr_parsing_rustc_allowed_unstable_pairing, code = E0789)] pub(crate) struct RustcAllowedUnstablePairing { @@ -417,3 +432,44 @@ pub(crate) struct UnknownVersionLiteral { #[primary_span] pub span: Span, } + +// FIXME(jdonszelmann) duplicated from `rustc_passes`, remove once `check_attr` is integrated. +#[derive(Diagnostic)] +#[diag(attr_parsing_unused_multiple)] +pub(crate) struct UnusedMultiple { + #[primary_span] + #[suggestion(code = "", applicability = "machine-applicable")] + pub this: Span, + #[note] + pub other: Span, + pub name: Symbol, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_stability_outside_std, code = E0734)] +pub(crate) struct StabilityOutsideStd { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_empty_confusables)] +pub(crate) struct EmptyConfusables { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_repr_ident, code = E0565)] +pub(crate) struct ReprIdent { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_unrecognized_repr_hint, code = E0552)] +#[help] +pub(crate) struct UnrecognizedReprHint { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_borrowck/messages.ftl b/compiler/rustc_borrowck/messages.ftl index ee4b2f95cb151..ada20e5c614f8 100644 --- a/compiler/rustc_borrowck/messages.ftl +++ b/compiler/rustc_borrowck/messages.ftl @@ -213,6 +213,10 @@ borrowck_suggest_create_fresh_reborrow = borrowck_suggest_iterate_over_slice = consider iterating over a slice of the `{$ty}`'s content to avoid moving into the `for` loop +borrowck_tail_expr_drop_order = relative drop order changing in Rust 2024 + .label = this temporary value will be dropped at the end of the block + .note = consider using a `let` binding to ensure the value will live long enough + borrowck_ty_no_impl_copy = {$is_partial_move -> [true] partial move diff --git a/compiler/rustc_borrowck/src/borrow_set.rs b/compiler/rustc_borrowck/src/borrow_set.rs index ff838fbbb8868..303fa469332e0 100644 --- a/compiler/rustc_borrowck/src/borrow_set.rs +++ b/compiler/rustc_borrowck/src/borrow_set.rs @@ -2,7 +2,7 @@ use std::fmt; use std::ops::Index; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{self, Body, Local, Location, traversal}; use rustc_middle::span_bug; @@ -11,7 +11,6 @@ use rustc_mir_dataflow::move_paths::MoveData; use tracing::debug; use crate::BorrowIndex; -use crate::path_utils::allow_two_phase_borrow; use crate::place_ext::PlaceExt; pub struct BorrowSet<'tcx> { @@ -132,7 +131,7 @@ impl<'tcx> fmt::Display for BorrowData<'tcx> { pub enum LocalsStateAtExit { AllAreInvalidated, - SomeAreInvalidated { has_storage_dead_or_moved: BitSet }, + SomeAreInvalidated { has_storage_dead_or_moved: DenseBitSet }, } impl LocalsStateAtExit { @@ -141,7 +140,7 @@ impl LocalsStateAtExit { body: &Body<'tcx>, move_data: &MoveData<'tcx>, ) -> Self { - struct HasStorageDead(BitSet); + struct HasStorageDead(DenseBitSet); impl<'tcx> Visitor<'tcx> for HasStorageDead { fn visit_local(&mut self, local: Local, ctx: PlaceContext, _: Location) { @@ -154,7 +153,8 @@ impl LocalsStateAtExit { if locals_are_invalidated_at_exit { LocalsStateAtExit::AllAreInvalidated } else { - let mut has_storage_dead = HasStorageDead(BitSet::new_empty(body.local_decls.len())); + let mut has_storage_dead = + HasStorageDead(DenseBitSet::new_empty(body.local_decls.len())); has_storage_dead.visit_body(body); let mut has_storage_dead_or_moved = has_storage_dead.0; for move_out in &move_data.moves { @@ -350,7 +350,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> { start_location, assigned_place, borrow_index, ); - if !allow_two_phase_borrow(kind) { + if !kind.allows_two_phase_borrow() { debug!(" -> {:?}", start_location); return; } diff --git a/compiler/rustc_borrowck/src/consumers.rs b/compiler/rustc_borrowck/src/consumers.rs index 74de766ba2317..5a89f7c351cf6 100644 --- a/compiler/rustc_borrowck/src/consumers.rs +++ b/compiler/rustc_borrowck/src/consumers.rs @@ -8,11 +8,12 @@ use rustc_middle::ty::TyCtxt; pub use super::borrow_set::{BorrowData, BorrowSet, TwoPhaseActivation}; pub use super::constraints::OutlivesConstraint; pub use super::dataflow::{BorrowIndex, Borrows, calculate_borrows_out_of_scope_at_location}; -pub use super::facts::{AllFacts as PoloniusInput, PoloniusRegionVid, RustcFacts}; -pub use super::location::{LocationTable, RichLocation}; -pub use super::nll::PoloniusOutput; pub use super::place_ext::PlaceExt; pub use super::places_conflict::{PlaceConflictBias, places_conflict}; +pub use super::polonius::legacy::{ + PoloniusFacts as PoloniusInput, PoloniusLocationTable, PoloniusOutput, PoloniusRegionVid, + RichLocation, RustcFacts, +}; pub use super::region_infer::RegionInferenceContext; /// Options determining the output behavior of [`get_body_with_borrowck_facts`]. @@ -32,7 +33,7 @@ pub enum ConsumerOptions { /// without significant slowdowns. /// /// Implies [`RegionInferenceContext`](ConsumerOptions::RegionInferenceContext), - /// and additionally retrieve the [`LocationTable`] and [`PoloniusInput`] that + /// and additionally retrieve the [`PoloniusLocationTable`] and [`PoloniusInput`] that /// would be given to Polonius. Critically, this does not run Polonius, which /// one may want to avoid due to performance issues on large bodies. PoloniusInputFacts, @@ -70,7 +71,7 @@ pub struct BodyWithBorrowckFacts<'tcx> { /// The table that maps Polonius points to locations in the table. /// Populated when using [`ConsumerOptions::PoloniusInputFacts`] /// or [`ConsumerOptions::PoloniusOutputFacts`]. - pub location_table: Option, + pub location_table: Option, /// Polonius input facts. /// Populated when using [`ConsumerOptions::PoloniusInputFacts`] /// or [`ConsumerOptions::PoloniusOutputFacts`]. diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs index dc4eab766c932..7511a55b03ae5 100644 --- a/compiler/rustc_borrowck/src/dataflow.rs +++ b/compiler/rustc_borrowck/src/dataflow.rs @@ -2,7 +2,7 @@ use std::fmt; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::graph; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{ self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges, }; @@ -12,7 +12,7 @@ use rustc_mir_dataflow::impls::{ EverInitializedPlaces, EverInitializedPlacesDomain, MaybeUninitializedPlaces, MaybeUninitializedPlacesDomain, }; -use rustc_mir_dataflow::{Analysis, GenKill, JoinSemiLattice, SwitchIntEdgeEffects}; +use rustc_mir_dataflow::{Analysis, GenKill, JoinSemiLattice}; use tracing::debug; use crate::{BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, places_conflict}; @@ -101,16 +101,6 @@ impl<'a, 'tcx> Analysis<'tcx> for Borrowck<'a, 'tcx> { // This is only reachable from `iterate_to_fixpoint`, which this analysis doesn't use. unreachable!(); } - - fn apply_switch_int_edge_effects( - &mut self, - _block: BasicBlock, - _discr: &mir::Operand<'tcx>, - _apply_edge_effects: &mut impl SwitchIntEdgeEffects, - ) { - // This is only reachable from `iterate_to_fixpoint`, which this analysis doesn't use. - unreachable!(); - } } impl JoinSemiLattice for BorrowckDomain { @@ -190,26 +180,35 @@ pub struct Borrows<'a, 'tcx> { } struct OutOfScopePrecomputer<'a, 'tcx> { - visited: BitSet, + visited: DenseBitSet, visit_stack: Vec, body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>, borrows_out_of_scope_at_location: FxIndexMap>, } -impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> { - fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { - OutOfScopePrecomputer { - visited: BitSet::new_empty(body.basic_blocks.len()), +impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> { + fn compute( + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + ) -> FxIndexMap> { + let mut prec = OutOfScopePrecomputer { + visited: DenseBitSet::new_empty(body.basic_blocks.len()), visit_stack: vec![], body, regioncx, borrows_out_of_scope_at_location: FxIndexMap::default(), + }; + for (borrow_index, borrow_data) in borrow_set.iter_enumerated() { + let borrow_region = borrow_data.region; + let location = borrow_data.reserve_location; + prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location); } + + prec.borrows_out_of_scope_at_location } -} -impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> { fn precompute_borrows_out_of_scope( &mut self, borrow_index: BorrowIndex, @@ -290,19 +289,11 @@ pub fn calculate_borrows_out_of_scope_at_location<'tcx>( regioncx: &RegionInferenceContext<'tcx>, borrow_set: &BorrowSet<'tcx>, ) -> FxIndexMap> { - let mut prec = OutOfScopePrecomputer::new(body, regioncx); - for (borrow_index, borrow_data) in borrow_set.iter_enumerated() { - let borrow_region = borrow_data.region; - let location = borrow_data.reserve_location; - - prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location); - } - - prec.borrows_out_of_scope_at_location + OutOfScopePrecomputer::compute(body, regioncx, borrow_set) } struct PoloniusOutOfScopePrecomputer<'a, 'tcx> { - visited: BitSet, + visited: DenseBitSet, visit_stack: Vec, body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>, @@ -310,19 +301,30 @@ struct PoloniusOutOfScopePrecomputer<'a, 'tcx> { loans_out_of_scope_at_location: FxIndexMap>, } -impl<'a, 'tcx> PoloniusOutOfScopePrecomputer<'a, 'tcx> { - fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { - Self { - visited: BitSet::new_empty(body.basic_blocks.len()), +impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { + fn compute( + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + ) -> FxIndexMap> { + // The in-tree polonius analysis computes loans going out of scope using the + // set-of-loans model. + let mut prec = PoloniusOutOfScopePrecomputer { + visited: DenseBitSet::new_empty(body.basic_blocks.len()), visit_stack: vec![], body, regioncx, loans_out_of_scope_at_location: FxIndexMap::default(), + }; + for (loan_idx, loan_data) in borrow_set.iter_enumerated() { + let issuing_region = loan_data.region; + let loan_issued_at = loan_data.reserve_location; + prec.precompute_loans_out_of_scope(loan_idx, issuing_region, loan_issued_at); } + + prec.loans_out_of_scope_at_location } -} -impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { /// Loans are in scope while they are live: whether they are contained within any live region. /// In the location-insensitive analysis, a loan will be contained in a region if the issuing /// region can reach it in the subset graph. So this is a reachability problem. @@ -335,10 +337,17 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { let sccs = self.regioncx.constraint_sccs(); let universal_regions = self.regioncx.universal_regions(); + // The loop below was useful for the location-insensitive analysis but shouldn't be + // impactful in the location-sensitive case. It seems that it does, however, as without it a + // handful of tests fail. That likely means some liveness or outlives data related to choice + // regions is missing + // FIXME: investigate the impact of loans traversing applied member constraints and why some + // tests fail otherwise. + // // We first handle the cases where the loan doesn't go out of scope, depending on the // issuing region's successors. for successor in graph::depth_first_search(&self.regioncx.region_graph(), issuing_region) { - // 1. Via applied member constraints + // Via applied member constraints // // The issuing region can flow into the choice regions, and they are either: // - placeholders or free regions themselves, @@ -356,14 +365,6 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { return; } } - - // 2. Via regions that are live at all points: placeholders and free regions. - // - // If the issuing region outlives such a region, its loan escapes the function and - // cannot go out of scope. We can early return. - if self.regioncx.is_region_live_at_all_points(successor) { - return; - } } let first_block = loan_issued_at.block; @@ -471,34 +472,12 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { regioncx: &RegionInferenceContext<'tcx>, borrow_set: &'a BorrowSet<'tcx>, ) -> Self { - let mut borrows_out_of_scope_at_location = - calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set); - - // The in-tree polonius analysis computes loans going out of scope using the set-of-loans - // model, and makes sure they're identical to the existing computation of the set-of-points - // model. - if tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { - let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx); - for (loan_idx, loan_data) in borrow_set.iter_enumerated() { - let issuing_region = loan_data.region; - let loan_issued_at = loan_data.reserve_location; - - polonius_prec.precompute_loans_out_of_scope( - loan_idx, - issuing_region, - loan_issued_at, - ); - } - - assert_eq!( - borrows_out_of_scope_at_location, polonius_prec.loans_out_of_scope_at_location, - "polonius loan scopes differ from NLL borrow scopes, for body {:?}", - body.span, - ); - - borrows_out_of_scope_at_location = polonius_prec.loans_out_of_scope_at_location; - } - + let borrows_out_of_scope_at_location = + if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set) + } else { + PoloniusOutOfScopePrecomputer::compute(body, regioncx, borrow_set) + }; Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location } } @@ -569,7 +548,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { } } -type BorrowsDomain = BitSet; +type BorrowsDomain = DenseBitSet; /// Forward dataflow computation of the set of borrows that are in scope at a particular location. /// - we gen the introduced loans @@ -585,7 +564,7 @@ impl<'tcx> rustc_mir_dataflow::Analysis<'tcx> for Borrows<'_, 'tcx> { fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { // bottom = nothing is reserved or activated yet; - BitSet::new_empty(self.borrow_set.len()) + DenseBitSet::new_empty(self.borrow_set.len()) } fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index 8dcc5324fdfb0..da59f9f9ebd5d 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -20,7 +20,7 @@ use rustc_middle::bug; use rustc_middle::hir::nested_filter::OnlyBodies; use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::{ - self, AggregateKind, BindingForm, BorrowKind, CallSource, ClearCrossCrate, ConstraintCategory, + self, AggregateKind, BindingForm, BorrowKind, ClearCrossCrate, ConstraintCategory, FakeBorrowKind, FakeReadCause, LocalDecl, LocalInfo, LocalKind, Location, MutBorrowKind, Operand, Place, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, VarBindingForm, VarDebugInfoContents, @@ -30,14 +30,13 @@ use rustc_middle::ty::{ self, PredicateKind, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor, Upcast, suggest_constraining_type_params, }; -use rustc_middle::util::CallKind; use rustc_mir_dataflow::move_paths::{InitKind, MoveOutIndex, MovePathIndex}; use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::hygiene::DesugaringKind; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{BytePos, Span, Symbol}; +use rustc_span::{BytePos, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::error_reporting::traits::FindExprBySpan; +use rustc_trait_selection::error_reporting::traits::call_kind::CallKind; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; use rustc_trait_selection::traits::{Obligation, ObligationCause, ObligationCtxt}; @@ -47,7 +46,7 @@ use super::explain_borrow::{BorrowExplanation, LaterUseKind}; use super::{DescribePlaceOpt, RegionName, RegionNameSource, UseSpans}; use crate::borrow_set::{BorrowData, TwoPhaseActivation}; use crate::diagnostics::conflict_errors::StorageDeadOrDrop::LocalStorageDead; -use crate::diagnostics::{CapturedMessageOpt, Instance, find_all_local_uses}; +use crate::diagnostics::{CapturedMessageOpt, call_kind, find_all_local_uses}; use crate::prefixes::IsPrefixOf; use crate::{InitializationRequiringAction, MirBorrowckCtxt, WriteKind, borrowck_errors}; @@ -306,7 +305,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } if let UseSpans::FnSelfUse { - kind: CallKind::DerefCoercion { deref_target, deref_target_ty, .. }, + kind: CallKind::DerefCoercion { deref_target_span, deref_target_ty, .. }, .. } = use_spans { @@ -316,8 +315,10 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { )); // Check first whether the source is accessible (issue #87060) - if self.infcx.tcx.sess.source_map().is_span_accessible(deref_target) { - err.span_note(deref_target, "deref defined here"); + if let Some(deref_target_span) = deref_target_span + && self.infcx.tcx.sess.source_map().is_span_accessible(deref_target_span) + { + err.span_note(deref_target_span, "deref defined here"); } } @@ -1517,15 +1518,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { }); self.explain_why_borrow_contains_point(location, borrow, None) - .add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - Some(borrow_span), - None, - ); + .add_explanation_to_diagnostic(&self, &mut err, "", Some(borrow_span), None); self.suggest_copy_for_type_in_cloned_ref(&mut err, place); let typeck_results = self.infcx.tcx.typeck(self.mir_def_id()); if let Some(expr) = self.find_expr(borrow_span) { @@ -1592,15 +1585,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { }); self.explain_why_borrow_contains_point(location, borrow, None) - .add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - None, - None, - ); + .add_explanation_to_diagnostic(&self, &mut err, "", None, None); err } @@ -1887,9 +1872,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } explanation.add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, + &self, &mut err, first_borrow_desc, None, @@ -2699,22 +2682,19 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { return; } - let mut sugg = vec![]; let sm = self.infcx.tcx.sess.source_map(); - - if let Some(span) = finder.closure_arg_span { - sugg.push((sm.next_point(span.shrink_to_lo()).shrink_to_hi(), finder.suggest_arg)); - } - for span in finder.closure_change_spans { - sugg.push((span, "this".to_string())); - } - - for (span, suggest) in finder.closure_call_changes { - sugg.push((span, suggest)); - } + let sugg = finder + .closure_arg_span + .map(|span| (sm.next_point(span.shrink_to_lo()).shrink_to_hi(), finder.suggest_arg)) + .into_iter() + .chain( + finder.closure_change_spans.into_iter().map(|span| (span, "this".to_string())), + ) + .chain(finder.closure_call_changes) + .collect(); err.multipart_suggestion_verbose( - "try explicitly pass `&Self` into the Closure as an argument", + "try explicitly passing `&Self` into the closure as an argument", sugg, Applicability::MachineApplicable, ); @@ -3047,15 +3027,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { if let BorrowExplanation::MustBeValidFor { .. } = explanation { } else { - explanation.add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - None, - None, - ); + explanation.add_explanation_to_diagnostic(&self, &mut err, "", None, None); } } else { err.span_label(borrow_span, "borrowed value does not live long enough"); @@ -3068,15 +3040,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } }); - explanation.add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - Some(borrow_span), - None, - ); + explanation.add_explanation_to_diagnostic(&self, &mut err, "", Some(borrow_span), None); } err @@ -3129,15 +3093,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { _ => {} } - explanation.add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - None, - None, - ); + explanation.add_explanation_to_diagnostic(&self, &mut err, "", None, None); self.buffer_error(err); } @@ -3310,15 +3266,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } _ => {} } - explanation.add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - None, - None, - ); + explanation.add_explanation_to_diagnostic(&self, &mut err, "", None, None); borrow_spans.args_subdiag(&mut err, |args_span| { crate::session_diagnostics::CaptureArgLabel::Capture { @@ -3809,15 +3757,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } }); - self.explain_why_borrow_contains_point(location, loan, None).add_explanation_to_diagnostic( - self.infcx.tcx, - self.body, - &self.local_names, - &mut err, - "", - None, - None, - ); + self.explain_why_borrow_contains_point(location, loan, None) + .add_explanation_to_diagnostic(&self, &mut err, "", None, None); self.explain_deref_coercion(loan, &mut err); @@ -3826,38 +3767,27 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { fn explain_deref_coercion(&mut self, loan: &BorrowData<'tcx>, err: &mut Diag<'_>) { let tcx = self.infcx.tcx; - if let ( - Some(Terminator { - kind: TerminatorKind::Call { call_source: CallSource::OverloadedOperator, .. }, - .. - }), - Some((method_did, method_args)), - ) = ( - &self.body[loan.reserve_location.block].terminator, - rustc_middle::util::find_self_call( + if let Some(Terminator { kind: TerminatorKind::Call { call_source, fn_span, .. }, .. }) = + &self.body[loan.reserve_location.block].terminator + && let Some((method_did, method_args)) = rustc_middle::util::find_self_call( tcx, self.body, loan.assigned_place.local, loan.reserve_location.block, - ), - ) { - if tcx.is_diagnostic_item(sym::deref_method, method_did) { - let deref_target = - tcx.get_diagnostic_item(sym::deref_target).and_then(|deref_target| { - Instance::try_resolve( - tcx, - self.infcx.typing_env(self.infcx.param_env), - deref_target, - method_args, - ) - .transpose() - }); - if let Some(Ok(instance)) = deref_target { - let deref_target_ty = - instance.ty(tcx, self.infcx.typing_env(self.infcx.param_env)); - err.note(format!("borrow occurs due to deref coercion to `{deref_target_ty}`")); - err.span_note(tcx.def_span(instance.def_id()), "deref defined here"); - } + ) + && let CallKind::DerefCoercion { deref_target_span, deref_target_ty, .. } = call_kind( + self.infcx.tcx, + self.infcx.typing_env(self.infcx.param_env), + method_did, + method_args, + *fn_span, + call_source.from_hir_call(), + Some(self.infcx.tcx.fn_arg_names(method_did)[0]), + ) + { + err.note(format!("borrow occurs due to deref coercion to `{deref_target_ty}`")); + if let Some(deref_target_span) = deref_target_span { + err.span_note(deref_target_span, "deref defined here"); } } } diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index 45a8ef0adb6a7..5c0c1d0eb86af 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -5,10 +5,9 @@ use std::assert_matches::assert_matches; -use rustc_errors::{Applicability, Diag}; +use rustc_errors::{Applicability, Diag, EmissionGuarantee}; use rustc_hir as hir; use rustc_hir::intravisit::Visitor; -use rustc_index::IndexSlice; use rustc_infer::infer::NllRegionVariableOrigin; use rustc_middle::middle::resolve_bound_vars::ObjectLifetimeDefault; use rustc_middle::mir::{ @@ -17,15 +16,16 @@ use rustc_middle::mir::{ }; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt}; -use rustc_span::symbol::{Symbol, kw}; -use rustc_span::{DesugaringKind, Span, sym}; +use rustc_span::{DesugaringKind, Span, kw, sym}; use rustc_trait_selection::error_reporting::traits::FindExprBySpan; +use rustc_trait_selection::error_reporting::traits::call_kind::CallKind; use tracing::{debug, instrument}; use super::{RegionName, UseSpans, find_use}; use crate::borrow_set::BorrowData; +use crate::constraints::OutlivesConstraint; use crate::nll::ConstraintDescription; -use crate::region_infer::{BlameConstraint, Cause, ExtraConstraintInfo}; +use crate::region_infer::{BlameConstraint, Cause}; use crate::{MirBorrowckCtxt, WriteKind}; #[derive(Debug)] @@ -43,7 +43,7 @@ pub(crate) enum BorrowExplanation<'tcx> { span: Span, region_name: RegionName, opt_place_desc: Option, - extra_info: Vec, + path: Vec>, }, Unexplained, } @@ -61,16 +61,18 @@ impl<'tcx> BorrowExplanation<'tcx> { pub(crate) fn is_explained(&self) -> bool { !matches!(self, BorrowExplanation::Unexplained) } - pub(crate) fn add_explanation_to_diagnostic( + pub(crate) fn add_explanation_to_diagnostic( &self, - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - local_names: &IndexSlice>, - err: &mut Diag<'_>, + cx: &MirBorrowckCtxt<'_, '_, 'tcx>, + err: &mut Diag<'_, G>, borrow_desc: &str, borrow_span: Option, multiple_borrow_span: Option<(Span, Span)>, ) { + let tcx = cx.infcx.tcx; + let body = cx.body; + let local_names = &cx.local_names; + if let Some(span) = borrow_span { let def_id = body.source.def_id(); if let Some(node) = tcx.hir().get_if_local(def_id) @@ -306,7 +308,7 @@ impl<'tcx> BorrowExplanation<'tcx> { ref region_name, ref opt_place_desc, from_closure: _, - ref extra_info, + ref path, } => { region_name.highlight_region_name(err); @@ -328,13 +330,8 @@ impl<'tcx> BorrowExplanation<'tcx> { ); }; - for extra in extra_info { - match extra { - ExtraConstraintInfo::PlaceholderFromPredicate(span) => { - err.span_note(*span, "due to current limitations in the borrow checker, this implies a `'static` lifetime"); - } - } - } + cx.add_placeholder_from_predicate_note(err, &path); + cx.add_sized_or_copy_bound_info(err, category, &path); if let ConstraintCategory::Cast { is_implicit_coercion: true, @@ -349,10 +346,10 @@ impl<'tcx> BorrowExplanation<'tcx> { } } - fn add_object_lifetime_default_note( + fn add_object_lifetime_default_note( &self, tcx: TyCtxt<'tcx>, - err: &mut Diag<'_>, + err: &mut Diag<'_, G>, unsize_ty: Ty<'tcx>, ) { if let ty::Adt(def, args) = unsize_ty.kind() { @@ -406,9 +403,9 @@ impl<'tcx> BorrowExplanation<'tcx> { } } - fn add_lifetime_bound_suggestion_to_diagnostic( + fn add_lifetime_bound_suggestion_to_diagnostic( &self, - err: &mut Diag<'_>, + err: &mut Diag<'_, G>, category: &ConstraintCategory<'tcx>, span: Span, region_name: &RegionName, @@ -435,14 +432,14 @@ impl<'tcx> BorrowExplanation<'tcx> { } } -fn suggest_rewrite_if_let( +fn suggest_rewrite_if_let( tcx: TyCtxt<'_>, expr: &hir::Expr<'_>, pat: &str, init: &hir::Expr<'_>, conseq: &hir::Expr<'_>, alt: Option<&hir::Expr<'_>>, - err: &mut Diag<'_>, + err: &mut Diag<'_, G>, ) { let source_map = tcx.sess.source_map(); err.span_note( @@ -487,8 +484,9 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { &self, borrow_region: RegionVid, outlived_region: RegionVid, - ) -> (ConstraintCategory<'tcx>, bool, Span, Option, Vec) { - let (blame_constraint, extra_info) = self.regioncx.best_blame_constraint( + ) -> (ConstraintCategory<'tcx>, bool, Span, Option, Vec>) + { + let (blame_constraint, path) = self.regioncx.best_blame_constraint( borrow_region, NllRegionVariableOrigin::FreeRegion, |r| self.regioncx.provides_universal_region(r, borrow_region, outlived_region), @@ -497,7 +495,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { let outlived_fr_name = self.give_region_a_name(outlived_region); - (category, from_closure, cause.span, outlived_fr_name, extra_info) + (category, from_closure, cause.span, outlived_fr_name, path) } /// Returns structured explanation for *why* the borrow contains the @@ -596,7 +594,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { None => { if let Some(region) = self.to_error_region_vid(borrow_region_vid) { - let (category, from_closure, span, region_name, extra_info) = + let (category, from_closure, span, region_name, path) = self.free_region_constraint_info(borrow_region_vid, region); if let Some(region_name) = region_name { let opt_place_desc = self.describe_place(borrow.borrowed_place.as_ref()); @@ -606,7 +604,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { span, region_name, opt_place_desc, - extra_info, + path, } } else { debug!("Could not generate a region name"); @@ -636,6 +634,39 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { // Used in a closure. (LaterUseKind::ClosureCapture, capture_kind_span, Some(path_span)) } + // In the case that the borrowed value (probably a temporary) + // overlaps with the method's receiver, then point at the method. + UseSpans::FnSelfUse { + var_span: span, + kind: CallKind::Normal { desugaring: None, .. }, + .. + } if span + .overlaps(self.body.local_decls[borrow.assigned_place.local].source_info.span) => + { + if let TerminatorKind::Call { func, call_source: CallSource::Normal, .. } = + &self.body.basic_blocks[location.block].terminator().kind + { + // Just point to the function, to reduce the chance of overlapping spans. + let function_span = match func { + Operand::Constant(c) => c.span, + Operand::Copy(place) | Operand::Move(place) => { + if let Some(l) = place.as_local() { + let local_decl = &self.body.local_decls[l]; + if self.local_names[l].is_none() { + local_decl.source_info.span + } else { + span + } + } else { + span + } + } + }; + (LaterUseKind::Call, function_span, None) + } else { + (LaterUseKind::Other, span, None) + } + } UseSpans::PatUse(span) | UseSpans::OtherUse(span) | UseSpans::FnSelfUse { var_span: span, .. } => { diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index 92afad62aa4c2..bd6f77156ca99 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -1,28 +1,32 @@ //! Borrow checker diagnostics. +use std::collections::BTreeMap; + use rustc_abi::{FieldIdx, VariantIdx}; -use rustc_errors::{Applicability, Diag, MultiSpan}; +use rustc_data_structures::fx::FxIndexMap; +use rustc_errors::{Applicability, Diag, EmissionGuarantee, MultiSpan}; use rustc_hir::def::{CtorKind, Namespace}; use rustc_hir::{self as hir, CoroutineKind, LangItem}; use rustc_index::IndexSlice; -use rustc_infer::infer::BoundRegionConversionTime; +use rustc_infer::infer::{ + BoundRegionConversionTime, NllRegionVariableOrigin, RegionVariableOrigin, +}; use rustc_infer::traits::SelectionError; use rustc_middle::bug; use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::{ - AggregateKind, CallSource, ConstOperand, FakeReadCause, Local, LocalInfo, LocalKind, Location, - Operand, Place, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, Terminator, - TerminatorKind, + AggregateKind, CallSource, ConstOperand, ConstraintCategory, FakeReadCause, Local, LocalInfo, + LocalKind, Location, Operand, Place, PlaceRef, ProjectionElem, Rvalue, Statement, + StatementKind, Terminator, TerminatorKind, }; use rustc_middle::ty::print::Print; -use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; -use rustc_middle::util::{CallDesugaringKind, call_kind}; -use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult, MoveOutIndex}; use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Spanned; -use rustc_span::symbol::sym; -use rustc_span::{DUMMY_SP, Span, Symbol}; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; +use rustc_trait_selection::error_reporting::traits::call_kind::{CallDesugaringKind, call_kind}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{ FulfillmentErrorCode, type_known_to_meet_bound_modulo_regions, @@ -31,7 +35,9 @@ use tracing::debug; use super::MirBorrowckCtxt; use super::borrow_set::BorrowData; +use crate::constraints::OutlivesConstraint; use crate::fluent_generated as fluent; +use crate::nll::ConstraintDescription; use crate::session_diagnostics::{ CaptureArgLabel, CaptureReasonLabel, CaptureReasonNote, CaptureReasonSuggest, CaptureVarCause, CaptureVarKind, CaptureVarPathUseCause, OnClosureNote, @@ -57,7 +63,7 @@ pub(crate) use mutability_errors::AccessKind; pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder; pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors}; pub(crate) use region_name::{RegionName, RegionNameSource}; -pub(crate) use rustc_middle::util::CallKind; +pub(crate) use rustc_trait_selection::error_reporting::traits::call_kind::CallKind; pub(super) struct DescribePlaceOpt { including_downcast: bool, @@ -69,6 +75,126 @@ pub(super) struct DescribePlaceOpt { pub(super) struct IncludingTupleField(pub(super) bool); +enum BufferedDiag<'infcx> { + Error(Diag<'infcx>), + NonError(Diag<'infcx, ()>), +} + +impl<'infcx> BufferedDiag<'infcx> { + fn sort_span(&self) -> Span { + match self { + BufferedDiag::Error(diag) => diag.sort_span, + BufferedDiag::NonError(diag) => diag.sort_span, + } + } +} + +#[derive(Default)] +pub(crate) struct BorrowckDiagnosticsBuffer<'infcx, 'tcx> { + /// This field keeps track of move errors that are to be reported for given move indices. + /// + /// There are situations where many errors can be reported for a single move out (see + /// #53807) and we want only the best of those errors. + /// + /// The `report_use_of_moved_or_uninitialized` function checks this map and replaces the + /// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of + /// the `Place` of the previous most diagnostic. This happens instead of buffering the + /// error. Once all move errors have been reported, any diagnostics in this map are added + /// to the buffer to be emitted. + /// + /// `BTreeMap` is used to preserve the order of insertions when iterating. This is necessary + /// when errors in the map are being re-added to the error buffer so that errors with the + /// same primary span come out in a consistent order. + buffered_move_errors: BTreeMap, (PlaceRef<'tcx>, Diag<'infcx>)>, + + buffered_mut_errors: FxIndexMap, usize)>, + + /// Buffer of diagnostics to be reported. A mixture of error and non-error diagnostics. + buffered_diags: Vec>, +} + +impl<'infcx, 'tcx> BorrowckDiagnosticsBuffer<'infcx, 'tcx> { + pub(crate) fn buffer_non_error(&mut self, diag: Diag<'infcx, ()>) { + self.buffered_diags.push(BufferedDiag::NonError(diag)); + } +} + +impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { + pub(crate) fn buffer_error(&mut self, diag: Diag<'infcx>) { + self.diags_buffer.buffered_diags.push(BufferedDiag::Error(diag)); + } + + pub(crate) fn buffer_non_error(&mut self, diag: Diag<'infcx, ()>) { + self.diags_buffer.buffer_non_error(diag); + } + + pub(crate) fn buffer_move_error( + &mut self, + move_out_indices: Vec, + place_and_err: (PlaceRef<'tcx>, Diag<'infcx>), + ) -> bool { + if let Some((_, diag)) = + self.diags_buffer.buffered_move_errors.insert(move_out_indices, place_and_err) + { + // Cancel the old diagnostic so we don't ICE + diag.cancel(); + false + } else { + true + } + } + + pub(crate) fn get_buffered_mut_error(&mut self, span: Span) -> Option<(Diag<'infcx>, usize)> { + // FIXME(#120456) - is `swap_remove` correct? + self.diags_buffer.buffered_mut_errors.swap_remove(&span) + } + + pub(crate) fn buffer_mut_error(&mut self, span: Span, diag: Diag<'infcx>, count: usize) { + self.diags_buffer.buffered_mut_errors.insert(span, (diag, count)); + } + + pub(crate) fn emit_errors(&mut self) -> Option { + let mut res = self.infcx.tainted_by_errors(); + + // Buffer any move errors that we collected and de-duplicated. + for (_, (_, diag)) in std::mem::take(&mut self.diags_buffer.buffered_move_errors) { + // We have already set tainted for this error, so just buffer it. + self.buffer_error(diag); + } + for (_, (mut diag, count)) in std::mem::take(&mut self.diags_buffer.buffered_mut_errors) { + if count > 10 { + #[allow(rustc::diagnostic_outside_of_impl)] + #[allow(rustc::untranslatable_diagnostic)] + diag.note(format!("...and {} other attempted mutable borrows", count - 10)); + } + self.buffer_error(diag); + } + + if !self.diags_buffer.buffered_diags.is_empty() { + self.diags_buffer.buffered_diags.sort_by_key(|buffered_diag| buffered_diag.sort_span()); + for buffered_diag in self.diags_buffer.buffered_diags.drain(..) { + match buffered_diag { + BufferedDiag::Error(diag) => res = Some(diag.emit()), + BufferedDiag::NonError(diag) => diag.emit(), + } + } + } + + res + } + + pub(crate) fn has_buffered_diags(&self) -> bool { + self.diags_buffer.buffered_diags.is_empty() + } + + pub(crate) fn has_move_error( + &self, + move_out_indices: &[MoveOutIndex], + ) -> Option<&(PlaceRef<'tcx>, Diag<'infcx>)> { + self.diags_buffer.buffered_move_errors.get(move_out_indices) + } +} + impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// Adds a suggestion when a closure is invoked twice with a moved variable or when a closure /// is moved after being invoked. @@ -497,6 +623,52 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { region.print(&mut printer).unwrap(); printer.into_buffer() } + + /// Add a note to region errors and borrow explanations when higher-ranked regions in predicates + /// implicitly introduce an "outlives `'static`" constraint. + fn add_placeholder_from_predicate_note( + &self, + err: &mut Diag<'_, G>, + path: &[OutlivesConstraint<'tcx>], + ) { + let predicate_span = path.iter().find_map(|constraint| { + let outlived = constraint.sub; + if let Some(origin) = self.regioncx.var_infos.get(outlived) + && let RegionVariableOrigin::Nll(NllRegionVariableOrigin::Placeholder(_)) = + origin.origin + && let ConstraintCategory::Predicate(span) = constraint.category + { + Some(span) + } else { + None + } + }); + + if let Some(span) = predicate_span { + err.span_note(span, "due to current limitations in the borrow checker, this implies a `'static` lifetime"); + } + } + + /// Add a label to region errors and borrow explanations when outlives constraints arise from + /// proving a type implements `Sized` or `Copy`. + fn add_sized_or_copy_bound_info( + &self, + err: &mut Diag<'_, G>, + blamed_category: ConstraintCategory<'tcx>, + path: &[OutlivesConstraint<'tcx>], + ) { + for sought_category in [ConstraintCategory::SizedBound, ConstraintCategory::CopyBound] { + if sought_category != blamed_category + && let Some(sought_constraint) = path.iter().find(|c| c.category == sought_category) + { + let label = format!( + "requirement occurs due to {}", + sought_category.description().trim_end() + ); + err.span_label(sought_constraint.span, label); + } + } + } } /// The span(s) associated to a use of a place. @@ -539,9 +711,6 @@ impl UseSpans<'_> { UseSpans::ClosureUse { args_span: span, .. } | UseSpans::PatUse(span) | UseSpans::OtherUse(span) => span, - UseSpans::FnSelfUse { fn_call_span, kind: CallKind::DerefCoercion { .. }, .. } => { - fn_call_span - } UseSpans::FnSelfUse { var_span, .. } => var_span, } } @@ -552,9 +721,6 @@ impl UseSpans<'_> { UseSpans::ClosureUse { path_span: span, .. } | UseSpans::PatUse(span) | UseSpans::OtherUse(span) => span, - UseSpans::FnSelfUse { fn_call_span, kind: CallKind::DerefCoercion { .. }, .. } => { - fn_call_span - } UseSpans::FnSelfUse { var_span, .. } => var_span, } } @@ -565,9 +731,6 @@ impl UseSpans<'_> { UseSpans::ClosureUse { capture_kind_span: span, .. } | UseSpans::PatUse(span) | UseSpans::OtherUse(span) => span, - UseSpans::FnSelfUse { fn_call_span, kind: CallKind::DerefCoercion { .. }, .. } => { - fn_call_span - } UseSpans::FnSelfUse { var_span, .. } => var_span, } } @@ -879,6 +1042,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { kind, }; } + normal_ret } diff --git a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs index 4ba6b2e94ec56..beacbdbd3fa7b 100644 --- a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs @@ -1,9 +1,10 @@ #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +use rustc_data_structures::fx::FxHashSet; use rustc_errors::{Applicability, Diag}; use rustc_hir::intravisit::Visitor; -use rustc_hir::{CaptureBy, ExprKind, HirId, Node}; +use rustc_hir::{self as hir, CaptureBy, ExprKind, HirId, Node}; use rustc_middle::bug; use rustc_middle::mir::*; use rustc_middle::ty::{self, Ty}; @@ -683,48 +684,126 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } fn add_move_error_suggestions(&self, err: &mut Diag<'_>, binds_to: &[Local]) { - let mut suggestions: Vec<(Span, String, String)> = Vec::new(); + /// A HIR visitor to associate each binding with a `&` or `&mut` that could be removed to + /// make it bind by reference instead (if possible) + struct BindingFinder<'tcx> { + typeck_results: &'tcx ty::TypeckResults<'tcx>, + hir: rustc_middle::hir::map::Map<'tcx>, + /// Input: the span of the pattern we're finding bindings in + pat_span: Span, + /// Input: the spans of the bindings we're providing suggestions for + binding_spans: Vec, + /// Internal state: have we reached the pattern we're finding bindings in? + found_pat: bool, + /// Internal state: the innermost `&` or `&mut` "above" the visitor + ref_pat: Option<&'tcx hir::Pat<'tcx>>, + /// Internal state: could removing a `&` give bindings unexpected types? + has_adjustments: bool, + /// Output: for each input binding, the `&` or `&mut` to remove to make it by-ref + ref_pat_for_binding: Vec<(Span, Option<&'tcx hir::Pat<'tcx>>)>, + /// Output: ref patterns that can't be removed straightforwardly + cannot_remove: FxHashSet, + } + impl<'tcx> Visitor<'tcx> for BindingFinder<'tcx> { + type NestedFilter = rustc_middle::hir::nested_filter::OnlyBodies; + + fn nested_visit_map(&mut self) -> Self::Map { + self.hir + } + + fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) -> Self::Result { + // Don't walk into const patterns or anything else that might confuse this + if !self.found_pat { + hir::intravisit::walk_expr(self, ex) + } + } + + fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) { + if p.span == self.pat_span { + self.found_pat = true; + } + + let parent_has_adjustments = self.has_adjustments; + self.has_adjustments |= + self.typeck_results.pat_adjustments().contains_key(p.hir_id); + + // Track the innermost `&` or `&mut` enclosing bindings, to suggest removing it. + let parent_ref_pat = self.ref_pat; + if let hir::PatKind::Ref(..) = p.kind { + self.ref_pat = Some(p); + // To avoid edition-dependent logic to figure out how many refs this `&` can + // peel off, simply don't remove the "parent" `&`. + self.cannot_remove.extend(parent_ref_pat.map(|r| r.hir_id)); + if self.has_adjustments { + // Removing this `&` could give child bindings unexpected types, so don't. + self.cannot_remove.insert(p.hir_id); + // As long the `&` stays, child patterns' types should be as expected. + self.has_adjustments = false; + } + } + + if let hir::PatKind::Binding(_, _, ident, _) = p.kind { + // the spans in `binding_spans` encompass both the ident and binding mode + if let Some(&bind_sp) = + self.binding_spans.iter().find(|bind_sp| bind_sp.contains(ident.span)) + { + self.ref_pat_for_binding.push((bind_sp, self.ref_pat)); + } else { + // we've encountered a binding that we're not reporting a move error for. + // we don't want to change its type, so don't remove the surrounding `&`. + if let Some(ref_pat) = self.ref_pat { + self.cannot_remove.insert(ref_pat.hir_id); + } + } + } + + hir::intravisit::walk_pat(self, p); + self.ref_pat = parent_ref_pat; + self.has_adjustments = parent_has_adjustments; + } + } + let mut pat_span = None; + let mut binding_spans = Vec::new(); for local in binds_to { let bind_to = &self.body.local_decls[*local]; - if let LocalInfo::User(BindingForm::Var(VarBindingForm { pat_span, .. })) = + if let LocalInfo::User(BindingForm::Var(VarBindingForm { pat_span: pat_sp, .. })) = *bind_to.local_info() { - let Ok(pat_snippet) = self.infcx.tcx.sess.source_map().span_to_snippet(pat_span) - else { - continue; - }; - let Some(stripped) = pat_snippet.strip_prefix('&') else { - suggestions.push(( - bind_to.source_info.span.shrink_to_lo(), - "consider borrowing the pattern binding".to_string(), - "ref ".to_string(), - )); - continue; - }; - let inner_pat_snippet = stripped.trim_start(); - let (pat_span, suggestion, to_remove) = if inner_pat_snippet.starts_with("mut") - && inner_pat_snippet["mut".len()..].starts_with(rustc_lexer::is_whitespace) - { - let inner_pat_snippet = inner_pat_snippet["mut".len()..].trim_start(); - let pat_span = pat_span.with_hi( - pat_span.lo() - + BytePos((pat_snippet.len() - inner_pat_snippet.len()) as u32), - ); - (pat_span, String::new(), "mutable borrow") - } else { - let pat_span = pat_span.with_hi( - pat_span.lo() - + BytePos( - (pat_snippet.len() - inner_pat_snippet.trim_start().len()) as u32, - ), - ); - (pat_span, String::new(), "borrow") - }; - suggestions.push(( - pat_span, - format!("consider removing the {to_remove}"), - suggestion, - )); + pat_span = Some(pat_sp); + binding_spans.push(bind_to.source_info.span); + } + } + let Some(pat_span) = pat_span else { return }; + + let hir = self.infcx.tcx.hir(); + let Some(body) = hir.maybe_body_owned_by(self.mir_def_id()) else { return }; + let typeck_results = self.infcx.tcx.typeck(self.mir_def_id()); + let mut finder = BindingFinder { + typeck_results, + hir, + pat_span, + binding_spans, + found_pat: false, + ref_pat: None, + has_adjustments: false, + ref_pat_for_binding: Vec::new(), + cannot_remove: FxHashSet::default(), + }; + finder.visit_body(body); + + let mut suggestions = Vec::new(); + for (binding_span, opt_ref_pat) in finder.ref_pat_for_binding { + if let Some(ref_pat) = opt_ref_pat + && !finder.cannot_remove.contains(&ref_pat.hir_id) + && let hir::PatKind::Ref(subpat, mutbl) = ref_pat.kind + && let Some(ref_span) = ref_pat.span.trim_end(subpat.span) + { + let mutable_str = if mutbl.is_mut() { "mutable " } else { "" }; + let msg = format!("consider removing the {mutable_str}borrow"); + suggestions.push((ref_span, msg, "".to_string())); + } else { + let msg = "consider borrowing the pattern binding".to_string(); + suggestions.push((binding_span.shrink_to_lo(), msg, "ref ".to_string())); } } suggestions.sort_unstable_by_key(|&(span, _, _)| span); diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index 4fb7b22f2896e..a6ca038282d96 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -10,20 +10,19 @@ use rustc_hir::intravisit::Visitor; use rustc_hir::{self as hir, BindingMode, ByRef, Node}; use rustc_middle::bug; use rustc_middle::hir::place::PlaceBase; +use rustc_middle::mir::visit::PlaceContext; use rustc_middle::mir::{ self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location, Mutability, Place, PlaceRef, ProjectionElem, }; use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, Upcast}; -use rustc_span::symbol::{Symbol, kw}; -use rustc_span::{BytePos, DesugaringKind, Span, sym}; +use rustc_span::{BytePos, DesugaringKind, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits; use tracing::debug; use crate::diagnostics::BorrowedContentSource; -use crate::util::FindAssignments; use crate::{MirBorrowckCtxt, session_diagnostics}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] @@ -576,7 +575,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { // ---------- place self.err.multipart_suggestions( format!( - "to modify a `{}`, use `.get_mut()`, `.insert()` or the entry API", + "use `.insert()` to insert a value into a `{}`, `.get_mut()` \ + to modify it, or the entry API for more flexibility", self.ty, ), vec![ @@ -593,16 +593,17 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { (rv.span.shrink_to_hi(), ")".to_string()), ], vec![ - // val.get_mut(index).map(|v| { *v = rv; }); + // if let Some(v) = val.get_mut(index) { *v = rv; } + (val.span.shrink_to_lo(), "if let Some(val) = ".to_string()), ( val.span.shrink_to_hi().with_hi(index.span.lo()), ".get_mut(".to_string(), ), ( index.span.shrink_to_hi().with_hi(place.span.hi()), - ").map(|val| { *val".to_string(), + ") { *val".to_string(), ), - (rv.span.shrink_to_hi(), "; })".to_string()), + (rv.span.shrink_to_hi(), "; }".to_string()), ], vec![ // let x = val.entry(index).or_insert(rv); @@ -623,21 +624,22 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { self.suggested = true; } else if let hir::ExprKind::MethodCall(_path, receiver, _, sp) = expr.kind && let hir::ExprKind::Index(val, index, _) = receiver.kind - && expr.span == self.assign_span + && receiver.span == self.assign_span { // val[index].path(args..); self.err.multipart_suggestion( format!("to modify a `{}` use `.get_mut()`", self.ty), vec![ + (val.span.shrink_to_lo(), "if let Some(val) = ".to_string()), ( val.span.shrink_to_hi().with_hi(index.span.lo()), ".get_mut(".to_string(), ), ( index.span.shrink_to_hi().with_hi(receiver.span.hi()), - ").map(|val| val".to_string(), + ") { val".to_string(), ), - (sp.shrink_to_hi(), ")".to_string()), + (sp.shrink_to_hi(), "; }".to_string()), ], Applicability::MachineApplicable, ); @@ -979,7 +981,9 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let arg = match hir.get_if_local(callee_def_id) { Some( - hir::Node::Item(hir::Item { ident, kind: hir::ItemKind::Fn(sig, ..), .. }) + hir::Node::Item(hir::Item { + ident, kind: hir::ItemKind::Fn { sig, .. }, .. + }) | hir::Node::TraitItem(hir::TraitItem { ident, kind: hir::TraitItemKind::Fn(sig, _), @@ -1018,7 +1022,9 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { // ...otherwise we are probably in the tail expression of the function, point at the // return type. match self.infcx.tcx.hir_node_by_def_id(hir.get_parent_item(fn_call_id).def_id) { - hir::Node::Item(hir::Item { ident, kind: hir::ItemKind::Fn(sig, ..), .. }) + hir::Node::Item(hir::Item { + ident, kind: hir::ItemKind::Fn { sig, .. }, .. + }) | hir::Node::TraitItem(hir::TraitItem { ident, kind: hir::TraitItemKind::Fn(sig, _), @@ -1082,6 +1088,38 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } + /// Finds all statements that assign directly to local (i.e., X = ...) and returns their + /// locations. + fn find_assignments(&self, local: Local) -> Vec { + use rustc_middle::mir::visit::Visitor; + + struct FindLocalAssignmentVisitor { + needle: Local, + locations: Vec, + } + + impl<'tcx> Visitor<'tcx> for FindLocalAssignmentVisitor { + fn visit_local( + &mut self, + local: Local, + place_context: PlaceContext, + location: Location, + ) { + if self.needle != local { + return; + } + + if place_context.is_place_assignment() { + self.locations.push(location); + } + } + } + + let mut visitor = FindLocalAssignmentVisitor { needle: local, locations: vec![] }; + visitor.visit_body(self.body); + visitor.locations + } + fn suggest_make_local_mut(&self, err: &mut Diag<'_>, local: Local, name: Symbol) { let local_decl = &self.body.local_decls[local]; @@ -1115,7 +1153,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { })) => { // check if the RHS is from desugaring let opt_assignment_rhs_span = - self.body.find_assignments(local).first().map(|&location| { + self.find_assignments(local).first().map(|&location| { if let Some(mir::Statement { source_info: _, kind: @@ -1474,16 +1512,27 @@ fn suggest_ampmut<'tcx>( // let x: &i32 = &'a 5; // ^^ lifetime annotation not allowed // - if let Some(assignment_rhs_span) = opt_assignment_rhs_span - && let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) - && let Some(stripped) = src.strip_prefix('&') + if let Some(rhs_span) = opt_assignment_rhs_span + && let Ok(rhs_str) = tcx.sess.source_map().span_to_snippet(rhs_span) + && let Some(rhs_str_no_amp) = rhs_str.strip_prefix('&') { - let is_raw_ref = stripped.trim_start().starts_with("raw "); - // We don't support raw refs yet - if is_raw_ref { - return None; + // Suggest changing `&raw const` to `&raw mut` if applicable. + if rhs_str_no_amp.trim_start().strip_prefix("raw const").is_some() { + let const_idx = rhs_str.find("const").unwrap() as u32; + let const_span = rhs_span + .with_lo(rhs_span.lo() + BytePos(const_idx)) + .with_hi(rhs_span.lo() + BytePos(const_idx + "const".len() as u32)); + + return Some(AmpMutSugg { + has_sugg: true, + span: const_span, + suggestion: "mut".to_owned(), + additional: None, + }); } - let is_mut = if let Some(rest) = stripped.trim_start().strip_prefix("mut") { + + // Figure out if rhs already is `&mut`. + let is_mut = if let Some(rest) = rhs_str_no_amp.trim_start().strip_prefix("mut") { match rest.chars().next() { // e.g. `&mut x` Some(c) if c.is_whitespace() => true, @@ -1500,9 +1549,8 @@ fn suggest_ampmut<'tcx>( // if the reference is already mutable then there is nothing we can do // here. if !is_mut { - let span = assignment_rhs_span; // shrink the span to just after the `&` in `&variable` - let span = span.with_lo(span.lo() + BytePos(1)).shrink_to_lo(); + let span = rhs_span.with_lo(rhs_span.lo() + BytePos(1)).shrink_to_lo(); // FIXME(Ezrashaw): returning is bad because we still might want to // update the annotated type, see #106857. diff --git a/compiler/rustc_borrowck/src/diagnostics/opaque_suggestions.rs b/compiler/rustc_borrowck/src/diagnostics/opaque_suggestions.rs index d77c53a398447..876b8f214b017 100644 --- a/compiler/rustc_borrowck/src/diagnostics/opaque_suggestions.rs +++ b/compiler/rustc_borrowck/src/diagnostics/opaque_suggestions.rs @@ -31,7 +31,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { diag: &mut Diag<'_>, ) { // We look at all the locals. Why locals? Because it's the best thing - // I could think of that's correlated with the *instantiated* higer-ranked + // I could think of that's correlated with the *instantiated* higher-ranked // binder for calls, since we don't really store those anywhere else. for ty in self.body.local_decls.iter().map(|local| local.ty) { if !ty.has_opaque_types() { diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 6ea5c44dc0170..f0baa20648cd2 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -13,10 +13,9 @@ use rustc_hir::{PolyTraitRef, TyKind, WhereBoundPredicate}; use rustc_infer::infer::{NllRegionVariableOrigin, RelateParamBound}; use rustc_middle::bug; use rustc_middle::hir::place::PlaceBase; -use rustc_middle::mir::{ConstraintCategory, ReturnConstraint}; +use rustc_middle::mir::{AnnotationSource, ConstraintCategory, ReturnConstraint}; use rustc_middle::ty::{self, GenericArgs, Region, RegionVid, Ty, TyCtxt, TypeVisitor}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, kw}; +use rustc_span::{Ident, Span, kw}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::error_reporting::infer::nice_region_error::{ self, HirTraitObjectVisitor, NiceRegionError, TraitObjectVisitor, find_anon_type, @@ -30,7 +29,7 @@ use tracing::{debug, instrument, trace}; use super::{OutlivesSuggestionBuilder, RegionName, RegionNameSource}; use crate::nll::ConstraintDescription; use crate::region_infer::values::RegionElement; -use crate::region_infer::{BlameConstraint, ExtraConstraintInfo, TypeTest}; +use crate::region_infer::{BlameConstraint, TypeTest}; use crate::session_diagnostics::{ FnMutError, FnMutReturnTypeErr, GenericDoesNotLiveLongEnough, LifetimeOutliveErr, LifetimeReturnCategoryErr, RequireStaticErr, VarHereDenote, @@ -50,8 +49,8 @@ impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> { ConstraintCategory::Cast { is_implicit_coercion: false, .. } => "cast ", ConstraintCategory::Cast { is_implicit_coercion: true, .. } => "coercion ", ConstraintCategory::CallArgument(_) => "argument ", - ConstraintCategory::TypeAnnotation => "type annotation ", - ConstraintCategory::ClosureBounds => "closure body ", + ConstraintCategory::TypeAnnotation(AnnotationSource::GenericArg) => "generic argument ", + ConstraintCategory::TypeAnnotation(_) => "type annotation ", ConstraintCategory::SizedBound => "proving this value is `Sized` ", ConstraintCategory::CopyBound => "copying this value ", ConstraintCategory::OpaqueType => "opaque type ", @@ -189,7 +188,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// Returns `true` if a closure is inferred to be an `FnMut` closure. fn is_closure_fn_mut(&self, fr: RegionVid) -> bool { if let Some(ty::ReLateParam(late_param)) = self.to_error_region(fr).as_deref() - && let ty::BoundRegionKind::ClosureEnv = late_param.bound_region + && let ty::LateParamRegionKind::ClosureEnv = late_param.kind && let DefiningTy::Closure(_, args) = self.regioncx.universal_regions().defining_ty { return args.as_closure().kind() == ty::ClosureKind::FnMut; @@ -441,10 +440,9 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { ) { debug!("report_region_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr); - let (blame_constraint, extra_info) = - self.regioncx.best_blame_constraint(fr, fr_origin, |r| { - self.regioncx.provides_universal_region(r, fr, outlived_fr) - }); + let (blame_constraint, path) = self.regioncx.best_blame_constraint(fr, fr_origin, |r| { + self.regioncx.provides_universal_region(r, fr, outlived_fr) + }); let BlameConstraint { category, cause, variance_info, .. } = blame_constraint; debug!("report_region_error: category={:?} {:?} {:?}", category, cause, variance_info); @@ -555,13 +553,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - for extra in extra_info { - match extra { - ExtraConstraintInfo::PlaceholderFromPredicate(span) => { - diag.span_note(span, "due to current limitations in the borrow checker, this implies a `'static` lifetime"); - } - } - } + self.add_placeholder_from_predicate_note(&mut diag, &path); + self.add_sized_or_copy_bound_info(&mut diag, category, &path); self.buffer_error(diag); } @@ -848,7 +841,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { return; }; - let fn_returns = self.infcx.tcx.return_type_impl_or_dyn_traits(suitable_region.def_id); + let fn_returns = self.infcx.tcx.return_type_impl_or_dyn_traits(suitable_region.scope); let param = if let Some(param) = find_param_with_region(self.infcx.tcx, self.mir_def_id(), f, outlived_f) @@ -875,7 +868,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { Some(arg), captures, Some((param.param_ty_span, param.param_ty.to_string())), - Some(suitable_region.def_id), + Some(suitable_region.scope), ); return; } @@ -883,7 +876,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let Some((alias_tys, alias_span, lt_addition_span)) = self .infcx .tcx - .return_type_impl_or_dyn_traits_with_type_alias(suitable_region.def_id) + .return_type_impl_or_dyn_traits_with_type_alias(suitable_region.scope) else { return; }; @@ -1018,18 +1011,20 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { return; }; - let Some((ty_sub, _)) = - self.infcx.tcx.is_suitable_region(self.mir_def_id(), sub).and_then(|anon_reg| { - find_anon_type(self.infcx.tcx, self.mir_def_id(), sub, &anon_reg.bound_region) - }) + let Some((ty_sub, _)) = self + .infcx + .tcx + .is_suitable_region(self.mir_def_id(), sub) + .and_then(|_| find_anon_type(self.infcx.tcx, self.mir_def_id(), sub)) else { return; }; - let Some((ty_sup, _)) = - self.infcx.tcx.is_suitable_region(self.mir_def_id(), sup).and_then(|anon_reg| { - find_anon_type(self.infcx.tcx, self.mir_def_id(), sup, &anon_reg.bound_region) - }) + let Some((ty_sup, _)) = self + .infcx + .tcx + .is_suitable_region(self.mir_def_id(), sup) + .and_then(|_| find_anon_type(self.infcx.tcx, self.mir_def_id(), sup)) else { return; }; diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index d49dee8514457..9349b46ec5b0c 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -5,14 +5,13 @@ use std::fmt::{self, Display}; use std::iter; use rustc_data_structures::fx::IndexEntry; -use rustc_errors::Diag; +use rustc_errors::{Diag, EmissionGuarantee}; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_middle::ty::print::RegionHighlightMode; use rustc_middle::ty::{self, GenericArgKind, GenericArgsRef, RegionVid, Ty}; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::{Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use tracing::{debug, instrument}; @@ -109,7 +108,7 @@ impl RegionName { } } - pub(crate) fn highlight_region_name(&self, diag: &mut Diag<'_>) { + pub(crate) fn highlight_region_name(&self, diag: &mut Diag<'_, G>) { match &self.source { RegionNameSource::NamedLateParamRegion(span) | RegionNameSource::NamedEarlyParamRegion(span) => { @@ -300,17 +299,17 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { Some(RegionName { name: kw::StaticLifetime, source: RegionNameSource::Static }) } - ty::ReLateParam(late_param) => match late_param.bound_region { - ty::BoundRegionKind::Named(region_def_id, name) => { + ty::ReLateParam(late_param) => match late_param.kind { + ty::LateParamRegionKind::Named(region_def_id, name) => { // Get the span to point to, even if we don't use the name. let span = tcx.hir().span_if_local(region_def_id).unwrap_or(DUMMY_SP); debug!( "bound region named: {:?}, is_named: {:?}", name, - late_param.bound_region.is_named() + late_param.kind.is_named() ); - if late_param.bound_region.is_named() { + if late_param.kind.is_named() { // A named region that is actually named. Some(RegionName { name, @@ -332,7 +331,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { } } - ty::BoundRegionKind::ClosureEnv => { + ty::LateParamRegionKind::ClosureEnv => { let def_ty = self.regioncx.universal_regions().defining_ty; let closure_kind = match def_ty { @@ -369,7 +368,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { }) } - ty::BoundRegionKind::Anon => None, + ty::LateParamRegionKind::Anon(_) => None, }, ty::ReBound(..) @@ -459,11 +458,8 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { ) -> RegionNameHighlight { let mut highlight = RegionHighlightMode::default(); highlight.highlighting_region_vid(self.infcx.tcx, needle_fr, counter); - let type_name = self - .infcx - .err_ctxt() - .extract_inference_diagnostics_data(ty.into(), Some(highlight)) - .name; + let type_name = + self.infcx.err_ctxt().extract_inference_diagnostics_data(ty.into(), highlight).name; debug!( "highlight_if_we_cannot_match_hir_ty: type_name={:?} needle_fr={:?}", @@ -874,7 +870,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { let type_name = self .infcx .err_ctxt() - .extract_inference_diagnostics_data(yield_ty.into(), Some(highlight)) + .extract_inference_diagnostics_data(yield_ty.into(), highlight) .name; let yield_span = match tcx.hir_node(self.mir_hir_id()) { diff --git a/compiler/rustc_borrowck/src/diagnostics/var_name.rs b/compiler/rustc_borrowck/src/diagnostics/var_name.rs index bda7024126732..191c0444c742e 100644 --- a/compiler/rustc_borrowck/src/diagnostics/var_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/var_name.rs @@ -1,8 +1,7 @@ use rustc_index::IndexSlice; use rustc_middle::mir::{Body, Local}; use rustc_middle::ty::{self, RegionVid, TyCtxt}; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use tracing::debug; use crate::region_infer::RegionInferenceContext; diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 18b7984e90dc1..45764f9c6b867 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -6,6 +6,7 @@ #![feature(assert_matches)] #![feature(box_patterns)] #![feature(file_buffered)] +#![feature(if_let_guard)] #![feature(let_chains)] #![feature(never_type)] #![feature(rustc_attrs)] @@ -15,18 +16,19 @@ #![warn(unreachable_pub)] // tidy-alphabetical-end +use std::borrow::Cow; use std::cell::RefCell; -use std::collections::BTreeMap; use std::marker::PhantomData; -use std::ops::Deref; +use std::ops::{ControlFlow, Deref}; use rustc_abi::FieldIdx; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::graph::dominators::Dominators; -use rustc_errors::Diag; +use rustc_errors::LintDiagnostic; use rustc_hir as hir; +use rustc_hir::CRATE_HIR_ID; use rustc_hir::def_id::LocalDefId; -use rustc_index::bit_set::{BitSet, MixedBitSet}; +use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use rustc_index::{IndexSlice, IndexVec}; use rustc_infer::infer::{ InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt, @@ -41,10 +43,10 @@ use rustc_mir_dataflow::impls::{ EverInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces, }; use rustc_mir_dataflow::move_paths::{ - InitIndex, InitLocation, LookupResult, MoveData, MoveOutIndex, MovePathIndex, + InitIndex, InitLocation, LookupResult, MoveData, MovePathIndex, }; use rustc_mir_dataflow::{Analysis, EntryStates, Results, ResultsVisitor, visit_results}; -use rustc_session::lint::builtin::UNUSED_MUT; +use rustc_session::lint::builtin::{TAIL_EXPR_DROP_ORDER, UNUSED_MUT}; use rustc_span::{Span, Symbol}; use smallvec::SmallVec; use tracing::{debug, instrument}; @@ -52,12 +54,13 @@ use tracing::{debug, instrument}; use crate::borrow_set::{BorrowData, BorrowSet}; use crate::consumers::{BodyWithBorrowckFacts, ConsumerOptions}; use crate::dataflow::{BorrowIndex, Borrowck, BorrowckDomain, Borrows}; -use crate::diagnostics::{AccessKind, IllegalMoveOriginKind, MoveError, RegionName}; -use crate::location::LocationTable; -use crate::nll::PoloniusOutput; +use crate::diagnostics::{ + AccessKind, BorrowckDiagnosticsBuffer, IllegalMoveOriginKind, MoveError, RegionName, +}; use crate::path_utils::*; use crate::place_ext::PlaceExt; use crate::places_conflict::{PlaceConflictBias, places_conflict}; +use crate::polonius::legacy::{PoloniusLocationTable, PoloniusOutput}; use crate::prefixes::PrefixSet; use crate::region_infer::RegionInferenceContext; use crate::renumber::RegionCtxt; @@ -69,8 +72,6 @@ mod constraints; mod dataflow; mod def_use; mod diagnostics; -mod facts; -mod location; mod member_constraints; mod nll; mod path_utils; @@ -84,7 +85,6 @@ mod session_diagnostics; mod type_check; mod universal_regions; mod used_muts; -mod util; /// A public API provided for the Rust compiler consumers. pub mod consumers; @@ -120,11 +120,10 @@ fn mir_borrowck(tcx: TyCtxt<'_>, def: LocalDefId) -> &BorrowCheckResult<'_> { return tcx.arena.alloc(result); } - let promoted: &IndexSlice<_, _> = &promoted.borrow(); - let opt_closure_req = do_mir_borrowck(tcx, input_body, promoted, None).0; + let borrowck_result = do_mir_borrowck(tcx, input_body, &*promoted.borrow(), None).0; debug!("mir_borrowck done"); - tcx.arena.alloc(opt_closure_req) + tcx.arena.alloc(borrowck_result) } /// Perform the actual borrow checking. @@ -141,6 +140,9 @@ fn do_mir_borrowck<'tcx>( ) -> (BorrowCheckResult<'tcx>, Option>>) { let def = input_body.source.def_id().expect_local(); let infcx = BorrowckInferCtxt::new(tcx, def); + if let Some(e) = input_body.tainted_by_errors { + infcx.set_tainted_by_errors(e); + } let mut local_names = IndexVec::from_elem(None, &input_body.local_decls); for var_debug_info in &input_body.var_debug_info { @@ -162,13 +164,6 @@ fn do_mir_borrowck<'tcx>( } } - let diags = &mut diags::BorrowckDiags::new(); - - // Gather the upvars of a closure, if any. - if let Some(e) = input_body.tainted_by_errors { - infcx.set_tainted_by_errors(e); - } - // Replace all regions with fresh inference variables. This // requires first making our own copy of the MIR. This copy will // be modified (in place) to contain non-lexical lifetimes. It @@ -184,7 +179,7 @@ fn do_mir_borrowck<'tcx>( infcx.register_predefined_opaques_for_next_solver(def); } - let location_table = LocationTable::new(body); + let location_table = PoloniusLocationTable::new(body); let move_data = MoveData::gather_moves(body, tcx, |_| true); let promoted_move_data = promoted @@ -206,6 +201,7 @@ fn do_mir_borrowck<'tcx>( polonius_output, opt_closure_req, nll_errors, + localized_outlives_constraints, } = nll::compute_regions( &infcx, free_regions, @@ -218,13 +214,21 @@ fn do_mir_borrowck<'tcx>( consumer_options, ); - // Dump MIR results into a file, if that is enabled. This let us + // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); // We also have a `#[rustc_regions]` annotation that causes us to dump // information. - nll::dump_annotation(&infcx, body, ®ioncx, &opt_closure_req, &opaque_type_values, diags); + let diags_buffer = &mut BorrowckDiagnosticsBuffer::default(); + nll::dump_annotation( + &infcx, + body, + ®ioncx, + &opt_closure_req, + &opaque_type_values, + diags_buffer, + ); let movable_coroutine = // The first argument is the coroutine type passed by value @@ -246,7 +250,8 @@ fn do_mir_borrowck<'tcx>( infcx: &infcx, body: promoted_body, move_data: &move_data, - location_table: &location_table, // no need to create a real one for the promoted, it is not used + // no need to create a real location table for the promoted, it is not used + location_table: &location_table, movable_coroutine, fn_self_span_reported: Default::default(), locals_are_invalidated_at_exit, @@ -263,7 +268,7 @@ fn do_mir_borrowck<'tcx>( next_region_name: RefCell::new(1), polonius_output: None, move_errors: Vec::new(), - diags, + diags_buffer, }; MoveVisitor { ctxt: &mut promoted_mbcx }.visit_body(promoted_body); promoted_mbcx.report_move_errors(); @@ -302,7 +307,7 @@ fn do_mir_borrowck<'tcx>( next_region_name: RefCell::new(1), polonius_output, move_errors: Vec::new(), - diags, + diags_buffer, }; // Compute and report region errors, if any. @@ -318,6 +323,16 @@ fn do_mir_borrowck<'tcx>( mbcx.report_move_errors(); + // If requested, dump polonius MIR. + polonius::dump_polonius_mir( + &infcx, + body, + ®ioncx, + &borrow_set, + localized_outlives_constraints, + &opt_closure_req, + ); + // For each non-user used mutable variable, check if it's been assigned from // a user-declared local. If so, then put that local into the used_mut set. // Note that this set is expected to be small - only upvars from closures @@ -337,35 +352,7 @@ fn do_mir_borrowck<'tcx>( mbcx.gather_used_muts(temporary_used_locals, unused_mut_locals); debug!("mbcx.used_mut: {:?}", mbcx.used_mut); - let used_mut = std::mem::take(&mut mbcx.used_mut); - for local in mbcx.body.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) { - let local_decl = &mbcx.body.local_decls[local]; - let lint_root = match &mbcx.body.source_scopes[local_decl.source_info.scope].local_data { - ClearCrossCrate::Set(data) => data.lint_root, - _ => continue, - }; - - // Skip over locals that begin with an underscore or have no name - match mbcx.local_names[local] { - Some(name) => { - if name.as_str().starts_with('_') { - continue; - } - } - None => continue, - } - - let span = local_decl.source_info.span; - if span.desugaring_kind().is_some() { - // If the `mut` arises as part of a desugaring, we should ignore it. - continue; - } - - let mut_span = tcx.sess.source_map().span_until_non_whitespace(span); - - tcx.emit_node_span_lint(UNUSED_MUT, lint_root, span, VarNeedNotMut { span: mut_span }) - } - + mbcx.lint_unused_mut(); let tainted_by_errors = mbcx.emit_errors(); let result = BorrowCheckResult { @@ -530,7 +517,7 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { /// Map from MIR `Location` to `LocationIndex`; created /// when MIR borrowck begins. - location_table: &'a LocationTable, + location_table: &'a PoloniusLocationTable, movable_coroutine: bool, /// This keeps track of whether local variables are free-ed when the function @@ -590,7 +577,7 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { /// Results of Polonius analysis. polonius_output: Option>, - diags: &'a mut diags::BorrowckDiags<'infcx, 'tcx>, + diags_buffer: &'a mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, move_errors: Vec>, } @@ -653,9 +640,11 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< | StatementKind::Coverage(..) // These do not actually affect borrowck | StatementKind::ConstEvalCounter - // This do not affect borrowck - | StatementKind::BackwardIncompatibleDropHint { .. } | StatementKind::StorageLive(..) => {} + // This does not affect borrowck + StatementKind::BackwardIncompatibleDropHint { place, reason: BackwardIncompatibleDropReason::Edition2024 } => { + self.check_backward_incompatible_drop(location, (**place, span), state); + } StatementKind::StorageDead(local) => { self.access_place( location, @@ -840,7 +829,6 @@ use self::ReadOrWrite::{Activation, Read, Reservation, Write}; #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum ArtificialField { - ArrayLength, FakeBorrow, } @@ -1025,6 +1013,24 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { } } + fn borrows_in_scope<'s>( + &self, + location: Location, + state: &'s BorrowckDomain, + ) -> Cow<'s, DenseBitSet> { + if let Some(polonius) = &self.polonius_output { + // Use polonius output if it has been enabled. + let location = self.location_table.start_index(location); + let mut polonius_output = DenseBitSet::new_empty(self.borrow_set.len()); + for &idx in polonius.errors_at(location) { + polonius_output.insert(idx); + } + Cow::Owned(polonius_output) + } else { + Cow::Borrowed(&state.borrows) + } + } + #[instrument(level = "debug", skip(self, state))] fn check_access_for_conflict( &mut self, @@ -1036,18 +1042,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { ) -> bool { let mut error_reported = false; - // Use polonius output if it has been enabled. - let mut polonius_output; - let borrows_in_scope = if let Some(polonius) = &self.polonius_output { - let location = self.location_table.start_index(location); - polonius_output = BitSet::new_empty(self.borrow_set.len()); - for &idx in polonius.errors_at(location) { - polonius_output.insert(idx); - } - &polonius_output - } else { - &state.borrows - }; + let borrows_in_scope = self.borrows_in_scope(location, state); each_borrow_involving_path( self, @@ -1072,31 +1067,31 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { rw, (borrow_index, borrow), ); - Control::Continue + ControlFlow::Continue(()) } (Read(_), BorrowKind::Shared | BorrowKind::Fake(_)) | ( Read(ReadKind::Borrow(BorrowKind::Fake(FakeBorrowKind::Shallow))), BorrowKind::Mut { .. }, - ) => Control::Continue, + ) => ControlFlow::Continue(()), (Reservation(_), BorrowKind::Fake(_) | BorrowKind::Shared) => { // This used to be a future compatibility warning (to be // disallowed on NLL). See rust-lang/rust#56254 - Control::Continue + ControlFlow::Continue(()) } (Write(WriteKind::Move), BorrowKind::Fake(FakeBorrowKind::Shallow)) => { // Handled by initialization checks. - Control::Continue + ControlFlow::Continue(()) } (Read(kind), BorrowKind::Mut { .. }) => { // Reading from mere reservations of mutable-borrows is OK. if !is_active(this.dominators(), borrow, location) { - assert!(allow_two_phase_borrow(borrow.kind)); - return Control::Continue; + assert!(borrow.kind.allows_two_phase_borrow()); + return ControlFlow::Continue(()); } error_reported = true; @@ -1112,7 +1107,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { this.buffer_error(err); } } - Control::Break + ControlFlow::Break(()) } (Reservation(kind) | Activation(kind, _) | Write(kind), _) => { @@ -1159,7 +1154,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { this.report_illegal_mutation_of_borrowed(location, place_span, borrow) } } - Control::Break + ControlFlow::Break(()) } }, ); @@ -1167,6 +1162,61 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { error_reported } + /// Through #123739, backward incompatible drops (BIDs) are introduced. + /// We would like to emit lints whether borrow checking fails at these future drop locations. + #[instrument(level = "debug", skip(self, state))] + fn check_backward_incompatible_drop( + &mut self, + location: Location, + (place, place_span): (Place<'tcx>, Span), + state: &BorrowckDomain, + ) { + let tcx = self.infcx.tcx; + // If this type does not need `Drop`, then treat it like a `StorageDead`. + // This is needed because we track the borrows of refs to thread locals, + // and we'll ICE because we don't track borrows behind shared references. + let sd = if place.ty(self.body, tcx).ty.needs_drop(tcx, self.body.typing_env(tcx)) { + AccessDepth::Drop + } else { + AccessDepth::Shallow(None) + }; + + let borrows_in_scope = self.borrows_in_scope(location, state); + + // This is a very simplified version of `Self::check_access_for_conflict`. + // We are here checking on BIDs and specifically still-live borrows of data involving the BIDs. + each_borrow_involving_path( + self, + self.infcx.tcx, + self.body, + (sd, place), + self.borrow_set, + |borrow_index| borrows_in_scope.contains(borrow_index), + |this, _borrow_index, borrow| { + if matches!(borrow.kind, BorrowKind::Fake(_)) { + return ControlFlow::Continue(()); + } + let borrowed = this.retrieve_borrow_spans(borrow).var_or_use_path_span(); + let explain = this.explain_why_borrow_contains_point( + location, + borrow, + Some((WriteKind::StorageDeadOrDrop, place)), + ); + this.infcx.tcx.node_span_lint( + TAIL_EXPR_DROP_ORDER, + CRATE_HIR_ID, + borrowed, + |diag| { + session_diagnostics::TailExprDropOrder { borrowed }.decorate_lint(diag); + explain.add_explanation_to_diagnostic(&this, diag, "", None, None); + }, + ); + // We may stop at the first case + ControlFlow::Break(()) + }, + ); + } + fn mutate_place( &mut self, location: Location, @@ -1203,7 +1253,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { } BorrowKind::Mut { .. } => { let wk = WriteKind::MutableBorrow(bk); - if allow_two_phase_borrow(bk) { + if bk.allows_two_phase_borrow() { (Deep, Reservation(wk)) } else { (Deep, Write(wk)) @@ -1288,16 +1338,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { ); } - &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => { - let af = match *rvalue { - Rvalue::Len(..) => Some(ArtificialField::ArrayLength), - Rvalue::Discriminant(..) => None, - _ => unreachable!(), - }; + &Rvalue::Discriminant(place) => { self.access_place( location, (place, span), - (Shallow(af), Read(ReadKind::Copy)), + (Shallow(None), Read(ReadKind::Copy)), LocalMutationIsAllowed::No, state, ); @@ -1633,6 +1678,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -1674,6 +1720,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -2393,144 +2440,36 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { // `BasicBlocks` computes dominators on-demand and caches them. self.body.basic_blocks.dominators() } -} - -mod diags { - use rustc_errors::ErrorGuaranteed; - - use super::*; - - enum BufferedDiag<'infcx> { - Error(Diag<'infcx>), - NonError(Diag<'infcx, ()>), - } - - impl<'infcx> BufferedDiag<'infcx> { - fn sort_span(&self) -> Span { - match self { - BufferedDiag::Error(diag) => diag.sort_span, - BufferedDiag::NonError(diag) => diag.sort_span, - } - } - } - - pub(crate) struct BorrowckDiags<'infcx, 'tcx> { - /// This field keeps track of move errors that are to be reported for given move indices. - /// - /// There are situations where many errors can be reported for a single move out (see - /// #53807) and we want only the best of those errors. - /// - /// The `report_use_of_moved_or_uninitialized` function checks this map and replaces the - /// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of - /// the `Place` of the previous most diagnostic. This happens instead of buffering the - /// error. Once all move errors have been reported, any diagnostics in this map are added - /// to the buffer to be emitted. - /// - /// `BTreeMap` is used to preserve the order of insertions when iterating. This is necessary - /// when errors in the map are being re-added to the error buffer so that errors with the - /// same primary span come out in a consistent order. - buffered_move_errors: BTreeMap, (PlaceRef<'tcx>, Diag<'infcx>)>, - - buffered_mut_errors: FxIndexMap, usize)>, - - /// Buffer of diagnostics to be reported. A mixture of error and non-error diagnostics. - buffered_diags: Vec>, - } - - impl<'infcx, 'tcx> BorrowckDiags<'infcx, 'tcx> { - pub(crate) fn new() -> Self { - BorrowckDiags { - buffered_move_errors: BTreeMap::new(), - buffered_mut_errors: Default::default(), - buffered_diags: Default::default(), - } - } - - pub(crate) fn buffer_error(&mut self, diag: Diag<'infcx>) { - self.buffered_diags.push(BufferedDiag::Error(diag)); - } - - pub(crate) fn buffer_non_error(&mut self, diag: Diag<'infcx, ()>) { - self.buffered_diags.push(BufferedDiag::NonError(diag)); - } - } - - impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { - pub(crate) fn buffer_error(&mut self, diag: Diag<'infcx>) { - self.diags.buffer_error(diag); - } - - pub(crate) fn buffer_non_error(&mut self, diag: Diag<'infcx, ()>) { - self.diags.buffer_non_error(diag); - } - - pub(crate) fn buffer_move_error( - &mut self, - move_out_indices: Vec, - place_and_err: (PlaceRef<'tcx>, Diag<'infcx>), - ) -> bool { - if let Some((_, diag)) = - self.diags.buffered_move_errors.insert(move_out_indices, place_and_err) - { - // Cancel the old diagnostic so we don't ICE - diag.cancel(); - false - } else { - true - } - } - - pub(crate) fn get_buffered_mut_error( - &mut self, - span: Span, - ) -> Option<(Diag<'infcx>, usize)> { - // FIXME(#120456) - is `swap_remove` correct? - self.diags.buffered_mut_errors.swap_remove(&span) - } - - pub(crate) fn buffer_mut_error(&mut self, span: Span, diag: Diag<'infcx>, count: usize) { - self.diags.buffered_mut_errors.insert(span, (diag, count)); - } - pub(crate) fn emit_errors(&mut self) -> Option { - let mut res = self.infcx.tainted_by_errors(); - - // Buffer any move errors that we collected and de-duplicated. - for (_, (_, diag)) in std::mem::take(&mut self.diags.buffered_move_errors) { - // We have already set tainted for this error, so just buffer it. - self.diags.buffer_error(diag); - } - for (_, (mut diag, count)) in std::mem::take(&mut self.diags.buffered_mut_errors) { - if count > 10 { - #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] - diag.note(format!("...and {} other attempted mutable borrows", count - 10)); - } - self.diags.buffer_error(diag); - } + fn lint_unused_mut(&self) { + let tcx = self.infcx.tcx; + let body = self.body; + for local in body.mut_vars_and_args_iter().filter(|local| !self.used_mut.contains(local)) { + let local_decl = &body.local_decls[local]; + let lint_root = match &body.source_scopes[local_decl.source_info.scope].local_data { + ClearCrossCrate::Set(data) => data.lint_root, + _ => continue, + }; - if !self.diags.buffered_diags.is_empty() { - self.diags.buffered_diags.sort_by_key(|buffered_diag| buffered_diag.sort_span()); - for buffered_diag in self.diags.buffered_diags.drain(..) { - match buffered_diag { - BufferedDiag::Error(diag) => res = Some(diag.emit()), - BufferedDiag::NonError(diag) => diag.emit(), + // Skip over locals that begin with an underscore or have no name + match self.local_names[local] { + Some(name) => { + if name.as_str().starts_with('_') { + continue; } } + None => continue, } - res - } + let span = local_decl.source_info.span; + if span.desugaring_kind().is_some() { + // If the `mut` arises as part of a desugaring, we should ignore it. + continue; + } - pub(crate) fn has_buffered_diags(&self) -> bool { - self.diags.buffered_diags.is_empty() - } + let mut_span = tcx.sess.source_map().span_until_non_whitespace(span); - pub(crate) fn has_move_error( - &self, - move_out_indices: &[MoveOutIndex], - ) -> Option<&(PlaceRef<'tcx>, Diag<'infcx>)> { - self.diags.buffered_move_errors.get(move_out_indices) + tcx.emit_node_span_lint(UNUSED_MUT, lint_root, span, VarNeedNotMut { span: mut_span }) } } } diff --git a/compiler/rustc_borrowck/src/member_constraints.rs b/compiler/rustc_borrowck/src/member_constraints.rs index fc621a3b8283d..a0adf471fd31f 100644 --- a/compiler/rustc_borrowck/src/member_constraints.rs +++ b/compiler/rustc_borrowck/src/member_constraints.rs @@ -4,10 +4,9 @@ use std::ops::Index; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxIndexMap; use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::infer::MemberConstraint; use rustc_middle::ty::{self, Ty}; use rustc_span::Span; -use tracing::debug; +use tracing::instrument; /// Compactly stores a set of `R0 member of [R1...Rn]` constraints, /// indexed by the region `R0`. @@ -23,7 +22,7 @@ where /// Stores the data about each `R0 member of [R1..Rn]` constraint. /// These are organized into a linked list, so each constraint /// contains the index of the next constraint with the same `R0`. - constraints: IndexVec>, + constraints: IndexVec>, /// Stores the `R1..Rn` regions for *all* sets. For any given /// constraint, we keep two indices so that we can pull out a @@ -33,7 +32,7 @@ where /// Represents a `R0 member of [R1..Rn]` constraint #[derive(Debug)] -pub(crate) struct NllMemberConstraint<'tcx> { +pub(crate) struct MemberConstraint<'tcx> { next_constraint: Option, /// The span where the hidden type was instantiated. @@ -70,37 +69,34 @@ impl Default for MemberConstraintSet<'_, ty::RegionVid> { } impl<'tcx> MemberConstraintSet<'tcx, ty::RegionVid> { + pub(crate) fn is_empty(&self) -> bool { + self.constraints.is_empty() + } + /// Pushes a member constraint into the set. - /// - /// The input member constraint `m_c` is in the form produced by - /// the `rustc_middle::infer` code. - /// - /// The `to_region_vid` callback fn is used to convert the regions - /// within into `RegionVid` format -- it typically consults the - /// `UniversalRegions` data structure that is known to the caller - /// (but which this code is unaware of). - pub(crate) fn push_constraint( + #[instrument(level = "debug", skip(self))] + pub(crate) fn add_member_constraint( &mut self, - m_c: &MemberConstraint<'tcx>, - mut to_region_vid: impl FnMut(ty::Region<'tcx>) -> ty::RegionVid, + key: ty::OpaqueTypeKey<'tcx>, + hidden_ty: Ty<'tcx>, + definition_span: Span, + member_region_vid: ty::RegionVid, + choice_regions: &[ty::RegionVid], ) { - debug!("push_constraint(m_c={:?})", m_c); - let member_region_vid: ty::RegionVid = to_region_vid(m_c.member_region); let next_constraint = self.first_constraints.get(&member_region_vid).cloned(); let start_index = self.choice_regions.len(); - let end_index = start_index + m_c.choice_regions.len(); - debug!("push_constraint: member_region_vid={:?}", member_region_vid); - let constraint_index = self.constraints.push(NllMemberConstraint { + self.choice_regions.extend(choice_regions); + let end_index = self.choice_regions.len(); + let constraint_index = self.constraints.push(MemberConstraint { next_constraint, member_region_vid, - definition_span: m_c.definition_span, - hidden_ty: m_c.hidden_ty, - key: m_c.key, + definition_span, + hidden_ty, + key, start_index, end_index, }); self.first_constraints.insert(member_region_vid, constraint_index); - self.choice_regions.extend(m_c.choice_regions.iter().map(|&r| to_region_vid(r))); } } @@ -182,7 +178,7 @@ where /// R0 member of [R1..Rn] /// ``` pub(crate) fn choice_regions(&self, pci: NllMemberConstraintIndex) -> &[ty::RegionVid] { - let NllMemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; + let MemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; &self.choice_regions[*start_index..*end_index] } } @@ -191,9 +187,9 @@ impl<'tcx, R> Index for MemberConstraintSet<'tcx, R> where R: Copy + Eq, { - type Output = NllMemberConstraint<'tcx>; + type Output = MemberConstraint<'tcx>; - fn index(&self, i: NllMemberConstraintIndex) -> &NllMemberConstraint<'tcx> { + fn index(&self, i: NllMemberConstraintIndex) -> &MemberConstraint<'tcx> { &self.constraints[i] } } @@ -215,7 +211,7 @@ where /// target_list: A -> B -> C -> D -> E -> F -> (None) /// ``` fn append_list( - constraints: &mut IndexSlice>, + constraints: &mut IndexSlice>, target_list: NllMemberConstraintIndex, source_list: NllMemberConstraintIndex, ) { diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index d7ea8e1bcc2cc..35264bd1a7075 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -21,30 +21,33 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces; use rustc_mir_dataflow::move_paths::MoveData; use rustc_mir_dataflow::points::DenseLocationMap; use rustc_session::config::MirIncludeSpans; -use rustc_span::symbol::sym; +use rustc_span::sym; use tracing::{debug, instrument}; use crate::borrow_set::BorrowSet; use crate::consumers::ConsumerOptions; -use crate::diagnostics::RegionErrors; -use crate::facts::{AllFacts, AllFactsExt, RustcFacts}; -use crate::location::LocationTable; +use crate::diagnostics::{BorrowckDiagnosticsBuffer, RegionErrors}; +use crate::polonius::LocalizedOutlivesConstraintSet; +use crate::polonius::legacy::{ + PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput, +}; use crate::region_infer::RegionInferenceContext; use crate::type_check::{self, MirTypeckResults}; use crate::universal_regions::UniversalRegions; use crate::{BorrowckInferCtxt, polonius, renumber}; -pub type PoloniusOutput = Output; - /// The output of `nll::compute_regions`. This includes the computed `RegionInferenceContext`, any /// closure requirements to propagate, and any generated errors. pub(crate) struct NllOutput<'tcx> { pub regioncx: RegionInferenceContext<'tcx>, pub opaque_type_values: FxIndexMap>, - pub polonius_input: Option>, + pub polonius_input: Option>, pub polonius_output: Option>, pub opt_closure_req: Option>, pub nll_errors: RegionErrors<'tcx>, + + /// When using `-Zpolonius=next`: the localized typeck and liveness constraints. + pub localized_outlives_constraints: Option, } /// Rewrites the regions in the MIR to use NLL variables, also scraping out the set of universal @@ -79,7 +82,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( universal_regions: UniversalRegions<'tcx>, body: &Body<'tcx>, promoted: &IndexSlice>, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, move_data: &MoveData<'tcx>, borrow_set: &BorrowSet<'tcx>, @@ -90,58 +93,69 @@ pub(crate) fn compute_regions<'a, 'tcx>( || is_polonius_legacy_enabled; let polonius_output = consumer_options.map(|c| c.polonius_output()).unwrap_or_default() || is_polonius_legacy_enabled; - let mut all_facts = - (polonius_input || AllFacts::enabled(infcx.tcx)).then_some(AllFacts::default()); + let mut polonius_facts = + (polonius_input || PoloniusFacts::enabled(infcx.tcx)).then_some(PoloniusFacts::default()); - let elements = Rc::new(DenseLocationMap::new(body)); + let location_map = Rc::new(DenseLocationMap::new(body)); // Run the MIR type-checker. - let MirTypeckResults { constraints, universal_region_relations, opaque_type_values } = - type_check::type_check( - infcx, - body, - promoted, - universal_regions, - location_table, - borrow_set, - &mut all_facts, - flow_inits, - move_data, - Rc::clone(&elements), - ); + let MirTypeckResults { + constraints, + universal_region_relations, + opaque_type_values, + polonius_context, + } = type_check::type_check( + infcx, + body, + promoted, + universal_regions, + location_table, + borrow_set, + &mut polonius_facts, + flow_inits, + move_data, + Rc::clone(&location_map), + ); // Create the region inference context, taking ownership of the // region inference data that was contained in `infcx`, and the // base constraints generated by the type-check. - let var_origins = infcx.get_region_var_origins(); + let var_infos = infcx.get_region_var_infos(); // If requested, emit legacy polonius facts. polonius::legacy::emit_facts( - &mut all_facts, + &mut polonius_facts, infcx.tcx, location_table, body, borrow_set, move_data, &universal_region_relations, + &constraints, ); let mut regioncx = RegionInferenceContext::new( infcx, - var_origins, + var_infos, constraints, universal_region_relations, - elements, + location_map, ); + // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints + // and use them to compute loan liveness. + let localized_outlives_constraints = polonius_context.as_ref().map(|polonius_context| { + polonius_context.compute_loan_liveness(infcx.tcx, &mut regioncx, body, borrow_set) + }); + // If requested: dump NLL facts, and run legacy polonius analysis. - let polonius_output = all_facts.as_ref().and_then(|all_facts| { + let polonius_output = polonius_facts.as_ref().and_then(|polonius_facts| { if infcx.tcx.sess.opts.unstable_opts.nll_facts { let def_id = body.source.def_id(); let def_path = infcx.tcx.def_path(def_id); let dir_path = PathBuf::from(&infcx.tcx.sess.opts.unstable_opts.nll_facts_dir) .join(def_path.to_filename_friendly_no_crate()); - all_facts.write_to_dir(dir_path, location_table).unwrap(); + polonius_facts.write_to_dir(dir_path, location_table).unwrap(); } if polonius_output { @@ -150,7 +164,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( let algorithm = Algorithm::from_str(&algorithm).unwrap(); debug!("compute_regions: using polonius algorithm {:?}", algorithm); let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis"); - Some(Box::new(Output::compute(all_facts, algorithm, false))) + Some(Box::new(Output::compute(polonius_facts, algorithm, false))) } else { None } @@ -170,10 +184,11 @@ pub(crate) fn compute_regions<'a, 'tcx>( NllOutput { regioncx, opaque_type_values: remapped_opaque_tys, - polonius_input: all_facts.map(Box::new), + polonius_input: polonius_facts.map(Box::new), polonius_output, opt_closure_req: closure_region_requirements, nll_errors, + localized_outlives_constraints, } } @@ -214,40 +229,7 @@ pub(super) fn dump_nll_mir<'tcx>( &0, body, |pass_where, out| { - match pass_where { - // Before the CFG, dump out the values for each region variable. - PassWhere::BeforeCFG => { - regioncx.dump_mir(tcx, out)?; - writeln!(out, "|")?; - - if let Some(closure_region_requirements) = closure_region_requirements { - writeln!(out, "| Free Region Constraints")?; - for_each_region_constraint(tcx, closure_region_requirements, &mut |msg| { - writeln!(out, "| {msg}") - })?; - writeln!(out, "|")?; - } - - if borrow_set.len() > 0 { - writeln!(out, "| Borrows")?; - for (borrow_idx, borrow_data) in borrow_set.iter_enumerated() { - writeln!( - out, - "| {:?}: issued at {:?} in {:?}", - borrow_idx, borrow_data.reserve_location, borrow_data.region - )?; - } - writeln!(out, "|")?; - } - } - - PassWhere::BeforeLocation(_) => {} - - PassWhere::AfterTerminator(_) => {} - - PassWhere::BeforeBlock(_) | PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {} - } - Ok(()) + emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out) }, options, ); @@ -265,6 +247,51 @@ pub(super) fn dump_nll_mir<'tcx>( }; } +/// Produces the actual NLL MIR sections to emit during the dumping process. +pub(crate) fn emit_nll_mir<'tcx>( + tcx: TyCtxt<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option>, + borrow_set: &BorrowSet<'tcx>, + pass_where: PassWhere, + out: &mut dyn io::Write, +) -> io::Result<()> { + match pass_where { + // Before the CFG, dump out the values for each region variable. + PassWhere::BeforeCFG => { + regioncx.dump_mir(tcx, out)?; + writeln!(out, "|")?; + + if let Some(closure_region_requirements) = closure_region_requirements { + writeln!(out, "| Free Region Constraints")?; + for_each_region_constraint(tcx, closure_region_requirements, &mut |msg| { + writeln!(out, "| {msg}") + })?; + writeln!(out, "|")?; + } + + if borrow_set.len() > 0 { + writeln!(out, "| Borrows")?; + for (borrow_idx, borrow_data) in borrow_set.iter_enumerated() { + writeln!( + out, + "| {:?}: issued at {:?} in {:?}", + borrow_idx, borrow_data.reserve_location, borrow_data.region + )?; + } + writeln!(out, "|")?; + } + } + + PassWhere::BeforeLocation(_) => {} + + PassWhere::AfterTerminator(_) => {} + + PassWhere::BeforeBlock(_) | PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {} + } + Ok(()) +} + #[allow(rustc::diagnostic_outside_of_impl)] #[allow(rustc::untranslatable_diagnostic)] pub(super) fn dump_annotation<'tcx, 'infcx>( @@ -273,7 +300,7 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( regioncx: &RegionInferenceContext<'tcx>, closure_region_requirements: &Option>, opaque_type_values: &FxIndexMap>, - diags: &mut crate::diags::BorrowckDiags<'infcx, 'tcx>, + diagnostics_buffer: &mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, ) { let tcx = infcx.tcx; let base_def_id = tcx.typeck_root_def_id(body.source.def_id()); @@ -319,7 +346,7 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( err.note(format!("Inferred opaque type values:\n{opaque_type_values:#?}")); } - diags.buffer_non_error(err); + diagnostics_buffer.buffer_non_error(err); } fn for_each_region_constraint<'tcx>( diff --git a/compiler/rustc_borrowck/src/path_utils.rs b/compiler/rustc_borrowck/src/path_utils.rs index 12a37f56fcf96..2c94a32d369ce 100644 --- a/compiler/rustc_borrowck/src/path_utils.rs +++ b/compiler/rustc_borrowck/src/path_utils.rs @@ -1,26 +1,14 @@ +use std::ops::ControlFlow; + use rustc_abi::FieldIdx; use rustc_data_structures::graph::dominators::Dominators; -use rustc_middle::mir::{BasicBlock, Body, BorrowKind, Location, Place, PlaceRef, ProjectionElem}; +use rustc_middle::mir::{BasicBlock, Body, Location, Place, PlaceRef, ProjectionElem}; use rustc_middle::ty::TyCtxt; use tracing::debug; use crate::borrow_set::{BorrowData, BorrowSet, TwoPhaseActivation}; use crate::{AccessDepth, BorrowIndex, places_conflict}; -/// Returns `true` if the borrow represented by `kind` is -/// allowed to be split into separate Reservation and -/// Activation phases. -pub(super) fn allow_two_phase_borrow(kind: BorrowKind) -> bool { - kind.allows_two_phase_borrow() -} - -/// Control for the path borrow checking code -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub(super) enum Control { - Continue, - Break, -} - /// Encapsulates the idea of iterating over every borrow that involves a particular path pub(super) fn each_borrow_involving_path<'tcx, F, I, S>( s: &mut S, @@ -31,7 +19,7 @@ pub(super) fn each_borrow_involving_path<'tcx, F, I, S>( is_candidate: I, mut op: F, ) where - F: FnMut(&mut S, BorrowIndex, &BorrowData<'tcx>) -> Control, + F: FnMut(&mut S, BorrowIndex, &BorrowData<'tcx>) -> ControlFlow<()>, I: Fn(BorrowIndex) -> bool, { let (access, place) = access_place; @@ -62,7 +50,7 @@ pub(super) fn each_borrow_involving_path<'tcx, F, I, S>( i, borrowed, place, access ); let ctrl = op(s, i, borrowed); - if ctrl == Control::Break { + if matches!(ctrl, ControlFlow::Break(_)) { return; } } diff --git a/compiler/rustc_borrowck/src/places_conflict.rs b/compiler/rustc_borrowck/src/places_conflict.rs index 679e111caa984..560b8c0349ad6 100644 --- a/compiler/rustc_borrowck/src/places_conflict.rs +++ b/compiler/rustc_borrowck/src/places_conflict.rs @@ -203,8 +203,7 @@ fn place_components_conflict<'tcx>( let base_ty = base.ty(body, tcx).ty; match (elem, base_ty.kind(), access) { - (_, _, Shallow(Some(ArtificialField::ArrayLength))) - | (_, _, Shallow(Some(ArtificialField::FakeBorrow))) => { + (_, _, Shallow(Some(ArtificialField::FakeBorrow))) => { // The array length is like additional fields on the // type; it does not overlap any existing data there. // Furthermore, if cannot actually be a prefix of any diff --git a/compiler/rustc_borrowck/src/polonius/constraints.rs b/compiler/rustc_borrowck/src/polonius/constraints.rs new file mode 100644 index 0000000000000..50f59dd0dee67 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/constraints.rs @@ -0,0 +1,45 @@ +use rustc_middle::ty::RegionVid; +use rustc_mir_dataflow::points::PointIndex; + +/// A localized outlives constraint reifies the CFG location where the outlives constraint holds, +/// within the origins themselves as if they were different from point to point: from `a: b` +/// outlives constraints to `a@p: b@p`, where `p` is the point in the CFG. +/// +/// This models two sources of constraints: +/// - constraints that traverse the subsets between regions at a given point, `a@p: b@p`. These +/// depend on typeck constraints generated via assignments, calls, etc. (In practice there are +/// subtleties where a statement's effect only starts being visible at the successor point, via +/// the "result" of that statement). +/// - constraints that traverse the CFG via the same region, `a@p: a@q`, where `p` is a predecessor +/// of `q`. These depend on the liveness of the regions at these points, as well as their +/// variance. +/// +/// The `source` origin at `from` flows into the `target` origin at `to`. +/// +/// This dual of NLL's [crate::constraints::OutlivesConstraint] therefore encodes the +/// position-dependent outlives constraints used by Polonius, to model the flow-sensitive loan +/// propagation via reachability within a graph of localized constraints. +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub(crate) struct LocalizedOutlivesConstraint { + pub source: RegionVid, + pub from: PointIndex, + pub target: RegionVid, + pub to: PointIndex, +} + +/// A container of [LocalizedOutlivesConstraint]s that can be turned into a traversable +/// `rustc_data_structures` graph. +#[derive(Clone, Default, Debug)] +pub(crate) struct LocalizedOutlivesConstraintSet { + pub outlives: Vec, +} + +impl LocalizedOutlivesConstraintSet { + pub(crate) fn push(&mut self, constraint: LocalizedOutlivesConstraint) { + if constraint.source == constraint.target && constraint.from == constraint.to { + // 'a@p: 'a@p is pretty uninteresting + return; + } + self.outlives.push(constraint); + } +} diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs new file mode 100644 index 0000000000000..a6d8014903440 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/dump.rs @@ -0,0 +1,104 @@ +use std::io; + +use rustc_middle::mir::pretty::{PrettyPrintMirOptions, dump_mir_with_options}; +use rustc_middle::mir::{Body, ClosureRegionRequirements, PassWhere}; +use rustc_middle::ty::TyCtxt; +use rustc_session::config::MirIncludeSpans; + +use crate::borrow_set::BorrowSet; +use crate::polonius::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet}; +use crate::{BorrowckInferCtxt, RegionInferenceContext}; + +/// `-Zdump-mir=polonius` dumps MIR annotated with NLL and polonius specific information. +// Note: this currently duplicates most of NLL MIR, with some additions for the localized outlives +// constraints. This is ok for now as this dump will change in the near future to an HTML file to +// become more useful. +pub(crate) fn dump_polonius_mir<'tcx>( + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: Option, + closure_region_requirements: &Option>, +) { + let tcx = infcx.tcx; + if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + return; + } + + let localized_outlives_constraints = localized_outlives_constraints + .expect("missing localized constraints with `-Zpolonius=next`"); + + // We want the NLL extra comments printed by default in NLL MIR dumps (they were removed in + // #112346). Specifying `-Z mir-include-spans` on the CLI still has priority: for example, + // they're always disabled in mir-opt tests to make working with blessed dumps easier. + let options = PrettyPrintMirOptions { + include_extra_comments: matches!( + tcx.sess.opts.unstable_opts.mir_include_spans, + MirIncludeSpans::On | MirIncludeSpans::Nll + ), + }; + + dump_mir_with_options( + tcx, + false, + "polonius", + &0, + body, + |pass_where, out| { + emit_polonius_mir( + tcx, + regioncx, + closure_region_requirements, + borrow_set, + &localized_outlives_constraints, + pass_where, + out, + ) + }, + options, + ); +} + +/// Produces the actual NLL + Polonius MIR sections to emit during the dumping process. +fn emit_polonius_mir<'tcx>( + tcx: TyCtxt<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option>, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: &LocalizedOutlivesConstraintSet, + pass_where: PassWhere, + out: &mut dyn io::Write, +) -> io::Result<()> { + // Emit the regular NLL front-matter + crate::nll::emit_nll_mir( + tcx, + regioncx, + closure_region_requirements, + borrow_set, + pass_where.clone(), + out, + )?; + + let liveness = regioncx.liveness_constraints(); + + // Add localized outlives constraints + match pass_where { + PassWhere::BeforeCFG => { + if localized_outlives_constraints.outlives.len() > 0 { + writeln!(out, "| Localized constraints")?; + + for constraint in &localized_outlives_constraints.outlives { + let LocalizedOutlivesConstraint { source, from, target, to } = constraint; + let from = liveness.location_from_point(*from); + let to = liveness.location_from_point(*to); + writeln!(out, "| {source:?} at {from:?} -> {target:?} at {to:?}")?; + } + writeln!(out, "|")?; + } + } + _ => {} + } + + Ok(()) +} diff --git a/compiler/rustc_borrowck/src/polonius/legacy/accesses.rs b/compiler/rustc_borrowck/src/polonius/legacy/accesses.rs new file mode 100644 index 0000000000000..edd7ca578b727 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/legacy/accesses.rs @@ -0,0 +1,84 @@ +use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; +use rustc_middle::mir::{Body, Local, Location, Place}; +use rustc_middle::ty::TyCtxt; +use rustc_mir_dataflow::move_paths::{LookupResult, MoveData}; +use tracing::debug; + +use super::{LocationIndex, PoloniusFacts, PoloniusLocationTable}; +use crate::def_use::{self, DefUse}; +use crate::universal_regions::UniversalRegions; + +/// Emit polonius facts for variable defs, uses, drops, and path accesses. +pub(crate) fn emit_access_facts<'tcx>( + tcx: TyCtxt<'tcx>, + facts: &mut PoloniusFacts, + body: &Body<'tcx>, + location_table: &PoloniusLocationTable, + move_data: &MoveData<'tcx>, + universal_regions: &UniversalRegions<'tcx>, +) { + let mut extractor = AccessFactsExtractor { facts, move_data, location_table }; + extractor.visit_body(body); + + for (local, local_decl) in body.local_decls.iter_enumerated() { + debug!("add use_of_var_derefs_origin facts - local={:?}, type={:?}", local, local_decl.ty); + tcx.for_each_free_region(&local_decl.ty, |region| { + let region_vid = universal_regions.to_region_vid(region); + facts.use_of_var_derefs_origin.push((local, region_vid.into())); + }); + } +} + +/// MIR visitor extracting point-wise facts about accesses. +struct AccessFactsExtractor<'a, 'tcx> { + facts: &'a mut PoloniusFacts, + move_data: &'a MoveData<'tcx>, + location_table: &'a PoloniusLocationTable, +} + +impl<'tcx> AccessFactsExtractor<'_, 'tcx> { + fn location_to_index(&self, location: Location) -> LocationIndex { + self.location_table.mid_index(location) + } +} + +impl<'a, 'tcx> Visitor<'tcx> for AccessFactsExtractor<'a, 'tcx> { + fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) { + match def_use::categorize(context) { + Some(DefUse::Def) => { + debug!("AccessFactsExtractor - emit def"); + self.facts.var_defined_at.push((local, self.location_to_index(location))); + } + Some(DefUse::Use) => { + debug!("AccessFactsExtractor - emit use"); + self.facts.var_used_at.push((local, self.location_to_index(location))); + } + Some(DefUse::Drop) => { + debug!("AccessFactsExtractor - emit drop"); + self.facts.var_dropped_at.push((local, self.location_to_index(location))); + } + _ => (), + } + } + + fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { + self.super_place(place, context, location); + + match context { + PlaceContext::NonMutatingUse(_) + | PlaceContext::MutatingUse(MutatingUseContext::Borrow) => { + let path = match self.move_data.rev_lookup.find(place.as_ref()) { + LookupResult::Exact(path) | LookupResult::Parent(Some(path)) => path, + _ => { + // There's no path access to emit. + return; + } + }; + debug!("AccessFactsExtractor - emit path access ({path:?}, {location:?})"); + self.facts.path_accessed_at_base.push((path, self.location_to_index(location))); + } + + _ => {} + } + } +} diff --git a/compiler/rustc_borrowck/src/facts.rs b/compiler/rustc_borrowck/src/polonius/legacy/facts.rs similarity index 79% rename from compiler/rustc_borrowck/src/facts.rs rename to compiler/rustc_borrowck/src/polonius/legacy/facts.rs index ef8e757a54780..64389b11a651a 100644 --- a/compiler/rustc_borrowck/src/facts.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/facts.rs @@ -4,18 +4,20 @@ use std::fs::{self, File}; use std::io::Write; use std::path::Path; -use polonius_engine::{AllFacts as PoloniusFacts, Atom}; +use polonius_engine::{AllFacts, Atom, Output}; use rustc_macros::extension; use rustc_middle::mir::Local; use rustc_middle::ty::{RegionVid, TyCtxt}; use rustc_mir_dataflow::move_paths::MovePathIndex; +use super::{LocationIndex, PoloniusLocationTable}; use crate::BorrowIndex; -use crate::location::{LocationIndex, LocationTable}; #[derive(Copy, Clone, Debug)] pub struct RustcFacts; +pub type PoloniusOutput = Output; + rustc_index::newtype_index! { /// A (kinda) newtype of `RegionVid` so we can implement `Atom` on it. #[orderable] @@ -47,11 +49,11 @@ impl polonius_engine::FactTypes for RustcFacts { type Path = MovePathIndex; } -pub type AllFacts = PoloniusFacts; +pub type PoloniusFacts = AllFacts; -#[extension(pub(crate) trait AllFactsExt)] -impl AllFacts { - /// Returns `true` if there is a need to gather `AllFacts` given the +#[extension(pub(crate) trait PoloniusFactsExt)] +impl PoloniusFacts { + /// Returns `true` if there is a need to gather `PoloniusFacts` given the /// current `-Z` flags. fn enabled(tcx: TyCtxt<'_>) -> bool { tcx.sess.opts.unstable_opts.nll_facts @@ -61,7 +63,7 @@ impl AllFacts { fn write_to_dir( &self, dir: impl AsRef, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box> { let dir: &Path = dir.as_ref(); fs::create_dir_all(dir)?; @@ -117,7 +119,7 @@ impl Atom for LocationIndex { } struct FactWriter<'w> { - location_table: &'w LocationTable, + location_table: &'w PoloniusLocationTable, dir: &'w Path, } @@ -139,7 +141,7 @@ trait FactRow { fn write( &self, out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box>; } @@ -147,7 +149,7 @@ impl FactRow for PoloniusRegionVid { fn write( &self, out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box> { write_row(out, location_table, &[self]) } @@ -161,7 +163,7 @@ where fn write( &self, out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box> { write_row(out, location_table, &[&self.0, &self.1]) } @@ -176,7 +178,7 @@ where fn write( &self, out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box> { write_row(out, location_table, &[&self.0, &self.1, &self.2]) } @@ -192,7 +194,7 @@ where fn write( &self, out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, ) -> Result<(), Box> { write_row(out, location_table, &[&self.0, &self.1, &self.2, &self.3]) } @@ -200,7 +202,7 @@ where fn write_row( out: &mut dyn Write, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, columns: &[&dyn FactCell], ) -> Result<(), Box> { for (index, c) in columns.iter().enumerate() { @@ -211,41 +213,41 @@ fn write_row( } trait FactCell { - fn to_string(&self, location_table: &LocationTable) -> String; + fn to_string(&self, location_table: &PoloniusLocationTable) -> String; } impl FactCell for BorrowIndex { - fn to_string(&self, _location_table: &LocationTable) -> String { + fn to_string(&self, _location_table: &PoloniusLocationTable) -> String { format!("{self:?}") } } impl FactCell for Local { - fn to_string(&self, _location_table: &LocationTable) -> String { + fn to_string(&self, _location_table: &PoloniusLocationTable) -> String { format!("{self:?}") } } impl FactCell for MovePathIndex { - fn to_string(&self, _location_table: &LocationTable) -> String { + fn to_string(&self, _location_table: &PoloniusLocationTable) -> String { format!("{self:?}") } } impl FactCell for PoloniusRegionVid { - fn to_string(&self, _location_table: &LocationTable) -> String { + fn to_string(&self, _location_table: &PoloniusLocationTable) -> String { format!("{self:?}") } } impl FactCell for RegionVid { - fn to_string(&self, _location_table: &LocationTable) -> String { + fn to_string(&self, _location_table: &PoloniusLocationTable) -> String { format!("{self:?}") } } impl FactCell for LocationIndex { - fn to_string(&self, location_table: &LocationTable) -> String { - format!("{:?}", location_table.to_location(*self)) + fn to_string(&self, location_table: &PoloniusLocationTable) -> String { + format!("{:?}", location_table.to_rich_location(*self)) } } diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index f646beeecf7b3..0ad91ae51a3a0 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -1,3 +1,5 @@ +use std::ops::ControlFlow; + use rustc_data_structures::graph::dominators::Dominators; use rustc_middle::bug; use rustc_middle::mir::visit::Visitor; @@ -9,9 +11,8 @@ use rustc_middle::mir::{ use rustc_middle::ty::TyCtxt; use tracing::debug; +use super::{PoloniusFacts, PoloniusLocationTable}; use crate::borrow_set::BorrowSet; -use crate::facts::AllFacts; -use crate::location::LocationTable; use crate::path_utils::*; use crate::{ AccessDepth, Activation, ArtificialField, BorrowIndex, Deep, LocalMutationIsAllowed, Read, @@ -21,22 +22,22 @@ use crate::{ /// Emit `loan_invalidated_at` facts. pub(super) fn emit_loan_invalidations<'tcx>( tcx: TyCtxt<'tcx>, - all_facts: &mut AllFacts, - location_table: &LocationTable, + facts: &mut PoloniusFacts, body: &Body<'tcx>, + location_table: &PoloniusLocationTable, borrow_set: &BorrowSet<'tcx>, ) { let dominators = body.basic_blocks.dominators(); let mut visitor = - LoanInvalidationsGenerator { all_facts, borrow_set, tcx, location_table, body, dominators }; + LoanInvalidationsGenerator { facts, borrow_set, tcx, location_table, body, dominators }; visitor.visit_body(body); } struct LoanInvalidationsGenerator<'a, 'tcx> { tcx: TyCtxt<'tcx>, - all_facts: &'a mut AllFacts, - location_table: &'a LocationTable, + facts: &'a mut PoloniusFacts, body: &'a Body<'tcx>, + location_table: &'a PoloniusLocationTable, dominators: &'a Dominators, borrow_set: &'a BorrowSet<'tcx>, } @@ -151,7 +152,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { let resume = self.location_table.start_index(resume.start_location()); for (i, data) in borrow_set.iter_enumerated() { if borrow_of_local_data(data.borrowed_place) { - self.all_facts.loan_invalidated_at.push((resume, i)); + self.facts.loan_invalidated_at.push((resume, i)); } } @@ -165,7 +166,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { let start = self.location_table.start_index(location); for (i, data) in borrow_set.iter_enumerated() { if borrow_of_local_data(data.borrowed_place) { - self.all_facts.loan_invalidated_at.push((start, i)); + self.facts.loan_invalidated_at.push((start, i)); } } } @@ -261,7 +262,7 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { } BorrowKind::Mut { .. } => { let wk = WriteKind::MutableBorrow(bk); - if allow_two_phase_borrow(bk) { + if bk.allows_two_phase_borrow() { (Deep, Reservation(wk)) } else { (Deep, Write(wk)) @@ -299,16 +300,11 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { self.consume_operand(location, op); } - &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => { - let af = match rvalue { - Rvalue::Len(..) => Some(ArtificialField::ArrayLength), - Rvalue::Discriminant(..) => None, - _ => unreachable!(), - }; + &Rvalue::Discriminant(place) => { self.access_place( location, place, - (Shallow(af), Read(ReadKind::Copy)), + (Shallow(None), Read(ReadKind::Copy)), LocalMutationIsAllowed::No, ); } @@ -384,8 +380,8 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { // Reading from mere reservations of mutable-borrows is OK. if !is_active(this.dominators, borrow, location) { // If the borrow isn't active yet, reads don't invalidate it - assert!(allow_two_phase_borrow(borrow.kind)); - return Control::Continue; + assert!(borrow.kind.allows_two_phase_borrow()); + return ControlFlow::Continue(()); } // Unique and mutable borrows are invalidated by reads from any @@ -401,7 +397,7 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { this.emit_loan_invalidated_at(borrow_index, location); } } - Control::Continue + ControlFlow::Continue(()) }, ); } @@ -409,7 +405,7 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { /// Generates a new `loan_invalidated_at(L, B)` fact. fn emit_loan_invalidated_at(&mut self, b: BorrowIndex, l: Location) { let lidx = self.location_table.start_index(l); - self.all_facts.loan_invalidated_at.push((lidx, b)); + self.facts.loan_invalidated_at.push((lidx, b)); } fn check_activations(&mut self, location: Location) { diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_kills.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_kills.rs index 68e0865ab82c9..098c922bf7b91 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_kills.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_kills.rs @@ -6,20 +6,19 @@ use rustc_middle::mir::{ use rustc_middle::ty::TyCtxt; use tracing::debug; +use super::{PoloniusFacts, PoloniusLocationTable}; use crate::borrow_set::BorrowSet; -use crate::facts::AllFacts; -use crate::location::LocationTable; use crate::places_conflict; /// Emit `loan_killed_at` and `cfg_edge` facts at the same time. pub(super) fn emit_loan_kills<'tcx>( tcx: TyCtxt<'tcx>, - all_facts: &mut AllFacts, - location_table: &LocationTable, + facts: &mut PoloniusFacts, body: &Body<'tcx>, + location_table: &PoloniusLocationTable, borrow_set: &BorrowSet<'tcx>, ) { - let mut visitor = LoanKillsGenerator { borrow_set, tcx, location_table, all_facts, body }; + let mut visitor = LoanKillsGenerator { borrow_set, tcx, location_table, facts, body }; for (bb, data) in body.basic_blocks.iter_enumerated() { visitor.visit_basic_block_data(bb, data); } @@ -27,8 +26,8 @@ pub(super) fn emit_loan_kills<'tcx>( struct LoanKillsGenerator<'a, 'tcx> { tcx: TyCtxt<'tcx>, - all_facts: &'a mut AllFacts, - location_table: &'a LocationTable, + facts: &'a mut PoloniusFacts, + location_table: &'a PoloniusLocationTable, borrow_set: &'a BorrowSet<'tcx>, body: &'a Body<'tcx>, } @@ -36,12 +35,12 @@ struct LoanKillsGenerator<'a, 'tcx> { impl<'a, 'tcx> Visitor<'tcx> for LoanKillsGenerator<'a, 'tcx> { fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { // Also record CFG facts here. - self.all_facts.cfg_edge.push(( + self.facts.cfg_edge.push(( self.location_table.start_index(location), self.location_table.mid_index(location), )); - self.all_facts.cfg_edge.push(( + self.facts.cfg_edge.push(( self.location_table.mid_index(location), self.location_table.start_index(location.successor_within_block()), )); @@ -63,15 +62,15 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanKillsGenerator<'a, 'tcx> { fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { // Also record CFG facts here. - self.all_facts.cfg_edge.push(( + self.facts.cfg_edge.push(( self.location_table.start_index(location), self.location_table.mid_index(location), )); let successor_blocks = terminator.successors(); - self.all_facts.cfg_edge.reserve(successor_blocks.size_hint().0); + self.facts.cfg_edge.reserve(successor_blocks.size_hint().0); for successor_block in successor_blocks { - self.all_facts.cfg_edge.push(( + self.facts.cfg_edge.push(( self.location_table.mid_index(location), self.location_table.start_index(successor_block.start_location()), )); @@ -128,7 +127,7 @@ impl<'tcx> LoanKillsGenerator<'_, 'tcx> { if places_conflict { let location_index = self.location_table.mid_index(location); - self.all_facts.loan_killed_at.push((borrow_index, location_index)); + self.facts.loan_killed_at.push((borrow_index, location_index)); } } } @@ -140,9 +139,9 @@ impl<'tcx> LoanKillsGenerator<'_, 'tcx> { fn record_killed_borrows_for_local(&mut self, local: Local, location: Location) { if let Some(borrow_indices) = self.borrow_set.local_map.get(&local) { let location_index = self.location_table.mid_index(location); - self.all_facts.loan_killed_at.reserve(borrow_indices.len()); + self.facts.loan_killed_at.reserve(borrow_indices.len()); for &borrow_index in borrow_indices { - self.all_facts.loan_killed_at.push((borrow_index, location_index)); + self.facts.loan_killed_at.push((borrow_index, location_index)); } } } diff --git a/compiler/rustc_borrowck/src/location.rs b/compiler/rustc_borrowck/src/polonius/legacy/location.rs similarity index 86% rename from compiler/rustc_borrowck/src/location.rs rename to compiler/rustc_borrowck/src/polonius/legacy/location.rs index fbe5f94ed42ca..5f816bb9bbdc4 100644 --- a/compiler/rustc_borrowck/src/location.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/location.rs @@ -13,7 +13,7 @@ use tracing::debug; /// granularity through outlives relations; however, the rich location /// table serves another purpose: it compresses locations from /// multiple words into a single u32. -pub struct LocationTable { +pub struct PoloniusLocationTable { num_points: usize, statements_before_block: IndexVec, } @@ -30,7 +30,7 @@ pub enum RichLocation { Mid(Location), } -impl LocationTable { +impl PoloniusLocationTable { pub(crate) fn new(body: &Body<'_>) -> Self { let mut num_points = 0; let statements_before_block = body @@ -43,8 +43,8 @@ impl LocationTable { }) .collect(); - debug!("LocationTable(statements_before_block={:#?})", statements_before_block); - debug!("LocationTable: num_points={:#?}", num_points); + debug!("PoloniusLocationTable(statements_before_block={:#?})", statements_before_block); + debug!("PoloniusLocationTable: num_points={:#?}", num_points); Self { num_points, statements_before_block } } @@ -65,7 +65,7 @@ impl LocationTable { LocationIndex::from_usize(start_index + statement_index * 2 + 1) } - pub fn to_location(&self, index: LocationIndex) -> RichLocation { + pub fn to_rich_location(&self, index: LocationIndex) -> RichLocation { let point_index = index.index(); // Find the basic block. We have a vector with the @@ -97,6 +97,13 @@ impl LocationTable { RichLocation::Mid(Location { block, statement_index }) } } + + pub fn to_location(&self, index: LocationIndex) -> Location { + match self.to_rich_location(index) { + RichLocation::Start(location) => location, + RichLocation::Mid(location) => location, + } + } } impl LocationIndex { diff --git a/compiler/rustc_borrowck/src/polonius/legacy/mod.rs b/compiler/rustc_borrowck/src/polonius/legacy/mod.rs index 9fccc00bdaf0e..95820c07a02f8 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/mod.rs @@ -3,18 +3,27 @@ //! Will be removed in the future, once the in-tree `-Zpolonius=next` implementation reaches feature //! parity. -use rustc_middle::mir::{Body, LocalKind, Location, START_BLOCK}; -use rustc_middle::ty::TyCtxt; +use std::iter; + +use either::Either; +use rustc_middle::mir::{Body, Local, LocalKind, Location, START_BLOCK}; +use rustc_middle::ty::{GenericArg, TyCtxt}; use rustc_mir_dataflow::move_paths::{InitKind, InitLocation, MoveData}; use tracing::debug; use crate::borrow_set::BorrowSet; -use crate::facts::{AllFacts, PoloniusRegionVid}; -use crate::location::LocationTable; +use crate::constraints::OutlivesConstraint; +use crate::type_check::MirTypeckRegionConstraints; use crate::type_check::free_region_relations::UniversalRegionRelations; +use crate::universal_regions::UniversalRegions; +mod accesses; mod loan_invalidations; mod loan_kills; +mod location; +pub use self::location::*; +mod facts; +pub use self::facts::*; /// When requested, emit most of the facts needed by polonius: /// - moves and assignments @@ -22,42 +31,52 @@ mod loan_kills; /// - CFG points and edges /// - loan kills /// - loan invalidations +/// - access facts such as variable definitions, uses, drops, and path accesses +/// - outlives constraints /// /// The rest of the facts are emitted during typeck and liveness. pub(crate) fn emit_facts<'tcx>( - all_facts: &mut Option, + facts: &mut Option, tcx: TyCtxt<'tcx>, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, body: &Body<'tcx>, borrow_set: &BorrowSet<'tcx>, - move_data: &MoveData<'_>, - universal_region_relations: &UniversalRegionRelations<'_>, + move_data: &MoveData<'tcx>, + universal_region_relations: &UniversalRegionRelations<'tcx>, + constraints: &MirTypeckRegionConstraints<'tcx>, ) { - let Some(all_facts) = all_facts else { + let Some(facts) = facts else { // We don't do anything if there are no facts to fill. return; }; let _prof_timer = tcx.prof.generic_activity("polonius_fact_generation"); - emit_move_facts(all_facts, move_data, location_table, body); - emit_universal_region_facts(all_facts, borrow_set, universal_region_relations); - emit_cfg_and_loan_kills_facts(all_facts, tcx, location_table, body, borrow_set); - emit_loan_invalidations_facts(all_facts, tcx, location_table, body, borrow_set); + emit_move_facts(facts, body, location_table, move_data); + emit_universal_region_facts(facts, borrow_set, universal_region_relations); + loan_kills::emit_loan_kills(tcx, facts, body, location_table, borrow_set); + loan_invalidations::emit_loan_invalidations(tcx, facts, body, location_table, borrow_set); + accesses::emit_access_facts( + tcx, + facts, + body, + location_table, + move_data, + &universal_region_relations.universal_regions, + ); + emit_outlives_facts(facts, location_table, constraints); } /// Emit facts needed for move/init analysis: moves and assignments. fn emit_move_facts( - all_facts: &mut AllFacts, - move_data: &MoveData<'_>, - location_table: &LocationTable, + facts: &mut PoloniusFacts, body: &Body<'_>, + location_table: &PoloniusLocationTable, + move_data: &MoveData<'_>, ) { - all_facts - .path_is_var - .extend(move_data.rev_lookup.iter_locals_enumerated().map(|(l, r)| (r, l))); + facts.path_is_var.extend(move_data.rev_lookup.iter_locals_enumerated().map(|(l, r)| (r, l))); for (child, move_path) in move_data.move_paths.iter_enumerated() { if let Some(parent) = move_path.parent { - all_facts.child_path.push((child, parent)); + facts.child_path.push((child, parent)); } } @@ -83,14 +102,14 @@ fn emit_move_facts( // The initialization happened in (or rather, when arriving at) // the successors, but not in the unwind block. let first_statement = Location { block: successor, statement_index: 0 }; - all_facts + facts .path_assigned_at_base .push((init.path, location_table.start_index(first_statement))); } } else { // In all other cases, the initialization just happens at the // midpoint, like any other effect. - all_facts + facts .path_assigned_at_base .push((init.path, location_table.mid_index(location))); } @@ -98,7 +117,7 @@ fn emit_move_facts( // Arguments are initialized on function entry InitLocation::Argument(local) => { assert!(body.local_kind(local) == LocalKind::Arg); - all_facts.path_assigned_at_base.push((init.path, fn_entry_start)); + facts.path_assigned_at_base.push((init.path, fn_entry_start)); } } } @@ -107,20 +126,20 @@ fn emit_move_facts( if body.local_kind(local) != LocalKind::Arg { // Non-arguments start out deinitialised; we simulate this with an // initial move: - all_facts.path_moved_at_base.push((path, fn_entry_start)); + facts.path_moved_at_base.push((path, fn_entry_start)); } } // moved_out_at // deinitialisation is assumed to always happen! - all_facts + facts .path_moved_at_base .extend(move_data.moves.iter().map(|mo| (mo.path, location_table.mid_index(mo.source)))); } /// Emit universal regions facts, and their relations. fn emit_universal_region_facts( - all_facts: &mut AllFacts, + facts: &mut PoloniusFacts, borrow_set: &BorrowSet<'_>, universal_region_relations: &UniversalRegionRelations<'_>, ) { @@ -131,7 +150,7 @@ fn emit_universal_region_facts( // added to the existing number of loans, as if they succeeded them in the set. // let universal_regions = &universal_region_relations.universal_regions; - all_facts + facts .universal_region .extend(universal_regions.universal_regions_iter().map(PoloniusRegionVid::from)); let borrow_count = borrow_set.len(); @@ -144,7 +163,7 @@ fn emit_universal_region_facts( for universal_region in universal_regions.universal_regions_iter() { let universal_region_idx = universal_region.index(); let placeholder_loan_idx = borrow_count + universal_region_idx; - all_facts.placeholder.push((universal_region.into(), placeholder_loan_idx.into())); + facts.placeholder.push((universal_region.into(), placeholder_loan_idx.into())); } // 2: the universal region relations `outlives` constraints are emitted as @@ -156,29 +175,51 @@ fn emit_universal_region_facts( fr1={:?}, fr2={:?}", fr1, fr2 ); - all_facts.known_placeholder_subset.push((fr1.into(), fr2.into())); + facts.known_placeholder_subset.push((fr1.into(), fr2.into())); } } } -/// Emit facts about loan invalidations. -fn emit_loan_invalidations_facts<'tcx>( - all_facts: &mut AllFacts, +/// For every potentially drop()-touched region `region` in `local`'s type +/// (`kind`), emit a `drop_of_var_derefs_origin(local, origin)` fact. +pub(crate) fn emit_drop_facts<'tcx>( tcx: TyCtxt<'tcx>, - location_table: &LocationTable, - body: &Body<'tcx>, - borrow_set: &BorrowSet<'tcx>, + local: Local, + kind: &GenericArg<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + facts: &mut Option, ) { - loan_invalidations::emit_loan_invalidations(tcx, all_facts, location_table, body, borrow_set); + debug!("emit_drop_facts(local={:?}, kind={:?}", local, kind); + let Some(facts) = facts.as_mut() else { return }; + let _prof_timer = tcx.prof.generic_activity("polonius_fact_generation"); + tcx.for_each_free_region(kind, |drop_live_region| { + let region_vid = universal_regions.to_region_vid(drop_live_region); + facts.drop_of_var_derefs_origin.push((local, region_vid.into())); + }); } -/// Emit facts about CFG points and edges, as well as locations where loans are killed. -fn emit_cfg_and_loan_kills_facts<'tcx>( - all_facts: &mut AllFacts, - tcx: TyCtxt<'tcx>, - location_table: &LocationTable, - body: &Body<'tcx>, - borrow_set: &BorrowSet<'tcx>, +/// Emit facts about the outlives constraints: the `subset` base relation, i.e. not a transitive +/// closure. +fn emit_outlives_facts<'tcx>( + facts: &mut PoloniusFacts, + location_table: &PoloniusLocationTable, + constraints: &MirTypeckRegionConstraints<'tcx>, ) { - loan_kills::emit_loan_kills(tcx, all_facts, location_table, body, borrow_set); + facts.subset_base.extend(constraints.outlives_constraints.outlives().iter().flat_map( + |constraint: &OutlivesConstraint<'_>| { + if let Some(from_location) = constraint.locations.from_location() { + Either::Left(iter::once(( + constraint.sup.into(), + constraint.sub.into(), + location_table.mid_index(from_location), + ))) + } else { + Either::Right( + location_table.all_points().map(move |location| { + (constraint.sup.into(), constraint.sub.into(), location) + }), + ) + } + }, + )); } diff --git a/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs b/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs new file mode 100644 index 0000000000000..75ee29c9d0d50 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/liveness_constraints.rs @@ -0,0 +1,336 @@ +use std::collections::BTreeMap; + +use rustc_index::bit_set::SparseBitMatrix; +use rustc_index::interval::SparseIntervalMatrix; +use rustc_middle::mir::{Body, Location}; +use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation}; +use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeVisitable}; +use rustc_mir_dataflow::points::PointIndex; + +use super::{ + ConstraintDirection, LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet, + PoloniusContext, +}; +use crate::region_infer::values::LivenessValues; +use crate::universal_regions::UniversalRegions; + +impl PoloniusContext { + /// Record the variance of each region contained within the given value. + pub(crate) fn record_live_region_variance<'tcx>( + &mut self, + tcx: TyCtxt<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + value: impl TypeVisitable> + Relate>, + ) { + let mut extractor = VarianceExtractor { + tcx, + ambient_variance: ty::Variance::Covariant, + directions: &mut self.live_region_variances, + universal_regions, + }; + extractor.relate(value, value).expect("Can't have a type error relating to itself"); + } + + /// Unlike NLLs, in polonius we traverse the cfg to look for regions live across an edge, so we + /// need to transpose the "points where each region is live" matrix to a "live regions per point" + /// matrix. + // FIXME: avoid this conversion by always storing liveness data in this shape in the rest of + // borrowck. + pub(crate) fn record_live_regions_per_point( + &mut self, + num_regions: usize, + points_per_live_region: &SparseIntervalMatrix, + ) { + let mut live_regions_per_point = SparseBitMatrix::new(num_regions); + for region in points_per_live_region.rows() { + for point in points_per_live_region.row(region).unwrap().iter() { + live_regions_per_point.insert(point, region); + } + } + self.live_regions = Some(live_regions_per_point); + } +} + +/// Propagate loans throughout the CFG: for each statement in the MIR, create localized outlives +/// constraints for loans that are propagated to the next statements. +pub(super) fn create_liveness_constraints<'tcx>( + body: &Body<'tcx>, + liveness: &LivenessValues, + live_regions: &SparseBitMatrix, + live_region_variances: &BTreeMap, + universal_regions: &UniversalRegions<'tcx>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + for (block, bb) in body.basic_blocks.iter_enumerated() { + let statement_count = bb.statements.len(); + for statement_index in 0..=statement_count { + let current_location = Location { block, statement_index }; + let current_point = liveness.point_from_location(current_location); + + if statement_index < statement_count { + // Intra-block edges, straight line constraints from each point to its successor + // within the same block. + let next_location = Location { block, statement_index: statement_index + 1 }; + let next_point = liveness.point_from_location(next_location); + propagate_loans_between_points( + current_point, + next_point, + live_regions, + live_region_variances, + universal_regions, + localized_outlives_constraints, + ); + } else { + // Inter-block edges, from the block's terminator to each successor block's entry + // point. + for successor_block in bb.terminator().successors() { + let next_location = Location { block: successor_block, statement_index: 0 }; + let next_point = liveness.point_from_location(next_location); + propagate_loans_between_points( + current_point, + next_point, + live_regions, + live_region_variances, + universal_regions, + localized_outlives_constraints, + ); + } + } + } + } +} + +/// Propagate loans within a region between two points in the CFG, if that region is live at both +/// the source and target points. +fn propagate_loans_between_points( + current_point: PointIndex, + next_point: PointIndex, + live_regions: &SparseBitMatrix, + live_region_variances: &BTreeMap, + universal_regions: &UniversalRegions<'_>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + // Universal regions are semantically live at all points. + // Note: we always have universal regions but they're not always (or often) involved in the + // subset graph. For now, we emit all their edges unconditionally, but some of these subgraphs + // will be disconnected from the rest of the graph and thus, unnecessary. + // + // FIXME: only emit the edges of universal regions that existential regions can reach. + for region in universal_regions.universal_regions_iter() { + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: current_point, + target: region, + to: next_point, + }); + } + + let Some(current_live_regions) = live_regions.row(current_point) else { + // There are no constraints to add: there are no live regions at the current point. + return; + }; + let Some(next_live_regions) = live_regions.row(next_point) else { + // There are no constraints to add: there are no live regions at the next point. + return; + }; + + for region in next_live_regions.iter() { + if !current_live_regions.contains(region) { + continue; + } + + // `region` is indeed live at both points, add a constraint between them, according to + // variance. + if let Some(&direction) = live_region_variances.get(®ion) { + add_liveness_constraint( + region, + current_point, + next_point, + direction, + localized_outlives_constraints, + ); + } else { + // Note: there currently are cases related to promoted and const generics, where we + // don't yet have variance information (possibly about temporary regions created when + // typeck sanitizes the promoteds). Until that is done, we conservatively fallback to + // maximizing reachability by adding a bidirectional edge here. This will not limit + // traversal whatsoever, and thus propagate liveness when needed. + // + // FIXME: add the missing variance information and remove this fallback bidirectional + // edge. + let fallback = ConstraintDirection::Bidirectional; + add_liveness_constraint( + region, + current_point, + next_point, + fallback, + localized_outlives_constraints, + ); + } + } +} + +/// Adds `LocalizedOutlivesConstraint`s between two connected points, according to the given edge +/// direction. +fn add_liveness_constraint( + region: RegionVid, + current_point: PointIndex, + next_point: PointIndex, + direction: ConstraintDirection, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + match direction { + ConstraintDirection::Forward => { + // Covariant cases: loans flow in the regular direction, from the current point to the + // next point. + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: current_point, + target: region, + to: next_point, + }); + } + ConstraintDirection::Backward => { + // Contravariant cases: loans flow in the inverse direction, from the next point to the + // current point. + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: next_point, + target: region, + to: current_point, + }); + } + ConstraintDirection::Bidirectional => { + // For invariant cases, loans can flow in both directions: we add both edges. + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: current_point, + target: region, + to: next_point, + }); + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: next_point, + target: region, + to: current_point, + }); + } + } +} + +/// Extracts variances for regions contained within types. Follows the same structure as +/// `rustc_infer`'s `Generalizer`: we try to relate a type with itself to track and extract the +/// variances of regions. +struct VarianceExtractor<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + ambient_variance: ty::Variance, + directions: &'a mut BTreeMap, + universal_regions: &'a UniversalRegions<'tcx>, +} + +impl<'tcx> VarianceExtractor<'_, 'tcx> { + fn record_variance(&mut self, region: ty::Region<'tcx>, variance: ty::Variance) { + // We're only interested in the variance of vars and free regions. + // + // Note: even if we currently bail for two cases of unexpected region kinds here, missing + // variance data is not a soundness problem: the regions with missing variance will still be + // present in the constraint graph as they are live, and liveness edges construction has a + // fallback for this case. + // + // FIXME: that being said, we need to investigate these cases better to not ignore regions + // in general. + if region.is_bound() { + // We ignore these because they cannot be turned into the vids we need. + return; + } + + if region.is_erased() { + // These cannot be turned into a vid either, and we also ignore them: the fact that they + // show up here looks like either an issue upstream or a combination with unexpectedly + // continuing compilation too far when we're in a tainted by errors situation. + // + // FIXME: investigate the `generic_const_exprs` test that triggers this issue, + // `ui/const-generics/generic_const_exprs/issue-97047-ice-2.rs` + return; + } + + let direction = match variance { + ty::Covariant => ConstraintDirection::Forward, + ty::Contravariant => ConstraintDirection::Backward, + ty::Invariant => ConstraintDirection::Bidirectional, + ty::Bivariant => { + // We don't add edges for bivariant cases. + return; + } + }; + + let region = self.universal_regions.to_region_vid(region); + self.directions + .entry(region) + .and_modify(|entry| { + // If there's already a recorded direction for this region, we combine the two: + // - combining the same direction is idempotent + // - combining different directions is trivially bidirectional + if entry != &direction { + *entry = ConstraintDirection::Bidirectional; + } + }) + .or_insert(direction); + } +} + +impl<'tcx> TypeRelation> for VarianceExtractor<'_, 'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn relate_with_variance>>( + &mut self, + variance: ty::Variance, + _info: ty::VarianceDiagInfo>, + a: T, + b: T, + ) -> RelateResult<'tcx, T> { + let old_ambient_variance = self.ambient_variance; + self.ambient_variance = self.ambient_variance.xform(variance); + let r = self.relate(a, b)?; + self.ambient_variance = old_ambient_variance; + Ok(r) + } + + fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { + assert_eq!(a, b); // we are misusing TypeRelation here; both LHS and RHS ought to be == + relate::structurally_relate_tys(self, a, b) + } + + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + assert_eq!(a, b); // we are misusing TypeRelation here; both LHS and RHS ought to be == + self.record_variance(a, self.ambient_variance); + Ok(a) + } + + fn consts( + &mut self, + a: ty::Const<'tcx>, + b: ty::Const<'tcx>, + ) -> RelateResult<'tcx, ty::Const<'tcx>> { + assert_eq!(a, b); // we are misusing TypeRelation here; both LHS and RHS ought to be == + relate::structurally_relate_consts(self, a, b) + } + + fn binders( + &mut self, + a: ty::Binder<'tcx, T>, + _: ty::Binder<'tcx, T>, + ) -> RelateResult<'tcx, ty::Binder<'tcx, T>> + where + T: Relate>, + { + self.relate(a.skip_binder(), a.skip_binder())?; + Ok(a) + } +} diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs new file mode 100644 index 0000000000000..c519453652fe8 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -0,0 +1,270 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::{ + Body, Local, Location, Place, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, +}; +use rustc_middle::ty::{RegionVid, TyCtxt}; +use rustc_mir_dataflow::points::PointIndex; + +use super::{LiveLoans, LocalizedOutlivesConstraintSet}; +use crate::dataflow::BorrowIndex; +use crate::region_infer::values::LivenessValues; +use crate::{BorrowSet, PlaceConflictBias, places_conflict}; + +/// With the full graph of constraints, we can compute loan reachability, stop at kills, and trace +/// loan liveness throughout the CFG. +pub(super) fn compute_loan_liveness<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + liveness: &LivenessValues, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: &LocalizedOutlivesConstraintSet, +) -> LiveLoans { + let mut live_loans = LiveLoans::new(borrow_set.len()); + + // FIXME: it may be preferable for kills to be encoded in the edges themselves, to simplify and + // likely make traversal (and constraint generation) more efficient. We also display kills on + // edges when visualizing the constraint graph anyways. + let kills = collect_kills(body, tcx, borrow_set); + + let graph = index_constraints(&localized_outlives_constraints); + let mut visited = FxHashSet::default(); + let mut stack = Vec::new(); + + // Compute reachability per loan by traversing each loan's subgraph starting from where it is + // introduced. + for (loan_idx, loan) in borrow_set.iter_enumerated() { + visited.clear(); + stack.clear(); + + let start_node = LocalizedNode { + region: loan.region, + point: liveness.point_from_location(loan.reserve_location), + }; + stack.push(start_node); + + while let Some(node) = stack.pop() { + if !visited.insert(node) { + continue; + } + + // Record the loan as being live on entry to this point. + live_loans.insert(node.point, loan_idx); + + // Here, we have a conundrum. There's currently a weakness in our theory, in that + // we're using a single notion of reachability to represent what used to be _two_ + // different transitive closures. It didn't seem impactful when coming up with the + // single-graph and reachability through space (regions) + time (CFG) concepts, but in + // practice the combination of time-traveling with kills is more impactful than + // initially anticipated. + // + // Kills should prevent a loan from reaching its successor points in the CFG, but not + // while time-traveling: we're not actually at that CFG point, but looking for + // predecessor regions that contain the loan. One of the two TCs we had pushed the + // transitive subset edges to each point instead of having backward edges, and the + // problem didn't exist before. In the abstract, naive reachability is not enough to + // model this, we'd need a slightly different solution. For example, maybe with a + // two-step traversal: + // - at each point we first traverse the subgraph (and possibly time-travel) looking for + // exit nodes while ignoring kills, + // - and then when we're back at the current point, we continue normally. + // + // Another (less annoying) subtlety is that kills and the loan use-map are + // flow-insensitive. Kills can actually appear in places before a loan is introduced, or + // at a location that is actually unreachable in the CFG from the introduction point, + // and these can also be encountered during time-traveling. + // + // The simplest change that made sense to "fix" the issues above is taking into + // account kills that are: + // - reachable from the introduction point + // - encountered during forward traversal. Note that this is not transitive like the + // two-step traversal described above: only kills encountered on exit via a backward + // edge are ignored. + // + // In our test suite, there are a couple of cases where kills are encountered while + // time-traveling, however as far as we can tell, always in cases where they would be + // unreachable. We have reason to believe that this is a property of the single-graph + // approach (but haven't proved it yet): + // - reachable kills while time-traveling would also be encountered via regular + // traversal + // - it makes _some_ sense to ignore unreachable kills, but subtleties around dead code + // in general need to be better thought through (like they were for NLLs). + // - ignoring kills is a conservative approximation: the loan is still live and could + // cause false positive errors at another place access. Soundness issues in this + // domain should look more like the absence of reachability instead. + // + // This is enough in practice to pass tests, and therefore is what we have implemented + // for now. + // + // FIXME: all of the above. Analyze potential unsoundness, possibly in concert with a + // borrowck implementation in a-mir-formality, fuzzing, or manually crafting + // counter-examples. + + // Continuing traversal will depend on whether the loan is killed at this point, and + // whether we're time-traveling. + let current_location = liveness.location_from_point(node.point); + let is_loan_killed = + kills.get(¤t_location).is_some_and(|kills| kills.contains(&loan_idx)); + + for succ in outgoing_edges(&graph, node) { + // If the loan is killed at this point, it is killed _on exit_. But only during + // forward traversal. + if is_loan_killed { + let destination = liveness.location_from_point(succ.point); + if current_location.is_predecessor_of(destination, body) { + continue; + } + } + stack.push(succ); + } + } + } + + live_loans +} + +/// The localized constraint graph is currently the per-node map of its physical edges. In the +/// future, we'll add logical edges to model constraints that hold at all points in the CFG. +type LocalizedConstraintGraph = FxHashMap>; + +/// A node in the graph to be traversed, one of the two vertices of a localized outlives constraint. +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +struct LocalizedNode { + region: RegionVid, + point: PointIndex, +} + +/// Traverses the constraints and returns the indexable graph of edges per node. +fn index_constraints(constraints: &LocalizedOutlivesConstraintSet) -> LocalizedConstraintGraph { + let mut edges = LocalizedConstraintGraph::default(); + for constraint in &constraints.outlives { + let source = LocalizedNode { region: constraint.source, point: constraint.from }; + let target = LocalizedNode { region: constraint.target, point: constraint.to }; + edges.entry(source).or_default().insert(target); + } + + edges +} + +/// Returns the outgoing edges of a given node, not its transitive closure. +fn outgoing_edges( + graph: &LocalizedConstraintGraph, + node: LocalizedNode, +) -> impl Iterator + use<'_> { + graph.get(&node).into_iter().flat_map(|edges| edges.iter().copied()) +} + +/// Traverses the MIR and collects kills. +fn collect_kills<'tcx>( + body: &Body<'tcx>, + tcx: TyCtxt<'tcx>, + borrow_set: &BorrowSet<'tcx>, +) -> BTreeMap> { + let mut collector = KillsCollector { borrow_set, tcx, body, kills: BTreeMap::default() }; + for (block, data) in body.basic_blocks.iter_enumerated() { + collector.visit_basic_block_data(block, data); + } + collector.kills +} + +struct KillsCollector<'a, 'tcx> { + body: &'a Body<'tcx>, + tcx: TyCtxt<'tcx>, + borrow_set: &'a BorrowSet<'tcx>, + + /// The set of loans killed at each location. + kills: BTreeMap>, +} + +// This visitor has a similar structure to the `Borrows` dataflow computation with respect to kills, +// and the datalog polonius fact generation for the `loan_killed_at` relation. +impl<'tcx> KillsCollector<'_, 'tcx> { + /// Records the borrows on the specified place as `killed`. For example, when assigning to a + /// local, or on a call's return destination. + fn record_killed_borrows_for_place(&mut self, place: Place<'tcx>, location: Location) { + // For the reasons described in graph traversal, we also filter out kills + // unreachable from the loan's introduction point, as they would stop traversal when + // e.g. checking for reachability in the subset graph through invariance constraints + // higher up. + let filter_unreachable_kills = |loan| { + let introduction = self.borrow_set[loan].reserve_location; + let reachable = introduction.is_predecessor_of(location, self.body); + reachable + }; + + let other_borrows_of_local = self + .borrow_set + .local_map + .get(&place.local) + .into_iter() + .flat_map(|bs| bs.iter()) + .copied(); + + // If the borrowed place is a local with no projections, all other borrows of this + // local must conflict. This is purely an optimization so we don't have to call + // `places_conflict` for every borrow. + if place.projection.is_empty() { + if !self.body.local_decls[place.local].is_ref_to_static() { + self.kills + .entry(location) + .or_default() + .extend(other_borrows_of_local.filter(|&loan| filter_unreachable_kills(loan))); + } + return; + } + + // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given + // pair of array indices are not equal, so that when `places_conflict` returns true, we + // will be assured that two places being compared definitely denotes the same sets of + // locations. + let definitely_conflicting_borrows = other_borrows_of_local + .filter(|&i| { + places_conflict( + self.tcx, + self.body, + self.borrow_set[i].borrowed_place, + place, + PlaceConflictBias::NoOverlap, + ) + }) + .filter(|&loan| filter_unreachable_kills(loan)); + + self.kills.entry(location).or_default().extend(definitely_conflicting_borrows); + } + + /// Records the borrows on the specified local as `killed`. + fn record_killed_borrows_for_local(&mut self, local: Local, location: Location) { + if let Some(borrow_indices) = self.borrow_set.local_map.get(&local) { + self.kills.entry(location).or_default().extend(borrow_indices.iter()); + } + } +} + +impl<'tcx> Visitor<'tcx> for KillsCollector<'_, 'tcx> { + fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { + // Make sure there are no remaining borrows for locals that have gone out of scope. + if let StatementKind::StorageDead(local) = statement.kind { + self.record_killed_borrows_for_local(local, location); + } + + self.super_statement(statement, location); + } + + fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { + // When we see `X = ...`, then kill borrows of `(*X).foo` and so forth. + self.record_killed_borrows_for_place(*place, location); + self.super_assign(place, rvalue, location); + } + + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { + // A `Call` terminator's return value can be a local which has borrows, so we need to record + // those as killed as well. + if let TerminatorKind::Call { destination, .. } = terminator.kind { + self.record_killed_borrows_for_place(destination, location); + } + + self.super_terminator(terminator, location); + } +} diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index 9c1583f198862..52a5f75d8a239 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -1 +1,140 @@ +//! Polonius analysis and support code: +//! - dedicated constraints +//! - conversion from NLL constraints +//! - debugging utilities +//! - etc. +//! +//! The current implementation models the flow-sensitive borrow-checking concerns as a graph +//! containing both information about regions and information about the control flow. +//! +//! Loan propagation is seen as a reachability problem (with some subtleties) between where the loan +//! is introduced and a given point. +//! +//! Constraints arising from type-checking allow loans to flow from region to region at the same CFG +//! point. Constraints arising from liveness allow loans to flow within from point to point, between +//! live regions at these points. +//! +//! Edges can be bidirectional to encode invariant relationships, and loans can flow "back in time" +//! to traverse these constraints arising earlier in the CFG. +//! +//! When incorporating kills in the traversal, the loans reaching a given point are considered live. +//! +//! After this, the usual NLL process happens. These live loans are fed into a dataflow analysis +//! combining them with the points where loans go out of NLL scope (the frontier where they stop +//! propagating to a live region), to yield the "loans in scope" or "active loans", at a given +//! point. +//! +//! Illegal accesses are still computed by checking whether one of these resulting loans is +//! invalidated. +//! +//! More information on this simple approach can be found in the following links, and in the future +//! in the rustc dev guide: +//! - +//! - +//! + +mod constraints; +mod dump; pub(crate) mod legacy; +mod liveness_constraints; +mod loan_liveness; +mod typeck_constraints; + +use std::collections::BTreeMap; + +use rustc_index::bit_set::SparseBitMatrix; +use rustc_middle::mir::Body; +use rustc_middle::ty::{RegionVid, TyCtxt}; +use rustc_mir_dataflow::points::PointIndex; + +pub(crate) use self::constraints::*; +pub(crate) use self::dump::dump_polonius_mir; +use self::liveness_constraints::create_liveness_constraints; +use self::loan_liveness::compute_loan_liveness; +use self::typeck_constraints::convert_typeck_constraints; +use crate::dataflow::BorrowIndex; +use crate::{BorrowSet, RegionInferenceContext}; + +pub(crate) type LiveLoans = SparseBitMatrix; + +/// This struct holds the data needed to create the Polonius localized constraints. +pub(crate) struct PoloniusContext { + /// The set of regions that are live at a given point in the CFG, used to create localized + /// outlives constraints between regions that are live at connected points in the CFG. + live_regions: Option>, + + /// The expected edge direction per live region: the kind of directed edge we'll create as + /// liveness constraints depends on the variance of types with respect to each contained region. + live_region_variances: BTreeMap, +} + +/// The direction a constraint can flow into. Used to create liveness constraints according to +/// variance. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum ConstraintDirection { + /// For covariant cases, we add a forward edge `O at P1 -> O at P2`. + Forward, + + /// For contravariant cases, we add a backward edge `O at P2 -> O at P1` + Backward, + + /// For invariant cases, we add both the forward and backward edges `O at P1 <-> O at P2`. + Bidirectional, +} + +impl PoloniusContext { + pub(crate) fn new() -> PoloniusContext { + Self { live_region_variances: BTreeMap::new(), live_regions: None } + } + + /// Computes live loans using the set of loans model for `-Zpolonius=next`. + /// + /// First, creates a constraint graph combining regions and CFG points, by: + /// - converting NLL typeck constraints to be localized + /// - encoding liveness constraints + /// + /// Then, this graph is traversed, and combined with kills, reachability is recorded as loan + /// liveness, to be used by the loan scope and active loans computations. + pub(crate) fn compute_loan_liveness<'tcx>( + &self, + tcx: TyCtxt<'tcx>, + regioncx: &mut RegionInferenceContext<'tcx>, + body: &Body<'tcx>, + borrow_set: &BorrowSet<'tcx>, + ) -> LocalizedOutlivesConstraintSet { + let mut localized_outlives_constraints = LocalizedOutlivesConstraintSet::default(); + convert_typeck_constraints( + tcx, + body, + regioncx.liveness_constraints(), + regioncx.outlives_constraints(), + regioncx.universal_regions(), + &mut localized_outlives_constraints, + ); + + let live_regions = self.live_regions.as_ref().expect( + "live regions per-point data should have been created at the end of MIR typeck", + ); + create_liveness_constraints( + body, + regioncx.liveness_constraints(), + live_regions, + &self.live_region_variances, + regioncx.universal_regions(), + &mut localized_outlives_constraints, + ); + + // Now that we have a complete graph, we can compute reachability to trace the liveness of + // loans for the next step in the chain, the NLL loan scope and active loans computations. + let live_loans = compute_loan_liveness( + tcx, + body, + regioncx.liveness_constraints(), + borrow_set, + &localized_outlives_constraints, + ); + regioncx.record_live_loans(live_loans); + + localized_outlives_constraints + } +} diff --git a/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs b/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs new file mode 100644 index 0000000000000..8235b844886e0 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs @@ -0,0 +1,241 @@ +use rustc_data_structures::fx::FxHashSet; +use rustc_middle::mir::{Body, Location, Statement, StatementKind, Terminator, TerminatorKind}; +use rustc_middle::ty::{TyCtxt, TypeVisitable}; +use rustc_mir_dataflow::points::PointIndex; + +use super::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet}; +use crate::constraints::OutlivesConstraint; +use crate::region_infer::values::LivenessValues; +use crate::type_check::Locations; +use crate::universal_regions::UniversalRegions; + +/// Propagate loans throughout the subset graph at a given point (with some subtleties around the +/// location where effects start to be visible). +pub(super) fn convert_typeck_constraints<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + liveness: &LivenessValues, + outlives_constraints: impl Iterator>, + universal_regions: &UniversalRegions<'tcx>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + for outlives_constraint in outlives_constraints { + match outlives_constraint.locations { + Locations::All(_) => { + // For now, turn logical constraints holding at all points into physical edges at + // every point in the graph. + // FIXME: encode this into *traversal* instead. + for (block, bb) in body.basic_blocks.iter_enumerated() { + let statement_count = bb.statements.len(); + for statement_index in 0..=statement_count { + let current_location = Location { block, statement_index }; + let current_point = liveness.point_from_location(current_location); + + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: outlives_constraint.sup, + from: current_point, + target: outlives_constraint.sub, + to: current_point, + }); + } + } + } + + Locations::Single(location) => { + // This constraint is marked as holding at one location, we localize it to that + // location or its successor, depending on the corresponding MIR + // statement/terminator. Unfortunately, they all show up from typeck as coming "on + // entry", so for now we modify them to take effects that should apply "on exit" + // into account. + // + // FIXME: this approach is subtle, complicated, and hard to test, so we should track + // this information better in MIR typeck instead, for example with a new `Locations` + // variant that contains which node is crossing over between entry and exit. + let point = liveness.point_from_location(location); + let localized_constraint = if let Some(stmt) = + body[location.block].statements.get(location.statement_index) + { + localize_statement_constraint( + tcx, + body, + stmt, + liveness, + &outlives_constraint, + location, + point, + universal_regions, + ) + } else { + assert_eq!(location.statement_index, body[location.block].statements.len()); + let terminator = body[location.block].terminator(); + localize_terminator_constraint( + tcx, + body, + terminator, + liveness, + &outlives_constraint, + point, + universal_regions, + ) + }; + localized_outlives_constraints.push(localized_constraint); + } + } + } +} + +/// For a given outlives constraint arising from a MIR statement, localize the constraint with the +/// needed CFG `from`-`to` intra-block nodes. +fn localize_statement_constraint<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + stmt: &Statement<'tcx>, + liveness: &LivenessValues, + outlives_constraint: &OutlivesConstraint<'tcx>, + current_location: Location, + current_point: PointIndex, + universal_regions: &UniversalRegions<'tcx>, +) -> LocalizedOutlivesConstraint { + match &stmt.kind { + StatementKind::Assign(box (lhs, rhs)) => { + // To create localized outlives constraints without midpoints, we rely on the property + // that no input regions from the RHS of the assignment will flow into themselves: they + // should not appear in the output regions in the LHS. We believe this to be true by + // construction of the MIR, via temporaries, and assert it here. + // + // We think we don't need midpoints because: + // - every LHS Place has a unique set of regions that don't appear elsewhere + // - this implies that for them to be part of the RHS, the same Place must be read and + // written + // - and that should be impossible in MIR + // + // When we have a more complete implementation in the future, tested with crater, etc, + // we can relax this to a debug assert instead, or remove it. + assert!( + { + let mut lhs_regions = FxHashSet::default(); + tcx.for_each_free_region(lhs, |region| { + let region = universal_regions.to_region_vid(region); + lhs_regions.insert(region); + }); + + let mut rhs_regions = FxHashSet::default(); + tcx.for_each_free_region(rhs, |region| { + let region = universal_regions.to_region_vid(region); + rhs_regions.insert(region); + }); + + // The intersection between LHS and RHS regions should be empty. + lhs_regions.is_disjoint(&rhs_regions) + }, + "there should be no common regions between the LHS and RHS of an assignment" + ); + + // As mentioned earlier, we should be tracking these better upstream but: we want to + // relate the types on entry to the type of the place on exit. That is, outlives + // constraints on the RHS are on entry, and outlives constraints to/from the LHS are on + // exit (i.e. on entry to the successor location). + let lhs_ty = body.local_decls[lhs.local].ty; + let successor_location = Location { + block: current_location.block, + statement_index: current_location.statement_index + 1, + }; + let successor_point = liveness.point_from_location(successor_location); + compute_constraint_direction( + tcx, + outlives_constraint, + &lhs_ty, + current_point, + successor_point, + universal_regions, + ) + } + _ => { + // For the other cases, we localize an outlives constraint to where it arises. + LocalizedOutlivesConstraint { + source: outlives_constraint.sup, + from: current_point, + target: outlives_constraint.sub, + to: current_point, + } + } + } +} + +/// For a given outlives constraint arising from a MIR terminator, localize the constraint with the +/// needed CFG `from`-`to` inter-block nodes. +fn localize_terminator_constraint<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + terminator: &Terminator<'tcx>, + liveness: &LivenessValues, + outlives_constraint: &OutlivesConstraint<'tcx>, + current_point: PointIndex, + universal_regions: &UniversalRegions<'tcx>, +) -> LocalizedOutlivesConstraint { + // FIXME: check if other terminators need the same handling as `Call`s, in particular + // Assert/Yield/Drop. A handful of tests are failing with Drop related issues, as well as some + // coroutine tests, and that may be why. + match &terminator.kind { + // FIXME: also handle diverging calls. + TerminatorKind::Call { destination, target: Some(target), .. } => { + // Calls are similar to assignments, and thus follow the same pattern. If there is a + // target for the call we also relate what flows into the destination here to entry to + // that successor. + let destination_ty = destination.ty(&body.local_decls, tcx); + let successor_location = Location { block: *target, statement_index: 0 }; + let successor_point = liveness.point_from_location(successor_location); + compute_constraint_direction( + tcx, + outlives_constraint, + &destination_ty, + current_point, + successor_point, + universal_regions, + ) + } + _ => { + // Typeck constraints guide loans between regions at the current point, so we do that in + // the general case, and liveness will take care of making them flow to the terminator's + // successors. + LocalizedOutlivesConstraint { + source: outlives_constraint.sup, + from: current_point, + target: outlives_constraint.sub, + to: current_point, + } + } + } +} +/// For a given outlives constraint and CFG edge, returns the localized constraint with the +/// appropriate `from`-`to` direction. This is computed according to whether the constraint flows to +/// or from a free region in the given `value`, some kind of result for an effectful operation, like +/// the LHS of an assignment. +fn compute_constraint_direction<'tcx>( + tcx: TyCtxt<'tcx>, + outlives_constraint: &OutlivesConstraint<'tcx>, + value: &impl TypeVisitable>, + current_point: PointIndex, + successor_point: PointIndex, + universal_regions: &UniversalRegions<'tcx>, +) -> LocalizedOutlivesConstraint { + let mut to = current_point; + let mut from = current_point; + tcx.for_each_free_region(value, |region| { + let region = universal_regions.to_region_vid(region); + if region == outlives_constraint.sub { + // This constraint flows into the result, its effects start becoming visible on exit. + to = successor_point; + } else if region == outlives_constraint.sup { + // This constraint flows from the result, its effects start becoming visible on exit. + from = successor_point; + } + }); + + LocalizedOutlivesConstraint { + source: outlives_constraint.sup, + from, + target: outlives_constraint.sub, + to, + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index 0eecf98a6ede5..d2268c4779d63 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -13,15 +13,16 @@ use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin}; use rustc_middle::bug; use rustc_middle::mir::{ - BasicBlock, Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureOutlivesSubjectTy, - ClosureRegionRequirements, ConstraintCategory, Local, Location, ReturnConstraint, - TerminatorKind, + AnnotationSource, BasicBlock, Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, + ClosureOutlivesSubjectTy, ClosureRegionRequirements, ConstraintCategory, Local, Location, + ReturnConstraint, TerminatorKind, }; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::fold::fold_regions; use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, UniverseIndex}; use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::Span; +use rustc_span::hygiene::DesugaringKind; use tracing::{debug, instrument, trace}; use crate::BorrowckInferCtxt; @@ -30,7 +31,8 @@ use crate::constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstra use crate::dataflow::BorrowIndex; use crate::diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo}; use crate::member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}; -use crate::nll::PoloniusOutput; +use crate::polonius::LiveLoans; +use crate::polonius::legacy::PoloniusOutput; use crate::region_infer::reverse_sccs::ReverseSccGraph; use crate::region_infer::values::{LivenessValues, RegionElement, RegionValues, ToElementIndex}; use crate::type_check::free_region_relations::UniversalRegionRelations; @@ -315,11 +317,6 @@ enum Trace<'tcx> { NotVisited, } -#[derive(Clone, PartialEq, Eq, Debug)] -pub(crate) enum ExtraConstraintInfo { - PlaceholderFromPredicate(Span), -} - #[instrument(skip(infcx, sccs), level = "debug")] fn sccs_info<'tcx>(infcx: &BorrowckInferCtxt<'tcx>, sccs: &ConstraintSccs) { use crate::renumber::RegionCtxt; @@ -396,7 +393,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { var_infos: VarInfos, constraints: MirTypeckRegionConstraints<'tcx>, universal_region_relations: Frozen>, - elements: Rc, + location_map: Rc, ) -> Self { let universal_regions = &universal_region_relations.universal_regions; let MirTypeckRegionConstraints { @@ -440,7 +437,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } let mut scc_values = - RegionValues::new(elements, universal_regions.len(), placeholder_indices); + RegionValues::new(location_map, universal_regions.len(), placeholder_indices); for region in liveness_constraints.regions() { let scc = constraint_sccs.scc(region); @@ -571,7 +568,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// Given a universal region in scope on the MIR, returns the /// corresponding index. /// - /// (Panics if `r` is not a registered universal region.) + /// Panics if `r` is not a registered universal region, most notably + /// if it is a placeholder. Handling placeholders requires access to the + /// `MirTypeckRegionConstraints`. pub(crate) fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { self.universal_regions().to_region_vid(r) } @@ -795,7 +794,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { // If the member region lives in a higher universe, we currently choose // the most conservative option by leaving it unchanged. - if !self.constraint_sccs().annotation(scc).min_universe().is_root() { return; } @@ -823,12 +821,14 @@ impl<'tcx> RegionInferenceContext<'tcx> { } debug!(?choice_regions, "after ub"); - // At this point we can pick any member of `choice_regions`, but to avoid potential - // non-determinism we will pick the *unique minimum* choice. + // At this point we can pick any member of `choice_regions` and would like to choose + // it to be a small as possible. To avoid potential non-determinism we will pick the + // smallest such choice. // // Because universal regions are only partially ordered (i.e, not every two regions are // comparable), we will ignore any region that doesn't compare to all others when picking // the minimum choice. + // // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. @@ -853,6 +853,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { return; }; + // As we require `'scc: 'min_choice`, we have definitely already computed + // its `scc_values` at this point. let min_choice_scc = self.constraint_sccs.scc(min_choice); debug!(?min_choice, ?min_choice_scc); if self.scc_values.add_region(scc, min_choice_scc) { @@ -973,7 +975,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { propagated_outlives_requirements: &mut Vec>, ) -> bool { let tcx = infcx.tcx; - let TypeTest { generic_kind, lower_bound, span: blame_span, ref verify_bound } = *type_test; + let TypeTest { generic_kind, lower_bound, span: blame_span, verify_bound: _ } = *type_test; let generic_ty = generic_kind.to_ty(tcx); let Some(subject) = self.try_promote_type_test_subject(infcx, generic_ty) else { @@ -1011,25 +1013,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { // For each region outlived by lower_bound find a non-local, // universal region (it may be the same region) and add it to // `ClosureOutlivesRequirement`. + let mut found_outlived_universal_region = false; for ur in self.scc_values.universal_regions_outlived_by(r_scc) { + found_outlived_universal_region = true; debug!("universal_region_outlived_by ur={:?}", ur); - // Check whether we can already prove that the "subject" outlives `ur`. - // If so, we don't have to propagate this requirement to our caller. - // - // To continue the example from the function, if we are trying to promote - // a requirement that `T: 'X`, and we know that `'X = '1 + '2` (i.e., the union - // `'1` and `'2`), then in this loop `ur` will be `'1` (and `'2`). So here - // we check whether `T: '1` is something we *can* prove. If so, no need - // to propagate that requirement. - // - // This is needed because -- particularly in the case - // where `ur` is a local bound -- we are sometimes in a - // position to prove things that our caller cannot. See - // #53570 for an example. - if self.eval_verify_bound(infcx, generic_ty, ur, &verify_bound) { - continue; - } - let non_local_ub = self.universal_region_relations.non_local_upper_bounds(ur); debug!(?non_local_ub); @@ -1051,6 +1038,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { propagated_outlives_requirements.push(requirement); } } + // If we succeed to promote the subject, i.e. it only contains non-local regions, + // and fail to prove the type test inside of the closure, the `lower_bound` has to + // also be at least as large as some universal region, as the type test is otherwise + // trivial. + assert!(found_outlived_universal_region); true } @@ -1943,10 +1935,16 @@ impl<'tcx> RegionInferenceContext<'tcx> { from_region: RegionVid, from_region_origin: NllRegionVariableOrigin, target_test: impl Fn(RegionVid) -> bool, - ) -> (BlameConstraint<'tcx>, Vec) { + ) -> (BlameConstraint<'tcx>, Vec>) { // Find all paths - let (path, target_region) = - self.find_constraint_paths_between_regions(from_region, target_test).unwrap(); + let (path, target_region) = self + .find_constraint_paths_between_regions(from_region, target_test) + .or_else(|| { + self.find_constraint_paths_between_regions(from_region, |r| { + self.cannot_name_placeholder(from_region, r) + }) + }) + .unwrap(); debug!( "path={:#?}", path.iter() @@ -1959,25 +1957,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { .collect::>() ); - let mut extra_info = vec![]; - for constraint in path.iter() { - let outlived = constraint.sub; - let Some(origin) = self.var_infos.get(outlived) else { - continue; - }; - let RegionVariableOrigin::Nll(NllRegionVariableOrigin::Placeholder(p)) = origin.origin - else { - continue; - }; - debug!(?constraint, ?p); - let ConstraintCategory::Predicate(span) = constraint.category else { - continue; - }; - extra_info.push(ExtraConstraintInfo::PlaceholderFromPredicate(span)); - // We only want to point to one - break; - } - // We try to avoid reporting a `ConstraintCategory::Predicate` as our best constraint. // Instead, we use it to produce an improved `ObligationCauseCode`. // FIXME - determine what we should do if we encounter multiple @@ -1996,42 +1975,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { }) .unwrap_or_else(|| ObligationCauseCode::Misc); - // Classify each of the constraints along the path. - let mut categorized_path: Vec> = path - .iter() - .map(|constraint| BlameConstraint { - category: constraint.category, - from_closure: constraint.from_closure, - cause: ObligationCause::new(constraint.span, CRATE_DEF_ID, cause_code.clone()), - variance_info: constraint.variance_info, - }) - .collect(); - debug!("categorized_path={:#?}", categorized_path); - - // To find the best span to cite, we first try to look for the - // final constraint that is interesting and where the `sup` is - // not unified with the ultimate target region. The reason - // for this is that we have a chain of constraints that lead - // from the source to the target region, something like: - // - // '0: '1 ('0 is the source) - // '1: '2 - // '2: '3 - // '3: '4 - // '4: '5 - // '5: '6 ('6 is the target) - // - // Some of those regions are unified with `'6` (in the same - // SCC). We want to screen those out. After that point, the - // "closest" constraint we have to the end is going to be the - // most likely to be the point where the value escapes -- but - // we still want to screen for an "interesting" point to - // highlight (e.g., a call site or something). - let target_scc = self.constraint_sccs.scc(target_region); - let mut range = 0..path.len(); - - // As noted above, when reporting an error, there is typically a chain of constraints - // leading from some "source" region which must outlive some "target" region. + // When reporting an error, there is typically a chain of constraints leading from some + // "source" region which must outlive some "target" region. // In most cases, we prefer to "blame" the constraints closer to the target -- // but there is one exception. When constraints arise from higher-ranked subtyping, // we generally prefer to blame the source value, @@ -2072,78 +2017,114 @@ impl<'tcx> RegionInferenceContext<'tcx> { | NllRegionVariableOrigin::Existential { from_forall: true } => false, }; - let find_region = |i: &usize| { - let constraint = &path[*i]; - - let constraint_sup_scc = self.constraint_sccs.scc(constraint.sup); - - if blame_source { - match categorized_path[*i].category { - ConstraintCategory::OpaqueType - | ConstraintCategory::Boring - | ConstraintCategory::BoringNoLocation - | ConstraintCategory::Internal - | ConstraintCategory::Predicate(_) => false, - ConstraintCategory::TypeAnnotation - | ConstraintCategory::Return(_) - | ConstraintCategory::Yield => true, - _ => constraint_sup_scc != target_scc, - } + // To pick a constraint to blame, we organize constraints by how interesting we expect them + // to be in diagnostics, then pick the most interesting one closest to either the source or + // the target on our constraint path. + let constraint_interest = |constraint: &OutlivesConstraint<'tcx>| { + // Try to avoid blaming constraints from desugarings, since they may not clearly match + // match what users have written. As an exception, allow blaming returns generated by + // `?` desugaring, since the correspondence is fairly clear. + let category = if let Some(kind) = constraint.span.desugaring_kind() + && (kind != DesugaringKind::QuestionMark + || !matches!(constraint.category, ConstraintCategory::Return(_))) + { + ConstraintCategory::Boring } else { - !matches!( - categorized_path[*i].category, - ConstraintCategory::OpaqueType - | ConstraintCategory::Boring - | ConstraintCategory::BoringNoLocation - | ConstraintCategory::Internal - | ConstraintCategory::Predicate(_) - ) - } - }; - - let best_choice = - if blame_source { range.rev().find(find_region) } else { range.find(find_region) }; - - debug!(?best_choice, ?blame_source, ?extra_info); + constraint.category + }; - if let Some(i) = best_choice { - if let Some(next) = categorized_path.get(i + 1) { - if matches!(categorized_path[i].category, ConstraintCategory::Return(_)) - && next.category == ConstraintCategory::OpaqueType + match category { + // Returns usually provide a type to blame and have specially written diagnostics, + // so prioritize them. + ConstraintCategory::Return(_) => 0, + // Unsizing coercions are interesting, since we have a note for that: + // `BorrowExplanation::add_object_lifetime_default_note`. + // FIXME(dianne): That note shouldn't depend on a coercion being blamed; see issue + // #131008 for an example of where we currently don't emit it but should. + // Once the note is handled properly, this case should be removed. Until then, it + // should be as limited as possible; the note is prone to false positives and this + // constraint usually isn't best to blame. + ConstraintCategory::Cast { + unsize_to: Some(unsize_ty), + is_implicit_coercion: true, + } if target_region == self.universal_regions().fr_static + // Mirror the note's condition, to minimize how often this diverts blame. + && let ty::Adt(_, args) = unsize_ty.kind() + && args.iter().any(|arg| arg.as_type().is_some_and(|ty| ty.is_trait())) + // Mimic old logic for this, to minimize false positives in tests. + && !path + .iter() + .any(|c| matches!(c.category, ConstraintCategory::TypeAnnotation(_))) => { - // The return expression is being influenced by the return type being - // impl Trait, point at the return type and not the return expr. - return (next.clone(), extra_info); + 1 } + // Between other interesting constraints, order by their position on the `path`. + ConstraintCategory::Yield + | ConstraintCategory::UseAsConst + | ConstraintCategory::UseAsStatic + | ConstraintCategory::TypeAnnotation( + AnnotationSource::Ascription + | AnnotationSource::Declaration + | AnnotationSource::OpaqueCast, + ) + | ConstraintCategory::Cast { .. } + | ConstraintCategory::CallArgument(_) + | ConstraintCategory::CopyBound + | ConstraintCategory::SizedBound + | ConstraintCategory::Assignment + | ConstraintCategory::Usage + | ConstraintCategory::ClosureUpvar(_) => 2, + // Generic arguments are unlikely to be what relates regions together + ConstraintCategory::TypeAnnotation(AnnotationSource::GenericArg) => 3, + // We handle predicates and opaque types specially; don't prioritize them here. + ConstraintCategory::Predicate(_) | ConstraintCategory::OpaqueType => 4, + // `Boring` constraints can correspond to user-written code and have useful spans, + // but don't provide any other useful information for diagnostics. + ConstraintCategory::Boring => 5, + // `BoringNoLocation` constraints can point to user-written code, but are less + // specific, and are not used for relations that would make sense to blame. + ConstraintCategory::BoringNoLocation => 6, + // Do not blame internal constraints. + ConstraintCategory::Internal => 7, + ConstraintCategory::IllegalUniverse => 8, } + }; - if categorized_path[i].category == ConstraintCategory::Return(ReturnConstraint::Normal) - { - let field = categorized_path.iter().find_map(|p| { - if let ConstraintCategory::ClosureUpvar(f) = p.category { - Some(f) - } else { - None - } - }); - - if let Some(field) = field { - categorized_path[i].category = - ConstraintCategory::Return(ReturnConstraint::ClosureUpvar(field)); - } - } + let best_choice = if blame_source { + path.iter().enumerate().rev().min_by_key(|(_, c)| constraint_interest(c)).unwrap().0 + } else { + path.iter().enumerate().min_by_key(|(_, c)| constraint_interest(c)).unwrap().0 + }; - return (categorized_path[i].clone(), extra_info); - } + debug!(?best_choice, ?blame_source); - // If that search fails, that is.. unusual. Maybe everything - // is in the same SCC or something. In that case, find what - // appears to be the most interesting point to report to the - // user via an even more ad-hoc guess. - categorized_path.sort_by_key(|p| p.category); - debug!("sorted_path={:#?}", categorized_path); + let best_constraint = if let Some(next) = path.get(best_choice + 1) + && matches!(path[best_choice].category, ConstraintCategory::Return(_)) + && next.category == ConstraintCategory::OpaqueType + { + // The return expression is being influenced by the return type being + // impl Trait, point at the return type and not the return expr. + *next + } else if path[best_choice].category == ConstraintCategory::Return(ReturnConstraint::Normal) + && let Some(field) = path.iter().find_map(|p| { + if let ConstraintCategory::ClosureUpvar(f) = p.category { Some(f) } else { None } + }) + { + OutlivesConstraint { + category: ConstraintCategory::Return(ReturnConstraint::ClosureUpvar(field)), + ..path[best_choice] + } + } else { + path[best_choice] + }; - (categorized_path.remove(0), extra_info) + let blame_constraint = BlameConstraint { + category: best_constraint.category, + from_closure: best_constraint.from_closure, + cause: ObligationCause::new(best_constraint.span, CRATE_DEF_ID, cause_code.clone()), + variance_info: best_constraint.variance_info, + }; + (blame_constraint, path) } pub(crate) fn universe_info(&self, universe: ty::UniverseIndex) -> UniverseInfo<'tcx> { @@ -2191,28 +2172,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.constraint_graph.region_graph(&self.constraints, self.universal_regions().fr_static) } - /// Returns whether the given region is considered live at all points: whether it is a - /// placeholder or a free region. - pub(crate) fn is_region_live_at_all_points(&self, region: RegionVid) -> bool { - // FIXME: there must be a cleaner way to find this information. At least, when - // higher-ranked subtyping is abstracted away from the borrowck main path, we'll only - // need to check whether this is a universal region. - let origin = self.region_definition(region).origin; - let live_at_all_points = matches!( - origin, - NllRegionVariableOrigin::Placeholder(_) | NllRegionVariableOrigin::FreeRegion - ); - live_at_all_points - } - - /// Returns whether the `loan_idx` is live at the given `location`: whether its issuing - /// region is contained within the type of a variable that is live at this point. - /// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`. - pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool { - let point = self.liveness_constraints.point_from_location(location); - self.liveness_constraints.is_loan_live_at(loan_idx, point) - } - /// Returns the representative `RegionVid` for a given SCC. /// See `RegionTracker` for how a region variable ID is chosen. /// @@ -2224,13 +2183,31 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn scc_representative(&self, scc: ConstraintSccIndex) -> RegionVid { self.constraint_sccs.annotation(scc).representative } + + pub(crate) fn liveness_constraints(&self) -> &LivenessValues { + &self.liveness_constraints + } + + /// When using `-Zpolonius=next`, records the given live loans for the loan scopes and active + /// loans dataflow computations. + pub(crate) fn record_live_loans(&mut self, live_loans: LiveLoans) { + self.liveness_constraints.record_live_loans(live_loans); + } + + /// Returns whether the `loan_idx` is live at the given `location`: whether its issuing + /// region is contained within the type of a variable that is live at this point. + /// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`. + pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool { + let point = self.liveness_constraints.point_from_location(location); + self.liveness_constraints.is_loan_live_at(loan_idx, point) + } } impl<'tcx> RegionDefinition<'tcx> { fn new(universe: ty::UniverseIndex, rv_origin: RegionVariableOrigin) -> Self { // Create a new region definition. Note that, for free // regions, the `external_name` field gets updated later in - // `init_universal_regions`. + // `init_free_and_bound_regions`. let origin = match rv_origin { RegionVariableOrigin::Nll(origin) => origin, diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs index 7164a129235f0..7c484327e3118 100644 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs @@ -134,12 +134,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { // the hidden type becomes the opaque type itself. In this case, this was an opaque // usage of the opaque type and we can ignore it. This check is mirrored in typeck's // writeback. - // FIXME(-Znext-solver): This should be unnecessary with the new solver. - if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; + if !infcx.next_trait_solver() { + if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; + } } // Sometimes two opaque types are the same only after we remap the generic parameters // back to the opaque type definition. E.g. we may have `OpaqueType` mapped to @@ -151,7 +152,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { let (Ok(e) | Err(e)) = prev .build_mismatch_error( &OpaqueHiddenType { ty, span: concrete_type.span }, - opaque_type_key.def_id, infcx.tcx, ) .map(|d| d.emit()); diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs index a16bce63839b0..f1bcb353dc61c 100644 --- a/compiler/rustc_borrowck/src/region_infer/values.rs +++ b/compiler/rustc_borrowck/src/region_infer/values.rs @@ -11,6 +11,7 @@ use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex}; use tracing::debug; use crate::BorrowIndex; +use crate::polonius::LiveLoans; rustc_index::newtype_index! { /// A single integer representing a `ty::Placeholder`. @@ -38,7 +39,7 @@ pub(crate) enum RegionElement { /// an interval matrix storing liveness ranges for each region-vid. pub(crate) struct LivenessValues { /// The map from locations to points. - elements: Rc, + location_map: Rc, /// Which regions are live. This is exclusive with the fine-grained tracking in `points`, and /// currently only used for validating promoteds (which don't care about more precise tracking). @@ -50,39 +51,18 @@ pub(crate) struct LivenessValues { /// region is live, only that it is. points: Option>, - /// When using `-Zpolonius=next`, for each point: the loans flowing into the live regions at - /// that point. - pub(crate) loans: Option, -} - -/// Data used to compute the loans that are live at a given point in the CFG, when using -/// `-Zpolonius=next`. -pub(crate) struct LiveLoans { - /// The set of loans that flow into a given region. When individual regions are marked as live - /// in the CFG, these inflowing loans are recorded as live. - pub(crate) inflowing_loans: SparseBitMatrix, - - /// The set of loans that are live at a given point in the CFG. - pub(crate) live_loans: SparseBitMatrix, -} - -impl LiveLoans { - pub(crate) fn new(num_loans: usize) -> Self { - LiveLoans { - live_loans: SparseBitMatrix::new(num_loans), - inflowing_loans: SparseBitMatrix::new(num_loans), - } - } + /// When using `-Zpolonius=next`, the set of loans that are live at a given point in the CFG. + live_loans: Option, } impl LivenessValues { /// Create an empty map of regions to locations where they're live. - pub(crate) fn with_specific_points(elements: Rc) -> Self { + pub(crate) fn with_specific_points(location_map: Rc) -> Self { LivenessValues { live_regions: None, - points: Some(SparseIntervalMatrix::new(elements.num_points())), - elements, - loans: None, + points: Some(SparseIntervalMatrix::new(location_map.num_points())), + location_map, + live_loans: None, } } @@ -90,15 +70,23 @@ impl LivenessValues { /// /// Unlike `with_specific_points`, does not track exact locations where something is live, only /// which regions are live. - pub(crate) fn without_specific_points(elements: Rc) -> Self { + pub(crate) fn without_specific_points(location_map: Rc) -> Self { LivenessValues { live_regions: Some(Default::default()), points: None, - elements, - loans: None, + location_map, + live_loans: None, } } + /// Returns the liveness matrix of points where each region is live. Panics if the liveness + /// values have been created without any per-point data (that is, for promoteds). + pub(crate) fn points(&self) -> &SparseIntervalMatrix { + self.points + .as_ref() + .expect("this `LivenessValues` wasn't created using `with_specific_points`") + } + /// Iterate through each region that has a value in this set. pub(crate) fn regions(&self) -> impl Iterator + '_ { self.points.as_ref().expect("use with_specific_points").rows() @@ -114,20 +102,13 @@ impl LivenessValues { /// Records `region` as being live at the given `location`. pub(crate) fn add_location(&mut self, region: RegionVid, location: Location) { - let point = self.elements.point_from_location(location); + let point = self.location_map.point_from_location(location); debug!("LivenessValues::add_location(region={:?}, location={:?})", region, location); if let Some(points) = &mut self.points { points.insert(region, point); - } else if self.elements.point_in_range(point) { + } else if self.location_map.point_in_range(point) { self.live_regions.as_mut().unwrap().insert(region); } - - // When available, record the loans flowing into this region as live at the given point. - if let Some(loans) = self.loans.as_mut() { - if let Some(inflowing) = loans.inflowing_loans.row(region) { - loans.live_loans.union_row(point, inflowing); - } - } } /// Records `region` as being live at all the given `points`. @@ -135,20 +116,9 @@ impl LivenessValues { debug!("LivenessValues::add_points(region={:?}, points={:?})", region, points); if let Some(this) = &mut self.points { this.union_row(region, points); - } else if points.iter().any(|point| self.elements.point_in_range(point)) { + } else if points.iter().any(|point| self.location_map.point_in_range(point)) { self.live_regions.as_mut().unwrap().insert(region); } - - // When available, record the loans flowing into this region as live at the given points. - if let Some(loans) = self.loans.as_mut() { - if let Some(inflowing) = loans.inflowing_loans.row(region) { - if !inflowing.is_empty() { - for point in points.iter() { - loans.live_loans.union_row(point, inflowing); - } - } - } - } } /// Records `region` as being live at all the control-flow points. @@ -162,7 +132,7 @@ impl LivenessValues { /// Returns whether `region` is marked live at the given `location`. pub(crate) fn is_live_at(&self, region: RegionVid, location: Location) -> bool { - let point = self.elements.point_from_location(location); + let point = self.location_map.point_from_location(location); if let Some(points) = &self.points { points.row(region).is_some_and(|r| r.contains(point)) } else { @@ -183,28 +153,39 @@ impl LivenessValues { .row(region) .into_iter() .flat_map(|set| set.iter()) - .take_while(|&p| self.elements.point_in_range(p)) + .take_while(|&p| self.location_map.point_in_range(p)) } /// For debugging purposes, returns a pretty-printed string of the points where the `region` is /// live. pub(crate) fn pretty_print_live_points(&self, region: RegionVid) -> String { pretty_print_region_elements( - self.live_points(region).map(|p| RegionElement::Location(self.elements.to_location(p))), + self.live_points(region) + .map(|p| RegionElement::Location(self.location_map.to_location(p))), ) } #[inline] pub(crate) fn point_from_location(&self, location: Location) -> PointIndex { - self.elements.point_from_location(location) + self.location_map.point_from_location(location) + } + + #[inline] + pub(crate) fn location_from_point(&self, point: PointIndex) -> Location { + self.location_map.to_location(point) + } + + /// When using `-Zpolonius=next`, records the given live loans for the loan scopes and active + /// loans dataflow computations. + pub(crate) fn record_live_loans(&mut self, live_loans: LiveLoans) { + self.live_loans = Some(live_loans); } /// When using `-Zpolonius=next`, returns whether the `loan_idx` is live at the given `point`. pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, point: PointIndex) -> bool { - self.loans + self.live_loans .as_ref() .expect("Accessing live loans requires `-Zpolonius=next`") - .live_loans .contains(point, loan_idx) } } @@ -259,7 +240,7 @@ impl PlaceholderIndices { /// because (since it is returned) it must live for at least `'a`. But /// it would also contain various points from within the function. pub(crate) struct RegionValues { - elements: Rc, + location_map: Rc, placeholder_indices: PlaceholderIndices, points: SparseIntervalMatrix, free_regions: SparseBitMatrix, @@ -274,14 +255,14 @@ impl RegionValues { /// Each of the regions in num_region_variables will be initialized with an /// empty set of points and no causal information. pub(crate) fn new( - elements: Rc, + location_map: Rc, num_universal_regions: usize, placeholder_indices: PlaceholderIndices, ) -> Self { - let num_points = elements.num_points(); + let num_points = location_map.num_points(); let num_placeholders = placeholder_indices.len(); Self { - elements, + location_map, points: SparseIntervalMatrix::new(num_points), placeholder_indices, free_regions: SparseBitMatrix::new(num_universal_regions), @@ -323,7 +304,7 @@ impl RegionValues { end: usize, ) -> Option { let row = self.points.row(r)?; - let block = self.elements.entry_point(block); + let block = self.location_map.entry_point(block); let start = block.plus(start); let end = block.plus(end); let first_unset = row.first_unset_in(start..=end)?; @@ -362,8 +343,8 @@ impl RegionValues { pub(crate) fn locations_outlived_by<'a>(&'a self, r: N) -> impl Iterator + 'a { self.points.row(r).into_iter().flat_map(move |set| { set.iter() - .take_while(move |&p| self.elements.point_in_range(p)) - .map(move |p| self.elements.to_location(p)) + .take_while(move |&p| self.location_map.point_in_range(p)) + .map(move |p| self.location_map.to_location(p)) }) } @@ -417,12 +398,12 @@ pub(crate) trait ToElementIndex: Debug + Copy { impl ToElementIndex for Location { fn add_to_row(self, values: &mut RegionValues, row: N) -> bool { - let index = values.elements.point_from_location(self); + let index = values.location_map.point_from_location(self); values.points.insert(row, index) } fn contained_in_row(self, values: &RegionValues, row: N) -> bool { - let index = values.elements.point_from_location(self); + let index = values.location_map.point_from_location(self); values.points.contains(row, index) } } @@ -451,14 +432,14 @@ impl ToElementIndex for ty::PlaceholderRegion { /// For debugging purposes, returns a pretty-printed string of the given points. pub(crate) fn pretty_print_points( - elements: &DenseLocationMap, + location_map: &DenseLocationMap, points: impl IntoIterator, ) -> String { pretty_print_region_elements( points .into_iter() - .take_while(|&p| elements.point_in_range(p)) - .map(|p| elements.to_location(p)) + .take_while(|&p| location_map.point_in_range(p)) + .map(|p| location_map.to_location(p)) .map(RegionElement::Location), ) } @@ -531,12 +512,12 @@ fn pretty_print_region_elements(elements: impl IntoIterator), Placeholder(Symbol), diff --git a/compiler/rustc_borrowck/src/session_diagnostics.rs b/compiler/rustc_borrowck/src/session_diagnostics.rs index 627444a4ce5b8..4be5d0dbf4284 100644 --- a/compiler/rustc_borrowck/src/session_diagnostics.rs +++ b/compiler/rustc_borrowck/src/session_diagnostics.rs @@ -480,3 +480,10 @@ pub(crate) struct SimdIntrinsicArgConst { pub arg: usize, pub intrinsic: String, } + +#[derive(LintDiagnostic)] +#[diag(borrowck_tail_expr_drop_order)] +pub(crate) struct TailExprDropOrder { + #[label] + pub borrowed: Span, +} diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 918efac2a2018..4b7f53213886e 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -77,17 +77,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { #[instrument(skip(self), level = "debug")] pub(super) fn convert_all(&mut self, query_constraints: &QueryRegionConstraints<'tcx>) { - let QueryRegionConstraints { outlives, member_constraints } = query_constraints; - - // Annoying: to invoke `self.to_region_vid`, we need access to - // `self.constraints`, but we also want to be mutating - // `self.member_constraints`. For now, just swap out the value - // we want and replace at the end. - let mut tmp = std::mem::take(&mut self.constraints.member_constraints); - for member_constraint in member_constraints { - tmp.push_constraint(member_constraint, |r| self.to_region_vid(r)); - } - self.constraints.member_constraints = tmp; + let QueryRegionConstraints { outlives } = query_constraints; for &(predicate, constraint_category) in outlives { self.convert(predicate, constraint_category); @@ -295,13 +285,8 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { match result { Ok(TypeOpOutput { output: ty, constraints, .. }) => { - if let Some(constraints) = constraints { - assert!( - constraints.member_constraints.is_empty(), - "no member constraints expected from normalizing: {:#?}", - constraints.member_constraints - ); - next_outlives_predicates.extend(constraints.outlives.iter().copied()); + if let Some(QueryRegionConstraints { outlives }) = constraints { + next_outlives_predicates.extend(outlives.iter().copied()); } ty } diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index ea965eb654583..edf612f4e97a4 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -5,13 +5,14 @@ use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::region_constraints::GenericKind; use rustc_infer::infer::{InferCtxt, outlives}; +use rustc_infer::traits::ScrubbedTraitError; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::ObligationCause; use rustc_middle::traits::query::OutlivesBound; use rustc_middle::ty::{self, RegionVid, Ty, TypeVisitableExt}; use rustc_span::{ErrorGuaranteed, Span}; -use rustc_trait_selection::error_reporting::InferCtxtErrorExt; -use rustc_trait_selection::solve::deeply_normalize; +use rustc_trait_selection::solve::NoSolution; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use tracing::{debug, instrument}; use type_op::TypeOpOutput; @@ -229,24 +230,14 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { let mut constraints = vec![]; let mut known_type_outlives_obligations = vec![]; for bound in param_env.caller_bounds() { - let Some(mut outlives) = bound.as_type_outlives_clause() else { continue }; - - // In the new solver, normalize the type-outlives obligation assumptions. - if self.infcx.next_trait_solver() { - match deeply_normalize( - self.infcx.at(&ObligationCause::misc(span, defining_ty_def_id), param_env), + if let Some(outlives) = bound.as_type_outlives_clause() { + self.normalize_and_push_type_outlives_obligation( outlives, - ) { - Ok(normalized_outlives) => { - outlives = normalized_outlives; - } - Err(e) => { - self.infcx.err_ctxt().report_fulfillment_errors(e); - } - } - } - - known_type_outlives_obligations.push(outlives); + span, + &mut known_type_outlives_obligations, + &mut constraints, + ); + }; } let unnormalized_input_output_tys = self @@ -356,6 +347,44 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { } } + fn normalize_and_push_type_outlives_obligation( + &self, + mut outlives: ty::PolyTypeOutlivesPredicate<'tcx>, + span: Span, + known_type_outlives_obligations: &mut Vec>, + constraints: &mut Vec<&QueryRegionConstraints<'tcx>>, + ) { + // In the new solver, normalize the type-outlives obligation assumptions. + if self.infcx.next_trait_solver() { + let Ok(TypeOpOutput { + output: normalized_outlives, + constraints: constraints_normalize, + error_info: _, + }) = CustomTypeOp::new( + |ocx| { + ocx.deeply_normalize( + &ObligationCause::dummy_with_span(span), + self.param_env, + outlives, + ) + .map_err(|_: Vec>| NoSolution) + }, + "normalize type outlives obligation", + ) + .fully_perform(self.infcx, span) + else { + self.infcx.dcx().delayed_bug(format!("could not normalize {outlives:?}")); + return; + }; + outlives = normalized_outlives; + if let Some(c) = constraints_normalize { + constraints.push(c); + } + } + + known_type_outlives_obligations.push(outlives); + } + /// Update the type of a single local, which should represent /// either the return type of the MIR or one of its arguments. At /// the same time, compute and add any implied bounds that come diff --git a/compiler/rustc_borrowck/src/type_check/liveness/local_use_map.rs b/compiler/rustc_borrowck/src/type_check/liveness/local_use_map.rs index 695a1cdac0d79..6182b68f6f411 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/local_use_map.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/local_use_map.rs @@ -82,7 +82,7 @@ impl<'a> Iterator for AppearancesIter<'a> { impl LocalUseMap { pub(crate) fn build( live_locals: &[Local], - elements: &DenseLocationMap, + location_map: &DenseLocationMap, body: &Body<'_>, ) -> Self { let nones = IndexVec::from_elem(None, &body.local_decls); @@ -101,7 +101,7 @@ impl LocalUseMap { IndexVec::from_elem(false, &body.local_decls); live_locals.iter().for_each(|&local| locals_with_use_data[local] = true); - LocalUseMapBuild { local_use_map: &mut local_use_map, elements, locals_with_use_data } + LocalUseMapBuild { local_use_map: &mut local_use_map, location_map, locals_with_use_data } .visit_body(body); local_use_map @@ -125,7 +125,7 @@ impl LocalUseMap { struct LocalUseMapBuild<'me> { local_use_map: &'me mut LocalUseMap, - elements: &'me DenseLocationMap, + location_map: &'me DenseLocationMap, // Vector used in `visit_local` to signal which `Local`s do we need // def/use/drop information on, constructed from `live_locals` (that @@ -147,7 +147,7 @@ impl Visitor<'_> for LocalUseMapBuild<'_> { DefUse::Use => &mut self.local_use_map.first_use_at[local], DefUse::Drop => &mut self.local_use_map.first_drop_at[local], }; - let point_index = self.elements.point_from_location(location); + let point_index = self.location_map.point_from_location(location); let appearance_index = self .local_use_map .appearances diff --git a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs index 05e4a176a6d19..4e0b2a4e29681 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs @@ -3,6 +3,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir::visit::{TyContext, Visitor}; use rustc_middle::mir::{Body, Local, Location, SourceInfo}; use rustc_middle::span_bug; +use rustc_middle::ty::relate::Relate; use rustc_middle::ty::visit::TypeVisitable; use rustc_middle::ty::{GenericArgsRef, Region, RegionVid, Ty, TyCtxt}; use rustc_mir_dataflow::ResultsCursor; @@ -13,11 +14,11 @@ use tracing::debug; use super::TypeChecker; use crate::constraints::OutlivesConstraintSet; +use crate::polonius::PoloniusContext; use crate::region_infer::values::LivenessValues; use crate::universal_regions::UniversalRegions; mod local_use_map; -mod polonius; mod trace; /// Combines liveness analysis with initialization analysis to @@ -31,26 +32,32 @@ mod trace; pub(super) fn generate<'a, 'tcx>( typeck: &mut TypeChecker<'_, 'tcx>, body: &Body<'tcx>, - elements: &DenseLocationMap, + location_map: &DenseLocationMap, flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, move_data: &MoveData<'tcx>, ) { debug!("liveness::generate"); - let free_regions = regions_that_outlive_free_regions( - typeck.infcx.num_region_vars(), - &typeck.universal_regions, - &typeck.constraints.outlives_constraints, - ); + // NLLs can avoid computing some liveness data here because its constraints are + // location-insensitive, but that doesn't work in polonius: locals whose type contains a region + // that outlives a free region are not necessarily live everywhere in a flow-sensitive setting, + // unlike NLLs. + let free_regions = if !typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { + regions_that_outlive_free_regions( + typeck.infcx.num_region_vars(), + &typeck.universal_regions, + &typeck.constraints.outlives_constraints, + ) + } else { + typeck.universal_regions.universal_regions_iter().collect() + }; let (relevant_live_locals, boring_locals) = compute_relevant_live_locals(typeck.tcx(), &free_regions, body); - polonius::emit_access_facts(typeck, body, move_data); - trace::trace( typeck, body, - elements, + location_map, flow_inits, move_data, relevant_live_locals, @@ -59,7 +66,13 @@ pub(super) fn generate<'a, 'tcx>( // Mark regions that should be live where they appear within rvalues or within a call: like // args, regions, and types. - record_regular_live_regions(typeck.tcx(), &mut typeck.constraints.liveness_constraints, body); + record_regular_live_regions( + typeck.tcx(), + &mut typeck.constraints.liveness_constraints, + &typeck.universal_regions, + &mut typeck.polonius_context, + body, + ); } // The purpose of `compute_relevant_live_locals` is to define the subset of `Local` @@ -133,9 +146,12 @@ fn regions_that_outlive_free_regions<'tcx>( fn record_regular_live_regions<'tcx>( tcx: TyCtxt<'tcx>, liveness_constraints: &mut LivenessValues, + universal_regions: &UniversalRegions<'tcx>, + polonius_context: &mut Option, body: &Body<'tcx>, ) { - let mut visitor = LiveVariablesVisitor { tcx, liveness_constraints }; + let mut visitor = + LiveVariablesVisitor { tcx, liveness_constraints, universal_regions, polonius_context }; for (bb, data) in body.basic_blocks.iter_enumerated() { visitor.visit_basic_block_data(bb, data); } @@ -145,6 +161,8 @@ fn record_regular_live_regions<'tcx>( struct LiveVariablesVisitor<'a, 'tcx> { tcx: TyCtxt<'tcx>, liveness_constraints: &'a mut LivenessValues, + universal_regions: &'a UniversalRegions<'tcx>, + polonius_context: &'a mut Option, } impl<'a, 'tcx> Visitor<'tcx> for LiveVariablesVisitor<'a, 'tcx> { @@ -187,12 +205,17 @@ impl<'a, 'tcx> LiveVariablesVisitor<'a, 'tcx> { /// all regions appearing in the type of `value` must be live at `location`. fn record_regions_live_at(&mut self, value: T, location: Location) where - T: TypeVisitable>, + T: TypeVisitable> + Relate>, { debug!("record_regions_live_at(value={:?}, location={:?})", value, location); self.tcx.for_each_free_region(&value, |live_region| { let live_region_vid = live_region.as_var(); self.liveness_constraints.add_location(live_region_vid, location); }); + + // When using `-Zpolonius=next`, we record the variance of each live region. + if let Some(polonius_context) = self.polonius_context { + polonius_context.record_live_region_variance(self.tcx, self.universal_regions, value); + } } } diff --git a/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs b/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs deleted file mode 100644 index 5ffba94ee6821..0000000000000 --- a/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs +++ /dev/null @@ -1,123 +0,0 @@ -use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; -use rustc_middle::mir::{Body, Local, Location, Place}; -use rustc_middle::ty::GenericArg; -use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex}; -use tracing::debug; - -use super::TypeChecker; -use crate::def_use::{self, DefUse}; -use crate::location::{LocationIndex, LocationTable}; - -type VarPointRelation = Vec<(Local, LocationIndex)>; -type PathPointRelation = Vec<(MovePathIndex, LocationIndex)>; - -/// Emit polonius facts for variable defs, uses, drops, and path accesses. -pub(super) fn emit_access_facts<'a, 'tcx>( - typeck: &mut TypeChecker<'a, 'tcx>, - body: &Body<'tcx>, - move_data: &MoveData<'tcx>, -) { - if let Some(facts) = typeck.all_facts.as_mut() { - debug!("emit_access_facts()"); - - let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation"); - let location_table = typeck.location_table; - - let mut extractor = AccessFactsExtractor { - var_defined_at: &mut facts.var_defined_at, - var_used_at: &mut facts.var_used_at, - var_dropped_at: &mut facts.var_dropped_at, - path_accessed_at_base: &mut facts.path_accessed_at_base, - location_table, - move_data, - }; - extractor.visit_body(body); - - for (local, local_decl) in body.local_decls.iter_enumerated() { - debug!( - "add use_of_var_derefs_origin facts - local={:?}, type={:?}", - local, local_decl.ty - ); - let universal_regions = &typeck.universal_regions; - typeck.infcx.tcx.for_each_free_region(&local_decl.ty, |region| { - let region_vid = universal_regions.to_region_vid(region); - facts.use_of_var_derefs_origin.push((local, region_vid.into())); - }); - } - } -} - -/// For every potentially drop()-touched region `region` in `local`'s type -/// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact. -pub(super) fn emit_drop_facts<'tcx>( - typeck: &mut TypeChecker<'_, 'tcx>, - local: Local, - kind: &GenericArg<'tcx>, -) { - debug!("emit_drop_facts(local={:?}, kind={:?}", local, kind); - if let Some(facts) = typeck.all_facts.as_mut() { - let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation"); - let universal_regions = &typeck.universal_regions; - typeck.infcx.tcx.for_each_free_region(kind, |drop_live_region| { - let region_vid = universal_regions.to_region_vid(drop_live_region); - facts.drop_of_var_derefs_origin.push((local, region_vid.into())); - }); - } -} - -/// MIR visitor extracting point-wise facts about accesses. -struct AccessFactsExtractor<'a, 'tcx> { - var_defined_at: &'a mut VarPointRelation, - var_used_at: &'a mut VarPointRelation, - location_table: &'a LocationTable, - var_dropped_at: &'a mut VarPointRelation, - move_data: &'a MoveData<'tcx>, - path_accessed_at_base: &'a mut PathPointRelation, -} - -impl<'tcx> AccessFactsExtractor<'_, 'tcx> { - fn location_to_index(&self, location: Location) -> LocationIndex { - self.location_table.mid_index(location) - } -} - -impl<'a, 'tcx> Visitor<'tcx> for AccessFactsExtractor<'a, 'tcx> { - fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) { - match def_use::categorize(context) { - Some(DefUse::Def) => { - debug!("AccessFactsExtractor - emit def"); - self.var_defined_at.push((local, self.location_to_index(location))); - } - Some(DefUse::Use) => { - debug!("AccessFactsExtractor - emit use"); - self.var_used_at.push((local, self.location_to_index(location))); - } - Some(DefUse::Drop) => { - debug!("AccessFactsExtractor - emit drop"); - self.var_dropped_at.push((local, self.location_to_index(location))); - } - _ => (), - } - } - - fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { - self.super_place(place, context, location); - - match context { - PlaceContext::NonMutatingUse(_) - | PlaceContext::MutatingUse(MutatingUseContext::Borrow) => { - let path = match self.move_data.rev_lookup.find(place.as_ref()) { - LookupResult::Exact(path) | LookupResult::Parent(Some(path)) => path, - _ => { - // There's no path access to emit. - return; - } - }; - debug!("AccessFactsExtractor - emit path access ({path:?}, {location:?})"); - self.path_accessed_at_base.push((path, self.location_to_index(location))); - } - - _ => {} - } - } -} diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index 539d3f97a6387..c564d85616e25 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -1,10 +1,11 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::interval::IntervalSet; use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::for_liveness; use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location}; use rustc_middle::traits::query::DropckOutlivesResult; +use rustc_middle::ty::relate::Relate; use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; use rustc_mir_dataflow::ResultsCursor; use rustc_mir_dataflow::impls::MaybeInitializedPlaces; @@ -14,10 +15,9 @@ use rustc_span::DUMMY_SP; use rustc_trait_selection::traits::query::type_op::{DropckOutlives, TypeOp, TypeOpOutput}; use tracing::debug; -use crate::location::RichLocation; -use crate::region_infer::values::{self, LiveLoans}; +use crate::polonius; +use crate::region_infer::values; use crate::type_check::liveness::local_use_map::LocalUseMap; -use crate::type_check::liveness::polonius; use crate::type_check::{NormalizeLocation, TypeChecker}; /// This is the heart of the liveness computation. For each variable X @@ -37,49 +37,18 @@ use crate::type_check::{NormalizeLocation, TypeChecker}; pub(super) fn trace<'a, 'tcx>( typeck: &mut TypeChecker<'_, 'tcx>, body: &Body<'tcx>, - elements: &DenseLocationMap, + location_map: &DenseLocationMap, flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, move_data: &MoveData<'tcx>, relevant_live_locals: Vec, boring_locals: Vec, ) { - let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body); - - // When using `-Zpolonius=next`, compute the set of loans that can reach a given region. - if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { - let borrow_set = &typeck.borrow_set; - let mut live_loans = LiveLoans::new(borrow_set.len()); - let outlives_constraints = &typeck.constraints.outlives_constraints; - let graph = outlives_constraints.graph(typeck.infcx.num_region_vars()); - let region_graph = - graph.region_graph(outlives_constraints, typeck.universal_regions.fr_static); - - // Traverse each issuing region's constraints, and record the loan as flowing into the - // outlived region. - for (loan, issuing_region_data) in borrow_set.iter_enumerated() { - for succ in rustc_data_structures::graph::depth_first_search( - ®ion_graph, - issuing_region_data.region, - ) { - // We don't need to mention that a loan flows into its issuing region. - if succ == issuing_region_data.region { - continue; - } - - live_loans.inflowing_loans.insert(succ, loan); - } - } - - // Store the inflowing loans in the liveness constraints: they will be used to compute live - // loans when liveness data is recorded there. - typeck.constraints.liveness_constraints.loans = Some(live_loans); - }; - + let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, body); let cx = LivenessContext { typeck, body, flow_inits, - elements, + location_map, local_use_map, move_data, drop_data: FxIndexMap::default(), @@ -100,7 +69,7 @@ struct LivenessContext<'a, 'typeck, 'b, 'tcx> { typeck: &'a mut TypeChecker<'typeck, 'tcx>, /// Defines the `PointIndex` mapping - elements: &'a DenseLocationMap, + location_map: &'a DenseLocationMap, /// MIR we are analyzing. body: &'a Body<'tcx>, @@ -129,7 +98,7 @@ struct LivenessResults<'a, 'typeck, 'b, 'tcx> { cx: LivenessContext<'a, 'typeck, 'b, 'tcx>, /// Set of points that define the current local. - defs: BitSet, + defs: DenseBitSet, /// Points where the current variable is "use live" -- meaning /// that there is a future "full use" that may use its value. @@ -149,10 +118,10 @@ struct LivenessResults<'a, 'typeck, 'b, 'tcx> { impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { fn new(cx: LivenessContext<'a, 'typeck, 'b, 'tcx>) -> Self { - let num_points = cx.elements.num_points(); + let num_points = cx.location_map.num_points(); LivenessResults { cx, - defs: BitSet::new_empty(num_points), + defs: DenseBitSet::new_empty(num_points), use_live_at: IntervalSet::new(num_points), drop_live_at: IntervalSet::new(num_points), drop_locations: vec![], @@ -210,51 +179,40 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { /// /// Add facts for all locals with free regions, since regions may outlive /// the function body only at certain nodes in the CFG. - fn add_extra_drop_facts(&mut self, relevant_live_locals: &[Local]) -> Option<()> { + fn add_extra_drop_facts(&mut self, relevant_live_locals: &[Local]) { // This collect is more necessary than immediately apparent // because these facts go into `add_drop_live_facts_for()`, - // which also writes to `all_facts`, and so this is genuinely + // which also writes to `polonius_facts`, and so this is genuinely // a simultaneous overlapping mutable borrow. // FIXME for future hackers: investigate whether this is // actually necessary; these facts come from Polonius // and probably maybe plausibly does not need to go back in. // It may be necessary to just pick out the parts of // `add_drop_live_facts_for()` that make sense. + let Some(facts) = self.cx.typeck.polonius_facts.as_ref() else { return }; let facts_to_add: Vec<_> = { - let drop_used = &self.cx.typeck.all_facts.as_ref()?.var_dropped_at; - let relevant_live_locals: FxIndexSet<_> = relevant_live_locals.iter().copied().collect(); - drop_used + facts + .var_dropped_at .iter() - .filter_map(|(local, location_index)| { - let local_ty = self.cx.body.local_decls[*local].ty; - if relevant_live_locals.contains(local) || !local_ty.has_free_regions() { + .filter_map(|&(local, location_index)| { + let local_ty = self.cx.body.local_decls[local].ty; + if relevant_live_locals.contains(&local) || !local_ty.has_free_regions() { return None; } - let location = match self.cx.typeck.location_table.to_location(*location_index) - { - RichLocation::Start(l) => l, - RichLocation::Mid(l) => l, - }; - - Some((*local, local_ty, location)) + let location = self.cx.typeck.location_table.to_location(location_index); + Some((local, local_ty, location)) }) .collect() }; - // FIXME: these locations seem to have a special meaning (e.g. everywhere, at the end, - // ...), but I don't know which one. Please help me rename it to something descriptive! - // Also, if this IntervalSet is used in many places, it maybe should have a newtype'd - // name with a description of what it means for future mortals passing by. - let locations = IntervalSet::new(self.cx.elements.num_points()); - + let live_at = IntervalSet::new(self.cx.location_map.num_points()); for (local, local_ty, location) in facts_to_add { - self.cx.add_drop_live_facts_for(local, local_ty, &[location], &locations); + self.cx.add_drop_live_facts_for(local, local_ty, &[location], &live_at); } - Some(()) } /// Clear the value of fields that are "per local variable". @@ -290,7 +248,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { // * Inclusively, the block start // * Exclusively, the previous definition (if it's in this block) // * Exclusively, the previous live_at setting (an optimization) - let block_start = self.cx.elements.to_block_start(p); + let block_start = self.cx.location_map.to_block_start(p); let previous_defs = self.defs.last_set_in(block_start..=p); let previous_live_at = self.use_live_at.last_set_in(block_start..=p); @@ -314,12 +272,12 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { // terminators of predecessor basic blocks. Push those onto the // stack so that the next iteration(s) will process them. - let block = self.cx.elements.to_location(block_start).block; + let block = self.cx.location_map.to_location(block_start).block; self.stack.extend( self.cx.body.basic_blocks.predecessors()[block] .iter() .map(|&pred_bb| self.cx.body.terminator_loc(pred_bb)) - .map(|pred_loc| self.cx.elements.point_from_location(pred_loc)), + .map(|pred_loc| self.cx.location_map.point_from_location(pred_loc)), ); } } @@ -342,7 +300,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { // Find the drops where `local` is initialized. for drop_point in self.cx.local_use_map.drops(local) { - let location = self.cx.elements.to_location(drop_point); + let location = self.cx.location_map.to_location(drop_point); debug_assert_eq!(self.cx.body.terminator_loc(location.block), location,); if self.cx.initialized_at_terminator(location.block, mpi) @@ -378,7 +336,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { debug!( "compute_drop_live_points_for_block(mpi={:?}, term_point={:?})", self.cx.move_data.move_paths[mpi].place, - self.cx.elements.to_location(term_point), + self.cx.location_map.to_location(term_point), ); // We are only invoked with terminators where `mpi` is @@ -388,12 +346,15 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { // Otherwise, scan backwards through the statements in the // block. One of them may be either a definition or use // live point. - let term_location = self.cx.elements.to_location(term_point); + let term_location = self.cx.location_map.to_location(term_point); debug_assert_eq!(self.cx.body.terminator_loc(term_location.block), term_location,); let block = term_location.block; - let entry_point = self.cx.elements.entry_point(term_location.block); + let entry_point = self.cx.location_map.entry_point(term_location.block); for p in (entry_point..term_point).rev() { - debug!("compute_drop_live_points_for_block: p = {:?}", self.cx.elements.to_location(p)); + debug!( + "compute_drop_live_points_for_block: p = {:?}", + self.cx.location_map.to_location(p) + ); if self.defs.contains(p) { debug!("compute_drop_live_points_for_block: def site"); @@ -439,7 +400,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> { } let pred_term_loc = self.cx.body.terminator_loc(pred_block); - let pred_term_point = self.cx.elements.point_from_location(pred_term_loc); + let pred_term_point = self.cx.location_map.point_from_location(pred_term_loc); // If the terminator of this predecessor either *assigns* // our value or is a "normal use", then stop. @@ -532,13 +493,9 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> { /// Stores the result that all regions in `value` are live for the /// points `live_at`. - fn add_use_live_facts_for( - &mut self, - value: impl TypeVisitable>, - live_at: &IntervalSet, - ) { + fn add_use_live_facts_for(&mut self, value: Ty<'tcx>, live_at: &IntervalSet) { debug!("add_use_live_facts_for(value={:?})", value); - Self::make_all_regions_live(self.elements, self.typeck, value, live_at); + Self::make_all_regions_live(self.location_map, self.typeck, value, live_at); } /// Some variable with type `live_ty` is "drop live" at `location` @@ -562,7 +519,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> { dropped_local, dropped_ty, drop_locations, - values::pretty_print_points(self.elements, live_at.iter()), + values::pretty_print_points(self.location_map, live_at.iter()), ); let drop_data = self.drop_data.entry(dropped_ty).or_insert_with({ @@ -589,21 +546,27 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> { // All things in the `outlives` array may be touched by // the destructor and must be live at this point. for &kind in &drop_data.dropck_result.kinds { - Self::make_all_regions_live(self.elements, self.typeck, kind, live_at); - polonius::emit_drop_facts(self.typeck, dropped_local, &kind); + Self::make_all_regions_live(self.location_map, self.typeck, kind, live_at); + polonius::legacy::emit_drop_facts( + self.typeck.tcx(), + dropped_local, + &kind, + self.typeck.universal_regions, + self.typeck.polonius_facts, + ); } } fn make_all_regions_live( - elements: &DenseLocationMap, + location_map: &DenseLocationMap, typeck: &mut TypeChecker<'_, 'tcx>, - value: impl TypeVisitable>, + value: impl TypeVisitable> + Relate>, live_at: &IntervalSet, ) { debug!("make_all_regions_live(value={:?})", value); debug!( "make_all_regions_live: live_at={}", - values::pretty_print_points(elements, live_at.iter()), + values::pretty_print_points(location_map, live_at.iter()), ); value.visit_with(&mut for_liveness::FreeRegionsVisitor { @@ -615,6 +578,15 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> { typeck.constraints.liveness_constraints.add_points(live_region_vid, live_at); }, }); + + // When using `-Zpolonius=next`, we record the variance of each live region. + if let Some(polonius_context) = typeck.polonius_context { + polonius_context.record_live_region_variance( + typeck.infcx.tcx, + typeck.universal_regions, + value, + ); + } } fn compute_drop_data(typeck: &TypeChecker<'_, 'tcx>, dropped_ty: Ty<'tcx>) -> DropData<'tcx> { diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 90d327b0ad20d..eca8a688ff4a2 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -3,8 +3,7 @@ use std::rc::Rc; use std::{fmt, iter, mem}; -use either::Either; -use rustc_abi::{FIRST_VARIANT, FieldIdx}; +use rustc_abi::FieldIdx; use rustc_data_structures::frozen::Frozen; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_errors::ErrorGuaranteed; @@ -40,20 +39,17 @@ use rustc_mir_dataflow::move_paths::MoveData; use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::def_id::CRATE_DEF_ID; use rustc_span::source_map::Spanned; -use rustc_span::symbol::sym; -use rustc_span::{DUMMY_SP, Span}; -use rustc_trait_selection::traits::query::type_op::custom::{ - CustomTypeOp, scrape_region_constraints, -}; +use rustc_span::{DUMMY_SP, Span, sym}; +use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; use tracing::{debug, instrument, trace}; use crate::borrow_set::BorrowSet; use crate::constraints::{OutlivesConstraint, OutlivesConstraintSet}; use crate::diagnostics::UniverseInfo; -use crate::facts::AllFacts; -use crate::location::LocationTable; use crate::member_constraints::MemberConstraintSet; +use crate::polonius::PoloniusContext; +use crate::polonius::legacy::{PoloniusFacts, PoloniusLocationTable}; use crate::region_infer::TypeTest; use crate::region_infer::values::{LivenessValues, PlaceholderIndex, PlaceholderIndices}; use crate::renumber::RegionCtxt; @@ -77,20 +73,12 @@ macro_rules! span_mirbug { }) } -macro_rules! span_mirbug_and_err { - ($context:expr, $elem:expr, $($message:tt)*) => ({ - { - span_mirbug!($context, $elem, $($message)*); - $context.error() - } - }) -} - mod canonical; mod constraint_conversion; pub(crate) mod free_region_relations; mod input_output; pub(crate) mod liveness; +mod opaque_types; mod relate_tys; /// Type checks the given `mir` in the context of the inference @@ -107,33 +95,32 @@ mod relate_tys; /// # Parameters /// /// - `infcx` -- inference context to use -/// - `param_env` -- parameter environment to use for trait solving /// - `body` -- MIR body to type-check /// - `promoted` -- map of promoted constants within `body` /// - `universal_regions` -- the universal regions from `body`s function signature -/// - `location_table` -- MIR location map of `body` +/// - `location_table` -- for datalog polonius, the map between `Location`s and `RichLocation`s /// - `borrow_set` -- information about borrows occurring in `body` -/// - `all_facts` -- when using Polonius, this is the generated set of Polonius facts +/// - `polonius_facts` -- when using Polonius, this is the generated set of Polonius facts /// - `flow_inits` -- results of a maybe-init dataflow analysis /// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis -/// - `elements` -- MIR region map +/// - `location_map` -- map between MIR `Location` and `PointIndex` pub(crate) fn type_check<'a, 'tcx>( infcx: &BorrowckInferCtxt<'tcx>, body: &Body<'tcx>, promoted: &IndexSlice>, universal_regions: UniversalRegions<'tcx>, - location_table: &LocationTable, + location_table: &PoloniusLocationTable, borrow_set: &BorrowSet<'tcx>, - all_facts: &mut Option, + polonius_facts: &mut Option, flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, move_data: &MoveData<'tcx>, - elements: Rc, + location_map: Rc, ) -> MirTypeckResults<'tcx> { let implicit_region_bound = ty::Region::new_var(infcx.tcx, universal_regions.fr_fn_body); let mut constraints = MirTypeckRegionConstraints { placeholder_indices: PlaceholderIndices::default(), placeholder_index_to_region: IndexVec::default(), - liveness_constraints: LivenessValues::with_specific_points(Rc::clone(&elements)), + liveness_constraints: LivenessValues::with_specific_points(Rc::clone(&location_map)), outlives_constraints: OutlivesConstraintSet::default(), member_constraints: MemberConstraintSet::default(), type_tests: Vec::default(), @@ -153,9 +140,21 @@ pub(crate) fn type_check<'a, 'tcx>( &mut constraints, ); + let pre_obligations = infcx.take_registered_region_obligations(); + assert!( + pre_obligations.is_empty(), + "there should be no incoming region obligations = {pre_obligations:#?}", + ); + debug!(?normalized_inputs_and_output); - let mut checker = TypeChecker { + let mut polonius_context = if infcx.tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + Some(PoloniusContext::new()) + } else { + None + }; + + let mut typeck = TypeChecker { infcx, last_span: body.span, body, @@ -166,95 +165,37 @@ pub(crate) fn type_check<'a, 'tcx>( reported_errors: Default::default(), universal_regions: &universal_region_relations.universal_regions, location_table, - all_facts, + polonius_facts, borrow_set, constraints: &mut constraints, + polonius_context: &mut polonius_context, }; - checker.check_user_type_annotations(); + typeck.check_user_type_annotations(); - let mut verifier = TypeVerifier { cx: &mut checker, promoted, last_span: body.span }; + let mut verifier = TypeVerifier { typeck: &mut typeck, promoted, last_span: body.span }; verifier.visit_body(body); - checker.typeck_mir(body); - checker.equate_inputs_and_outputs(body, &normalized_inputs_and_output); - checker.check_signature_annotation(body); - - liveness::generate(&mut checker, body, &elements, flow_inits, move_data); - - translate_outlives_facts(&mut checker); - let opaque_type_values = infcx.take_opaque_types(); - - let opaque_type_values = opaque_type_values - .into_iter() - .map(|(opaque_type_key, decl)| { - let _: Result<_, ErrorGuaranteed> = checker.fully_perform_op( - Locations::All(body.span), - ConstraintCategory::OpaqueType, - CustomTypeOp::new( - |ocx| { - ocx.infcx.register_member_constraints( - opaque_type_key, - decl.hidden_type.ty, - decl.hidden_type.span, - ); - Ok(()) - }, - "opaque_type_map", - ), - ); - let hidden_type = infcx.resolve_vars_if_possible(decl.hidden_type); - trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); - if hidden_type.has_non_region_infer() { - infcx.dcx().span_bug( - decl.hidden_type.span, - format!("could not resolve {:#?}", hidden_type.ty.kind()), - ); - } + typeck.typeck_mir(body); + typeck.equate_inputs_and_outputs(body, &normalized_inputs_and_output); + typeck.check_signature_annotation(body); - // Convert all regions to nll vars. - let (opaque_type_key, hidden_type) = - fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |region, _| { - match region.kind() { - ty::ReVar(_) => region, - ty::RePlaceholder(placeholder) => { - checker.constraints.placeholder_region(infcx, placeholder) - } - _ => ty::Region::new_var( - infcx.tcx, - checker.universal_regions.to_region_vid(region), - ), - } - }); + liveness::generate(&mut typeck, body, &location_map, flow_inits, move_data); - (opaque_type_key, hidden_type) - }) - .collect(); + let opaque_type_values = + opaque_types::take_opaques_and_register_member_constraints(&mut typeck); - MirTypeckResults { constraints, universal_region_relations, opaque_type_values } -} + if let Some(polonius_context) = typeck.polonius_context.as_mut() { + let num_regions = infcx.num_region_vars(); + let points_per_live_region = typeck.constraints.liveness_constraints.points(); + polonius_context.record_live_regions_per_point(num_regions, points_per_live_region); + } -fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) { - if let Some(facts) = typeck.all_facts { - let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation"); - let location_table = typeck.location_table; - facts.subset_base.extend( - typeck.constraints.outlives_constraints.outlives().iter().flat_map( - |constraint: &OutlivesConstraint<'_>| { - if let Some(from_location) = constraint.locations.from_location() { - Either::Left(iter::once(( - constraint.sup.into(), - constraint.sub.into(), - location_table.mid_index(from_location), - ))) - } else { - Either::Right(location_table.all_points().map(move |location| { - (constraint.sup.into(), constraint.sub.into(), location) - })) - } - }, - ), - ); + MirTypeckResults { + constraints, + universal_region_relations, + opaque_type_values, + polonius_context, } } @@ -270,13 +211,11 @@ enum FieldAccessError { OutOfRange { field_count: usize }, } -/// Verifies that MIR types are sane to not crash further checks. +/// Verifies that MIR types are sane. /// -/// The sanitize_XYZ methods here take an MIR object and compute its -/// type, calling `span_mirbug` and returning an error type if there -/// is a problem. +/// FIXME: This should be merged with the actual `TypeChecker`. struct TypeVerifier<'a, 'b, 'tcx> { - cx: &'a mut TypeChecker<'b, 'tcx>, + typeck: &'a mut TypeChecker<'b, 'tcx>, promoted: &'b IndexSlice>, last_span: Span, } @@ -289,18 +228,95 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { - self.sanitize_place(place, location, context); + self.super_place(place, context, location); + let tcx = self.tcx(); + let place_ty = place.ty(self.body(), tcx); + if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::Copy, Some(self.last_span)), + [place_ty.ty], + ); + + // To have a `Copy` operand, the type `T` of the + // value must be `Copy`. Note that we prove that `T: Copy`, + // rather than using the `is_copy_modulo_regions` + // test. This is important because + // `is_copy_modulo_regions` ignores the resulting region + // obligations and assumes they pass. This can result in + // bounds from `Copy` impls being unsoundly ignored (e.g., + // #29149). Note that we decide to use `Copy` before knowing + // whether the bounds fully apply: in effect, the rule is + // that if a value of some type could implement `Copy`, then + // it must. + self.typeck.prove_trait_ref( + trait_ref, + location.to_locations(), + ConstraintCategory::CopyBound, + ); + } + } + + fn visit_projection_elem( + &mut self, + place: PlaceRef<'tcx>, + elem: PlaceElem<'tcx>, + context: PlaceContext, + location: Location, + ) { + let tcx = self.tcx(); + let base_ty = place.ty(self.body(), tcx); + match elem { + // All these projections don't add any constraints, so there's nothing to + // do here. We check their invariants in the MIR validator after all. + ProjectionElem::Deref + | ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::Downcast(..) => {} + ProjectionElem::Field(field, fty) => { + let fty = self.typeck.normalize(fty, location); + let ty = base_ty.field_ty(tcx, field); + let ty = self.typeck.normalize(ty, location); + debug!(?fty, ?ty); + + if let Err(terr) = self.typeck.relate_types( + ty, + context.ambient_variance(), + fty, + location.to_locations(), + ConstraintCategory::Boring, + ) { + span_mirbug!(self, place, "bad field access ({:?}: {:?}): {:?}", ty, fty, terr); + } + } + ProjectionElem::OpaqueCast(ty) => { + let ty = self.typeck.normalize(ty, location); + self.typeck + .relate_types( + ty, + context.ambient_variance(), + base_ty.ty, + location.to_locations(), + ConstraintCategory::TypeAnnotation(AnnotationSource::OpaqueCast), + ) + .unwrap(); + } + ProjectionElem::Subtype(_) => { + bug!("ProjectionElem::Subtype shouldn't exist in borrowck") + } + } } fn visit_const_operand(&mut self, constant: &ConstOperand<'tcx>, location: Location) { debug!(?constant, ?location, "visit_const_operand"); self.super_const_operand(constant, location); - let ty = self.sanitize_type(constant, constant.const_.ty()); + let ty = constant.const_.ty(); - self.cx.infcx.tcx.for_each_free_region(&ty, |live_region| { - let live_region_vid = self.cx.universal_regions.to_region_vid(live_region); - self.cx.constraints.liveness_constraints.add_location(live_region_vid, location); + self.typeck.infcx.tcx.for_each_free_region(&ty, |live_region| { + let live_region_vid = self.typeck.universal_regions.to_region_vid(live_region); + self.typeck.constraints.liveness_constraints.add_location(live_region_vid, location); }); // HACK(compiler-errors): Constants that are gathered into Body.required_consts @@ -312,14 +328,14 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { }; if let Some(annotation_index) = constant.user_ty { - if let Err(terr) = self.cx.relate_type_and_user_type( + if let Err(terr) = self.typeck.relate_type_and_user_type( constant.const_.ty(), ty::Invariant, &UserTypeProjection { base: annotation_index, projs: vec![] }, locations, - ConstraintCategory::Boring, + ConstraintCategory::TypeAnnotation(AnnotationSource::GenericArg), ) { - let annotation = &self.cx.user_type_annotations[annotation_index]; + let annotation = &self.typeck.user_type_annotations[annotation_index]; span_mirbug!( self, constant, @@ -348,9 +364,12 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { promoted: &Body<'tcx>, ty, san_ty| { - if let Err(terr) = - verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring) - { + if let Err(terr) = verifier.typeck.eq_types( + ty, + san_ty, + locations, + ConstraintCategory::Boring, + ) { span_mirbug!( verifier, promoted, @@ -363,26 +382,26 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { }; let promoted_body = &self.promoted[promoted]; - self.sanitize_promoted(promoted_body, location); + self.verify_promoted(promoted_body, location); let promoted_ty = promoted_body.return_ty(); check_err(self, promoted_body, ty, promoted_ty); } else { - self.cx.ascribe_user_type( + self.typeck.ascribe_user_type( constant.const_.ty(), ty::UserType::new(ty::UserTypeKind::TypeOf(uv.def, UserArgs { args: uv.args, user_self_ty: None, })), - locations.span(self.cx.body), + locations.span(self.typeck.body), ); } } else if let Some(static_def_id) = constant.check_static_ptr(tcx) { let unnormalized_ty = tcx.type_of(static_def_id).instantiate_identity(); - let normalized_ty = self.cx.normalize(unnormalized_ty, locations); + let normalized_ty = self.typeck.normalize(unnormalized_ty, locations); let literal_ty = constant.const_.ty().builtin_deref(true).unwrap(); - if let Err(terr) = self.cx.eq_types( + if let Err(terr) = self.typeck.eq_types( literal_ty, normalized_ty, locations, @@ -394,7 +413,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { if let ty::FnDef(def_id, args) = *constant.const_.ty().kind() { let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, args); - self.cx.normalize_and_prove_instantiated_predicates( + self.typeck.normalize_and_prove_instantiated_predicates( def_id, instantiated_predicates, locations, @@ -404,7 +423,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { tcx.impl_of_method(def_id).map(|imp| tcx.def_kind(imp)), Some(DefKind::Impl { of_trait: true }) )); - self.cx.prove_predicates( + self.typeck.prove_predicates( args.types().map(|ty| ty::ClauseKind::WellFormed(ty.into())), locations, ConstraintCategory::Boring, @@ -413,15 +432,8 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } } - fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { - self.super_rvalue(rvalue, location); - let rval_ty = rvalue.ty(self.body(), self.tcx()); - self.sanitize_type(rvalue, rval_ty); - } - fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) { self.super_local_decl(local, local_decl); - self.sanitize_type(local_decl, local_decl.ty); if let Some(user_ty) = &local_decl.user_ty { for (user_ty, span) in user_ty.projections_and_spans() { @@ -438,12 +450,12 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { local_decl.ty }; - if let Err(terr) = self.cx.relate_type_and_user_type( + if let Err(terr) = self.typeck.relate_type_and_user_type( ty, ty::Invariant, user_ty, Locations::All(*span), - ConstraintCategory::TypeAnnotation, + ConstraintCategory::TypeAnnotation(AnnotationSource::Declaration), ) { span_mirbug!( self, @@ -460,7 +472,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } fn visit_body(&mut self, body: &Body<'tcx>) { - self.sanitize_type(&"return type", body.return_ty()); // The types of local_decls are checked above which is called in super_body. self.super_body(body); } @@ -468,98 +479,41 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { fn body(&self) -> &Body<'tcx> { - self.cx.body + self.typeck.body } fn tcx(&self) -> TyCtxt<'tcx> { - self.cx.infcx.tcx - } - - fn sanitize_type(&mut self, parent: &dyn fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> { - if ty.has_escaping_bound_vars() || ty.references_error() { - span_mirbug_and_err!(self, parent, "bad type {:?}", ty) - } else { - ty - } - } - - /// Checks that the types internal to the `place` match up with - /// what would be expected. - #[instrument(level = "debug", skip(self, location), ret)] - fn sanitize_place( - &mut self, - place: &Place<'tcx>, - location: Location, - context: PlaceContext, - ) -> PlaceTy<'tcx> { - let mut place_ty = PlaceTy::from_ty(self.body().local_decls[place.local].ty); - - for elem in place.projection.iter() { - if place_ty.variant_index.is_none() { - if let Err(guar) = place_ty.ty.error_reported() { - return PlaceTy::from_ty(Ty::new_error(self.tcx(), guar)); - } - } - place_ty = self.sanitize_projection(place_ty, elem, place, location, context); - } - - if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { - let tcx = self.tcx(); - let trait_ref = ty::TraitRef::new( - tcx, - tcx.require_lang_item(LangItem::Copy, Some(self.last_span)), - [place_ty.ty], - ); - - // To have a `Copy` operand, the type `T` of the - // value must be `Copy`. Note that we prove that `T: Copy`, - // rather than using the `is_copy_modulo_regions` - // test. This is important because - // `is_copy_modulo_regions` ignores the resulting region - // obligations and assumes they pass. This can result in - // bounds from `Copy` impls being unsoundly ignored (e.g., - // #29149). Note that we decide to use `Copy` before knowing - // whether the bounds fully apply: in effect, the rule is - // that if a value of some type could implement `Copy`, then - // it must. - self.cx.prove_trait_ref( - trait_ref, - location.to_locations(), - ConstraintCategory::CopyBound, - ); - } - - place_ty + self.typeck.infcx.tcx } - fn sanitize_promoted(&mut self, promoted_body: &'b Body<'tcx>, location: Location) { + fn verify_promoted(&mut self, promoted_body: &'b Body<'tcx>, location: Location) { // Determine the constraints from the promoted MIR by running the type // checker on the promoted MIR, then transfer the constraints back to // the main MIR, changing the locations to the provided location. - let parent_body = mem::replace(&mut self.cx.body, promoted_body); + let parent_body = mem::replace(&mut self.typeck.body, promoted_body); // Use new sets of constraints and closure bounds so that we can // modify their locations. - let all_facts = &mut None; + let polonius_facts = &mut None; let mut constraints = Default::default(); let mut liveness_constraints = LivenessValues::without_specific_points(Rc::new(DenseLocationMap::new(promoted_body))); // Don't try to add borrow_region facts for the promoted MIR let mut swap_constraints = |this: &mut Self| { - mem::swap(this.cx.all_facts, all_facts); - mem::swap(&mut this.cx.constraints.outlives_constraints, &mut constraints); - mem::swap(&mut this.cx.constraints.liveness_constraints, &mut liveness_constraints); + mem::swap(this.typeck.polonius_facts, polonius_facts); + mem::swap(&mut this.typeck.constraints.outlives_constraints, &mut constraints); + mem::swap(&mut this.typeck.constraints.liveness_constraints, &mut liveness_constraints); }; swap_constraints(self); self.visit_body(promoted_body); - self.cx.typeck_mir(promoted_body); + self.typeck.typeck_mir(promoted_body); - self.cx.body = parent_body; + self.typeck.body = parent_body; // Merge the outlives constraints back in, at the given location. swap_constraints(self); @@ -575,7 +529,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { // temporary from the user's point of view. constraint.category = ConstraintCategory::Boring; } - self.cx.constraints.outlives_constraints.push(constraint) + self.typeck.constraints.outlives_constraints.push(constraint) } // If the region is live at least one location in the promoted MIR, // then add a liveness constraint to the main MIR for this region @@ -585,241 +539,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { // unordered. #[allow(rustc::potential_query_instability)] for region in liveness_constraints.live_regions_unordered() { - self.cx.constraints.liveness_constraints.add_location(region, location); - } - } - - #[instrument(skip(self, location), ret, level = "debug")] - fn sanitize_projection( - &mut self, - base: PlaceTy<'tcx>, - pi: PlaceElem<'tcx>, - place: &Place<'tcx>, - location: Location, - context: PlaceContext, - ) -> PlaceTy<'tcx> { - let tcx = self.tcx(); - let base_ty = base.ty; - match pi { - ProjectionElem::Deref => { - let deref_ty = base_ty.builtin_deref(true); - PlaceTy::from_ty(deref_ty.unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty) - })) - } - ProjectionElem::Index(i) => { - let index_ty = Place::from(i).ty(self.body(), tcx).ty; - if index_ty != tcx.types.usize { - PlaceTy::from_ty(span_mirbug_and_err!(self, i, "index by non-usize {:?}", i)) - } else { - PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - })) - } - } - ProjectionElem::ConstantIndex { .. } => { - // consider verifying in-bounds - PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - })) - } - ProjectionElem::Subslice { from, to, from_end } => { - PlaceTy::from_ty(match base_ty.kind() { - ty::Array(inner, _) => { - assert!(!from_end, "array subslices should not use from_end"); - Ty::new_array(tcx, *inner, to - from) - } - ty::Slice(..) => { - assert!(from_end, "slice subslices should use from_end"); - base_ty - } - _ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty), - }) - } - ProjectionElem::Downcast(maybe_name, index) => match base_ty.kind() { - ty::Adt(adt_def, _args) if adt_def.is_enum() => { - if index.as_usize() >= adt_def.variants().len() { - PlaceTy::from_ty(span_mirbug_and_err!( - self, - place, - "cast to variant #{:?} but enum only has {:?}", - index, - adt_def.variants().len() - )) - } else { - PlaceTy { ty: base_ty, variant_index: Some(index) } - } - } - // We do not need to handle coroutines here, because this runs - // before the coroutine transform stage. - _ => { - let ty = if let Some(name) = maybe_name { - span_mirbug_and_err!( - self, - place, - "can't downcast {:?} as {:?}", - base_ty, - name - ) - } else { - span_mirbug_and_err!(self, place, "can't downcast {:?}", base_ty) - }; - PlaceTy::from_ty(ty) - } - }, - ProjectionElem::Field(field, fty) => { - let fty = self.sanitize_type(place, fty); - let fty = self.cx.normalize(fty, location); - match self.field_ty(place, base, field, location) { - Ok(ty) => { - let ty = self.cx.normalize(ty, location); - debug!(?fty, ?ty); - - if let Err(terr) = self.cx.relate_types( - ty, - self.get_ambient_variance(context), - fty, - location.to_locations(), - ConstraintCategory::Boring, - ) { - span_mirbug!( - self, - place, - "bad field access ({:?}: {:?}): {:?}", - ty, - fty, - terr - ); - } - } - Err(FieldAccessError::OutOfRange { field_count }) => span_mirbug!( - self, - place, - "accessed field #{} but variant only has {}", - field.index(), - field_count - ), - } - PlaceTy::from_ty(fty) - } - ProjectionElem::Subtype(_) => { - bug!("ProjectionElem::Subtype shouldn't exist in borrowck") - } - ProjectionElem::OpaqueCast(ty) => { - let ty = self.sanitize_type(place, ty); - let ty = self.cx.normalize(ty, location); - self.cx - .relate_types( - ty, - self.get_ambient_variance(context), - base.ty, - location.to_locations(), - ConstraintCategory::TypeAnnotation, - ) - .unwrap(); - PlaceTy::from_ty(ty) - } - } - } - - fn error(&mut self) -> Ty<'tcx> { - Ty::new_misc_error(self.tcx()) - } - - fn get_ambient_variance(&self, context: PlaceContext) -> ty::Variance { - use rustc_middle::mir::visit::NonMutatingUseContext::*; - use rustc_middle::mir::visit::NonUseContext::*; - - match context { - PlaceContext::MutatingUse(_) => ty::Invariant, - PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant, - PlaceContext::NonMutatingUse( - Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | RawBorrow - | Projection, - ) => ty::Covariant, - PlaceContext::NonUse(AscribeUserTy(variance)) => variance, - } - } - - fn field_ty( - &mut self, - parent: &dyn fmt::Debug, - base_ty: PlaceTy<'tcx>, - field: FieldIdx, - location: Location, - ) -> Result, FieldAccessError> { - let tcx = self.tcx(); - - let (variant, args) = match base_ty { - PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() { - ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args), - ty::Coroutine(def_id, args) => { - let mut variants = args.as_coroutine().state_tys(def_id, tcx); - let Some(mut variant) = variants.nth(variant_index.into()) else { - bug!( - "variant_index of coroutine out of range: {:?}/{:?}", - variant_index, - args.as_coroutine().state_tys(def_id, tcx).count() - ); - }; - return match variant.nth(field.index()) { - Some(ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }), - }; - } - _ => bug!("can't have downcast of non-adt non-coroutine type"), - }, - PlaceTy { ty, variant_index: None } => match *ty.kind() { - ty::Adt(adt_def, args) if !adt_def.is_enum() => { - (adt_def.variant(FIRST_VARIANT), args) - } - ty::Closure(_, args) => { - return match args.as_closure().upvar_tys().get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_closure().upvar_tys().len(), - }), - }; - } - ty::CoroutineClosure(_, args) => { - return match args.as_coroutine_closure().upvar_tys().get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_coroutine_closure().upvar_tys().len(), - }), - }; - } - ty::Coroutine(_, args) => { - // Only prefix fields (upvars and current state) are - // accessible without a variant index. - return match args.as_coroutine().prefix_tys().get(field.index()) { - Some(ty) => Ok(*ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_coroutine().prefix_tys().len(), - }), - }; - } - ty::Tuple(tys) => { - return match tys.get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { field_count: tys.len() }), - }; - } - _ => { - return Ok(span_mirbug_and_err!( - self, - parent, - "can't project out of {:?}", - base_ty - )); - } - }, - }; - - if let Some(field) = variant.fields.get(field) { - Ok(self.cx.normalize(field.ty(tcx, args), location)) - } else { - Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() }) + self.typeck.constraints.liveness_constraints.add_location(region, location); } } } @@ -840,10 +560,12 @@ struct TypeChecker<'a, 'tcx> { implicit_region_bound: ty::Region<'tcx>, reported_errors: FxIndexSet<(Ty<'tcx>, Span)>, universal_regions: &'a UniversalRegions<'tcx>, - location_table: &'a LocationTable, - all_facts: &'a mut Option, + location_table: &'a PoloniusLocationTable, + polonius_facts: &'a mut Option, borrow_set: &'a BorrowSet<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, + /// When using `-Zpolonius=next`, the helper data used to create polonius constraints. + polonius_context: &'a mut Option, } /// Holder struct for passing results from MIR typeck to the rest of the non-lexical regions @@ -852,6 +574,7 @@ pub(crate) struct MirTypeckResults<'tcx> { pub(crate) constraints: MirTypeckRegionConstraints<'tcx>, pub(crate) universal_region_relations: Frozen>, pub(crate) opaque_type_values: FxIndexMap, OpaqueHiddenType<'tcx>>, + pub(crate) polonius_context: Option, } /// A collection of region constraints that must be satisfied for the @@ -981,6 +704,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.body } + fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> RegionVid { + if let ty::RePlaceholder(placeholder) = r.kind() { + self.constraints.placeholder_region(self.infcx, placeholder).as_var() + } else { + self.universal_regions.to_region_vid(r) + } + } + fn unsized_feature_enabled(&self) -> bool { let features = self.tcx().features(); features.unsized_locals() || features.unsized_fn_params() @@ -1196,7 +927,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ty::Invariant, &UserTypeProjection { base: annotation_index, projs: vec![] }, location.to_locations(), - ConstraintCategory::Boring, + ConstraintCategory::TypeAnnotation(AnnotationSource::GenericArg), ) { let annotation = &self.user_type_annotations[annotation_index]; span_mirbug!( @@ -1231,7 +962,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { *variance, projection, Locations::All(stmt.source_info.span), - ConstraintCategory::TypeAnnotation, + ConstraintCategory::TypeAnnotation(AnnotationSource::Ascription), ) { let annotation = &self.user_type_annotations[projection.base]; span_mirbug!( @@ -1495,6 +1226,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Some(l) if !body.local_decls[l].is_user_variable() => { ConstraintCategory::Boring } + // The return type of a call is interesting for diagnostics. _ => ConstraintCategory::Assignment, }; @@ -1922,7 +1654,20 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { match *cast_kind { CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, coercion_source) => { let is_implicit_coercion = coercion_source == CoercionSource::Implicit; - let src_sig = op.ty(body, tcx).fn_sig(tcx); + let src_ty = op.ty(body, tcx); + let mut src_sig = src_ty.fn_sig(tcx); + if let ty::FnDef(def_id, _) = src_ty.kind() + && let ty::FnPtr(_, target_hdr) = *ty.kind() + && tcx.codegen_fn_attrs(def_id).safe_target_features + && target_hdr.safety.is_safe() + && let Some(safe_sig) = tcx.adjust_target_feature_sig( + *def_id, + src_sig, + body.source.def_id(), + ) + { + src_sig = safe_sig; + } // HACK: This shouldn't be necessary... We can remove this when we actually // get binders with where clauses, then elaborate implied bounds into that @@ -2438,7 +2183,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ty_left, common_ty, location.to_locations(), - ConstraintCategory::Boring, + ConstraintCategory::CallArgument(None), ) .unwrap_or_else(|err| { bug!("Could not equate type variable with {:?}: {:?}", ty_left, err) @@ -2447,7 +2192,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ty_right, common_ty, location.to_locations(), - ConstraintCategory::Boring, + ConstraintCategory::CallArgument(None), ) { span_mirbug!( self, @@ -2490,7 +2235,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Rvalue::RawPtr(..) | Rvalue::ThreadLocalRef(..) - | Rvalue::Len(..) | Rvalue::Discriminant(..) | Rvalue::NullaryOp(NullOp::OffsetOf(..), _) => {} } @@ -2506,7 +2250,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | Rvalue::Repeat(..) | Rvalue::Ref(..) | Rvalue::RawPtr(..) - | Rvalue::Len(..) | Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::BinaryOp(..) @@ -2598,18 +2341,18 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { borrowed_place: &Place<'tcx>, ) { // These constraints are only meaningful during borrowck: - let Self { borrow_set, location_table, all_facts, constraints, .. } = self; + let Self { borrow_set, location_table, polonius_facts, constraints, .. } = self; // In Polonius mode, we also push a `loan_issued_at` fact // linking the loan to the region (in some cases, though, // there is no loan associated with this borrow expression -- // that occurs when we are borrowing an unsafe place, for // example). - if let Some(all_facts) = all_facts { + if let Some(polonius_facts) = polonius_facts { let _prof_timer = self.infcx.tcx.prof.generic_activity("polonius_fact_generation"); if let Some(borrow_index) = borrow_set.get_index_of(&location) { let region_vid = borrow_region.as_var(); - all_facts.loan_issued_at.push(( + polonius_facts.loan_issued_at.push(( region_vid.into(), borrow_index, location_table.mid_index(location), diff --git a/compiler/rustc_borrowck/src/type_check/opaque_types.rs b/compiler/rustc_borrowck/src/type_check/opaque_types.rs new file mode 100644 index 0000000000000..ad4e006c21ae8 --- /dev/null +++ b/compiler/rustc_borrowck/src/type_check/opaque_types.rs @@ -0,0 +1,335 @@ +use std::iter; + +use rustc_data_structures::fx::FxIndexMap; +use rustc_middle::span_bug; +use rustc_middle::ty::fold::fold_regions; +use rustc_middle::ty::{ + self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, TypeVisitor, +}; +use tracing::{debug, trace}; + +use super::{MemberConstraintSet, TypeChecker}; + +/// Once we're done with typechecking the body, we take all the opaque types +/// defined by this function and add their 'member constraints'. +pub(super) fn take_opaques_and_register_member_constraints<'tcx>( + typeck: &mut TypeChecker<'_, 'tcx>, +) -> FxIndexMap, OpaqueHiddenType<'tcx>> { + let infcx = typeck.infcx; + // Annoying: to invoke `typeck.to_region_vid`, we need access to + // `typeck.constraints`, but we also want to be mutating + // `typeck.member_constraints`. For now, just swap out the value + // we want and replace at the end. + let mut member_constraints = std::mem::take(&mut typeck.constraints.member_constraints); + let opaque_types = infcx + .take_opaque_types() + .into_iter() + .map(|(opaque_type_key, hidden_type)| { + let hidden_type = infcx.resolve_vars_if_possible(hidden_type); + register_member_constraints( + typeck, + &mut member_constraints, + opaque_type_key, + hidden_type, + ); + trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); + if hidden_type.has_non_region_infer() { + span_bug!(hidden_type.span, "could not resolve {:?}", hidden_type.ty); + } + + // Convert all regions to nll vars. + let (opaque_type_key, hidden_type) = + fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |r, _| { + ty::Region::new_var(infcx.tcx, typeck.to_region_vid(r)) + }); + + (opaque_type_key, hidden_type) + }) + .collect(); + assert!(typeck.constraints.member_constraints.is_empty()); + typeck.constraints.member_constraints = member_constraints; + opaque_types +} + +/// Given the map `opaque_types` containing the opaque +/// `impl Trait` types whose underlying, hidden types are being +/// inferred, this method adds constraints to the regions +/// appearing in those underlying hidden types to ensure that they +/// at least do not refer to random scopes within the current +/// function. These constraints are not (quite) sufficient to +/// guarantee that the regions are actually legal values; that +/// final condition is imposed after region inference is done. +/// +/// # The Problem +/// +/// Let's work through an example to explain how it works. Assume +/// the current function is as follows: +/// +/// ```text +/// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) +/// ``` +/// +/// Here, we have two `impl Trait` types whose values are being +/// inferred (the `impl Bar<'a>` and the `impl +/// Bar<'b>`). Conceptually, this is sugar for a setup where we +/// define underlying opaque types (`Foo1`, `Foo2`) and then, in +/// the return type of `foo`, we *reference* those definitions: +/// +/// ```text +/// type Foo1<'x> = impl Bar<'x>; +/// type Foo2<'x> = impl Bar<'x>; +/// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } +/// // ^^^^ ^^ +/// // | | +/// // | args +/// // def_id +/// ``` +/// +/// As indicating in the comments above, each of those references +/// is (in the compiler) basically generic parameters (`args`) +/// applied to the type of a suitable `def_id` (which identifies +/// `Foo1` or `Foo2`). +/// +/// Now, at this point in compilation, what we have done is to +/// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with +/// fresh inference variables C1 and C2. We wish to use the values +/// of these variables to infer the underlying types of `Foo1` and +/// `Foo2`. That is, this gives rise to higher-order (pattern) unification +/// constraints like: +/// +/// ```text +/// for<'a> (Foo1<'a> = C1) +/// for<'b> (Foo1<'b> = C2) +/// ``` +/// +/// For these equation to be satisfiable, the types `C1` and `C2` +/// can only refer to a limited set of regions. For example, `C1` +/// can only refer to `'static` and `'a`, and `C2` can only refer +/// to `'static` and `'b`. The job of this function is to impose that +/// constraint. +/// +/// Up to this point, C1 and C2 are basically just random type +/// inference variables, and hence they may contain arbitrary +/// regions. In fact, it is fairly likely that they do! Consider +/// this possible definition of `foo`: +/// +/// ```text +/// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { +/// (&*x, &*y) +/// } +/// ``` +/// +/// Here, the values for the concrete types of the two impl +/// traits will include inference variables: +/// +/// ```text +/// &'0 i32 +/// &'1 i32 +/// ``` +/// +/// Ordinarily, the subtyping rules would ensure that these are +/// sufficiently large. But since `impl Bar<'a>` isn't a specific +/// type per se, we don't get such constraints by default. This +/// is where this function comes into play. It adds extra +/// constraints to ensure that all the regions which appear in the +/// inferred type are regions that could validly appear. +/// +/// This is actually a bit of a tricky constraint in general. We +/// want to say that each variable (e.g., `'0`) can only take on +/// values that were supplied as arguments to the opaque type +/// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in +/// scope. We don't have a constraint quite of this kind in the current +/// region checker. +/// +/// # The Solution +/// +/// We generally prefer to make `<=` constraints, since they +/// integrate best into the region solver. To do that, we find the +/// "minimum" of all the arguments that appear in the args: that +/// is, some region which is less than all the others. In the case +/// of `Foo1<'a>`, that would be `'a` (it's the only choice, after +/// all). Then we apply that as a least bound to the variables +/// (e.g., `'a <= '0`). +/// +/// In some cases, there is no minimum. Consider this example: +/// +/// ```text +/// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } +/// ``` +/// +/// Here we would report a more complex "in constraint", like `'r +/// in ['a, 'b, 'static]` (where `'r` is some region appearing in +/// the hidden type). +/// +/// # Constrain regions, not the hidden concrete type +/// +/// Note that generating constraints on each region `Rc` is *not* +/// the same as generating an outlives constraint on `Tc` itself. +/// For example, if we had a function like this: +/// +/// ``` +/// # #![feature(type_alias_impl_trait)] +/// # fn main() {} +/// # trait Foo<'a> {} +/// # impl<'a, T> Foo<'a> for (&'a u32, T) {} +/// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { +/// (x, y) +/// } +/// +/// // Equivalent to: +/// # mod dummy { use super::*; +/// type FooReturn<'a, T> = impl Foo<'a>; +/// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { +/// (x, y) +/// } +/// # } +/// ``` +/// +/// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` +/// is an inference variable). If we generated a constraint that +/// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- +/// but this is not necessary, because the opaque type we +/// create will be allowed to reference `T`. So we only generate a +/// constraint that `'0: 'a`. +fn register_member_constraints<'tcx>( + typeck: &mut TypeChecker<'_, 'tcx>, + member_constraints: &mut MemberConstraintSet<'tcx, ty::RegionVid>, + opaque_type_key: OpaqueTypeKey<'tcx>, + OpaqueHiddenType { span, ty: hidden_ty }: OpaqueHiddenType<'tcx>, +) { + let tcx = typeck.tcx(); + let hidden_ty = typeck.infcx.resolve_vars_if_possible(hidden_ty); + debug!(?hidden_ty); + + let variances = tcx.variances_of(opaque_type_key.def_id); + debug!(?variances); + + // For a case like `impl Foo<'a, 'b>`, we would generate a constraint + // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the + // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). + // + // `conflict1` and `conflict2` are the two region bounds that we + // detected which were unrelated. They are used for diagnostics. + + // Create the set of choice regions: each region in the hidden + // type can be equal to any of the region parameters of the + // opaque type definition. + let fr_static = typeck.universal_regions.fr_static; + let choice_regions: Vec<_> = opaque_type_key + .args + .iter() + .enumerate() + .filter(|(i, _)| variances[*i] == ty::Invariant) + .filter_map(|(_, arg)| match arg.unpack() { + GenericArgKind::Lifetime(r) => Some(typeck.to_region_vid(r)), + GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, + }) + .chain(iter::once(fr_static)) + .collect(); + + // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's + // not currently sound until we have existential regions. + hidden_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { + tcx, + op: |r| { + member_constraints.add_member_constraint( + opaque_type_key, + hidden_ty, + span, + typeck.to_region_vid(r), + &choice_regions, + ) + }, + }); +} + +/// Visitor that requires that (almost) all regions in the type visited outlive +/// `least_region`. We cannot use `push_outlives_components` because regions in +/// closure signatures are not included in their outlives components. We need to +/// ensure all regions outlive the given bound so that we don't end up with, +/// say, `ReVar` appearing in a return type and causing ICEs when other +/// functions end up with region constraints involving regions from other +/// functions. +/// +/// We also cannot use `for_each_free_region` because for closures it includes +/// the regions parameters from the enclosing item. +/// +/// We ignore any type parameters because impl trait values are assumed to +/// capture all the in-scope type parameters. +struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { + tcx: TyCtxt<'tcx>, + op: OP, +} + +impl<'tcx, OP> TypeVisitor> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> +where + OP: FnMut(ty::Region<'tcx>), +{ + fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { + t.super_visit_with(self); + } + + fn visit_region(&mut self, r: ty::Region<'tcx>) { + match *r { + // ignore bound regions, keep visiting + ty::ReBound(_, _) => {} + _ => (self.op)(r), + } + } + + fn visit_ty(&mut self, ty: Ty<'tcx>) { + // We're only interested in types involving regions + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return; + } + + match ty.kind() { + ty::Closure(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + + for upvar in args.as_closure().upvar_tys() { + upvar.visit_with(self); + } + args.as_closure().sig_as_fn_ptr_ty().visit_with(self); + } + + ty::CoroutineClosure(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + + for upvar in args.as_coroutine_closure().upvar_tys() { + upvar.visit_with(self); + } + + args.as_coroutine_closure().signature_parts_ty().visit_with(self); + } + + ty::Coroutine(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + // Also skip the witness type, because that has no free regions. + + for upvar in args.as_coroutine().upvar_tys() { + upvar.visit_with(self); + } + args.as_coroutine().return_ty().visit_with(self); + args.as_coroutine().yield_ty().visit_with(self); + args.as_coroutine().resume_ty().visit_with(self); + } + + ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { + // Skip lifetime parameters that are not captures. + let variances = self.tcx.variances_of(*def_id); + + for (v, s) in std::iter::zip(variances, args.iter()) { + if *v != ty::Bivariant { + s.visit_with(self); + } + } + } + + _ => { + ty.super_visit_with(self); + } + } + } +} diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs index 752b2bf1a246e..59e2eee41d3fc 100644 --- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs +++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs @@ -13,8 +13,7 @@ use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::fold::FnMutDelegate; use rustc_middle::ty::relate::combine::{super_combine_consts, super_combine_tys}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Symbol, sym}; use tracing::{debug, instrument}; use crate::constraints::OutlivesConstraint; diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index bb64d646ff3ce..26af86c0cdd49 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -33,8 +33,7 @@ use rustc_middle::ty::{ TyCtxt, TypeVisitableExt, }; use rustc_middle::{bug, span_bug}; -use rustc_span::ErrorGuaranteed; -use rustc_span::symbol::{kw, sym}; +use rustc_span::{ErrorGuaranteed, kw, sym}; use tracing::{debug, instrument}; use crate::BorrowckInferCtxt; @@ -338,7 +337,7 @@ impl<'tcx> UniversalRegions<'tcx> { self.indices.indices.iter().map(|(&r, &v)| (r, v)) } - /// See `UniversalRegionIndices::to_region_vid`. + /// See [UniversalRegionIndices::to_region_vid]. pub(crate) fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { self.indices.to_region_vid(r) } @@ -468,15 +467,13 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { self.infcx.tcx.local_parent(self.mir_def), |r| { debug!(?r); - if !indices.indices.contains_key(&r) { - let region_vid = { - let name = r.get_name_or_anon(); - self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) - }; - - debug!(?region_vid); - indices.insert_late_bound_region(r, region_vid.as_var()); - } + let region_vid = { + let name = r.get_name_or_anon(); + self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) + }; + + debug!(?region_vid); + indices.insert_late_bound_region(r, region_vid.as_var()); }, ); @@ -485,21 +482,17 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { self.infcx.num_region_vars() }; - // "Liberate" the late-bound regions. These correspond to - // "local" free regions. + // Converse of above, if this is a function/closure then the late-bound regions declared + // on its signature are local. + // + // We manually loop over `bound_inputs_and_output` instead of using + // `for_each_late_bound_region_in_item` as we may need to add the otherwise + // implicit `ClosureEnv` region. let bound_inputs_and_output = self.compute_inputs_and_output(&indices, defining_ty); - - let inputs_and_output = self.infcx.replace_bound_regions_with_nll_infer_vars( - FR, - self.mir_def, - bound_inputs_and_output, - &mut indices, - ); - // Converse of above, if this is a function/closure then the late-bound regions declared on its - // signature are local. - for_each_late_bound_region_in_item(self.infcx.tcx, self.mir_def, |r| { - debug!(?r); - if !indices.indices.contains_key(&r) { + for (idx, bound_var) in bound_inputs_and_output.bound_vars().iter().enumerate() { + if let ty::BoundVariableKind::Region(kind) = bound_var { + let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind); + let r = ty::Region::new_late_param(self.infcx.tcx, self.mir_def.to_def_id(), kind); let region_vid = { let name = r.get_name_or_anon(); self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) @@ -508,7 +501,12 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { debug!(?region_vid); indices.insert_late_bound_region(r, region_vid.as_var()); } - }); + } + let inputs_and_output = self.infcx.replace_bound_regions_with_nll_infer_vars( + self.mir_def, + bound_inputs_and_output, + &indices, + ); let (unnormalized_output_ty, mut unnormalized_input_tys) = inputs_and_output.split_last().unwrap(); @@ -833,30 +831,19 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { #[instrument(level = "debug", skip(self, indices))] fn replace_bound_regions_with_nll_infer_vars( &self, - origin: NllRegionVariableOrigin, all_outlive_scope: LocalDefId, value: ty::Binder<'tcx, T>, - indices: &mut UniversalRegionIndices<'tcx>, + indices: &UniversalRegionIndices<'tcx>, ) -> T where T: TypeFoldable>, { let (value, _map) = self.tcx.instantiate_bound_regions(value, |br| { debug!(?br); + let kind = ty::LateParamRegionKind::from_bound(br.var, br.kind); let liberated_region = - ty::Region::new_late_param(self.tcx, all_outlive_scope.to_def_id(), br.kind); - let region_vid = { - let name = match br.kind.get_name() { - Some(name) => name, - _ => sym::anon, - }; - - self.next_nll_region_var(origin, || RegionCtxt::Bound(name)) - }; - - indices.insert_late_bound_region(liberated_region, region_vid.as_var()); - debug!(?liberated_region, ?region_vid); - region_vid + ty::Region::new_late_param(self.tcx, all_outlive_scope.to_def_id(), kind); + ty::Region::new_var(self.tcx, indices.to_region_vid(liberated_region)) }); value } @@ -870,7 +857,7 @@ impl<'tcx> UniversalRegionIndices<'tcx> { /// well. These are used for error reporting. fn insert_late_bound_region(&mut self, r: ty::Region<'tcx>, vid: ty::RegionVid) { debug!("insert_late_bound_region({:?}, {:?})", r, vid); - self.indices.insert(r, vid); + assert_eq!(self.indices.insert(r, vid), None); } /// Converts `r` into a local inference variable: `r` can either @@ -881,6 +868,10 @@ impl<'tcx> UniversalRegionIndices<'tcx> { /// reference those regions from the `ParamEnv`. It is also used /// during initialization. Relies on the `indices` map having been /// fully initialized. + /// + /// Panics if `r` is not a registered universal region, most notably + /// if it is a placeholder. Handling placeholders requires access to the + /// `MirTypeckRegionConstraints`. fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { if let ty::ReVar(..) = *r { r.as_var() @@ -942,12 +933,13 @@ fn for_each_late_bound_region_in_item<'tcx>( return; } - for bound_var in tcx.late_bound_vars(tcx.local_def_id_to_hir_id(mir_def_id)) { - let ty::BoundVariableKind::Region(bound_region) = bound_var else { - continue; - }; - let liberated_region = - ty::Region::new_late_param(tcx, mir_def_id.to_def_id(), bound_region); - f(liberated_region); + for (idx, bound_var) in + tcx.late_bound_vars(tcx.local_def_id_to_hir_id(mir_def_id)).iter().enumerate() + { + if let ty::BoundVariableKind::Region(kind) = bound_var { + let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind); + let liberated_region = ty::Region::new_late_param(tcx, mir_def_id.to_def_id(), kind); + f(liberated_region); + } } } diff --git a/compiler/rustc_borrowck/src/util/collect_writes.rs b/compiler/rustc_borrowck/src/util/collect_writes.rs deleted file mode 100644 index 55f1073176aef..0000000000000 --- a/compiler/rustc_borrowck/src/util/collect_writes.rs +++ /dev/null @@ -1,35 +0,0 @@ -use rustc_middle::mir::visit::{PlaceContext, Visitor}; -use rustc_middle::mir::{Body, Local, Location}; - -pub(crate) trait FindAssignments { - // Finds all statements that assign directly to local (i.e., X = ...) - // and returns their locations. - fn find_assignments(&self, local: Local) -> Vec; -} - -impl<'tcx> FindAssignments for Body<'tcx> { - fn find_assignments(&self, local: Local) -> Vec { - let mut visitor = FindLocalAssignmentVisitor { needle: local, locations: vec![] }; - visitor.visit_body(self); - visitor.locations - } -} - -// The Visitor walks the MIR to return the assignment statements corresponding -// to a Local. -struct FindLocalAssignmentVisitor { - needle: Local, - locations: Vec, -} - -impl<'tcx> Visitor<'tcx> for FindLocalAssignmentVisitor { - fn visit_local(&mut self, local: Local, place_context: PlaceContext, location: Location) { - if self.needle != local { - return; - } - - if place_context.is_place_assignment() { - self.locations.push(location); - } - } -} diff --git a/compiler/rustc_borrowck/src/util/mod.rs b/compiler/rustc_borrowck/src/util/mod.rs deleted file mode 100644 index 5f2960b768b29..0000000000000 --- a/compiler/rustc_borrowck/src/util/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod collect_writes; - -pub(crate) use collect_writes::FindAssignments; diff --git a/compiler/rustc_builtin_macros/Cargo.toml b/compiler/rustc_builtin_macros/Cargo.toml index b50cb35b8e983..5650c5083cf08 100644 --- a/compiler/rustc_builtin_macros/Cargo.toml +++ b/compiler/rustc_builtin_macros/Cargo.toml @@ -20,6 +20,7 @@ rustc_errors = { path = "../rustc_errors" } rustc_expand = { path = "../rustc_expand" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } diff --git a/compiler/rustc_builtin_macros/messages.ftl b/compiler/rustc_builtin_macros/messages.ftl index 87d3d288013a3..4cac7cb93f581 100644 --- a/compiler/rustc_builtin_macros/messages.ftl +++ b/compiler/rustc_builtin_macros/messages.ftl @@ -197,6 +197,8 @@ builtin_macros_format_redundant_args = redundant {$n -> builtin_macros_format_remove_raw_ident = remove the `r#` +builtin_macros_format_reorder_format_parameter = did you mean `{$replacement}`? + builtin_macros_format_requires_string = requires at least a format string argument builtin_macros_format_string_invalid = invalid format string: {$desc} @@ -249,9 +251,9 @@ builtin_macros_naked_functions_testing_attribute = .label = function marked with testing attribute here .naked_attribute = `#[naked]` is incompatible with testing attributes -builtin_macros_no_default_variant = no default declared - .help = make a unit variant default by placing `#[default]` above it - .suggestion = make `{$ident}` default +builtin_macros_no_default_variant = `#[derive(Default)]` on enum with no `#[default]` + .label = this enum needs a unit variant marked with `#[default]` + .suggestion = make this unit variant default by placing `#[default]` on it builtin_macros_non_abi = at least one abi must be provided as an argument to `clobber_abi` diff --git a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs index 0caad997b9df1..d2b4e1ca824fd 100644 --- a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs +++ b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs @@ -3,8 +3,7 @@ use rustc_ast::{ self as ast, Fn, FnHeader, FnSig, Generics, ItemKind, Safety, Stmt, StmtKind, TyKind, }; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, kw, sym}; +use rustc_span::{Ident, Span, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index 14ac3cd74e884..5062cf55bb9ad 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -1,23 +1,22 @@ use ast::token::IdentIsRaw; use lint::BuiltinLintDiag; -use rustc_ast::AsmMacro; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter}; use rustc_ast::tokenstream::TokenStream; +use rustc_ast::{AsmMacro, token}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_errors::PResult; use rustc_expand::base::*; use rustc_index::bit_set::GrowableBitSet; -use rustc_parse::parser::Parser; +use rustc_parse::exp; +use rustc_parse::parser::{ExpKeywordPair, Parser}; use rustc_session::lint; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{ErrorGuaranteed, InnerSpan, Span}; +use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw}; use rustc_target::asm::InlineAsmArch; use smallvec::smallvec; use {rustc_ast as ast, rustc_parse_format as parse}; use crate::errors; -use crate::util::expr_to_spanned_string; +use crate::util::{ExprToSpannedString, expr_to_spanned_string}; pub struct AsmArgs { pub templates: Vec>, @@ -39,16 +38,16 @@ pub struct AsmArgs { /// - `Err(_)` if the current token matches the keyword, but was not expected fn eat_operand_keyword<'a>( p: &mut Parser<'a>, - symbol: Symbol, + exp: ExpKeywordPair, asm_macro: AsmMacro, ) -> PResult<'a, bool> { if matches!(asm_macro, AsmMacro::Asm) { - Ok(p.eat_keyword(symbol)) + Ok(p.eat_keyword(exp)) } else { let span = p.token.span; - if p.eat_keyword_noexpect(symbol) { + if p.eat_keyword_noexpect(exp.kw) { // in gets printed as `r#in` otherwise - let symbol = if symbol == kw::In { "in" } else { symbol.as_str() }; + let symbol = if exp.kw == kw::In { "in" } else { exp.kw.as_str() }; Err(p.dcx().create_err(errors::AsmUnsupportedOperand { span, symbol, @@ -96,13 +95,13 @@ pub fn parse_asm_args<'a>( let mut allow_templates = true; while p.token != token::Eof { - if !p.eat(&token::Comma) { + if !p.eat(exp!(Comma)) { if allow_templates { // After a template string, we always expect *only* a comma... return Err(dcx.create_err(errors::AsmExpectedComma { span: p.token.span })); } else { // ...after that delegate to `expect` to also include the other expected tokens. - return Err(p.expect(&token::Comma).err().unwrap()); + return Err(p.expect(exp!(Comma)).err().unwrap()); } } if p.token == token::Eof { @@ -110,14 +109,14 @@ pub fn parse_asm_args<'a>( } // accept trailing commas // Parse clobber_abi - if p.eat_keyword(sym::clobber_abi) { + if p.eat_keyword(exp!(ClobberAbi)) { parse_clobber_abi(p, &mut args)?; allow_templates = false; continue; } // Parse options - if p.eat_keyword(sym::options) { + if p.eat_keyword(exp!(Options)) { parse_options(p, &mut args, asm_macro)?; allow_templates = false; continue; @@ -129,7 +128,7 @@ pub fn parse_asm_args<'a>( let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) { let (ident, _) = p.token.ident().unwrap(); p.bump(); - p.expect(&token::Eq)?; + p.expect(exp!(Eq))?; allow_templates = false; Some(ident.name) } else { @@ -137,57 +136,57 @@ pub fn parse_asm_args<'a>( }; let mut explicit_reg = false; - let op = if eat_operand_keyword(p, kw::In, asm_macro)? { + let op = if eat_operand_keyword(p, exp!(In), asm_macro)? { let reg = parse_reg(p, &mut explicit_reg)?; - if p.eat_keyword(kw::Underscore) { + if p.eat_keyword(exp!(Underscore)) { let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span }); return Err(err); } let expr = p.parse_expr()?; ast::InlineAsmOperand::In { reg, expr } - } else if eat_operand_keyword(p, sym::out, asm_macro)? { + } else if eat_operand_keyword(p, exp!(Out), asm_macro)? { let reg = parse_reg(p, &mut explicit_reg)?; - let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; + let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) }; ast::InlineAsmOperand::Out { reg, expr, late: false } - } else if eat_operand_keyword(p, sym::lateout, asm_macro)? { + } else if eat_operand_keyword(p, exp!(Lateout), asm_macro)? { let reg = parse_reg(p, &mut explicit_reg)?; - let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; + let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) }; ast::InlineAsmOperand::Out { reg, expr, late: true } - } else if eat_operand_keyword(p, sym::inout, asm_macro)? { + } else if eat_operand_keyword(p, exp!(Inout), asm_macro)? { let reg = parse_reg(p, &mut explicit_reg)?; - if p.eat_keyword(kw::Underscore) { + if p.eat_keyword(exp!(Underscore)) { let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span }); return Err(err); } let expr = p.parse_expr()?; - if p.eat(&token::FatArrow) { + if p.eat(exp!(FatArrow)) { let out_expr = - if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; + if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) }; ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false } } else { ast::InlineAsmOperand::InOut { reg, expr, late: false } } - } else if eat_operand_keyword(p, sym::inlateout, asm_macro)? { + } else if eat_operand_keyword(p, exp!(Inlateout), asm_macro)? { let reg = parse_reg(p, &mut explicit_reg)?; - if p.eat_keyword(kw::Underscore) { + if p.eat_keyword(exp!(Underscore)) { let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span }); return Err(err); } let expr = p.parse_expr()?; - if p.eat(&token::FatArrow) { + if p.eat(exp!(FatArrow)) { let out_expr = - if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; + if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) }; ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true } } else { ast::InlineAsmOperand::InOut { reg, expr, late: true } } - } else if eat_operand_keyword(p, sym::label, asm_macro)? { + } else if eat_operand_keyword(p, exp!(Label), asm_macro)? { let block = p.parse_block()?; ast::InlineAsmOperand::Label { block } - } else if p.eat_keyword(kw::Const) { + } else if p.eat_keyword(exp!(Const)) { let anon_const = p.parse_expr_anon_const()?; ast::InlineAsmOperand::Const { anon_const } - } else if p.eat_keyword(sym::sym) { + } else if p.eat_keyword(exp!(Sym)) { let expr = p.parse_expr()?; let ast::ExprKind::Path(qself, path) = &expr.kind else { let err = dcx.create_err(errors::AsmSymNoPath { span: expr.span }); @@ -390,31 +389,31 @@ fn parse_options<'a>( ) -> PResult<'a, ()> { let span_start = p.prev_token.span; - p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; - - while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { - const OPTIONS: [(Symbol, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [ - (sym::pure, ast::InlineAsmOptions::PURE), - (sym::nomem, ast::InlineAsmOptions::NOMEM), - (sym::readonly, ast::InlineAsmOptions::READONLY), - (sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS), - (sym::noreturn, ast::InlineAsmOptions::NORETURN), - (sym::nostack, ast::InlineAsmOptions::NOSTACK), - (sym::may_unwind, ast::InlineAsmOptions::MAY_UNWIND), - (sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX), - (kw::Raw, ast::InlineAsmOptions::RAW), + p.expect(exp!(OpenParen))?; + + while !p.eat(exp!(CloseParen)) { + const OPTIONS: [(ExpKeywordPair, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [ + (exp!(Pure), ast::InlineAsmOptions::PURE), + (exp!(Nomem), ast::InlineAsmOptions::NOMEM), + (exp!(Readonly), ast::InlineAsmOptions::READONLY), + (exp!(PreservesFlags), ast::InlineAsmOptions::PRESERVES_FLAGS), + (exp!(Noreturn), ast::InlineAsmOptions::NORETURN), + (exp!(Nostack), ast::InlineAsmOptions::NOSTACK), + (exp!(MayUnwind), ast::InlineAsmOptions::MAY_UNWIND), + (exp!(AttSyntax), ast::InlineAsmOptions::ATT_SYNTAX), + (exp!(Raw), ast::InlineAsmOptions::RAW), ]; 'blk: { - for (symbol, option) in OPTIONS { + for (exp, option) in OPTIONS { let kw_matched = if asm_macro.is_supported_option(option) { - p.eat_keyword(symbol) + p.eat_keyword(exp) } else { - p.eat_keyword_noexpect(symbol) + p.eat_keyword_noexpect(exp.kw) }; if kw_matched { - try_set_option(p, args, asm_macro, symbol, option); + try_set_option(p, args, asm_macro, exp.kw, option); break 'blk; } } @@ -423,10 +422,10 @@ fn parse_options<'a>( } // Allow trailing commas - if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { + if p.eat(exp!(CloseParen)) { break; } - p.expect(&token::Comma)?; + p.expect(exp!(Comma))?; } let new_span = span_start.to(p.prev_token.span); @@ -438,14 +437,14 @@ fn parse_options<'a>( fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> { let span_start = p.prev_token.span; - p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + p.expect(exp!(OpenParen))?; - if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { + if p.eat(exp!(CloseParen)) { return Err(p.dcx().create_err(errors::NonABI { span: p.token.span })); } let mut new_abis = Vec::new(); - while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { + while !p.eat(exp!(CloseParen)) { match p.parse_str_lit() { Ok(str_lit) => { new_abis.push((str_lit.symbol_unescaped, str_lit.span)); @@ -457,10 +456,10 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, }; // Allow trailing commas - if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { + if p.eat(exp!(CloseParen)) { break; } - p.expect(&token::Comma)?; + p.expect(exp!(Comma))?; } let full_span = span_start.to(p.prev_token.span); @@ -483,7 +482,7 @@ fn parse_reg<'a>( p: &mut Parser<'a>, explicit_reg: &mut bool, ) -> PResult<'a, ast::InlineAsmRegOrRegClass> { - p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + p.expect(exp!(OpenParen))?; let result = match p.token.uninterpolate().kind { token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name), token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => { @@ -497,7 +496,7 @@ fn parse_reg<'a>( } }; p.bump(); - p.expect(&token::CloseDelim(Delimiter::Parenthesis))?; + p.expect(exp!(CloseParen))?; Ok(result) } @@ -528,7 +527,12 @@ fn expand_preparsed_asm( let msg = "asm template must be a string literal"; let template_sp = template_expr.span; let template_is_mac_call = matches!(template_expr.kind, ast::ExprKind::MacCall(_)); - let (template_str, template_style, template_span) = { + let ExprToSpannedString { + symbol: template_str, + style: template_style, + span: template_span, + .. + } = { let ExpandResult::Ready(mac) = expr_to_spanned_string(ecx, template_expr, msg) else { return ExpandResult::Retry(()); }; diff --git a/compiler/rustc_builtin_macros/src/assert.rs b/compiler/rustc_builtin_macros/src/assert.rs index 599b180f8793d..c659b1cff59b8 100644 --- a/compiler/rustc_builtin_macros/src/assert.rs +++ b/compiler/rustc_builtin_macros/src/assert.rs @@ -7,9 +7,9 @@ use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp, tok use rustc_ast_pretty::pprust; use rustc_errors::PResult; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; +use rustc_parse::exp; use rustc_parse::parser::Parser; -use rustc_span::symbol::{Ident, Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; use thin_vec::thin_vec; use crate::edition_panic::use_panic_2021; @@ -144,7 +144,7 @@ fn parse_assert<'a>(cx: &ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult< cx.dcx().emit_err(errors::AssertMissingComma { span: parser.token.span, comma }); parse_custom_message(&mut parser) - } else if parser.eat(&token::Comma) { + } else if parser.eat(exp!(Comma)) { parse_custom_message(&mut parser) } else { None diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs index eb07975d8afd1..bb9dc651cec25 100644 --- a/compiler/rustc_builtin_macros/src/assert/context.rs +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -8,8 +8,7 @@ use rustc_ast::{ use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; use rustc_expand::base::ExtCtxt; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; pub(super) struct Context<'cx, 'a> { diff --git a/compiler/rustc_builtin_macros/src/autodiff.rs b/compiler/rustc_builtin_macros/src/autodiff.rs index 66bb11ca52281..8960cf6ab598f 100644 --- a/compiler/rustc_builtin_macros/src/autodiff.rs +++ b/compiler/rustc_builtin_macros/src/autodiff.rs @@ -20,8 +20,7 @@ mod llvm_enzyme { PatKind, TyKind, }; use rustc_expand::base::{Annotatable, ExtCtxt}; - use rustc_span::symbol::{Ident, kw, sym}; - use rustc_span::{Span, Symbol}; + use rustc_span::{Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::{debug, trace}; @@ -35,7 +34,7 @@ mod llvm_enzyme { FnRetTy::Default(_) => false, } } - fn first_ident(x: &MetaItemInner) -> rustc_span::symbol::Ident { + fn first_ident(x: &MetaItemInner) -> rustc_span::Ident { let segments = &x.meta_item().unwrap().path.segments; assert!(segments.len() == 1); segments[0].ident diff --git a/compiler/rustc_builtin_macros/src/cfg.rs b/compiler/rustc_builtin_macros/src/cfg.rs index 6e90f1682e309..85b8ef79c0504 100644 --- a/compiler/rustc_builtin_macros/src/cfg.rs +++ b/compiler/rustc_builtin_macros/src/cfg.rs @@ -6,6 +6,7 @@ use rustc_ast::token; use rustc_ast::tokenstream::TokenStream; use rustc_errors::PResult; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; +use rustc_parse::exp; use rustc_span::Span; use {rustc_ast as ast, rustc_attr_parsing as attr}; @@ -48,9 +49,9 @@ fn parse_cfg<'a>( let cfg = p.parse_meta_item_inner()?; - let _ = p.eat(&token::Comma); + let _ = p.eat(exp!(Comma)); - if !p.eat(&token::Eof) { + if !p.eat(exp!(Eof)) { return Err(cx.dcx().create_err(errors::OneCfgPattern { span })); } diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs index 23578781a833f..5f203dd5d1131 100644 --- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs +++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs @@ -4,8 +4,7 @@ use rustc_ast as ast; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier}; use rustc_feature::AttributeTemplate; use rustc_parse::validate_attr; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs index d46a1bd3d3139..53c61831b4229 100644 --- a/compiler/rustc_builtin_macros/src/cfg_eval.rs +++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs @@ -12,8 +12,7 @@ use rustc_expand::configure; use rustc_feature::Features; use rustc_parse::parser::{ForceCollect, Parser}; use rustc_session::Session; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use smallvec::SmallVec; use tracing::instrument; diff --git a/compiler/rustc_builtin_macros/src/concat.rs b/compiler/rustc_builtin_macros/src/concat.rs index a28801f66dd10..c200539e12872 100644 --- a/compiler/rustc_builtin_macros/src/concat.rs +++ b/compiler/rustc_builtin_macros/src/concat.rs @@ -2,7 +2,7 @@ use rustc_ast::tokenstream::TokenStream; use rustc_ast::{ExprKind, LitKind, UnOp}; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; use rustc_session::errors::report_lit_error; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use crate::errors; use crate::util::get_exprs_from_tts; diff --git a/compiler/rustc_builtin_macros/src/concat_idents.rs b/compiler/rustc_builtin_macros/src/concat_idents.rs index b459cc3007d37..a721f5b84c557 100644 --- a/compiler/rustc_builtin_macros/src/concat_idents.rs +++ b/compiler/rustc_builtin_macros/src/concat_idents.rs @@ -3,8 +3,7 @@ use rustc_ast::token::{self, Token}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::{AttrVec, DUMMY_NODE_ID, Expr, ExprKind, Path, Ty, TyKind}; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::errors; @@ -19,7 +18,7 @@ pub(crate) fn expand_concat_idents<'cx>( } let mut res_str = String::new(); - for (i, e) in tts.trees().enumerate() { + for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match e { TokenTree::Token(Token { kind: token::Comma, .. }, _) => {} diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index 60450d085f6c0..2653a9f48b9b6 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -6,8 +6,7 @@ use rustc_expand::base::{ use rustc_feature::AttributeTemplate; use rustc_parse::validate_attr; use rustc_session::Session; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span, sym}; use crate::cfg_eval::cfg_eval; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index f79227d52a813..78f50ba8d29ed 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -1,8 +1,7 @@ use rustc_ast::{self as ast, Generics, ItemKind, MetaItem, VariantData}; use rustc_data_structures::fx::FxHashSet; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, kw, sym}; +use rustc_span::{Ident, Span, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs index bd6f4eb8d9945..5790350203a5a 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs @@ -1,8 +1,7 @@ use rustc_ast::{self as ast, MetaItem}; use rustc_data_structures::fx::FxHashSet; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index 433cbfaa6cb08..1ed44c20bc61e 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -1,7 +1,6 @@ use rustc_ast::MetaItem; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use thin_vec::thin_vec; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index ae1e5f4a2198c..4b93b3414c76b 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -1,8 +1,7 @@ use rustc_ast::ptr::P; use rustc_ast::{BinOpKind, BorrowKind, Expr, ExprKind, MetaItem, Mutability}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use thin_vec::thin_vec; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index 99a9832505327..7958e037555d5 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -1,7 +1,6 @@ use rustc_ast::{ExprKind, ItemKind, MetaItem, PatKind}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use thin_vec::thin_vec; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs b/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs index b91d6ced12349..9f3da6b6d35e7 100644 --- a/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs +++ b/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs @@ -6,12 +6,12 @@ use rustc_ast::{ self as ast, GenericArg, GenericBound, GenericParamKind, ItemKind, MetaItem, TraitBoundModifiers, VariantData, WherePredicate, }; -use rustc_attr_parsing as attr; +use rustc_attr_parsing::{AttributeKind, AttributeParser, ReprTransparent}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_expand::base::{Annotatable, ExtCtxt}; +use rustc_hir::Attribute; use rustc_macros::Diagnostic; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::errors; @@ -33,11 +33,11 @@ pub(crate) fn expand_deriving_coerce_pointee( let (name_ident, generics) = if let Annotatable::Item(aitem) = item && let ItemKind::Struct(struct_data, g) = &aitem.kind { - let is_transparent = aitem.attrs.iter().any(|attr| { - attr::find_repr_attrs(cx.sess, attr) - .into_iter() - .any(|r| matches!(r, attr::ReprTransparent)) - }); + let is_transparent = matches!( + AttributeParser::parse_limited(cx.sess, &aitem.attrs, sym::repr, span, true), + Some(Attribute::Parsed(AttributeKind::Repr(r))) if r.iter().any(|(r, _)| r == &ReprTransparent) + ); + if !is_transparent { cx.dcx().emit_err(RequireTransparent { span }); return; diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index 06a598cf3b0ef..eb01ca3941d83 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -1,8 +1,7 @@ use rustc_ast::{self as ast, EnumDef, MetaItem}; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_session::config::FmtDebug; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index 469092e7b1cf0..6348560496e95 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -3,8 +3,7 @@ use rustc_ast::ptr::P; use rustc_ast::{self as ast, Expr, MetaItem, Mutability}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index 6b1a6effad758..3c7bebd0f192e 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -4,8 +4,7 @@ use rustc_ast as ast; use rustc_ast::visit::visit_opt; use rustc_ast::{EnumDef, VariantData, attr}; use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt}; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym}; use smallvec::SmallVec; use thin_vec::{ThinVec, thin_vec}; @@ -43,7 +42,9 @@ pub(crate) fn expand_deriving_default( StaticStruct(_, fields) => { default_struct_substructure(cx, trait_span, substr, fields) } - StaticEnum(enum_def, _) => default_enum_substructure(cx, trait_span, enum_def), + StaticEnum(enum_def, _) => { + default_enum_substructure(cx, trait_span, enum_def, item.span()) + } _ => cx.dcx().span_bug(trait_span, "method in `derive(Default)`"), } })), @@ -97,9 +98,10 @@ fn default_enum_substructure( cx: &ExtCtxt<'_>, trait_span: Span, enum_def: &EnumDef, + item_span: Span, ) -> BlockOrExpr { let expr = match try { - let default_variant = extract_default_variant(cx, enum_def, trait_span)?; + let default_variant = extract_default_variant(cx, enum_def, trait_span, item_span)?; validate_default_attribute(cx, default_variant)?; default_variant } { @@ -147,6 +149,7 @@ fn extract_default_variant<'a>( cx: &ExtCtxt<'_>, enum_def: &'a EnumDef, trait_span: Span, + item_span: Span, ) -> Result<&'a rustc_ast::Variant, ErrorGuaranteed> { let default_variants: SmallVec<[_; 1]> = enum_def .variants @@ -164,9 +167,10 @@ fn extract_default_variant<'a>( .filter(|variant| !attr::contains_name(&variant.attrs, sym::non_exhaustive)); let suggs = possible_defaults - .map(|v| errors::NoDefaultVariantSugg { span: v.span, ident: v.ident }) + .map(|v| errors::NoDefaultVariantSugg { span: v.span.shrink_to_lo() }) .collect(); - let guar = cx.dcx().emit_err(errors::NoDefaultVariant { span: trait_span, suggs }); + let guar = + cx.dcx().emit_err(errors::NoDefaultVariant { span: trait_span, item_span, suggs }); return Err(guar); } diff --git a/compiler/rustc_builtin_macros/src/deriving/encodable.rs b/compiler/rustc_builtin_macros/src/deriving/encodable.rs index 5c7583f2a77d8..20aacb2caca20 100644 --- a/compiler/rustc_builtin_macros/src/deriving/encodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/encodable.rs @@ -87,8 +87,7 @@ use rustc_ast::{AttrVec, ExprKind, MetaItem, Mutability}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 1c5ff6c9ef247..fb9fe36161dc0 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -185,10 +185,10 @@ use rustc_ast::{ self as ast, AnonConst, BindingMode, ByRef, EnumDef, Expr, GenericArg, GenericParamKind, Generics, Mutability, PatKind, VariantData, }; -use rustc_attr_parsing as attr; +use rustc_attr_parsing::{AttributeKind, AttributeParser, ReprPacked}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_hir::Attribute; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use ty::{Bounds, Path, Ref, Self_, Ty}; @@ -481,14 +481,10 @@ impl<'a> TraitDef<'a> { ) { match item { Annotatable::Item(item) => { - let is_packed = item.attrs.iter().any(|attr| { - for r in attr::find_repr_attrs(cx.sess, attr) { - if let attr::ReprPacked(_) = r { - return true; - } - } - false - }); + let is_packed = matches!( + AttributeParser::parse_limited(cx.sess, &item.attrs, sym::repr, item.span, true), + Some(Attribute::Parsed(AttributeKind::Repr(r))) if r.iter().any(|(x, _)| matches!(x, ReprPacked(..))) + ); let newitem = match &item.kind { ast::ItemKind::Struct(struct_def, generics) => self.expand_struct_def( diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs index b118cbfbd913b..af6dc62db7a90 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs @@ -6,8 +6,7 @@ use rustc_ast::ptr::P; use rustc_ast::{self as ast, Expr, GenericArg, GenericParamKind, Generics, SelfKind}; use rustc_expand::base::ExtCtxt; use rustc_span::source_map::respan; -use rustc_span::symbol::{Ident, Symbol, kw}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw}; use thin_vec::ThinVec; /// A path, e.g., `::std::option::Option::` (global). Has support diff --git a/compiler/rustc_builtin_macros/src/deriving/hash.rs b/compiler/rustc_builtin_macros/src/deriving/hash.rs index 2d1f5b70f7746..6e6dbe19e4d1a 100644 --- a/compiler/rustc_builtin_macros/src/deriving/hash.rs +++ b/compiler/rustc_builtin_macros/src/deriving/hash.rs @@ -1,7 +1,6 @@ use rustc_ast::{MetaItem, Mutability}; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use thin_vec::thin_vec; use crate::deriving::generic::ty::*; diff --git a/compiler/rustc_builtin_macros/src/deriving/mod.rs b/compiler/rustc_builtin_macros/src/deriving/mod.rs index 681fbd1651db1..ec058b4131376 100644 --- a/compiler/rustc_builtin_macros/src/deriving/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/mod.rs @@ -4,8 +4,7 @@ use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::{GenericArg, MetaItem}; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; macro path_local($x:ident) { diff --git a/compiler/rustc_builtin_macros/src/edition_panic.rs b/compiler/rustc_builtin_macros/src/edition_panic.rs index c4164a7db9a47..b39c9861fd643 100644 --- a/compiler/rustc_builtin_macros/src/edition_panic.rs +++ b/compiler/rustc_builtin_macros/src/edition_panic.rs @@ -3,9 +3,8 @@ use rustc_ast::token::Delimiter; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::*; use rustc_expand::base::*; -use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; /// This expands to either /// - `$crate::panic::panic_2015!(...)` or diff --git a/compiler/rustc_builtin_macros/src/env.rs b/compiler/rustc_builtin_macros/src/env.rs index 43e2bf1796fc7..8831261759c3c 100644 --- a/compiler/rustc_builtin_macros/src/env.rs +++ b/compiler/rustc_builtin_macros/src/env.rs @@ -10,8 +10,7 @@ use rustc_ast::token::{self, LitKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::{AstDeref, ExprKind, GenericArg, Mutability}; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use thin_vec::thin_vec; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/errors.rs b/compiler/rustc_builtin_macros/src/errors.rs index c9bd3371e559d..6213bd802c751 100644 --- a/compiler/rustc_builtin_macros/src/errors.rs +++ b/compiler/rustc_builtin_macros/src/errors.rs @@ -4,8 +4,7 @@ use rustc_errors::{ SubdiagMessageOp, Subdiagnostic, }; use rustc_macros::{Diagnostic, Subdiagnostic}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; #[derive(Diagnostic)] #[diag(builtin_macros_requires_cfg_pattern)] @@ -370,26 +369,21 @@ pub(crate) struct DerivePathArgsValue { } #[derive(Diagnostic)] -#[diag(builtin_macros_no_default_variant)] -#[help] +#[diag(builtin_macros_no_default_variant, code = E0665)] pub(crate) struct NoDefaultVariant { #[primary_span] pub(crate) span: Span, + #[label] + pub(crate) item_span: Span, #[subdiagnostic] pub(crate) suggs: Vec, } #[derive(Subdiagnostic)] -#[suggestion( - builtin_macros_suggestion, - code = "#[default] {ident}", - applicability = "maybe-incorrect", - style = "tool-only" -)] +#[suggestion(builtin_macros_suggestion, code = "#[default] ", applicability = "maybe-incorrect")] pub(crate) struct NoDefaultVariantSugg { #[primary_span] pub(crate) span: Span, - pub(crate) ident: Ident, } #[derive(Diagnostic)] @@ -624,6 +618,17 @@ pub(crate) enum InvalidFormatStringSuggestion { #[primary_span] span: Span, }, + #[suggestion( + builtin_macros_format_reorder_format_parameter, + code = "{replacement}", + style = "verbose", + applicability = "machine-applicable" + )] + ReorderFormatParameter { + #[primary_span] + span: Span, + replacement: String, + }, } #[derive(Diagnostic)] diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index 32730ac3867f5..5202fe26c401e 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -12,12 +12,12 @@ use rustc_errors::{Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans use rustc_expand::base::*; use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY; use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId}; +use rustc_parse::exp; use rustc_parse_format as parse; -use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::{BytePos, ErrorGuaranteed, InnerSpan, Span}; +use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol}; use crate::errors; -use crate::util::expr_to_spanned_string; +use crate::util::{ExprToSpannedString, expr_to_spanned_string}; // The format_args!() macro is expanded in three steps: // 1. First, `parse_args` will parse the `(literal, arg, arg, name=arg, name=arg)` syntax, @@ -94,12 +94,12 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a, let mut first = true; while p.token != token::Eof { - if !p.eat(&token::Comma) { + if !p.eat(exp!(Comma)) { if first { - p.clear_expected_tokens(); + p.clear_expected_token_types(); } - match p.expect(&token::Comma) { + match p.expect(exp!(Comma)) { Err(err) => { match token::TokenKind::Comma.similar_tokens() { Some(tks) if tks.contains(&p.token.kind) => { @@ -123,7 +123,7 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a, match p.token.ident() { Some((ident, _)) if p.look_ahead(1, |t| *t == token::Eq) => { p.bump(); - p.expect(&token::Eq)?; + p.expect(exp!(Eq))?; let expr = p.parse_expr()?; if let Some((_, prev)) = args.by_name(ident.name) { ecx.dcx().emit_err(errors::FormatDuplicateArg { @@ -166,13 +166,18 @@ fn make_format_args( let MacroInput { fmtstr: efmt, mut args, is_direct_literal } = input; - let (fmt_str, fmt_style, fmt_span) = { + let ExprToSpannedString { + symbol: fmt_str, + span: fmt_span, + style: fmt_style, + uncooked_symbol: uncooked_fmt_str, + } = { let ExpandResult::Ready(mac) = expr_to_spanned_string(ecx, efmt.clone(), msg) else { return ExpandResult::Retry(()); }; match mac { Ok(mut fmt) if append_newline => { - fmt.0 = Symbol::intern(&format!("{}\n", fmt.0)); + fmt.symbol = Symbol::intern(&format!("{}\n", fmt.symbol)); fmt } Ok(fmt) => fmt, @@ -316,6 +321,13 @@ fn make_format_args( e.sugg_ = Some(errors::InvalidFormatStringSuggestion::RemoveRawIdent { span }) } } + parse::Suggestion::ReorderFormatParameter(span, replacement) => { + let span = fmt_span.from_inner(InnerSpan::new(span.start, span.end)); + e.sugg_ = Some(errors::InvalidFormatStringSuggestion::ReorderFormatParameter { + span, + replacement, + }); + } } let guar = ecx.dcx().emit_err(e); return ExpandResult::Ready(Err(guar)); @@ -584,7 +596,12 @@ fn make_format_args( } } - ExpandResult::Ready(Ok(FormatArgs { span: fmt_span, template, arguments: args })) + ExpandResult::Ready(Ok(FormatArgs { + span: fmt_span, + template, + arguments: args, + uncooked_fmt_str, + })) } fn invalid_placeholder_type_error( diff --git a/compiler/rustc_builtin_macros/src/global_allocator.rs b/compiler/rustc_builtin_macros/src/global_allocator.rs index b4b18409a18eb..8388e9dcafb8c 100644 --- a/compiler/rustc_builtin_macros/src/global_allocator.rs +++ b/compiler/rustc_builtin_macros/src/global_allocator.rs @@ -7,8 +7,7 @@ use rustc_ast::{ Stmt, StmtKind, Ty, TyKind, }; use rustc_expand::base::{Annotatable, ExtCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index cc4a974e7578c..6071d36f8ebbc 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -24,7 +24,7 @@ extern crate proc_macro; use rustc_expand::base::{MacroExpanderFn, ResolverExpand, SyntaxExtensionKind}; use rustc_expand::proc_macro::BangProcMacro; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::deriving::*; diff --git a/compiler/rustc_builtin_macros/src/pattern_type.rs b/compiler/rustc_builtin_macros/src/pattern_type.rs index 90b5f097b32b3..a600a9f316a7a 100644 --- a/compiler/rustc_builtin_macros/src/pattern_type.rs +++ b/compiler/rustc_builtin_macros/src/pattern_type.rs @@ -3,7 +3,8 @@ use rustc_ast::tokenstream::TokenStream; use rustc_ast::{Pat, Ty, ast}; use rustc_errors::PResult; use rustc_expand::base::{self, DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult}; -use rustc_span::{Span, sym}; +use rustc_parse::exp; +use rustc_span::Span; pub(crate) fn expand<'cx>( cx: &'cx mut ExtCtxt<'_>, @@ -24,7 +25,7 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P let mut parser = cx.new_parser_from_tts(stream); let ty = parser.parse_ty()?; - parser.expect_keyword(sym::is)?; + parser.expect_keyword(exp!(Is))?; let pat = parser.parse_pat_no_top_alt(None, None)?; Ok((ty, pat)) diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs index 707c36d5046a9..dee185ff0c95c 100644 --- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs +++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs @@ -11,8 +11,7 @@ use rustc_feature::Features; use rustc_session::Session; use rustc_span::hygiene::AstPass; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use smallvec::smallvec; use thin_vec::{ThinVec, thin_vec}; diff --git a/compiler/rustc_builtin_macros/src/source_util.rs b/compiler/rustc_builtin_macros/src/source_util.rs index 946fbe918e6a6..123b96f6bcaaa 100644 --- a/compiler/rustc_builtin_macros/src/source_util.rs +++ b/compiler/rustc_builtin_macros/src/source_util.rs @@ -16,8 +16,7 @@ use rustc_parse::parser::{ForceCollect, Parser}; use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal}; use rustc_session::lint::builtin::INCOMPLETE_INCLUDE; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::Symbol; -use rustc_span::{Pos, Span}; +use rustc_span::{Pos, Span, Symbol}; use smallvec::SmallVec; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs index a3fc53344ab14..1a3f4d2d44908 100644 --- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs +++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs @@ -3,10 +3,9 @@ use rustc_expand::base::{ExtCtxt, ResolverExpand}; use rustc_expand::expand::ExpansionConfig; use rustc_feature::Features; use rustc_session::Session; -use rustc_span::DUMMY_SP; use rustc_span::edition::Edition::*; use rustc_span::hygiene::AstPass; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; +use rustc_span::{DUMMY_SP, Ident, Symbol, kw, sym}; use thin_vec::thin_vec; pub fn inject( diff --git a/compiler/rustc_builtin_macros/src/test.rs b/compiler/rustc_builtin_macros/src/test.rs index 664e935ee14a3..3f73ddbdd297e 100644 --- a/compiler/rustc_builtin_macros/src/test.rs +++ b/compiler/rustc_builtin_macros/src/test.rs @@ -9,8 +9,7 @@ use rustc_ast::{self as ast, GenericParamKind, attr}; use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, Diag, Level}; use rustc_expand::base::*; -use rustc_span::symbol::{Ident, Symbol, sym}; -use rustc_span::{ErrorGuaranteed, FileNameDisplayPreference, Span}; +use rustc_span::{ErrorGuaranteed, FileNameDisplayPreference, Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::debug; diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index e7ff65e08f9cd..4644659894368 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -16,8 +16,7 @@ use rustc_lint_defs::BuiltinLintDiag; use rustc_session::Session; use rustc_session::lint::builtin::UNNAMEABLE_TEST_ITEMS; use rustc_span::hygiene::{AstPass, SyntaxContext, Transparency}; -use rustc_span::symbol::{Ident, Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; use rustc_target::spec::PanicStrategy; use smallvec::smallvec; use thin_vec::{ThinVec, thin_vec}; diff --git a/compiler/rustc_builtin_macros/src/trace_macros.rs b/compiler/rustc_builtin_macros/src/trace_macros.rs index e624d1da66bd5..8264c17b4d171 100644 --- a/compiler/rustc_builtin_macros/src/trace_macros.rs +++ b/compiler/rustc_builtin_macros/src/trace_macros.rs @@ -1,7 +1,6 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult}; -use rustc_span::Span; -use rustc_span::symbol::kw; +use rustc_span::{Span, kw}; use crate::errors; @@ -10,9 +9,9 @@ pub(crate) fn expand_trace_macros( sp: Span, tt: TokenStream, ) -> MacroExpanderResult<'static> { - let mut cursor = tt.trees(); + let mut iter = tt.iter(); let mut err = false; - let value = match &cursor.next() { + let value = match iter.next() { Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true, Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false, _ => { @@ -20,7 +19,7 @@ pub(crate) fn expand_trace_macros( false } }; - err |= cursor.next().is_some(); + err |= iter.next().is_some(); if err { cx.dcx().emit_err(errors::TraceMacros { span: sp }); } else { diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs index 2a28dfaf3c430..38fec2bff14c8 100644 --- a/compiler/rustc_builtin_macros/src/util.rs +++ b/compiler/rustc_builtin_macros/src/util.rs @@ -7,7 +7,7 @@ use rustc_expand::expand::AstFragment; use rustc_feature::AttributeTemplate; use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES; -use rustc_parse::{parser, validate_attr}; +use rustc_parse::{exp, parser, validate_attr}; use rustc_session::errors::report_lit_error; use rustc_span::{BytePos, Span, Symbol}; @@ -57,7 +57,17 @@ pub(crate) fn warn_on_duplicate_attribute(ecx: &ExtCtxt<'_>, item: &Annotatable, /// `Ok` represents successfully retrieving the string literal at the correct /// position, e.g., `println("abc")`. -type ExprToSpannedStringResult<'a> = Result<(Symbol, ast::StrStyle, Span), UnexpectedExprKind<'a>>; +pub(crate) type ExprToSpannedStringResult<'a> = Result>; + +pub(crate) struct ExprToSpannedString { + pub symbol: Symbol, + pub style: ast::StrStyle, + pub span: Span, + /// The raw string literal, with no escaping or processing. + /// + /// Generally only useful for lints that care about the raw bytes the user wrote. + pub uncooked_symbol: (ast::token::LitKind, Symbol), +} /// - `Ok` is returned when the conversion to a string literal is unsuccessful, /// but another type of expression is obtained instead. @@ -90,7 +100,12 @@ pub(crate) fn expr_to_spanned_string<'a>( ExpandResult::Ready(Err(match expr.kind { ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) { Ok(ast::LitKind::Str(s, style)) => { - return ExpandResult::Ready(Ok((s, style, expr.span))); + return ExpandResult::Ready(Ok(ExprToSpannedString { + symbol: s, + style, + span: expr.span, + uncooked_symbol: (token_lit.kind, token_lit.symbol), + })); } Ok(ast::LitKind::ByteStr(..)) => { let mut err = cx.dcx().struct_span_err(expr.span, err_msg); @@ -128,7 +143,7 @@ pub(crate) fn expr_to_string( Ok((err, _)) => err.emit(), Err(guar) => guar, }) - .map(|(symbol, style, _)| (symbol, style)) + .map(|ExprToSpannedString { symbol, style, .. }| (symbol, style)) }) } @@ -183,7 +198,7 @@ pub(crate) fn get_single_str_spanned_from_tts( Ok((err, _)) => err.emit(), Err(guar) => guar, }) - .map(|(symbol, _style, span)| (symbol, span)) + .map(|ExprToSpannedString { symbol, span, .. }| (symbol, span)) }) } @@ -204,7 +219,7 @@ pub(crate) fn get_single_expr_from_tts( Ok(ret) => ret, Err(guar) => return ExpandResult::Ready(Err(guar)), }; - let _ = p.eat(&token::Comma); + let _ = p.eat(exp!(Comma)); if p.token != token::Eof { cx.dcx().emit_err(errors::OnlyOneArgument { span, name }); @@ -237,7 +252,7 @@ pub(crate) fn get_exprs_from_tts( let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr(); es.push(expr); - if p.eat(&token::Comma) { + if p.eat(exp!(Comma)) { continue; } if p.token != token::Eof { diff --git a/compiler/rustc_codegen_cranelift/.github/workflows/main.yml b/compiler/rustc_codegen_cranelift/.github/workflows/main.yml index 2ee94146c1a4e..a8333df77e6e7 100644 --- a/compiler/rustc_codegen_cranelift/.github/workflows/main.yml +++ b/compiler/rustc_codegen_cranelift/.github/workflows/main.yml @@ -56,11 +56,6 @@ jobs: - os: macos-latest env: TARGET_TRIPLE: x86_64-apple-darwin - # cross-compile from Linux to Windows using mingw - - os: ubuntu-latest - env: - TARGET_TRIPLE: x86_64-pc-windows-gnu - apt_deps: gcc-mingw-w64-x86-64 wine-stable - os: ubuntu-latest env: TARGET_TRIPLE: aarch64-unknown-linux-gnu @@ -113,15 +108,6 @@ jobs: - name: Prepare dependencies run: ./y.sh prepare - # The Wine version shipped with Ubuntu 22.04 doesn't implement bcryptprimitives.dll - - name: Build bcryptprimitives.dll shim for Wine - if: matrix.os == 'ubuntu-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' - run: | - rustup target add x86_64-pc-windows-gnu - mkdir wine_shims - rustc patches/bcryptprimitives.rs -Copt-level=3 -Clto=fat --out-dir wine_shims --target x86_64-pc-windows-gnu - echo "WINEPATH=$(pwd)/wine_shims" >> $GITHUB_ENV - - name: Build run: ./y.sh build --sysroot none @@ -135,9 +121,6 @@ jobs: # This is roughly config rust-lang/rust uses for testing - name: Test with LLVM sysroot - # Skip native x86_64-pc-windows-gnu. It is way too slow and cross-compiled - # x86_64-pc-windows-gnu covers at least part of the tests. - if: matrix.os != 'windows-latest' || matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu' env: TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }} run: ./y.sh test --sysroot llvm --no-unstable-features @@ -215,10 +198,6 @@ jobs: - os: macos-latest env: TARGET_TRIPLE: aarch64-apple-darwin - # cross-compile from Linux to Windows using mingw - - os: ubuntu-latest - env: - TARGET_TRIPLE: x86_64-pc-windows-gnu - os: windows-latest env: TARGET_TRIPLE: x86_64-pc-windows-msvc @@ -243,12 +222,6 @@ jobs: if: matrix.os == 'macos-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-apple-darwin' run: rustup set default-host x86_64-apple-darwin - - name: Install MinGW toolchain - if: matrix.os == 'ubuntu-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' - run: | - sudo apt-get update - sudo apt-get install -y gcc-mingw-w64-x86-64 - - name: Prepare dependencies run: ./y.sh prepare @@ -262,19 +235,11 @@ jobs: run: tar cvfJ cg_clif.tar.xz dist - name: Upload prebuilt cg_clif - if: matrix.os == 'windows-latest' || matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu' uses: actions/upload-artifact@v4 with: name: cg_clif-${{ matrix.env.TARGET_TRIPLE }} path: cg_clif.tar.xz - - name: Upload prebuilt cg_clif (cross compile) - if: matrix.os != 'windows-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' - uses: actions/upload-artifact@v4 - with: - name: cg_clif-${{ runner.os }}-cross-x86_64-mingw - path: cg_clif.tar.xz - release: runs-on: ubuntu-latest timeout-minutes: 10 diff --git a/compiler/rustc_codegen_cranelift/Cargo.lock b/compiler/rustc_codegen_cranelift/Cargo.lock index d81e7214961f6..b5aba86079fc6 100644 --- a/compiler/rustc_codegen_cranelift/Cargo.lock +++ b/compiler/rustc_codegen_cranelift/Cargo.lock @@ -3,28 +3,22 @@ version = 4 [[package]] -name = "ahash" -version = "0.8.11" +name = "allocator-api2" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" [[package]] name = "bitflags" @@ -37,6 +31,9 @@ name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +dependencies = [ + "allocator-api2", +] [[package]] name = "cfg-if" @@ -46,24 +43,24 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "cranelift-bforest" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ba4f80548f22dc9c43911907b5e322c5555544ee85f785115701e6a28c9abe1" +checksum = "ac89549be94911dd0e839b4a7db99e9ed29c17517e1c026f61066884c168aa3c" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "005884e3649c3e5ff2dc79e8a94b138f11569cc08a91244a292714d2a86e9156" +checksum = "b9bd49369f76c77e34e641af85d0956869237832c118964d08bf5f51f210875a" [[package]] name = "cranelift-codegen" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4036255ec33ce9a37495dfbcfc4e1118fd34e693eff9a1e106336b7cd16a9b" +checksum = "fd96ce9cf8efebd7f5ab8ced5a0ce44250280bbae9f593d74a6d7effc3582a35" dependencies = [ "bumpalo", "cranelift-bforest", @@ -74,7 +71,7 @@ dependencies = [ "cranelift-entity", "cranelift-isle", "gimli", - "hashbrown", + "hashbrown 0.14.5", "log", "regalloc2", "rustc-hash", @@ -85,42 +82,42 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7ca74f4b68319da11d39e894437cb6e20ec7c2e11fbbda823c3bf207beedff7" +checksum = "5a68e358827afe4bfb6239fcbf6fbd5ac56206ece8a99c8f5f9bbd518773281a" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897e54f433a0269c4187871aa06d452214d5515d228d5bdc22219585e9eef895" +checksum = "e184c9767afbe73d50c55ec29abcf4c32f9baf0d9d22b86d58c4d55e06dee181" [[package]] name = "cranelift-control" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cb4018f5bf59fb53f515fa9d80e6f8c5ce19f198dc538984ebd23ecf8965ec" +checksum = "5cc7664f2a66f053e33f149e952bb5971d138e3af637f5097727ed6dc0ed95dd" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305399fd781a2953ac78c1396f02ff53144f39c33eb7fc7789cf4e8936d13a96" +checksum = "118597e3a9cf86c3556fa579a7a23b955fa18231651a52a77a2475d305a9cf84" dependencies = [ "cranelift-bitset", ] [[package]] name = "cranelift-frontend" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9230b460a128d53653456137751d27baf567947a3ab8c0c4d6e31fd08036d81e" +checksum = "7638ea1efb069a0aa18d8ee67401b6b0d19f6bfe5de5e9ede348bfc80bb0d8c7" dependencies = [ "cranelift-codegen", "log", @@ -130,15 +127,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b961e24ae3ec9813a24a15ae64bbd2a42e4de4d79a7f3225a412e3b94e78d1c8" +checksum = "15c53e1152a0b01c4ed2b1e0535602b8e86458777dd9d18b28732b16325c7dc0" [[package]] name = "cranelift-jit" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62699329d4ced20fe281fbaef45e11b473b7ab310491b4bdebcd8b818a8ef7fe" +checksum = "36972cab12ff246afe8d45b6a427669cf814bd393c661e5e8a8dedc26a81c73f" dependencies = [ "anyhow", "cranelift-codegen", @@ -156,9 +153,9 @@ dependencies = [ [[package]] name = "cranelift-module" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f20b0b51ba962dac30fc7e812b86e4390d908acd4f59bcc8ac7610a8f3e0977" +checksum = "11841b3f54ac480db1e8e8d5678ba901a13b387012d315e3f8fba3e7b7a80447" dependencies = [ "anyhow", "cranelift-codegen", @@ -167,9 +164,9 @@ dependencies = [ [[package]] name = "cranelift-native" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5bd76df6c9151188dfa428c863b33da5b34561b67f43c0cf3f24a794f9fa1f" +checksum = "7b7d8f895444fa52dd7bdd0bed11bf007a7fb43af65a6deac8fcc4094c6372f7" dependencies = [ "cranelift-codegen", "libc", @@ -178,9 +175,9 @@ dependencies = [ [[package]] name = "cranelift-object" -version = "0.114.0" +version = "0.115.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee231640a7ecceedd0f1f2782d9288db6a6908cc70675ed9427e3bf0ea6daacd" +checksum = "8e235ddfd19f100855ad03358c7ae0a13070c38a000701054cab46458cca6e81" dependencies = [ "anyhow", "cranelift-codegen", @@ -212,6 +209,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" +[[package]] +name = "foldhash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" + [[package]] name = "gimli" version = "0.31.1" @@ -228,31 +231,37 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ - "ahash", + "foldhash", ] [[package]] name = "indexmap" -version = "2.2.6" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.15.2", ] [[package]] name = "libc" -version = "0.2.155" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libloading" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets", @@ -281,50 +290,45 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "object" -version = "0.36.2" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "crc32fast", - "hashbrown", + "hashbrown 0.15.2", "indexmap", "memchr", ] -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] [[package]] name = "regalloc2" -version = "0.10.2" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12908dbeb234370af84d0579b9f68258a0f67e201412dd9a2814e6f45b2fc0f0" +checksum = "145c1c267e14f20fb0f88aa76a1c5ffec42d592c1d28b3cd9148ae35916158d3" dependencies = [ - "hashbrown", + "allocator-api2", + "bumpalo", + "hashbrown 0.15.2", "log", "rustc-hash", - "slice-group-by", "smallvec", ] @@ -342,9 +346,9 @@ dependencies = [ [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" [[package]] name = "rustc_codegen_cranelift" @@ -366,30 +370,24 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.210" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "slice-group-by" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" - [[package]] name = "smallvec" version = "1.13.2" @@ -404,9 +402,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "syn" -version = "2.0.70" +version = "2.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0209b68b3613b093e0ec905354eccaedcfe83b8cb37cbdeae64026c3064c16" +checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a" dependencies = [ "proc-macro2", "quote", @@ -421,21 +419,15 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "version_check" -version = "0.9.4" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "wasmtime-jit-icache-coherence" -version = "27.0.0" +version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91b218a92866f74f35162f5d03a4e0f62cd0e1cc624285b1014275e5d4575fad" +checksum = "d40d7722b9e1fbeae135715710a8a2570b1e6cf72b74dd653962d89831c6c70d" dependencies = [ "anyhow", "cfg-if", @@ -524,23 +516,3 @@ name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] diff --git a/compiler/rustc_codegen_cranelift/Cargo.toml b/compiler/rustc_codegen_cranelift/Cargo.toml index b2fed3c490edb..bfdbc3e768a69 100644 --- a/compiler/rustc_codegen_cranelift/Cargo.toml +++ b/compiler/rustc_codegen_cranelift/Cargo.toml @@ -8,12 +8,12 @@ crate-type = ["dylib"] [dependencies] # These have to be in sync with each other -cranelift-codegen = { version = "0.114.0", default-features = false, features = ["std", "unwind", "all-native-arch"] } -cranelift-frontend = { version = "0.114.0" } -cranelift-module = { version = "0.114.0" } -cranelift-native = { version = "0.114.0" } -cranelift-jit = { version = "0.114.0", optional = true } -cranelift-object = { version = "0.114.0" } +cranelift-codegen = { version = "0.115.0", default-features = false, features = ["std", "unwind", "all-native-arch"] } +cranelift-frontend = { version = "0.115.0" } +cranelift-module = { version = "0.115.0" } +cranelift-native = { version = "0.115.0" } +cranelift-jit = { version = "0.115.0", optional = true } +cranelift-object = { version = "0.115.0" } target-lexicon = "0.12.0" gimli = { version = "0.31", default-features = false, features = ["write"] } object = { version = "0.36", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] } @@ -23,6 +23,14 @@ libloading = { version = "0.8.0", optional = true } smallvec = "1.8.1" [patch.crates-io] +# Uncomment to use an unreleased version of cranelift +#cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } +#cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } +#cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } +#cranelift-native = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } +#cranelift-jit = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } +#cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime.git", branch = "release-28.0.0", version = "0.115.0" } + # Uncomment to use local checkout of cranelift #cranelift-codegen = { path = "../wasmtime/cranelift/codegen" } #cranelift-frontend = { path = "../wasmtime/cranelift/frontend" } diff --git a/compiler/rustc_codegen_cranelift/build_system/bench.rs b/compiler/rustc_codegen_cranelift/build_system/bench.rs index 73a0f325fc212..8359b7b527903 100644 --- a/compiler/rustc_codegen_cranelift/build_system/bench.rs +++ b/compiler/rustc_codegen_cranelift/build_system/bench.rs @@ -16,11 +16,7 @@ static SIMPLE_RAYTRACER_REPO: GitRepo = GitRepo::github( "", ); -pub(crate) fn benchmark(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { - benchmark_simple_raytracer(dirs, bootstrap_host_compiler); -} - -fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { +pub(crate) fn benchmark(dirs: &Dirs, compiler: &Compiler) { if std::process::Command::new("hyperfine").output().is_err() { eprintln!("Hyperfine not installed"); eprintln!("Hint: Try `cargo install hyperfine` to install hyperfine"); @@ -39,9 +35,9 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { }; eprintln!("[BENCH COMPILE] ebobby/simple-raytracer"); - let cargo_clif = dirs - .dist_dir - .join(get_file_name(&bootstrap_host_compiler.rustc, "cargo_clif", "bin").replace('_', "-")); + let cargo_clif = &compiler.cargo; + let rustc_clif = &compiler.rustc; + let rustflags = &compiler.rustflags.join("\x1f"); let manifest_path = SIMPLE_RAYTRACER_REPO.source_dir().to_path(dirs).join("Cargo.toml"); let target_dir = dirs.build_dir.join("simple_raytracer"); @@ -56,22 +52,24 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { target_dir = target_dir.display(), ); let clif_build_cmd = format!( - "RUSTC=rustc {cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir} && (rm build/raytracer_cg_clif || true) && ln build/simple_raytracer/debug/main build/raytracer_cg_clif", + "RUSTC={rustc_clif} CARGO_ENCODED_RUSTFLAGS=\"{rustflags}\" {cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir} && (rm build/raytracer_cg_clif || true) && ln build/simple_raytracer/debug/main build/raytracer_cg_clif", cargo_clif = cargo_clif.display(), + rustc_clif = rustc_clif.display(), manifest_path = manifest_path.display(), target_dir = target_dir.display(), ); let clif_build_opt_cmd = format!( - "RUSTC=rustc {cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir} --release && (rm build/raytracer_cg_clif_opt || true) && ln build/simple_raytracer/release/main build/raytracer_cg_clif_opt", + "RUSTC={rustc_clif} CARGO_ENCODED_RUSTFLAGS=\"{rustflags}\" {cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir} --release && (rm build/raytracer_cg_clif_opt || true) && ln build/simple_raytracer/release/main build/raytracer_cg_clif_opt", cargo_clif = cargo_clif.display(), + rustc_clif = rustc_clif.display(), manifest_path = manifest_path.display(), target_dir = target_dir.display(), ); - let bench_compile_markdown = dirs.dist_dir.join("bench_compile.md"); + let bench_compile_markdown = dirs.build_dir.join("bench_compile.md"); let bench_compile = hyperfine_command( - 1, + 0, bench_runs, Some(&clean_cmd), &[ @@ -92,23 +90,14 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { eprintln!("[BENCH RUN] ebobby/simple-raytracer"); - let bench_run_markdown = dirs.dist_dir.join("bench_run.md"); - - let raytracer_cg_llvm = Path::new(".").join(get_file_name( - &bootstrap_host_compiler.rustc, - "raytracer_cg_llvm", - "bin", - )); - let raytracer_cg_clif = Path::new(".").join(get_file_name( - &bootstrap_host_compiler.rustc, - "raytracer_cg_clif", - "bin", - )); - let raytracer_cg_clif_opt = Path::new(".").join(get_file_name( - &bootstrap_host_compiler.rustc, - "raytracer_cg_clif_opt", - "bin", - )); + let bench_run_markdown = dirs.build_dir.join("bench_run.md"); + + let raytracer_cg_llvm = + Path::new(".").join(get_file_name(&compiler.rustc, "raytracer_cg_llvm", "bin")); + let raytracer_cg_clif = + Path::new(".").join(get_file_name(&compiler.rustc, "raytracer_cg_clif", "bin")); + let raytracer_cg_clif_opt = + Path::new(".").join(get_file_name(&compiler.rustc, "raytracer_cg_clif_opt", "bin")); let mut bench_run = hyperfine_command( 0, bench_runs, diff --git a/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs b/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs index e47e982991622..a73e3c87d43d0 100644 --- a/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs +++ b/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs @@ -33,14 +33,7 @@ pub(crate) fn build_sysroot( let cg_clif_dylib_path = match cg_clif_dylib_src { CodegenBackend::Local(src_path) => { // Copy the backend - let cg_clif_dylib_path = if cfg!(windows) { - // Windows doesn't have rpath support, so the cg_clif dylib needs to be next to the - // binaries. - dist_dir.join("bin") - } else { - dist_dir.join("lib") - } - .join(src_path.file_name().unwrap()); + let cg_clif_dylib_path = dist_dir.join("lib").join(src_path.file_name().unwrap()); try_hard_link(src_path, &cg_clif_dylib_path); CodegenBackend::Local(cg_clif_dylib_path) } @@ -102,19 +95,14 @@ pub(crate) fn build_sysroot( .install_into_sysroot(dist_dir); } - let mut target_compiler = { - let rustc_clif = dist_dir.join(wrapper_base_name.replace("____", "rustc-clif")); - let rustdoc_clif = dist_dir.join(wrapper_base_name.replace("____", "rustdoc-clif")); - - Compiler { - cargo: bootstrap_host_compiler.cargo.clone(), - rustc: rustc_clif.clone(), - rustdoc: rustdoc_clif.clone(), - rustflags: vec![], - rustdocflags: vec![], - triple: target_triple, - runner: vec![], - } + let mut target_compiler = Compiler { + cargo: bootstrap_host_compiler.cargo.clone(), + rustc: dist_dir.join(wrapper_base_name.replace("____", "rustc-clif")), + rustdoc: dist_dir.join(wrapper_base_name.replace("____", "rustdoc-clif")), + rustflags: vec![], + rustdocflags: vec![], + triple: target_triple, + runner: vec![], }; if !is_native { target_compiler.set_cross_linker_and_runner(); diff --git a/compiler/rustc_codegen_cranelift/build_system/config.rs b/compiler/rustc_codegen_cranelift/build_system/config.rs index ef540cf1f822b..37bc4c5d78275 100644 --- a/compiler/rustc_codegen_cranelift/build_system/config.rs +++ b/compiler/rustc_codegen_cranelift/build_system/config.rs @@ -33,23 +33,3 @@ pub(crate) fn get_bool(name: &str) -> bool { true } } - -pub(crate) fn get_value(name: &str) -> Option { - let values = load_config_file() - .into_iter() - .filter(|(key, _)| key == name) - .map(|(_, val)| val) - .collect::>(); - if values.is_empty() { - None - } else if values.len() == 1 { - if values[0].is_none() { - eprintln!("Config `{}` missing value", name); - process::exit(1); - } - values.into_iter().next().unwrap() - } else { - eprintln!("Config `{}` given multiple values: {:?}", name, values); - process::exit(1); - } -} diff --git a/compiler/rustc_codegen_cranelift/build_system/main.rs b/compiler/rustc_codegen_cranelift/build_system/main.rs index 99e6146657f34..3ff9751a3ef2d 100644 --- a/compiler/rustc_codegen_cranelift/build_system/main.rs +++ b/compiler/rustc_codegen_cranelift/build_system/main.rs @@ -156,10 +156,8 @@ fn main() { let cargo = rustc_info::get_cargo_path(); let rustc = rustc_info::get_rustc_path(); let rustdoc = rustc_info::get_rustdoc_path(); - let triple = std::env::var("HOST_TRIPLE") - .ok() - .or_else(|| config::get_value("host")) - .unwrap_or_else(|| rustc_info::get_host_triple(&rustc)); + let triple = + std::env::var("HOST_TRIPLE").unwrap_or_else(|_| rustc_info::get_host_triple(&rustc)); Compiler { cargo, rustc, @@ -170,10 +168,8 @@ fn main() { runner: vec![], } }; - let target_triple = std::env::var("TARGET_TRIPLE") - .ok() - .or_else(|| config::get_value("target")) - .unwrap_or_else(|| bootstrap_host_compiler.triple.clone()); + let target_triple = + std::env::var("TARGET_TRIPLE").unwrap_or_else(|_| bootstrap_host_compiler.triple.clone()); let dirs = path::Dirs { source_dir: current_dir.clone(), @@ -247,7 +243,7 @@ fn main() { ); } Command::Bench => { - build_sysroot::build_sysroot( + let compiler = build_sysroot::build_sysroot( &dirs, sysroot_kind, &cg_clif_dylib, @@ -255,7 +251,7 @@ fn main() { rustup_toolchain_name.as_deref(), target_triple, ); - bench::benchmark(&dirs, &bootstrap_host_compiler); + bench::benchmark(&dirs, &compiler); } } } diff --git a/compiler/rustc_codegen_cranelift/build_system/path.rs b/compiler/rustc_codegen_cranelift/build_system/path.rs index 20a81156b71d9..d6a6558b2be29 100644 --- a/compiler/rustc_codegen_cranelift/build_system/path.rs +++ b/compiler/rustc_codegen_cranelift/build_system/path.rs @@ -11,20 +11,11 @@ pub(crate) struct Dirs { #[doc(hidden)] #[derive(Debug, Copy, Clone)] -pub(crate) enum PathBase { +enum PathBase { Source, Build, } -impl PathBase { - fn to_path(self, dirs: &Dirs) -> PathBuf { - match self { - PathBase::Source => dirs.source_dir.clone(), - PathBase::Build => dirs.build_dir.clone(), - } - } -} - #[derive(Debug, Copy, Clone)] pub(crate) struct RelPath { base: PathBase, @@ -41,6 +32,9 @@ impl RelPath { } pub(crate) fn to_path(&self, dirs: &Dirs) -> PathBuf { - self.base.to_path(dirs).join(self.suffix) + match self.base { + PathBase::Source => dirs.source_dir.join(self.suffix), + PathBase::Build => dirs.build_dir.join(self.suffix), + } } } diff --git a/compiler/rustc_codegen_cranelift/build_system/tests.rs b/compiler/rustc_codegen_cranelift/build_system/tests.rs index 08736db8ba0c8..8de419a0c4eb2 100644 --- a/compiler/rustc_codegen_cranelift/build_system/tests.rs +++ b/compiler/rustc_codegen_cranelift/build_system/tests.rs @@ -73,8 +73,6 @@ const BASE_SYSROOT_SUITE: &[TestCase] = &[ "example/arbitrary_self_types_pointers_and_wrappers.rs", &[], ), - TestCase::build_lib("build.alloc_system", "example/alloc_system.rs", "lib"), - TestCase::build_bin_and_run("aot.alloc_example", "example/alloc_example.rs", &[]), TestCase::jit_bin("jit.std_example", "example/std_example.rs", "arg"), TestCase::build_bin_and_run("aot.std_example", "example/std_example.rs", &["arg"]), TestCase::build_bin_and_run("aot.dst_field_align", "example/dst-field-align.rs", &[]), @@ -89,7 +87,6 @@ const BASE_SYSROOT_SUITE: &[TestCase] = &[ &[], ), TestCase::build_bin_and_run("aot.float-minmax-pass", "example/float-minmax-pass.rs", &[]), - TestCase::build_bin_and_run("aot.mod_bench", "example/mod_bench.rs", &[]), TestCase::build_bin_and_run("aot.issue-72793", "example/issue-72793.rs", &[]), TestCase::build_bin("aot.issue-59326", "example/issue-59326.rs"), TestCase::build_bin_and_run("aot.neon", "example/neon.rs", &[]), diff --git a/compiler/rustc_codegen_cranelift/config.txt b/compiler/rustc_codegen_cranelift/config.txt index b63597f60fc6f..f578cbef35e68 100644 --- a/compiler/rustc_codegen_cranelift/config.txt +++ b/compiler/rustc_codegen_cranelift/config.txt @@ -1,15 +1,5 @@ # This file allows configuring the build system. -# Which triple to produce a compiler toolchain for. -# -# Defaults to the default triple of rustc on the host system. -#host = x86_64-unknown-linux-gnu - -# Which triple to build libraries (core/alloc/std/test/proc_macro) for. -# -# Defaults to `host`. -#target = x86_64-unknown-linux-gnu - # Disables cleaning of the sysroot dir. This will cause old compiled artifacts to be re-used when # the sysroot source hasn't changed. This is useful when the codegen backend hasn't been modified. # This option can be changed while the build system is already running for as long as sysroot @@ -31,15 +21,12 @@ aot.mini_core_hello_world testsuite.base_sysroot aot.arbitrary_self_types_pointers_and_wrappers aot.issue_91827_extern_types -build.alloc_system -aot.alloc_example jit.std_example aot.std_example aot.dst_field_align aot.subslice-patterns-const-eval aot.track-caller-attribute aot.float-minmax-pass -aot.mod_bench aot.issue-72793 aot.issue-59326 aot.neon diff --git a/compiler/rustc_codegen_cranelift/example/alloc_example.rs b/compiler/rustc_codegen_cranelift/example/alloc_example.rs deleted file mode 100644 index da70ca7943983..0000000000000 --- a/compiler/rustc_codegen_cranelift/example/alloc_example.rs +++ /dev/null @@ -1,44 +0,0 @@ -#![feature(start, core_intrinsics, alloc_error_handler, lang_items)] -#![allow(internal_features)] -#![no_std] - -extern crate alloc; -extern crate alloc_system; - -use alloc::boxed::Box; - -use alloc_system::System; - -#[global_allocator] -static ALLOC: System = System; - -#[cfg_attr(unix, link(name = "c"))] -#[cfg_attr(target_env = "msvc", link(name = "msvcrt"))] -extern "C" { - fn puts(s: *const u8) -> i32; -} - -#[panic_handler] -fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { - core::intrinsics::abort(); -} - -#[alloc_error_handler] -fn alloc_error_handler(_: alloc::alloc::Layout) -> ! { - core::intrinsics::abort(); -} - -#[lang = "eh_personality"] -fn eh_personality() -> ! { - loop {} -} - -#[start] -fn main(_argc: isize, _argv: *const *const u8) -> isize { - let world: Box<&str> = Box::new("Hello World!\0"); - unsafe { - puts(*world as *const str as *const u8); - } - - 0 -} diff --git a/compiler/rustc_codegen_cranelift/example/alloc_system.rs b/compiler/rustc_codegen_cranelift/example/alloc_system.rs deleted file mode 100644 index 2884c9c32ae4d..0000000000000 --- a/compiler/rustc_codegen_cranelift/example/alloc_system.rs +++ /dev/null @@ -1,124 +0,0 @@ -// SPDX-License-Identifier: MIT OR Apache-2.0 -// SPDX-FileCopyrightText: The Rust Project Developers (see https://thanks.rust-lang.org) - -#![no_std] - -pub struct System; - -#[cfg(any(windows, unix, target_os = "redox"))] -mod realloc_fallback { - use core::alloc::{GlobalAlloc, Layout}; - use core::{cmp, ptr}; - impl super::System { - pub(crate) unsafe fn realloc_fallback( - &self, - ptr: *mut u8, - old_layout: Layout, - new_size: usize, - ) -> *mut u8 { - // Docs for GlobalAlloc::realloc require this to be valid: - let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align()); - let new_ptr = GlobalAlloc::alloc(self, new_layout); - if !new_ptr.is_null() { - let size = cmp::min(old_layout.size(), new_size); - ptr::copy_nonoverlapping(ptr, new_ptr, size); - GlobalAlloc::dealloc(self, ptr, old_layout); - } - new_ptr - } - } -} -#[cfg(any(unix, target_os = "redox"))] -mod platform { - use core::alloc::{GlobalAlloc, Layout}; - use core::ffi::c_void; - use core::ptr; - - use System; - extern "C" { - fn posix_memalign(memptr: *mut *mut c_void, align: usize, size: usize) -> i32; - fn free(p: *mut c_void); - } - unsafe impl GlobalAlloc for System { - #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - aligned_malloc(&layout) - } - #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { - let ptr = self.alloc(layout.clone()); - if !ptr.is_null() { - ptr::write_bytes(ptr, 0, layout.size()); - } - ptr - } - #[inline] - unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { - free(ptr as *mut c_void) - } - #[inline] - unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { - self.realloc_fallback(ptr, layout, new_size) - } - } - unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { - let mut out = ptr::null_mut(); - let ret = posix_memalign(&mut out, layout.align(), layout.size()); - if ret != 0 { ptr::null_mut() } else { out as *mut u8 } - } -} -#[cfg(windows)] -#[allow(nonstandard_style)] -mod platform { - use core::alloc::{GlobalAlloc, Layout}; - - use System; - type LPVOID = *mut u8; - type HANDLE = LPVOID; - type SIZE_T = usize; - type DWORD = u32; - type BOOL = i32; - extern "system" { - fn GetProcessHeap() -> HANDLE; - fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID; - fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL; - fn GetLastError() -> DWORD; - } - #[repr(C)] - struct Header(*mut u8); - const HEAP_ZERO_MEMORY: DWORD = 0x00000008; - unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { - &mut *(ptr as *mut Header).sub(1) - } - unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 { - let aligned = ptr.add(align - (ptr as usize & (align - 1))); - *get_header(aligned) = Header(ptr); - aligned - } - #[inline] - unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut u8 { - let size = layout.size() + layout.align(); - let ptr = HeapAlloc(GetProcessHeap(), flags, size); - (if ptr.is_null() { ptr } else { align_ptr(ptr, layout.align()) }) as *mut u8 - } - unsafe impl GlobalAlloc for System { - #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - allocate_with_flags(layout, 0) - } - #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { - allocate_with_flags(layout, HEAP_ZERO_MEMORY) - } - #[inline] - unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { - let header = get_header(ptr); - let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); - debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); - } - #[inline] - unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { - self.realloc_fallback(ptr, layout, new_size) - } - } -} diff --git a/compiler/rustc_codegen_cranelift/example/mod_bench.rs b/compiler/rustc_codegen_cranelift/example/mod_bench.rs deleted file mode 100644 index 11a3e8fc72d8d..0000000000000 --- a/compiler/rustc_codegen_cranelift/example/mod_bench.rs +++ /dev/null @@ -1,37 +0,0 @@ -#![feature(start, core_intrinsics, lang_items)] -#![allow(internal_features)] -#![no_std] - -#[cfg_attr(unix, link(name = "c"))] -#[cfg_attr(target_env = "msvc", link(name = "msvcrt"))] -extern "C" {} - -#[panic_handler] -fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { - core::intrinsics::abort(); -} - -#[lang = "eh_personality"] -fn eh_personality() {} - -// Required for rustc_codegen_llvm -#[no_mangle] -unsafe extern "C" fn _Unwind_Resume() { - core::intrinsics::unreachable(); -} - -#[start] -fn main(_argc: isize, _argv: *const *const u8) -> isize { - for i in 2..10_000_000 { - black_box((i + 1) % i); - } - - 0 -} - -#[inline(never)] -fn black_box(i: u32) { - if i != 1 { - core::intrinsics::abort(); - } -} diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch index 6012af6a8dd2a..bf58e48515855 100644 --- a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch @@ -16,8 +16,8 @@ index 7165c3e48af..968552ad435 100644 [dependencies] core = { path = "../core" } --compiler_builtins = { version = "=0.1.138", features = ['rustc-dep-of-std'] } -+compiler_builtins = { version = "=0.1.138", features = ['rustc-dep-of-std', 'no-f16-f128'] } +-compiler_builtins = { version = "=0.1.143", features = ['rustc-dep-of-std'] } ++compiler_builtins = { version = "=0.1.143", features = ['rustc-dep-of-std', 'no-f16-f128'] } [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } diff --git a/compiler/rustc_codegen_cranelift/patches/bcryptprimitives.rs b/compiler/rustc_codegen_cranelift/patches/bcryptprimitives.rs deleted file mode 100644 index 4d186485aac1d..0000000000000 --- a/compiler/rustc_codegen_cranelift/patches/bcryptprimitives.rs +++ /dev/null @@ -1,22 +0,0 @@ -// Shim for bcryptprimitives.dll. The Wine version shipped with Ubuntu 22.04 -// doesn't support it yet. Authored by @ChrisDenton - -#![crate_type = "cdylib"] -#![allow(nonstandard_style)] - -#[no_mangle] -pub unsafe extern "system" fn ProcessPrng(mut pbData: *mut u8, mut cbData: usize) -> i32 { - while cbData > 0 { - let size = core::cmp::min(cbData, u32::MAX as usize); - RtlGenRandom(pbData, size as u32); - cbData -= size; - pbData = pbData.add(size); - } - 1 -} - -#[link(name = "advapi32")] -extern "system" { - #[link_name = "SystemFunction036"] - pub fn RtlGenRandom(RandomBuffer: *mut u8, RandomBufferLength: u32) -> u8; -} diff --git a/compiler/rustc_codegen_cranelift/rust-toolchain b/compiler/rustc_codegen_cranelift/rust-toolchain index 8d935df4d1f2f..e4c3dd708fd98 100644 --- a/compiler/rustc_codegen_cranelift/rust-toolchain +++ b/compiler/rustc_codegen_cranelift/rust-toolchain @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-12-06" +channel = "nightly-2025-01-10" components = ["rust-src", "rustc-dev", "llvm-tools"] profile = "minimal" diff --git a/compiler/rustc_codegen_cranelift/scripts/cargo-clif.rs b/compiler/rustc_codegen_cranelift/scripts/cargo-clif.rs index 1e14f41d4a2c2..ebbb687961058 100644 --- a/compiler/rustc_codegen_cranelift/scripts/cargo-clif.rs +++ b/compiler/rustc_codegen_cranelift/scripts/cargo-clif.rs @@ -16,7 +16,7 @@ fn main() { if let Some(name) = option_env!("BUILTIN_BACKEND") { rustflags.push(format!("-Zcodegen-backend={name}")); } else { - let dylib = sysroot.join(if cfg!(windows) { "bin" } else { "lib" }).join( + let dylib = sysroot.join("lib").join( env::consts::DLL_PREFIX.to_string() + "rustc_codegen_cranelift" + env::consts::DLL_SUFFIX, diff --git a/compiler/rustc_codegen_cranelift/scripts/rustc-clif.rs b/compiler/rustc_codegen_cranelift/scripts/rustc-clif.rs index a27b9983bf181..528031af82a84 100644 --- a/compiler/rustc_codegen_cranelift/scripts/rustc-clif.rs +++ b/compiler/rustc_codegen_cranelift/scripts/rustc-clif.rs @@ -11,7 +11,7 @@ fn main() { sysroot = sysroot.parent().unwrap(); } - let cg_clif_dylib_path = sysroot.join(if cfg!(windows) { "bin" } else { "lib" }).join( + let cg_clif_dylib_path = sysroot.join("lib").join( env::consts::DLL_PREFIX.to_string() + "rustc_codegen_cranelift" + env::consts::DLL_SUFFIX, ); diff --git a/compiler/rustc_codegen_cranelift/scripts/rustdoc-clif.rs b/compiler/rustc_codegen_cranelift/scripts/rustdoc-clif.rs index 1cad312bb7918..6ebe060d8bbd1 100644 --- a/compiler/rustc_codegen_cranelift/scripts/rustdoc-clif.rs +++ b/compiler/rustc_codegen_cranelift/scripts/rustdoc-clif.rs @@ -11,7 +11,7 @@ fn main() { sysroot = sysroot.parent().unwrap(); } - let cg_clif_dylib_path = sysroot.join(if cfg!(windows) { "bin" } else { "lib" }).join( + let cg_clif_dylib_path = sysroot.join("lib").join( env::consts::DLL_PREFIX.to_string() + "rustc_codegen_cranelift" + env::consts::DLL_SUFFIX, ); diff --git a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh index e291ec204649e..e569da90cf7b4 100755 --- a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh +++ b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh @@ -128,7 +128,11 @@ rm tests/ui/abi/large-byval-align.rs # exceeds implementation limit of Cranelift # should work when using ./x.py test the way it is intended # ============================================================ rm -r tests/run-make/remap-path-prefix-dwarf # requires llvm-dwarfdump +rm -r tests/run-make/strip # same rm -r tests/run-make/compiler-builtins # Expects lib/rustlib/src/rust to contains the standard library source +rm -r tests/run-make/missing-unstable-trait-bound # This disables support for unstable features, but running cg_clif needs some unstable features +rm -r tests/run-make/const-trait-stable-toolchain # same +rm -r tests/run-make/incr-add-rust-src-component # genuine bugs # ============ @@ -196,5 +200,5 @@ index e7ae773ffa1d3..04bc2d7787da7 100644 EOF echo "[TEST] rustc test suite" -COMPILETEST_FORCE_STAGE0=1 ./x.py test --stage 0 --test-args=--nocapture tests/{codegen-units,run-make,ui,incremental} +COMPILETEST_FORCE_STAGE0=1 ./x.py test --stage 0 --test-args=--no-capture tests/{codegen-units,run-make,ui,incremental} popd diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs index 2466bfe60c7ab..2c99597922e8f 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs @@ -65,7 +65,11 @@ pub(crate) fn conv_to_call_conv(sess: &Session, c: Conv, default_call_conv: Call sess.dcx().fatal("C-cmse-nonsecure-entry call conv is not yet implemented"); } - Conv::Msp430Intr | Conv::PtxKernel | Conv::AvrInterrupt | Conv::AvrNonBlockingInterrupt => { + Conv::Msp430Intr + | Conv::PtxKernel + | Conv::GpuKernel + | Conv::AvrInterrupt + | Conv::AvrNonBlockingInterrupt => { unreachable!("tried to use {c:?} call conv which only exists on an unsupported target"); } } diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 34066eb83fc02..956a024fa4dc3 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -828,12 +828,6 @@ fn codegen_stmt<'tcx>( fx.bcx.ins().nop(); } } - Rvalue::Len(place) => { - let place = codegen_place(fx, place); - let usize_layout = fx.layout_of(fx.tcx.types.usize); - let len = codegen_array_len(fx, place); - lval.write_cvalue(fx, CValue::by_val(len, usize_layout)); - } Rvalue::ShallowInitBox(ref operand, content_ty) => { let content_ty = fx.monomorphize(content_ty); let box_layout = fx.layout_of(Ty::new_box(fx.tcx, content_ty)); diff --git a/compiler/rustc_codegen_cranelift/src/codegen_i128.rs b/compiler/rustc_codegen_cranelift/src/codegen_i128.rs index 734574338d049..dcfd7ddabbc42 100644 --- a/compiler/rustc_codegen_cranelift/src/codegen_i128.rs +++ b/compiler/rustc_codegen_cranelift/src/codegen_i128.rs @@ -62,9 +62,8 @@ pub(crate) fn maybe_codegen<'tcx>( } } -pub(crate) fn maybe_codegen_checked<'tcx>( +pub(crate) fn maybe_codegen_mul_checked<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, - bin_op: BinOp, lhs: CValue<'tcx>, rhs: CValue<'tcx>, ) -> Option> { @@ -77,33 +76,22 @@ pub(crate) fn maybe_codegen_checked<'tcx>( } let is_signed = type_sign(lhs.layout().ty); - - match bin_op { - BinOp::BitAnd | BinOp::BitOr | BinOp::BitXor => unreachable!(), - BinOp::Add | BinOp::Sub => None, - BinOp::Mul => { - let out_ty = Ty::new_tup(fx.tcx, &[lhs.layout().ty, fx.tcx.types.bool]); - let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty)); - let param_types = vec![ - AbiParam::special(fx.pointer_type, ArgumentPurpose::StructReturn), - AbiParam::new(types::I128), - AbiParam::new(types::I128), - ]; - let args = [out_place.to_ptr().get_addr(fx), lhs.load_scalar(fx), rhs.load_scalar(fx)]; - fx.lib_call( - if is_signed { "__rust_i128_mulo" } else { "__rust_u128_mulo" }, - param_types, - vec![], - &args, - ); - Some(out_place.to_cvalue(fx)) - } - BinOp::AddUnchecked | BinOp::SubUnchecked | BinOp::MulUnchecked => unreachable!(), - BinOp::AddWithOverflow | BinOp::SubWithOverflow | BinOp::MulWithOverflow => unreachable!(), - BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"), - BinOp::Div | BinOp::Rem => unreachable!(), - BinOp::Cmp => unreachable!(), - BinOp::Lt | BinOp::Le | BinOp::Eq | BinOp::Ge | BinOp::Gt | BinOp::Ne => unreachable!(), - BinOp::Shl | BinOp::ShlUnchecked | BinOp::Shr | BinOp::ShrUnchecked => unreachable!(), - } + let oflow_out_place = CPlace::new_stack_slot(fx, fx.layout_of(fx.tcx.types.i32)); + let param_types = vec![ + AbiParam::new(types::I128), + AbiParam::new(types::I128), + AbiParam::special(fx.pointer_type, ArgumentPurpose::Normal), + ]; + let args = [lhs.load_scalar(fx), rhs.load_scalar(fx), oflow_out_place.to_ptr().get_addr(fx)]; + let ret = fx.lib_call( + if is_signed { "__rust_i128_mulo" } else { "__rust_u128_mulo" }, + param_types, + vec![AbiParam::new(types::I128)], + &args, + ); + let mul = ret[0]; + let oflow = oflow_out_place.to_cvalue(fx).load_scalar(fx); + let oflow = clif_intcast(fx, oflow, types::I8, false); + let layout = fx.layout_of(Ty::new_tup(fx.tcx, &[lhs.layout().ty, fx.tcx.types.bool])); + Some(CValue::by_val_pair(mul, oflow, layout)) } diff --git a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs index 4154a62234c12..2484c10848ed3 100644 --- a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs +++ b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs @@ -3,7 +3,7 @@ use std::ffi::c_int; #[cfg(feature = "jit")] use std::ffi::c_void; -// FIXME replace with core::ffi::c_size_t once stablized +// FIXME replace with core::ffi::c_size_t once stabilized #[allow(non_camel_case_types)] #[cfg(feature = "jit")] type size_t = usize; @@ -43,7 +43,7 @@ builtin_functions! { fn __divti3(n: i128, d: i128) -> i128; fn __umodti3(n: u128, d: u128) -> u128; fn __modti3(n: i128, d: i128) -> i128; - fn __rust_u128_mulo(a: u128, b: u128) -> (u128, bool); + fn __rust_u128_mulo(a: u128, b: u128, oflow: &mut i32) -> u128; // floats fn __floattisf(i: i128) -> f32; diff --git a/compiler/rustc_codegen_cranelift/src/discriminant.rs b/compiler/rustc_codegen_cranelift/src/discriminant.rs index 45794a4266589..4d0d5dc60eba6 100644 --- a/compiler/rustc_codegen_cranelift/src/discriminant.rs +++ b/compiler/rustc_codegen_cranelift/src/discriminant.rs @@ -18,6 +18,7 @@ pub(crate) fn codegen_set_discriminant<'tcx>( return; } match layout.variants { + Variants::Empty => unreachable!("we already handled uninhabited types"), Variants::Single { index } => { assert_eq!(index, variant_index); } @@ -85,6 +86,7 @@ pub(crate) fn codegen_get_discriminant<'tcx>( } let (tag_scalar, tag_field, tag_encoding) = match &layout.variants { + Variants::Empty => unreachable!("we already handled uninhabited types"), Variants::Single { index } => { let discr_val = layout .ty diff --git a/compiler/rustc_codegen_cranelift/src/driver/aot.rs b/compiler/rustc_codegen_cranelift/src/driver/aot.rs index 4fc30b69123dd..fe578e44770f7 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/aot.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/aot.rs @@ -333,10 +333,9 @@ fn make_module(sess: &Session, name: String) -> UnwindModule { let mut builder = ObjectBuilder::new(isa, name + ".o", cranelift_module::default_libcall_names()).unwrap(); - // Unlike cg_llvm, cg_clif defaults to disabling -Zfunction-sections. For cg_llvm binary size - // is important, while cg_clif cares more about compilation times. Enabling -Zfunction-sections - // can easily double the amount of time necessary to perform linking. - builder.per_function_section(sess.opts.unstable_opts.function_sections.unwrap_or(false)); + builder.per_function_section( + sess.opts.unstable_opts.function_sections.unwrap_or(sess.target.function_sections), + ); UnwindModule::new(ObjectModule::new(builder), true) } diff --git a/compiler/rustc_codegen_cranelift/src/driver/jit.rs b/compiler/rustc_codegen_cranelift/src/driver/jit.rs index 4be4291021dfa..9ca930e14da58 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/jit.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/jit.rs @@ -287,14 +287,14 @@ fn dep_symbol_lookup_fn( let mut dylib_paths = Vec::new(); - let data = &crate_info.dependency_formats[&rustc_session::config::CrateType::Executable].1; + let data = &crate_info.dependency_formats[&rustc_session::config::CrateType::Executable]; // `used_crates` is in reverse postorder in terms of dependencies. Reverse the order here to // get a postorder which ensures that all dependencies of a dylib are loaded before the dylib // itself. This helps the dynamic linker to find dylibs not in the regular dynamic library // search path. for &cnum in crate_info.used_crates.iter().rev() { let src = &crate_info.used_crate_source[&cnum]; - match data[cnum.as_usize() - 1] { + match data[cnum] { Linkage::NotLinked | Linkage::IncludedFromDylib => {} Linkage::Static => { let name = crate_info.crate_name[&cnum]; diff --git a/compiler/rustc_codegen_cranelift/src/driver/mod.rs b/compiler/rustc_codegen_cranelift/src/driver/mod.rs index fb0eed07c1971..ffd47cace3807 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/mod.rs @@ -73,12 +73,14 @@ impl Drop for TimingGuard { impl cranelift_codegen::timing::Profiler for MeasuremeProfiler { fn start_pass(&self, pass: cranelift_codegen::timing::Pass) -> Box { - let mut timing_guard = - TimingGuard { profiler: std::mem::ManuallyDrop::new(self.0.clone()), inner: None }; + let mut timing_guard = Box::new(TimingGuard { + profiler: std::mem::ManuallyDrop::new(self.0.clone()), + inner: None, + }); timing_guard.inner = Some( unsafe { &*(&*timing_guard.profiler as &SelfProfilerRef as *const SelfProfilerRef) } .generic_activity(pass.description()), ); - Box::new(timing_guard) + timing_guard } } diff --git a/compiler/rustc_codegen_cranelift/src/inline_asm.rs b/compiler/rustc_codegen_cranelift/src/inline_asm.rs index 33726056cc1cb..6ff75f75d3b28 100644 --- a/compiler/rustc_codegen_cranelift/src/inline_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/inline_asm.rs @@ -136,7 +136,7 @@ pub(crate) fn codegen_inline_asm_terminator<'tcx>( fx.bcx.ins().jump(destination_block, &[]); } None => { - fx.bcx.ins().trap(TrapCode::user(0 /* unreachable */).unwrap()); + fx.bcx.ins().trap(TrapCode::user(1 /* unreachable */).unwrap()); } } } diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index 5f1b71eff6b32..2e5813556aafa 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -23,7 +23,7 @@ use rustc_middle::ty::GenericArgsRef; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; pub(crate) use self::llvm::codegen_llvm_intrinsic_call; use crate::cast::clif_intcast; diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs index e0ebe30752afa..6d71b8e8abab4 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs @@ -1136,7 +1136,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( _ => { fx.tcx.dcx().span_err(span, format!("Unknown SIMD intrinsic {}", intrinsic)); // Prevent verifier error - fx.bcx.ins().trap(TrapCode::user(0 /* unreachable */).unwrap()); + fx.bcx.ins().trap(TrapCode::user(1 /* unreachable */).unwrap()); return; } } diff --git a/compiler/rustc_codegen_cranelift/src/lib.rs b/compiler/rustc_codegen_cranelift/src/lib.rs index c38ef82e5b80c..dc5d80e7a3451 100644 --- a/compiler/rustc_codegen_cranelift/src/lib.rs +++ b/compiler/rustc_codegen_cranelift/src/lib.rs @@ -27,6 +27,8 @@ extern crate rustc_metadata; extern crate rustc_session; extern crate rustc_span; extern crate rustc_target; +#[macro_use] +extern crate tracing; // This prevents duplicating functions and statics that are already part of the host rustc process. #[allow(unused_extern_crates)] @@ -208,6 +210,7 @@ impl CodegenBackend for CraneliftCodegenBackend { need_metadata_module: bool, ) -> Box { tcx.dcx().abort_if_errors(); + info!("codegen crate {}", tcx.crate_name(LOCAL_CRATE)); let config = self.config.clone().unwrap_or_else(|| { BackendConfig::from_opts(&tcx.sess.opts.cg.llvm_args) .unwrap_or_else(|err| tcx.sess.dcx().fatal(err)) diff --git a/compiler/rustc_codegen_cranelift/src/main_shim.rs b/compiler/rustc_codegen_cranelift/src/main_shim.rs index e480f21b9df80..e6bf0d5b47e4c 100644 --- a/compiler/rustc_codegen_cranelift/src/main_shim.rs +++ b/compiler/rustc_codegen_cranelift/src/main_shim.rs @@ -2,8 +2,7 @@ use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use rustc_hir::LangItem; use rustc_middle::ty::{AssocKind, GenericArg}; use rustc_session::config::{EntryFnType, sigpipe}; -use rustc_span::DUMMY_SP; -use rustc_span::symbol::Ident; +use rustc_span::{DUMMY_SP, Ident}; use crate::prelude::*; diff --git a/compiler/rustc_codegen_cranelift/src/num.rs b/compiler/rustc_codegen_cranelift/src/num.rs index fb18f45d7dcad..f44e2459a784d 100644 --- a/compiler/rustc_codegen_cranelift/src/num.rs +++ b/compiler/rustc_codegen_cranelift/src/num.rs @@ -2,10 +2,10 @@ use crate::prelude::*; -pub(crate) fn bin_op_to_intcc(bin_op: BinOp, signed: bool) -> Option { +pub(crate) fn bin_op_to_intcc(bin_op: BinOp, signed: bool) -> IntCC { use BinOp::*; use IntCC::*; - Some(match bin_op { + match bin_op { Eq => Equal, Lt => { if signed { @@ -36,8 +36,8 @@ pub(crate) fn bin_op_to_intcc(bin_op: BinOp, signed: bool) -> Option { UnsignedGreaterThan } } - _ => return None, - }) + _ => unreachable!(), + } } fn codegen_three_way_compare<'tcx>( @@ -48,8 +48,8 @@ fn codegen_three_way_compare<'tcx>( ) -> CValue<'tcx> { // This emits `(lhs > rhs) - (lhs < rhs)`, which is cranelift's preferred form per // - let gt_cc = crate::num::bin_op_to_intcc(BinOp::Gt, signed).unwrap(); - let lt_cc = crate::num::bin_op_to_intcc(BinOp::Lt, signed).unwrap(); + let gt_cc = crate::num::bin_op_to_intcc(BinOp::Gt, signed); + let lt_cc = crate::num::bin_op_to_intcc(BinOp::Lt, signed); let gt = fx.bcx.ins().icmp(gt_cc, lhs, rhs); let lt = fx.bcx.ins().icmp(lt_cc, lhs, rhs); let val = fx.bcx.ins().isub(gt, lt); @@ -63,11 +63,7 @@ fn codegen_compare_bin_op<'tcx>( lhs: Value, rhs: Value, ) -> CValue<'tcx> { - if bin_op == BinOp::Cmp { - return codegen_three_way_compare(fx, signed, lhs, rhs); - } - - let intcc = crate::num::bin_op_to_intcc(bin_op, signed).unwrap(); + let intcc = crate::num::bin_op_to_intcc(bin_op, signed); let val = fx.bcx.ins().icmp(intcc, lhs, rhs); CValue::by_val(val, fx.layout_of(fx.tcx.types.bool)) } @@ -79,7 +75,7 @@ pub(crate) fn codegen_binop<'tcx>( in_rhs: CValue<'tcx>, ) -> CValue<'tcx> { match bin_op { - BinOp::Eq | BinOp::Lt | BinOp::Le | BinOp::Ne | BinOp::Ge | BinOp::Gt | BinOp::Cmp => { + BinOp::Eq | BinOp::Lt | BinOp::Le | BinOp::Ne | BinOp::Ge | BinOp::Gt => { match in_lhs.layout().ty.kind() { ty::Bool | ty::Uint(_) | ty::Int(_) | ty::Char => { let signed = type_sign(in_lhs.layout().ty); @@ -91,6 +87,16 @@ pub(crate) fn codegen_binop<'tcx>( _ => {} } } + BinOp::Cmp => match in_lhs.layout().ty.kind() { + ty::Bool | ty::Uint(_) | ty::Int(_) | ty::Char => { + let signed = type_sign(in_lhs.layout().ty); + let lhs = in_lhs.load_scalar(fx); + let rhs = in_rhs.load_scalar(fx); + + return codegen_three_way_compare(fx, signed, lhs, rhs); + } + _ => {} + }, _ => {} } @@ -200,10 +206,6 @@ pub(crate) fn codegen_checked_int_binop<'tcx>( let lhs = in_lhs.load_scalar(fx); let rhs = in_rhs.load_scalar(fx); - if let Some(res) = crate::codegen_i128::maybe_codegen_checked(fx, bin_op, in_lhs, in_rhs) { - return res; - } - let signed = type_sign(in_lhs.layout().ty); let (res, has_overflow) = match bin_op { @@ -236,6 +238,10 @@ pub(crate) fn codegen_checked_int_binop<'tcx>( (val, has_overflow) } BinOp::Mul => { + if let Some(res) = crate::codegen_i128::maybe_codegen_mul_checked(fx, in_lhs, in_rhs) { + return res; + } + let ty = fx.bcx.func.dfg.value_type(lhs); match ty { types::I8 | types::I16 | types::I32 if !signed => { @@ -357,14 +363,12 @@ pub(crate) fn codegen_float_binop<'tcx>( _ => bug!(), }; - let ret_val = fx.lib_call( + fx.lib_call( name, vec![AbiParam::new(ty), AbiParam::new(ty)], vec![AbiParam::new(ty)], &[lhs, rhs], - )[0]; - - return CValue::by_val(ret_val, in_lhs.layout()); + )[0] } BinOp::Eq | BinOp::Lt | BinOp::Le | BinOp::Ne | BinOp::Ge | BinOp::Gt => { let fltcc = match bin_op { @@ -431,13 +435,9 @@ pub(crate) fn codegen_ptr_binop<'tcx>( BinOp::Lt | BinOp::Le | BinOp::Ge | BinOp::Gt => { let ptr_eq = fx.bcx.ins().icmp(IntCC::Equal, lhs_ptr, rhs_ptr); - let ptr_cmp = - fx.bcx.ins().icmp(bin_op_to_intcc(bin_op, false).unwrap(), lhs_ptr, rhs_ptr); - let extra_cmp = fx.bcx.ins().icmp( - bin_op_to_intcc(bin_op, false).unwrap(), - lhs_extra, - rhs_extra, - ); + let ptr_cmp = fx.bcx.ins().icmp(bin_op_to_intcc(bin_op, false), lhs_ptr, rhs_ptr); + let extra_cmp = + fx.bcx.ins().icmp(bin_op_to_intcc(bin_op, false), lhs_extra, rhs_extra); fx.bcx.ins().select(ptr_eq, extra_cmp, ptr_cmp) } diff --git a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml index 704d7b9c2fd6a..73ec6b84a155e 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml @@ -22,7 +22,6 @@ jobs: - { gcc: "gcc-13.deb" } - { gcc: "gcc-13-without-int128.deb" } commands: [ - "--mini-tests", "--std-tests", # FIXME: re-enable asm tests when GCC can emit in the right syntax. # "--asm-tests", @@ -79,6 +78,7 @@ jobs: run: | ./y.sh prepare --only-libcore ./y.sh build --sysroot + ./y.sh test --mini-tests cargo test - name: Run y.sh cargo build @@ -87,7 +87,7 @@ jobs: - name: Clean run: | - ./y.sh clean all + ./y.sh clean all - name: Prepare dependencies run: | @@ -95,9 +95,6 @@ jobs: git config --global user.name "User" ./y.sh prepare - - name: Add more failing tests because the sysroot is not compiled with LTO - run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt - - name: Run tests run: | ./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} diff --git a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml index 2c1ed9ad42948..f33d9fcc58257 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml @@ -90,15 +90,12 @@ jobs: if: matrix.libgccjit_version.gcc != 'libgccjit12.so' run: ./y.sh prepare - - name: Add more failing tests because the sysroot is not compiled with LTO - run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt - - name: Run tests # TODO: re-enable those tests for libgccjit 12. if: matrix.libgccjit_version.gcc != 'libgccjit12.so' id: tests run: | - ${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log + ${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} 2>&1 | tee output_log rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY - name: Run failing ui pattern tests for ICE @@ -106,7 +103,7 @@ jobs: if: matrix.libgccjit_version.gcc != 'libgccjit12.so' id: ui-tests run: | - ${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --test-failing-ui-pattern-tests ${{ matrix.libgccjit_version.extra }} | tee output_log_ui + ${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --test-failing-ui-pattern-tests ${{ matrix.libgccjit_version.extra }} 2>&1 | tee output_log_ui if grep -q "the compiler unexpectedly panicked" output_log_ui; then echo "Error: 'the compiler unexpectedly panicked' found in output logs. CI Error!!" exit 1 diff --git a/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml b/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml index 7dcad21a02e1d..4c2ce91e86eef 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml @@ -82,9 +82,6 @@ jobs: #- name: Add more failing tests for GCC 12 #run: cat tests/failing-ui-tests12.txt >> tests/failing-ui-tests.txt - #- name: Add more failing tests because the sysroot is not compiled with LTO - #run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt - #- name: Run tests #run: | #./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features diff --git a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml index 1c864e04413f6..07bb372b3606d 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml @@ -23,7 +23,6 @@ jobs: fail-fast: false matrix: commands: [ - "--mini-tests", "--std-tests", # TODO(antoyo): fix those on m68k. #"--test-libcore", @@ -93,6 +92,7 @@ jobs: run: | ./y.sh prepare --only-libcore --cross ./y.sh build --sysroot --features compiler_builtins/no-f16-f128 --target-triple m68k-unknown-linux-gnu + ./y.sh test --mini-tests CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test ./y.sh clean all @@ -102,9 +102,6 @@ jobs: git config --global user.name "User" ./y.sh prepare --cross - - name: Add more failing tests because the sysroot is not compiled with LTO - run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt - - name: Run tests run: | ./y.sh test --release --clean --build-sysroot --sysroot-features compiler_builtins/no-f16-f128 ${{ matrix.commands }} diff --git a/compiler/rustc_codegen_gcc/.github/workflows/release.yml b/compiler/rustc_codegen_gcc/.github/workflows/release.yml index d5c06a836db94..60e0943c87dac 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/release.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/release.yml @@ -13,7 +13,7 @@ env: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: fail-fast: false @@ -54,6 +54,7 @@ jobs: run: | ./y.sh prepare --only-libcore EMBED_LTO_BITCODE=1 ./y.sh build --sysroot --release --release-sysroot + ./y.sh test --mini-tests cargo test ./y.sh clean all @@ -70,4 +71,9 @@ jobs: run: | # FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros. echo -n 'lto = "fat"' >> build_system/build_sysroot/Cargo.toml - EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }} + EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot --keep-lto-tests ${{ matrix.commands }} + + - name: Run y.sh cargo build + run: | + EMBED_LTO_BITCODE=1 CHANNEL="release" ./y.sh cargo build --release --manifest-path tests/hello-world/Cargo.toml + # TODO: grep the asm output for "call my_func" and fail if it is found. diff --git a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml index d8818eefa96d8..d5ae6144496f1 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml @@ -73,10 +73,6 @@ jobs: echo "LD_LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV echo "LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV - - name: Build (part 2) - run: | - cargo test - - name: Clean if: ${{ !matrix.cargo_runner }} run: | @@ -92,6 +88,7 @@ jobs: if: ${{ !matrix.cargo_runner }} run: | ./y.sh test --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore + cargo test - name: Run stdarch tests if: ${{ !matrix.cargo_runner }} diff --git a/compiler/rustc_codegen_gcc/.rustfmt.toml b/compiler/rustc_codegen_gcc/.rustfmt.toml index 725aec25a0718..a11bc41680d24 100644 --- a/compiler/rustc_codegen_gcc/.rustfmt.toml +++ b/compiler/rustc_codegen_gcc/.rustfmt.toml @@ -1,3 +1,5 @@ -version = "Two" +style_edition = "2024" use_small_heuristics = "Max" merge_derives = false +group_imports = "StdExternalCrate" +imports_granularity = "Module" diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock index 6b06e7d7f278a..636e75b94a3f2 100644 --- a/compiler/rustc_codegen_gcc/Cargo.lock +++ b/compiler/rustc_codegen_gcc/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "aho-corasick" @@ -11,12 +11,40 @@ dependencies = [ "memchr", ] +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + [[package]] name = "boml" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85fdb93f04c73bff54305fa437ffea5449c41edcaadfe882f35836206b166ac5" +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "fm" version = "0.2.2" @@ -28,18 +56,18 @@ dependencies = [ [[package]] name = "gccjit" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bb376e98c82d9284c3a17fc1d6bf9bc921055418950238d7a553c27a7e1f6ab" +checksum = "72fd91f4adbf02b53cfc73c97bc33c5f253009043f30c56a5ec08dd5c8094dc8" dependencies = [ "gccjit_sys", ] [[package]] name = "gccjit_sys" -version = "0.3.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93b4b1be553b5df790bf25ca2a1d6add81727dc29f8d5c8742468ed306d621d1" +checksum = "0fb7b8f48a75e2cfe78c3d9a980b32771c34ffd12d196021ab3f98c49fbd2f0d" dependencies = [ "libc", ] @@ -77,9 +105,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.150" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "memchr" @@ -97,6 +131,12 @@ dependencies = [ "libc", ] +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + [[package]] name = "regex" version = "1.8.4" @@ -121,6 +161,20 @@ dependencies = [ "boml", "gccjit", "lang_tester", + "tempfile", +] + +[[package]] +name = "rustix" +version = "0.38.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", ] [[package]] @@ -132,6 +186,19 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "tempfile" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys", +] + [[package]] name = "termcolor" version = "1.2.0" @@ -205,3 +272,76 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml index 4828b7ddb1686..63d37358561e6 100644 --- a/compiler/rustc_codegen_gcc/Cargo.toml +++ b/compiler/rustc_codegen_gcc/Cargo.toml @@ -22,15 +22,16 @@ master = ["gccjit/master"] default = ["master"] [dependencies] -gccjit = "2.2" +gccjit = "2.4" #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" } # Local copy. #gccjit = { path = "../gccjit.rs" } [dev-dependencies] -lang_tester = "0.8.0" boml = "0.3.1" +lang_tester = "0.8.0" +tempfile = "3.7.1" [profile.dev] # By compiling dependencies with optimizations, performing tests gets much faster. diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index c69e240c01de4..7cc7336612c79 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -93,6 +93,7 @@ struct TestArg { sysroot_panic_abort: bool, config_info: ConfigInfo, sysroot_features: Vec, + keep_lto_tests: bool, } impl TestArg { @@ -128,6 +129,9 @@ impl TestArg { "--sysroot-panic-abort" => { test_arg.sysroot_panic_abort = true; } + "--keep-lto-tests" => { + test_arg.keep_lto_tests = true; + } "--sysroot-features" => match args.next() { Some(feature) if !feature.is_empty() => { test_arg.sysroot_features.push(feature); @@ -194,7 +198,7 @@ fn build_if_no_backend(env: &Env, args: &TestArg) -> Result<(), String> { } fn clean(_env: &Env, args: &TestArg) -> Result<(), String> { - let _ = std::fs::remove_dir_all(&args.config_info.cargo_target_dir); + let _ = remove_dir_all(&args.config_info.cargo_target_dir); let path = Path::new(&args.config_info.cargo_target_dir).join("gccjit"); create_dir(&path) } @@ -641,7 +645,7 @@ fn test_projects(env: &Env, args: &TestArg) -> Result<(), String> { //failing test is fixed upstream. //"https://github.com/marshallpierce/rust-base64", // FIXME: one test is OOM-killed. // TODO: ignore the base64 test that is OOM-killed. - "https://github.com/time-rs/time", + //"https://github.com/time-rs/time", // FIXME: one test fails (https://github.com/time-rs/time/issues/719). "https://github.com/rust-lang/log", "https://github.com/bitflags/bitflags", //"https://github.com/serde-rs/serde", // FIXME: one test fails. @@ -835,8 +839,7 @@ fn valid_ui_error_pattern_test(file: &str) -> bool { .any(|to_ignore| file.ends_with(to_ignore)) } -#[rustfmt::skip] -fn contains_ui_error_patterns(file_path: &Path) -> Result { +fn contains_ui_error_patterns(file_path: &Path, keep_lto_tests: bool) -> Result { // Tests generating errors. let file = File::open(file_path) .map_err(|error| format!("Failed to read `{}`: {:?}", file_path.display(), error))?; @@ -849,22 +852,38 @@ fn contains_ui_error_patterns(file_path: &Path) -> Result { "//@ error-pattern:", "//@ build-fail", "//@ run-fail", + "//@ known-bug", "-Cllvm-args", "//~", "thread", ] - .iter() - .any(|check| line.contains(check)) + .iter() + .any(|check| line.contains(check)) + { + return Ok(true); + } + + if !keep_lto_tests + && (line.contains("-Clto") + || line.contains("-C lto") + || line.contains("compile-flags: -Clinker-plugin-lto")) + && !line.contains("-Clto=thin") { return Ok(true); } + if line.contains("//[") && line.contains("]~") { return Ok(true); } } - if file_path.display().to_string().contains("ambiguous-4-extern.rs") { + let file_path = file_path.display().to_string(); + if file_path.contains("ambiguous-4-extern.rs") { eprintln!("nothing found for {file_path:?}"); } + // The files in this directory contain errors. + if file_path.contains("/error-emitter/") { + return Ok(true); + } Ok(false) } @@ -903,7 +922,7 @@ where rust_path.join("tests/ui"), &mut |_dir| Ok(()), &mut |file_path| { - if contains_ui_error_patterns(file_path)? { + if contains_ui_error_patterns(file_path, args.keep_lto_tests)? { Ok(()) } else { remove_file(file_path).map_err(|e| e.to_string()) @@ -928,7 +947,7 @@ where .iter() .any(|name| *name == dir_name) { - std::fs::remove_dir_all(dir).map_err(|error| { + remove_dir_all(dir).map_err(|error| { format!("Failed to remove folder `{}`: {:?}", dir.display(), error) })?; } @@ -940,27 +959,42 @@ where // These two functions are used to remove files that are known to not be working currently // with the GCC backend to reduce noise. - fn dir_handling(dir: &Path) -> Result<(), String> { - if dir.file_name().map(|name| name == "auxiliary").unwrap_or(true) { - return Ok(()); - } + fn dir_handling(keep_lto_tests: bool) -> impl Fn(&Path) -> Result<(), String> { + move |dir| { + if dir.file_name().map(|name| name == "auxiliary").unwrap_or(true) { + return Ok(()); + } - walk_dir(dir, &mut dir_handling, &mut file_handling, false) - } - fn file_handling(file_path: &Path) -> Result<(), String> { - if !file_path.extension().map(|extension| extension == "rs").unwrap_or(false) { - return Ok(()); + walk_dir( + dir, + &mut dir_handling(keep_lto_tests), + &mut file_handling(keep_lto_tests), + false, + ) } - let path_str = file_path.display().to_string().replace("\\", "/"); - if valid_ui_error_pattern_test(&path_str) { - return Ok(()); - } else if contains_ui_error_patterns(file_path)? { - return remove_file(&file_path); + } + + fn file_handling(keep_lto_tests: bool) -> impl Fn(&Path) -> Result<(), String> { + move |file_path| { + if !file_path.extension().map(|extension| extension == "rs").unwrap_or(false) { + return Ok(()); + } + let path_str = file_path.display().to_string().replace("\\", "/"); + if valid_ui_error_pattern_test(&path_str) { + return Ok(()); + } else if contains_ui_error_patterns(file_path, keep_lto_tests)? { + return remove_file(&file_path); + } + Ok(()) } - Ok(()) } - walk_dir(rust_path.join("tests/ui"), &mut dir_handling, &mut file_handling, false)?; + walk_dir( + rust_path.join("tests/ui"), + &mut dir_handling(args.keep_lto_tests), + &mut file_handling(args.keep_lto_tests), + false, + )?; } let nb_parts = args.nb_parts.unwrap_or(0); if nb_parts > 0 { @@ -1173,7 +1207,7 @@ fn remove_files_callback<'a>( files.split('\n').map(|line| line.trim()).filter(|line| !line.is_empty()) { let path = rust_path.join(file); - if let Err(e) = std::fs::remove_dir_all(&path) { + if let Err(e) = remove_dir_all(&path) { println!("Failed to remove directory `{}`: {}", path.display(), e); } } diff --git a/compiler/rustc_codegen_gcc/example/mini_core.rs b/compiler/rustc_codegen_gcc/example/mini_core.rs index cdd151613df84..5a4ee0a198cef 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core.rs @@ -170,6 +170,14 @@ impl Add for usize { } } +impl Add for isize { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + self + rhs + } +} + #[lang = "sub"] pub trait Sub { type Output; @@ -681,7 +689,7 @@ impl Index for [T] { } } -extern { +extern "C" { type VaListImpl; } diff --git a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs index dcfa34cb729d8..1d51e0a1856ba 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs @@ -258,13 +258,13 @@ fn main() { assert_eq!(((|()| 42u8) as fn(()) -> u8)(()), 42); - extern { + extern "C" { #[linkage = "weak"] static ABC: *const u8; } { - extern { + extern "C" { #[linkage = "weak"] static ABC: *const u8; } diff --git a/compiler/rustc_codegen_gcc/example/mod_bench.rs b/compiler/rustc_codegen_gcc/example/mod_bench.rs index cae911c1073d5..e8a9cade74745 100644 --- a/compiler/rustc_codegen_gcc/example/mod_bench.rs +++ b/compiler/rustc_codegen_gcc/example/mod_bench.rs @@ -3,7 +3,7 @@ #![allow(internal_features)] #[link(name = "c")] -extern {} +extern "C" {} #[panic_handler] fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { diff --git a/compiler/rustc_codegen_gcc/example/std_example.rs b/compiler/rustc_codegen_gcc/example/std_example.rs index 9e43b4635f0d3..5fa1e0afb0603 100644 --- a/compiler/rustc_codegen_gcc/example/std_example.rs +++ b/compiler/rustc_codegen_gcc/example/std_example.rs @@ -7,7 +7,7 @@ use std::arch::x86_64::*; use std::io::Write; use std::ops::Coroutine; -extern { +extern "C" { pub fn printf(format: *const i8, ...) -> i32; } diff --git a/compiler/rustc_codegen_gcc/libgccjit.version b/compiler/rustc_codegen_gcc/libgccjit.version index b9bbbd324c3b9..ff58accec1d4c 100644 --- a/compiler/rustc_codegen_gcc/libgccjit.version +++ b/compiler/rustc_codegen_gcc/libgccjit.version @@ -1 +1 @@ -e744a9459d33864067214741daf5c5bc2a7b88c6 +45648c2edd4ecd862d9f08196d3d6c6ccba79f07 diff --git a/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch b/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch index b2ab05691ecbf..70e3e2ba7fee1 100644 --- a/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch +++ b/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch @@ -1,7 +1,7 @@ -From 18793c6109890493ceb3ff36549849a36e3d8022 Mon Sep 17 00:00:00 2001 +From af0e237f056fa838c77463381a19b0dc993c0a35 Mon Sep 17 00:00:00 2001 From: None Date: Sun, 1 Sep 2024 11:42:17 -0400 -Subject: [PATCH] [core] Disable not compiling tests +Subject: [PATCH] Disable not compiling tests --- library/core/tests/Cargo.toml | 14 ++++++++++++++ @@ -30,14 +30,15 @@ index 0000000..ca326ac +rand = { version = "0.8.5", default-features = false } +rand_xorshift = { version = "0.3.0", default-features = false } diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs -index 1e336bf..5800ebb 100644 +index a4a7946..ecfe43f 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -1,4 +1,5 @@ // tidy-alphabetical-start +#![cfg(test)] - #![cfg_attr(bootstrap, feature(offset_of_nested))] #![cfg_attr(target_has_atomic = "128", feature(integer_atomics))] #![cfg_attr(test, feature(cfg_match))] --- -2.46.0 + #![feature(alloc_layout_extra)] +-- +2.47.1 + diff --git a/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch b/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch index 01461987ffb14..9ef5e0e4f4672 100644 --- a/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch +++ b/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch @@ -27,5 +27,4 @@ index b71786c..cf484d5 100644 mod slice; mod str; mod str_lossy; --- -2.45.2 +-- 2.45.2 diff --git a/compiler/rustc_codegen_gcc/rust-toolchain b/compiler/rustc_codegen_gcc/rust-toolchain index dca3b0c22e4e5..940b3de9f7453 100644 --- a/compiler/rustc_codegen_gcc/rust-toolchain +++ b/compiler/rustc_codegen_gcc/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2024-08-11" +channel = "nightly-2025-01-12" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/compiler/rustc_codegen_gcc/src/allocator.rs b/compiler/rustc_codegen_gcc/src/allocator.rs index f13a75648aea8..416f3231a13c4 100644 --- a/compiler/rustc_codegen_gcc/src/allocator.rs +++ b/compiler/rustc_codegen_gcc/src/allocator.rs @@ -1,6 +1,6 @@ -#[cfg(feature = "master")] -use gccjit::FnAttribute; use gccjit::{Context, FunctionType, GlobalKind, ToRValue, Type}; +#[cfg(feature = "master")] +use gccjit::{FnAttribute, VarAttribute}; use rustc_ast::expand::allocator::{ ALLOCATOR_METHODS, AllocatorKind, AllocatorTy, NO_ALLOC_SHIM_IS_UNSTABLE, alloc_error_handler_name, default_fn_name, global_fn_name, @@ -10,6 +10,8 @@ use rustc_middle::ty::TyCtxt; use rustc_session::config::OomStrategy; use crate::GccContext; +#[cfg(feature = "master")] +use crate::base::symbol_visibility_to_gcc; pub(crate) unsafe fn codegen( tcx: TyCtxt<'_>, @@ -70,12 +72,20 @@ pub(crate) unsafe fn codegen( let name = OomStrategy::SYMBOL.to_string(); let global = context.new_global(None, GlobalKind::Exported, i8, name); + #[cfg(feature = "master")] + global.add_attribute(VarAttribute::Visibility(symbol_visibility_to_gcc( + tcx.sess.default_visibility(), + ))); let value = tcx.sess.opts.unstable_opts.oom.should_panic(); let value = context.new_rvalue_from_int(i8, value as i32); global.global_set_initializer_rvalue(value); let name = NO_ALLOC_SHIM_IS_UNSTABLE.to_string(); let global = context.new_global(None, GlobalKind::Exported, i8, name); + #[cfg(feature = "master")] + global.add_attribute(VarAttribute::Visibility(symbol_visibility_to_gcc( + tcx.sess.default_visibility(), + ))); let value = context.new_rvalue_from_int(i8, 0); global.global_set_initializer_rvalue(value); } @@ -105,15 +115,9 @@ fn create_wrapper_function( ); #[cfg(feature = "master")] - match tcx.sess.default_visibility() { - rustc_target::spec::SymbolVisibility::Hidden => { - func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)) - } - rustc_target::spec::SymbolVisibility::Protected => { - func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Protected)) - } - rustc_target::spec::SymbolVisibility::Interposable => {} - } + func.add_attribute(FnAttribute::Visibility(symbol_visibility_to_gcc( + tcx.sess.default_visibility(), + ))); if tcx.sess.must_emit_unwind_tables() { // TODO(antoyo): emit unwind tables. diff --git a/compiler/rustc_codegen_gcc/src/attributes.rs b/compiler/rustc_codegen_gcc/src/attributes.rs index 028a5ab5f716b..69b04dd579693 100644 --- a/compiler/rustc_codegen_gcc/src/attributes.rs +++ b/compiler/rustc_codegen_gcc/src/attributes.rs @@ -20,7 +20,7 @@ fn inline_attr<'gcc, 'tcx>( ) -> Option> { match inline { InlineAttr::Hint => Some(FnAttribute::Inline), - InlineAttr::Always => Some(FnAttribute::AlwaysInline), + InlineAttr::Always | InlineAttr::Force { .. } => Some(FnAttribute::AlwaysInline), InlineAttr::Never => { if cx.sess().target.arch != "amdgpu" { Some(FnAttribute::NoInline) diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs index ed92f9c52412c..e419bd1809900 100644 --- a/compiler/rustc_codegen_gcc/src/back/lto.rs +++ b/compiler/rustc_codegen_gcc/src/back/lto.rs @@ -35,16 +35,13 @@ use rustc_middle::bug; use rustc_middle::dep_graph::WorkProduct; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; use rustc_session::config::{CrateType, Lto}; +use rustc_target::spec::RelocModel; use tempfile::{TempDir, tempdir}; use crate::back::write::save_temp_bitcode; use crate::errors::{DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib}; use crate::{GccCodegenBackend, GccContext, SyncContext, to_gcc_opt_level}; -/// We keep track of the computed LTO cache keys from the previous -/// session to determine which CGUs we can reuse. -//pub const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin"; - pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { match crate_type { CrateType::Executable | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib => true, @@ -54,7 +51,7 @@ pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { struct LtoData { // TODO(antoyo): use symbols_below_threshold. - //symbols_below_threshold: Vec, + //symbols_below_threshold: Vec, upstream_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, } @@ -83,7 +80,7 @@ fn prepare_lto( let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| { if info.level.is_below_threshold(export_threshold) || info.used { - Some(CString::new(name.as_str()).unwrap()) + Some(name.clone()) } else { None } @@ -91,7 +88,7 @@ fn prepare_lto( let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); let mut symbols_below_threshold = { let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold"); - exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::>() + exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::>() }; info!("{} symbols to preserve in this crate", symbols_below_threshold.len()); @@ -159,11 +156,7 @@ fn prepare_lto( } } - Ok(LtoData { - //symbols_below_threshold, - upstream_modules, - tmp_path, - }) + Ok(LtoData { upstream_modules, tmp_path }) } fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { @@ -191,7 +184,7 @@ pub(crate) fn run_fat( cached_modules, lto_data.upstream_modules, lto_data.tmp_path, - //&symbols_below_threshold, + //<o_data.symbols_below_threshold, ) } @@ -202,7 +195,7 @@ fn fat_lto( cached_modules: Vec<(SerializedModule, WorkProduct)>, mut serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, - //symbols_below_threshold: &[*const libc::c_char], + //symbols_below_threshold: &[String], ) -> Result, FatalError> { let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module"); info!("going for a fat lto"); @@ -327,6 +320,7 @@ fn fat_lto( ptr as *const *const libc::c_char, symbols_below_threshold.len() as libc::size_t, );*/ + save_temp_bitcode(cgcx, &module, "lto.after-restriction"); //} } @@ -363,8 +357,6 @@ pub(crate) fn run_thin( let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); let lto_data = prepare_lto(cgcx, dcx)?; - /*let symbols_below_threshold = - symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::>();*/ if cgcx.opts.cg.linker_plugin_lto.enabled() { unreachable!( "We should never reach this case if the LTO step \ @@ -377,7 +369,8 @@ pub(crate) fn run_thin( modules, lto_data.upstream_modules, lto_data.tmp_path, - cached_modules, /*, &symbols_below_threshold*/ + cached_modules, + //<o_data.symbols_below_threshold, ) } @@ -428,7 +421,7 @@ fn thin_lto( serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, cached_modules: Vec<(SerializedModule, WorkProduct)>, - //symbols_below_threshold: &[*const libc::c_char], + //_symbols_below_threshold: &[String], ) -> Result<(Vec>, Vec), FatalError> { let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); info!("going for that thin, thin LTO"); @@ -640,7 +633,13 @@ pub unsafe fn optimize_thin_module( } }; let module = ModuleCodegen { - module_llvm: GccContext { context, should_combine_object_files, temp_dir: None }, + module_llvm: GccContext { + context, + should_combine_object_files, + // TODO(antoyo): use the correct relocation model here. + relocation_model: RelocModel::Pic, + temp_dir: None, + }, name: thin_module.name().to_string(), kind: ModuleKind::Regular, }; @@ -660,9 +659,7 @@ pub unsafe fn optimize_thin_module( { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name()); - if !llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target) { - return Err(write::llvm_err(&dcx, LlvmError::PrepareThinLtoModule)); - } + unsafe { llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target) }; save_temp_bitcode(cgcx, &module, "thin-lto-after-rename"); } diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs index 802968979c722..51c5ba73e32bc 100644 --- a/compiler/rustc_codegen_gcc/src/back/write.rs +++ b/compiler/rustc_codegen_gcc/src/back/write.rs @@ -1,6 +1,6 @@ use std::{env, fs}; -use gccjit::OutputKind; +use gccjit::{Context, OutputKind}; use rustc_codegen_ssa::back::link::ensure_removed; use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig}; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; @@ -10,6 +10,7 @@ use rustc_session::config::OutputType; use rustc_span::fatal_error::FatalError; use rustc_target::spec::SplitDebuginfo; +use crate::base::add_pic_option; use crate::errors::CopyBitcode; use crate::{GccCodegenBackend, GccContext}; @@ -31,51 +32,87 @@ pub(crate) unsafe fn codegen( // NOTE: Only generate object files with GIMPLE when this environment variable is set for // now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit). - // TODO: remove this environment variable. + // TODO(antoyo): remove this environment variable. let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1"); let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); - if config.bitcode_needed() && fat_lto { - let _timer = cgcx - .prof - .generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name); - - // TODO(antoyo) - /*if let Some(bitcode_filename) = bc_out.file_name() { - cgcx.prof.artifact_size( - "llvm_bitcode", - bitcode_filename.to_string_lossy(), - data.len() as u64, - ); - }*/ - - if config.emit_bc || config.emit_obj == EmitObj::Bitcode { + if config.bitcode_needed() { + if fat_lto { let _timer = cgcx .prof - .generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name); - context.add_command_line_option("-flto=auto"); - context.add_command_line_option("-flto-partition=one"); - // TODO: remove since we don't want fat objects when it is for Bitcode only. - context.add_command_line_option("-ffat-lto-objects"); - context - .compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str")); - } + .generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name); - if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { - let _timer = cgcx - .prof - .generic_activity_with_arg("GCC_module_codegen_embed_bitcode", &*module.name); - // TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes? - //embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data); - - context.add_command_line_option("-flto=auto"); - context.add_command_line_option("-flto-partition=one"); - context.add_command_line_option("-ffat-lto-objects"); - // TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument). - context - .compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str")); + // TODO(antoyo) + /*if let Some(bitcode_filename) = bc_out.file_name() { + cgcx.prof.artifact_size( + "llvm_bitcode", + bitcode_filename.to_string_lossy(), + data.len() as u64, + ); + }*/ + + if config.emit_bc || config.emit_obj == EmitObj::Bitcode { + let _timer = cgcx.prof.generic_activity_with_arg( + "GCC_module_codegen_emit_bitcode", + &*module.name, + ); + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + // TODO(antoyo): remove since we don't want fat objects when it is for Bitcode only. + context.add_command_line_option("-ffat-lto-objects"); + context.compile_to_file( + OutputKind::ObjectFile, + bc_out.to_str().expect("path to str"), + ); + } + + if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { + let _timer = cgcx.prof.generic_activity_with_arg( + "GCC_module_codegen_embed_bitcode", + &*module.name, + ); + // TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes? + //embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data); + + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + context.add_command_line_option("-ffat-lto-objects"); + // TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument). + context.compile_to_file( + OutputKind::ObjectFile, + bc_out.to_str().expect("path to str"), + ); + } + } else { + if config.emit_bc || config.emit_obj == EmitObj::Bitcode { + let _timer = cgcx.prof.generic_activity_with_arg( + "GCC_module_codegen_emit_bitcode", + &*module.name, + ); + context.compile_to_file( + OutputKind::ObjectFile, + bc_out.to_str().expect("path to str"), + ); + } + + if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { + // TODO(antoyo): we might want to emit to emit an error here, saying to set the + // environment variable EMBED_LTO_BITCODE. + let _timer = cgcx.prof.generic_activity_with_arg( + "GCC_module_codegen_embed_bitcode", + &*module.name, + ); + // TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes? + //embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data); + + // TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument). + context.compile_to_file( + OutputKind::ObjectFile, + bc_out.to_str().expect("path to str"), + ); + } } } @@ -123,6 +160,8 @@ pub(crate) unsafe fn codegen( // NOTE: without -fuse-linker-plugin, we get the following error: // lto1: internal compiler error: decompressed stream: Destination buffer is too small + // TODO(antoyo): since we do not do LTO when the linker is invoked anymore, perhaps + // the following flag is not necessary anymore. context.add_driver_option("-fuse-linker-plugin"); } @@ -131,11 +170,43 @@ pub(crate) unsafe fn codegen( // /usr/bin/ld: cannot find -lgcc_s: No such file or directory context.add_driver_option("-nostdlib"); - // NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o. - context.compile_to_file( - OutputKind::Executable, - obj_out.to_str().expect("path to str"), - ); + let path = obj_out.to_str().expect("path to str"); + + if fat_lto { + let lto_path = format!("{}.lto", path); + // FIXME(antoyo): The LTO frontend generates the following warning: + // ../build_sysroot/sysroot_src/library/core/src/num/dec2flt/lemire.rs:150:15: warning: type of ‘_ZN4core3num7dec2flt5table17POWER_OF_FIVE_12817ha449a68fb31379e4E’ does not match original declaration [-Wlto-type-mismatch] + // 150 | let (lo5, hi5) = POWER_OF_FIVE_128[index]; + // | ^ + // lto1: note: ‘_ZN4core3num7dec2flt5table17POWER_OF_FIVE_12817ha449a68fb31379e4E’ was previously declared here + // + // This option is to mute it to make the UI tests pass with LTO enabled. + context.add_driver_option("-Wno-lto-type-mismatch"); + // NOTE: this doesn't actually generate an executable. With the above + // flags, it combines the .o files together in another .o. + context.compile_to_file(OutputKind::Executable, <o_path); + + let context = Context::default(); + if cgcx.target_arch == "x86" || cgcx.target_arch == "x86_64" { + // NOTE: it seems we need to use add_driver_option instead of + // add_command_line_option here because we use the LTO frontend via gcc. + context.add_driver_option("-masm=intel"); + } + + // NOTE: these two options are needed to invoke LTO to produce an object file. + // We need to initiate a second compilation because the arguments "-x lto" + // needs to be at the very beginning. + context.add_driver_option("-x"); + context.add_driver_option("lto"); + add_pic_option(&context, module.module_llvm.relocation_model); + context.add_driver_option(lto_path); + + context.compile_to_file(OutputKind::ObjectFile, path); + } else { + // NOTE: this doesn't actually generate an executable. With the above + // flags, it combines the .o files together in another .o. + context.compile_to_file(OutputKind::Executable, path); + } } else { context.compile_to_file( OutputKind::ObjectFile, diff --git a/compiler/rustc_codegen_gcc/src/base.rs b/compiler/rustc_codegen_gcc/src/base.rs index 18aa32754e1d9..c9701fb9885c1 100644 --- a/compiler/rustc_codegen_gcc/src/base.rs +++ b/compiler/rustc_codegen_gcc/src/base.rs @@ -3,7 +3,7 @@ use std::env; use std::sync::Arc; use std::time::Instant; -use gccjit::{CType, FunctionType, GlobalKind}; +use gccjit::{CType, Context, FunctionType, GlobalKind}; use rustc_codegen_ssa::base::maybe_create_entry_wrapper; use rustc_codegen_ssa::mono_item::MonoItemExt; use rustc_codegen_ssa::traits::DebugInfoCodegenMethods; @@ -15,21 +15,32 @@ use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::TyCtxt; use rustc_session::config::DebugInfo; use rustc_span::Symbol; -use rustc_target::spec::PanicStrategy; +#[cfg(feature = "master")] +use rustc_target::spec::SymbolVisibility; +use rustc_target::spec::{PanicStrategy, RelocModel}; use crate::builder::Builder; use crate::context::CodegenCx; use crate::{GccContext, LockedTargetInfo, SyncContext, gcc_util, new_context}; #[cfg(feature = "master")] -pub fn visibility_to_gcc(linkage: Visibility) -> gccjit::Visibility { - match linkage { +pub fn visibility_to_gcc(visibility: Visibility) -> gccjit::Visibility { + match visibility { Visibility::Default => gccjit::Visibility::Default, Visibility::Hidden => gccjit::Visibility::Hidden, Visibility::Protected => gccjit::Visibility::Protected, } } +#[cfg(feature = "master")] +pub fn symbol_visibility_to_gcc(visibility: SymbolVisibility) -> gccjit::Visibility { + match visibility { + SymbolVisibility::Hidden => gccjit::Visibility::Hidden, + SymbolVisibility::Protected => gccjit::Visibility::Protected, + SymbolVisibility::Interposable => gccjit::Visibility::Default, + } +} + pub fn global_linkage_to_gcc(linkage: Linkage) -> GlobalKind { match linkage { Linkage::External => GlobalKind::Imported, @@ -140,9 +151,7 @@ pub fn compile_codegen_unit( }); } - if tcx.sess.relocation_model() == rustc_target::spec::RelocModel::Static { - context.add_command_line_option("-fno-pie"); - } + add_pic_option(&context, tcx.sess.relocation_model()); let target_cpu = gcc_util::target_cpu(tcx.sess); if target_cpu != "generic" { @@ -199,12 +208,13 @@ pub fn compile_codegen_unit( let f32_type_supported = target_info.supports_target_dependent_type(CType::Float32); let f64_type_supported = target_info.supports_target_dependent_type(CType::Float64); let f128_type_supported = target_info.supports_target_dependent_type(CType::Float128); + let u128_type_supported = target_info.supports_target_dependent_type(CType::UInt128t); // TODO: improve this to avoid passing that many arguments. let cx = CodegenCx::new( &context, cgu, tcx, - target_info.supports_128bit_int(), + u128_type_supported, f16_type_supported, f32_type_supported, f64_type_supported, @@ -235,6 +245,7 @@ pub fn compile_codegen_unit( name: cgu_name.to_string(), module_llvm: GccContext { context: Arc::new(SyncContext::new(context)), + relocation_model: tcx.sess.relocation_model(), should_combine_object_files: false, temp_dir: None, }, @@ -244,3 +255,24 @@ pub fn compile_codegen_unit( (module, cost) } + +pub fn add_pic_option<'gcc>(context: &Context<'gcc>, relocation_model: RelocModel) { + match relocation_model { + rustc_target::spec::RelocModel::Static => { + context.add_command_line_option("-fno-pie"); + context.add_driver_option("-fno-pie"); + } + rustc_target::spec::RelocModel::Pic => { + context.add_command_line_option("-fPIC"); + // NOTE: we use both add_command_line_option and add_driver_option because the usage in + // this module (compile_codegen_unit) requires add_command_line_option while the usage + // in the back::write module (codegen) requires add_driver_option. + context.add_driver_option("-fPIC"); + } + rustc_target::spec::RelocModel::Pie => { + context.add_command_line_option("-fPIE"); + context.add_driver_option("-fPIE"); + } + model => eprintln!("Unsupported relocation model: {:?}", model), + } +} diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index 9a142326ad196..89e5cf1b8c6b6 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -1102,18 +1102,24 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { val: RValue<'gcc>, ptr: RValue<'gcc>, align: Align, - _flags: MemFlags, + flags: MemFlags, ) -> RValue<'gcc> { let ptr = self.check_store(val, ptr); let destination = ptr.dereference(self.location); // NOTE: libgccjit does not support specifying the alignment on the assignment, so we cast // to type so it gets the proper alignment. let destination_type = destination.to_rvalue().get_type().unqualified(); - let aligned_type = destination_type.get_aligned(align.bytes()).make_pointer(); - let aligned_destination = self.cx.context.new_bitcast(self.location, ptr, aligned_type); - let aligned_destination = aligned_destination.dereference(self.location); - self.llbb().add_assignment(self.location, aligned_destination, val); - // TODO(antoyo): handle align and flags. + let align = if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() }; + let mut modified_destination_type = destination_type.get_aligned(align); + if flags.contains(MemFlags::VOLATILE) { + modified_destination_type = modified_destination_type.make_volatile(); + } + + let modified_ptr = + self.cx.context.new_cast(self.location, ptr, modified_destination_type.make_pointer()); + let modified_destination = modified_ptr.dereference(self.location); + self.llbb().add_assignment(self.location, modified_destination, val); + // TODO(antoyo): handle `MemFlags::NONTEMPORAL`. // NOTE: dummy value here since it's never used. FIXME(antoyo): API should not return a value here? // When adding support for NONTEMPORAL, make sure to not just emit MOVNT on x86; see the // LLVM backend for details. @@ -1236,13 +1242,13 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { } fn ptrtoint(&mut self, value: RValue<'gcc>, dest_ty: Type<'gcc>) -> RValue<'gcc> { - let usize_value = self.cx.const_bitcast(value, self.cx.type_isize()); + let usize_value = self.cx.context.new_cast(None, value, self.cx.type_isize()); self.intcast(usize_value, dest_ty, false) } fn inttoptr(&mut self, value: RValue<'gcc>, dest_ty: Type<'gcc>) -> RValue<'gcc> { let usize_value = self.intcast(value, self.cx.type_isize(), false); - self.cx.const_bitcast(usize_value, dest_ty) + self.cx.context.new_cast(None, usize_value, dest_ty) } fn bitcast(&mut self, value: RValue<'gcc>, dest_ty: Type<'gcc>) -> RValue<'gcc> { @@ -1901,6 +1907,15 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { v2: RValue<'gcc>, mask: RValue<'gcc>, ) -> RValue<'gcc> { + // NOTE: if the `mask` is a constant value, the following code will copy it in many places, + // which will make GCC create a lot (+4000) local variables in some cases. + // So we assign it to an explicit local variable once to avoid this. + let func = self.current_func(); + let mask_var = func.new_local(self.location, mask.get_type(), "mask"); + let block = self.block; + block.add_assignment(self.location, mask_var, mask); + let mask = mask_var.to_rvalue(); + // TODO(antoyo): use a recursive unqualified() here. let vector_type = v1.get_type().unqualified().dyncast_vector().expect("vector type"); let element_type = vector_type.get_element_type(); @@ -1917,18 +1932,35 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { self.int_type }; - let vector_type = - mask.get_type().dyncast_vector().expect("simd_shuffle mask should be of vector type"); - let mask_num_units = vector_type.get_num_units(); - let mut mask_elements = vec![]; - for i in 0..mask_num_units { - let index = self.context.new_rvalue_from_long(self.cx.type_u32(), i as _); - mask_elements.push(self.context.new_cast( - self.location, - self.extract_element(mask, index).to_rvalue(), - mask_element_type, - )); - } + // NOTE: this condition is needed because we call shuffle_vector in the implementation of + // simd_gather. + let mut mask_elements = if let Some(vector_type) = mask.get_type().dyncast_vector() { + let mask_num_units = vector_type.get_num_units(); + let mut mask_elements = vec![]; + for i in 0..mask_num_units { + let index = self.context.new_rvalue_from_long(self.cx.type_u32(), i as _); + mask_elements.push(self.context.new_cast( + self.location, + self.extract_element(mask, index).to_rvalue(), + mask_element_type, + )); + } + mask_elements + } else { + let struct_type = mask.get_type().is_struct().expect("mask should be of struct type"); + let mask_num_units = struct_type.get_field_count(); + let mut mask_elements = vec![]; + for i in 0..mask_num_units { + let field = struct_type.get_field(i as i32); + mask_elements.push(self.context.new_cast( + self.location, + mask.access_field(self.location, field).to_rvalue(), + mask_element_type, + )); + } + mask_elements + }; + let mask_num_units = mask_elements.len(); // NOTE: the mask needs to be the same length as the input vectors, so add the missing // elements in the mask if needed. diff --git a/compiler/rustc_codegen_gcc/src/callee.rs b/compiler/rustc_codegen_gcc/src/callee.rs index 65972a03e8355..c133ae4fcdd2a 100644 --- a/compiler/rustc_codegen_gcc/src/callee.rs +++ b/compiler/rustc_codegen_gcc/src/callee.rs @@ -72,95 +72,74 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>) attributes::from_fn_attrs(cx, func, instance); - let instance_def_id = instance.def_id(); - - // TODO(antoyo): set linkage and attributes. - - // Apply an appropriate linkage/visibility value to our item that we - // just declared. - // - // This is sort of subtle. Inside our codegen unit we started off - // compilation by predefining all our own `MonoItem` instances. That - // is, everything we're codegenning ourselves is already defined. That - // means that anything we're actually codegenning in this codegen unit - // will have hit the above branch in `get_declared_value`. As a result, - // we're guaranteed here that we're declaring a symbol that won't get - // defined, or in other words we're referencing a value from another - // codegen unit or even another crate. - // - // So because this is a foreign value we blanket apply an external - // linkage directive because it's coming from a different object file. - // The visibility here is where it gets tricky. This symbol could be - // referencing some foreign crate or foreign library (an `extern` - // block) in which case we want to leave the default visibility. We may - // also, though, have multiple codegen units. It could be a - // monomorphization, in which case its expected visibility depends on - // whether we are sharing generics or not. The important thing here is - // that the visibility we apply to the declaration is the same one that - // has been applied to the definition (wherever that definition may be). - let is_generic = instance.args.non_erasable_generics().next().is_some(); - - if is_generic { - // This is a monomorphization. Its expected visibility depends - // on whether we are in share-generics mode. - - if cx.tcx.sess.opts.share_generics() { - // We are in share_generics mode. - - if let Some(instance_def_id) = instance_def_id.as_local() { - // This is a definition from the current crate. If the - // definition is unreachable for downstream crates or - // the current crate does not re-export generics, the - // definition of the instance will have been declared - // as `hidden`. - if cx.tcx.is_unreachable_local_definition(instance_def_id) + #[cfg(feature = "master")] + { + let instance_def_id = instance.def_id(); + + // TODO(antoyo): set linkage and attributes. + + // Apply an appropriate linkage/visibility value to our item that we + // just declared. + // + // This is sort of subtle. Inside our codegen unit we started off + // compilation by predefining all our own `MonoItem` instances. That + // is, everything we're codegenning ourselves is already defined. That + // means that anything we're actually codegenning in this codegen unit + // will have hit the above branch in `get_declared_value`. As a result, + // we're guaranteed here that we're declaring a symbol that won't get + // defined, or in other words we're referencing a value from another + // codegen unit or even another crate. + // + // So because this is a foreign value we blanket apply an external + // linkage directive because it's coming from a different object file. + // The visibility here is where it gets tricky. This symbol could be + // referencing some foreign crate or foreign library (an `extern` + // block) in which case we want to leave the default visibility. We may + // also, though, have multiple codegen units. It could be a + // monomorphization, in which case its expected visibility depends on + // whether we are sharing generics or not. The important thing here is + // that the visibility we apply to the declaration is the same one that + // has been applied to the definition (wherever that definition may be). + let is_generic = instance.args.non_erasable_generics().next().is_some(); + + let is_hidden = if is_generic { + // This is a monomorphization of a generic function. + if !(cx.tcx.sess.opts.share_generics() + || tcx.codegen_fn_attrs(instance_def_id).inline + == rustc_attr_parsing::InlineAttr::Never) + { + // When not sharing generics, all instances are in the same + // crate and have hidden visibility. + true + } else if let Some(instance_def_id) = instance_def_id.as_local() { + // This is a monomorphization of a generic function + // defined in the current crate. It is hidden if: + // - the definition is unreachable for downstream + // crates, or + // - the current crate does not re-export generics + // (because the crate is a C library or executable) + cx.tcx.is_unreachable_local_definition(instance_def_id) || !cx.tcx.local_crate_exports_generics() - { - #[cfg(feature = "master")] - func.add_attribute(FnAttribute::Visibility(Visibility::Hidden)); - } } else { // This is a monomorphization of a generic function - // defined in an upstream crate. - if instance.upstream_monomorphization(tcx).is_some() { - // This is instantiated in another crate. It cannot - // be `hidden`. - } else { - // This is a local instantiation of an upstream definition. - // If the current crate does not re-export it - // (because it is a C library or an executable), it - // will have been declared `hidden`. - if !cx.tcx.local_crate_exports_generics() { - #[cfg(feature = "master")] - func.add_attribute(FnAttribute::Visibility(Visibility::Hidden)); - } - } + // defined in an upstream crate. It is hidden if: + // - it is instantiated in this crate, and + // - the current crate does not re-export generics + instance.upstream_monomorphization(tcx).is_none() + && !cx.tcx.local_crate_exports_generics() } } else { - // When not sharing generics, all instances are in the same - // crate and have hidden visibility - #[cfg(feature = "master")] + // This is a non-generic function. It is hidden if: + // - it is instantiated in the local crate, and + // - it is defined an upstream crate (non-local), or + // - it is not reachable + cx.tcx.is_codegened_item(instance_def_id) + && (!instance_def_id.is_local() + || !cx.tcx.is_reachable_non_generic(instance_def_id)) + }; + if is_hidden { func.add_attribute(FnAttribute::Visibility(Visibility::Hidden)); } - } else { - // This is a non-generic function - if cx.tcx.is_codegened_item(instance_def_id) { - // This is a function that is instantiated in the local crate - - if instance_def_id.is_local() { - // This is function that is defined in the local crate. - // If it is not reachable, it is hidden. - if !cx.tcx.is_reachable_non_generic(instance_def_id) { - #[cfg(feature = "master")] - func.add_attribute(FnAttribute::Visibility(Visibility::Hidden)); - } - } else { - // This is a function from an upstream crate that has - // been instantiated here. These are always hidden. - #[cfg(feature = "master")] - func.add_attribute(FnAttribute::Visibility(Visibility::Hidden)); - } - } } func diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 0d3e7083d56ad..f43743fc2a41b 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -240,14 +240,14 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { } }; let ptr_type = base_addr.get_type(); - let base_addr = self.const_bitcast(base_addr, self.usize_type); + let base_addr = self.context.new_cast(None, base_addr, self.usize_type); let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64); - let ptr = self.const_bitcast(base_addr + offset, ptr_type); + let ptr = self.context.new_cast(None, base_addr + offset, ptr_type); if !matches!(layout.primitive(), Pointer(_)) { self.const_bitcast(ptr.dereference(None).to_rvalue(), ty) } else { - self.const_bitcast(ptr, ty) + self.context.new_cast(None, ptr, ty) } } } diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs index 6dc2f4ed66881..1631ecfeecf3f 100644 --- a/compiler/rustc_codegen_gcc/src/consts.rs +++ b/compiler/rustc_codegen_gcc/src/consts.rs @@ -252,7 +252,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { let global = self.declare_global( sym, gcc_type, - GlobalKind::Exported, + GlobalKind::Imported, is_tls, fn_attrs.link_section, ); @@ -404,7 +404,6 @@ fn check_and_apply_linkage<'gcc, 'tcx>( // TODO(antoyo): set linkage. let value = cx.const_ptrcast(global1.get_address(None), gcc_type); global2.global_set_initializer_rvalue(value); - // TODO(antoyo): use global_set_initializer() when it will work. global2 } else { // Generate an external declaration. diff --git a/compiler/rustc_codegen_gcc/src/context.rs b/compiler/rustc_codegen_gcc/src/context.rs index f67dcf0cb116d..c81c53359fd18 100644 --- a/compiler/rustc_codegen_gcc/src/context.rs +++ b/compiler/rustc_codegen_gcc/src/context.rs @@ -386,6 +386,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { impl<'gcc, 'tcx> BackendTypes for CodegenCx<'gcc, 'tcx> { type Value = RValue<'gcc>; type Metadata = RValue<'gcc>; + // TODO(antoyo): change to Function<'gcc>. type Function = RValue<'gcc>; type BasicBlock = Block<'gcc>; diff --git a/compiler/rustc_codegen_gcc/src/debuginfo.rs b/compiler/rustc_codegen_gcc/src/debuginfo.rs index 6aeb656c1ab40..d3aeb7f3bdeb3 100644 --- a/compiler/rustc_codegen_gcc/src/debuginfo.rs +++ b/compiler/rustc_codegen_gcc/src/debuginfo.rs @@ -4,7 +4,7 @@ use gccjit::{Location, RValue}; use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind}; use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoCodegenMethods}; use rustc_data_structures::sync::Lrc; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::{self, Body, SourceScope}; use rustc_middle::ty::{Instance, PolyExistentialTraitRef, Ty}; @@ -69,7 +69,7 @@ fn compute_mir_scopes<'gcc, 'tcx>( ) { // Find all scopes with variables defined in them. let variables = if cx.sess().opts.debuginfo == DebugInfo::Full { - let mut vars = BitSet::new_empty(mir.source_scopes.len()); + let mut vars = DenseBitSet::new_empty(mir.source_scopes.len()); // FIXME(eddyb) take into account that arguments always have debuginfo, // irrespective of their name (assuming full debuginfo is enabled). // NOTE(eddyb) actually, on second thought, those are always in the @@ -82,7 +82,7 @@ fn compute_mir_scopes<'gcc, 'tcx>( // Nothing to emit, of course. None }; - let mut instantiated = BitSet::new_empty(mir.source_scopes.len()); + let mut instantiated = DenseBitSet::new_empty(mir.source_scopes.len()); // Instantiate all scopes. for idx in 0..mir.source_scopes.len() { let scope = SourceScope::new(idx); @@ -101,9 +101,9 @@ fn make_mir_scope<'gcc, 'tcx>( cx: &CodegenCx<'gcc, 'tcx>, _instance: Instance<'tcx>, mir: &Body<'tcx>, - variables: &Option>, + variables: &Option>, debug_context: &mut FunctionDebugContext<'tcx, (), Location<'gcc>>, - instantiated: &mut BitSet, + instantiated: &mut DenseBitSet, scope: SourceScope, ) { if instantiated.contains(scope) { diff --git a/compiler/rustc_codegen_gcc/src/errors.rs b/compiler/rustc_codegen_gcc/src/errors.rs index 56849cc861098..1b59b9ac169ab 100644 --- a/compiler/rustc_codegen_gcc/src/errors.rs +++ b/compiler/rustc_codegen_gcc/src/errors.rs @@ -28,6 +28,7 @@ pub(crate) struct UnstableCTargetFeature<'a> { #[diag(codegen_gcc_forbidden_ctarget_feature)] pub(crate) struct ForbiddenCTargetFeature<'a> { pub feature: &'a str, + pub enabled: &'a str, pub reason: &'a str, } @@ -39,10 +40,6 @@ pub(crate) enum PossibleFeature<'a> { None, } -#[derive(Diagnostic)] -#[diag(codegen_gcc_lto_not_supported)] -pub(crate) struct LTONotSupported; - #[derive(Diagnostic)] #[diag(codegen_gcc_unwinding_inline_asm)] pub(crate) struct UnwindingInlineAsm { diff --git a/compiler/rustc_codegen_gcc/src/gcc_util.rs b/compiler/rustc_codegen_gcc/src/gcc_util.rs index 058a874501b26..560aff43d6538 100644 --- a/compiler/rustc_codegen_gcc/src/gcc_util.rs +++ b/compiler/rustc_codegen_gcc/src/gcc_util.rs @@ -1,9 +1,11 @@ +use std::iter::FromIterator; + #[cfg(feature = "master")] use gccjit::Context; use rustc_codegen_ssa::codegen_attrs::check_tied_features; use rustc_codegen_ssa::errors::TargetFeatureDisableOrEnable; -use rustc_data_structures::fx::FxHashMap; -use rustc_middle::bug; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::unord::UnordSet; use rustc_session::Session; use rustc_target::target_features::RUSTC_SPECIFIC_FEATURES; use smallvec::{SmallVec, smallvec}; @@ -37,100 +39,149 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec return None, - Some(c @ ('+' | '-')) => c, - Some(_) => { - if diagnostics { - sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature: s }); - } - return None; - } - }; - - // Get the backend feature name, if any. - // This excludes rustc-specific features, that do not get passed down to GCC. - let feature = backend_feature_name(s)?; - // Warn against use of GCC specific feature names on the CLI. - if diagnostics { - let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); - match feature_state { - None => { - let rust_feature = - known_features.iter().find_map(|&(rust_feature, _, _)| { - let gcc_features = to_gcc_features(sess, rust_feature); - if gcc_features.contains(&feature) - && !gcc_features.contains(&rust_feature) - { - Some(rust_feature) - } else { - None - } - }); - let unknown_feature = if let Some(rust_feature) = rust_feature { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::Some { rust_feature }, - } - } else { - UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } - }; - sess.dcx().emit_warn(unknown_feature); - } - Some((_, stability, _)) => { - if let Err(reason) = - stability.toggle_allowed(&sess.target, enable_disable == '+') + + // Ensure that all ABI-required features are enabled, and the ABI-forbidden ones + // are disabled. + let abi_feature_constraints = sess.target.abi_required_features(); + let abi_incompatible_set = + FxHashSet::from_iter(abi_feature_constraints.incompatible.iter().copied()); + + // Compute implied features + let mut all_rust_features = vec![]; + for feature in sess.opts.cg.target_feature.split(',') { + if let Some(feature) = feature.strip_prefix('+') { + all_rust_features.extend( + UnordSet::from(sess.target.implied_target_features(std::iter::once(feature))) + .to_sorted_stable_ord() + .iter() + .map(|&&s| (true, s)), + ) + } else if let Some(feature) = feature.strip_prefix('-') { + // FIXME: Why do we not remove implied features on "-" here? + // We do the equivalent above in `target_features_cfg`. + // See . + all_rust_features.push((false, feature)); + } else if !feature.is_empty() && diagnostics { + sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature }); + } + } + // Remove features that are meant for rustc, not codegen. + all_rust_features.retain(|&(_, feature)| { + // Retain if it is not a rustc feature + !RUSTC_SPECIFIC_FEATURES.contains(&feature) + }); + + // Check feature validity. + if diagnostics { + for &(enable, feature) in &all_rust_features { + let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); + match feature_state { + None => { + let rust_feature = known_features.iter().find_map(|&(rust_feature, _, _)| { + let gcc_features = to_gcc_features(sess, rust_feature); + if gcc_features.contains(&feature) && !gcc_features.contains(&rust_feature) { - sess.dcx().emit_warn(ForbiddenCTargetFeature { feature, reason }); - } else if stability.requires_nightly().is_some() { - // An unstable feature. Warn about using it. (It makes little sense - // to hard-error here since we just warn about fully unknown - // features above). - sess.dcx().emit_warn(UnstableCTargetFeature { feature }); + Some(rust_feature) + } else { + None } + }); + let unknown_feature = if let Some(rust_feature) = rust_feature { + UnknownCTargetFeature { + feature, + rust_feature: PossibleFeature::Some { rust_feature }, + } + } else { + UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } + }; + sess.dcx().emit_warn(unknown_feature); + } + Some(&(_, stability, _)) => { + if let Err(reason) = stability.toggle_allowed() { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: if enable { "enabled" } else { "disabled" }, + reason, + }); + } else if stability.requires_nightly().is_some() { + // An unstable feature. Warn about using it. (It makes little sense + // to hard-error here since we just warn about fully unknown + // features above). + sess.dcx().emit_warn(UnstableCTargetFeature { feature }); } } + } - // FIXME(nagisa): figure out how to not allocate a full hashset here. - featsmap.insert(feature, enable_disable == '+'); + // Ensure that the features we enable/disable are compatible with the ABI. + if enable { + if abi_incompatible_set.contains(feature) { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: "enabled", + reason: "this feature is incompatible with the target ABI", + }); + } + } else { + // FIXME: we have to request implied features here since + // negative features do not handle implied features above. + for &required in abi_feature_constraints.required.iter() { + let implied = sess.target.implied_target_features(std::iter::once(required)); + if implied.contains(feature) { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: "disabled", + reason: "this feature is required by the target ABI", + }); + } + } } - // ... otherwise though we run through `to_gcc_features` when + // FIXME(nagisa): figure out how to not allocate a full hashset here. + featsmap.insert(feature, enable); + } + } + + // To be sure the ABI-relevant features are all in the right state, we explicitly + // (un)set them here. This means if the target spec sets those features wrong, + // we will silently correct them rather than silently producing wrong code. + // (The target sanity check tries to catch this, but we can't know which features are + // enabled in GCC by default so we can't be fully sure about that check.) + // We add these at the beginning of the list so that `-Ctarget-features` can + // still override it... that's unsound, but more compatible with past behavior. + all_rust_features.splice( + 0..0, + abi_feature_constraints + .required + .iter() + .map(|&f| (true, f)) + .chain(abi_feature_constraints.incompatible.iter().map(|&f| (false, f))), + ); + + // Translate this into GCC features. + let feats = + all_rust_features.iter().flat_map(|&(enable, feature)| { + let enable_disable = if enable { '+' } else { '-' }; + // We run through `to_gcc_features` when // passing requests down to GCC. This means that all in-language // features also work on the command line instead of having two // different names when the GCC name and the Rust name differ. - Some( - to_gcc_features(sess, feature) - .iter() - .flat_map(|feat| to_gcc_features(sess, feat).into_iter()) - .map(|feature| { - if enable_disable == '-' { - format!("-{}", feature) - } else { - feature.to_string() - } - }) - .collect::>(), - ) - }) - .flatten(); + to_gcc_features(sess, feature) + .iter() + .flat_map(|feat| to_gcc_features(sess, feat).into_iter()) + .map(|feature| { + if enable_disable == '-' { + format!("-{}", feature) + } else { + feature.to_string() + } + }) + .collect::>() + }); features.extend(feats); if diagnostics { @@ -146,26 +197,12 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec Option<&str> { - // features must start with a `+` or `-`. - let feature = s.strip_prefix(&['+', '-'][..]).unwrap_or_else(|| { - bug!("target feature `{}` must begin with a `+` or `-`", s); - }); - // Rustc-specific feature requests like `+crt-static` or `-crt-static` - // are not passed down to GCC. - if RUSTC_SPECIFIC_FEATURES.contains(&feature) { - return None; - } - Some(feature) -} - // To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> { let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch }; match (arch, s) { + // FIXME: seems like x87 does not exist? + ("x86", "x87") => smallvec![], ("x86", "sse4.2") => smallvec!["sse4.2", "crc32"], ("x86", "pclmulqdq") => smallvec!["pclmul"], ("x86", "rdrand") => smallvec!["rdrnd"], diff --git a/compiler/rustc_codegen_gcc/src/int.rs b/compiler/rustc_codegen_gcc/src/int.rs index 02b760dc7334c..fe6a65bed03b0 100644 --- a/compiler/rustc_codegen_gcc/src/int.rs +++ b/compiler/rustc_codegen_gcc/src/int.rs @@ -90,7 +90,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { } } } - } else if a_type.is_vector() && a_type.is_vector() { + } else if a_type.is_vector() && b_type.is_vector() { a >> b } else if a_native && !b_native { self.gcc_lshr(a, self.gcc_int_cast(b, a_type)) @@ -322,36 +322,26 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { }, } } else { - match new_kind { - Int(I128) | Uint(U128) => { - let func_name = match oop { - OverflowOp::Add => match new_kind { - Int(I128) => "__rust_i128_addo", - Uint(U128) => "__rust_u128_addo", - _ => unreachable!(), - }, - OverflowOp::Sub => match new_kind { - Int(I128) => "__rust_i128_subo", - Uint(U128) => "__rust_u128_subo", - _ => unreachable!(), - }, - OverflowOp::Mul => match new_kind { - Int(I128) => "__rust_i128_mulo", // TODO(antoyo): use __muloti4d instead? - Uint(U128) => "__rust_u128_mulo", - _ => unreachable!(), - }, - }; - return self.operation_with_overflow(func_name, lhs, rhs); - } - _ => match oop { - OverflowOp::Mul => match new_kind { - Int(I32) => "__mulosi4", - Int(I64) => "__mulodi4", - _ => unreachable!(), - }, - _ => unimplemented!("overflow operation for {:?}", new_kind), + let (func_name, width) = match oop { + OverflowOp::Add => match new_kind { + Int(I128) => ("__rust_i128_addo", 128), + Uint(U128) => ("__rust_u128_addo", 128), + _ => unreachable!(), }, - } + OverflowOp::Sub => match new_kind { + Int(I128) => ("__rust_i128_subo", 128), + Uint(U128) => ("__rust_u128_subo", 128), + _ => unreachable!(), + }, + OverflowOp::Mul => match new_kind { + Int(I32) => ("__mulosi4", 32), + Int(I64) => ("__mulodi4", 64), + Int(I128) => ("__rust_i128_mulo", 128), // TODO(antoyo): use __muloti4d instead? + Uint(U128) => ("__rust_u128_mulo", 128), + _ => unreachable!(), + }, + }; + return self.operation_with_overflow(func_name, lhs, rhs, width); }; let intrinsic = self.context.get_builtin_function(name); @@ -364,80 +354,87 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { (res.dereference(self.location).to_rvalue(), overflow) } + /// Non-`__builtin_*` overflow operations with a `fn(T, T, &mut i32) -> T` signature. pub fn operation_with_overflow( &self, func_name: &str, lhs: RValue<'gcc>, rhs: RValue<'gcc>, + width: u64, ) -> (RValue<'gcc>, RValue<'gcc>) { let a_type = lhs.get_type(); let b_type = rhs.get_type(); debug_assert!(a_type.dyncast_array().is_some()); debug_assert!(b_type.dyncast_array().is_some()); + let overflow_type = self.i32_type; + let overflow_param_type = overflow_type.make_pointer(); + let res_type = a_type; + + let overflow_value = + self.current_func().new_local(self.location, overflow_type, "overflow"); + let overflow_addr = overflow_value.get_address(self.location); + let param_a = self.context.new_parameter(self.location, a_type, "a"); let param_b = self.context.new_parameter(self.location, b_type, "b"); - let result_field = self.context.new_field(self.location, a_type, "result"); - let overflow_field = self.context.new_field(self.location, self.bool_type, "overflow"); - - let ret_ty = Ty::new_tup(self.tcx, &[self.tcx.types.i128, self.tcx.types.bool]); + let param_overflow = + self.context.new_parameter(self.location, overflow_param_type, "overflow"); + + let a_elem_type = a_type.dyncast_array().expect("non-array a value"); + debug_assert!(a_elem_type.is_integral()); + let res_ty = match width { + 32 => self.tcx.types.i32, + 64 => self.tcx.types.i64, + 128 => self.tcx.types.i128, + _ => unreachable!("unexpected integer size"), + }; let layout = self .tcx - .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ret_ty)) + .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(res_ty)) .unwrap(); let arg_abi = ArgAbi { layout, mode: PassMode::Direct(ArgAttributes::new()) }; let mut fn_abi = FnAbi { - args: vec![arg_abi.clone(), arg_abi.clone()].into_boxed_slice(), + args: vec![arg_abi.clone(), arg_abi.clone(), arg_abi.clone()].into_boxed_slice(), ret: arg_abi, c_variadic: false, - fixed_count: 2, + fixed_count: 3, conv: Conv::C, can_unwind: false, }; fn_abi.adjust_for_foreign_abi(self.cx, spec::abi::Abi::C { unwind: false }).unwrap(); - let indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); - - let return_type = self - .context - .new_struct_type(self.location, "result_overflow", &[result_field, overflow_field]); - let result = if indirect { - let return_value = - self.current_func().new_local(self.location, return_type.as_type(), "return_value"); - let return_param_type = return_type.as_type().make_pointer(); - let return_param = - self.context.new_parameter(self.location, return_param_type, "return_value"); + let ret_indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); + + let result = if ret_indirect { + let res_value = self.current_func().new_local(self.location, res_type, "result_value"); + let res_addr = res_value.get_address(self.location); + let res_param_type = res_type.make_pointer(); + let param_res = self.context.new_parameter(self.location, res_param_type, "result"); + let func = self.context.new_function( self.location, FunctionType::Extern, self.type_void(), - &[return_param, param_a, param_b], + &[param_res, param_a, param_b, param_overflow], func_name, false, ); - self.llbb().add_eval( - self.location, - self.context.new_call(self.location, func, &[ - return_value.get_address(self.location), - lhs, - rhs, - ]), - ); - return_value.to_rvalue() + let _void = + self.context.new_call(self.location, func, &[res_addr, lhs, rhs, overflow_addr]); + res_value.to_rvalue() } else { let func = self.context.new_function( self.location, FunctionType::Extern, - return_type.as_type(), - &[param_a, param_b], + res_type, + &[param_a, param_b, param_overflow], func_name, false, ); - self.context.new_call(self.location, func, &[lhs, rhs]) + self.context.new_call(self.location, func, &[lhs, rhs, overflow_addr]) }; - let overflow = result.access_field(self.location, overflow_field); - let int_result = result.access_field(self.location, result_field); - (int_result, overflow) + + (result, self.context.new_cast(self.location, overflow_value, self.bool_type).to_rvalue()) } pub fn gcc_icmp( @@ -660,7 +657,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { } } } - } else if a_type.is_vector() && a_type.is_vector() { + } else if a_type.is_vector() && b_type.is_vector() { a << b } else if a_native && !b_native { self.gcc_shl(a, self.gcc_int_cast(b, a_type)) diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs index 0a448ded6b1ae..231307def291f 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs @@ -421,7 +421,7 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>( | "__builtin_ia32_xsaveopt64" => { let new_args = args.to_vec(); let thirty_two = builder.context.new_rvalue_from_int(new_args[1].get_type(), 32); - let arg2 = new_args[1] << thirty_two | new_args[2]; + let arg2 = (new_args[1] << thirty_two) | new_args[2]; let arg2_type = gcc_func.get_param_type(1); let arg2 = builder.context.new_cast(None, arg2, arg2_type); args = vec![new_args[0], arg2].into(); diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index 69326f409bb3e..48606f5f91c0f 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -13,15 +13,16 @@ use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue}; +#[cfg(feature = "master")] +use rustc_codegen_ssa::traits::MiscCodegenMethods; use rustc_codegen_ssa::traits::{ - ArgAbiBuilderMethods, BuilderMethods, ConstCodegenMethods, IntrinsicCallBuilderMethods, + ArgAbiBuilderMethods, BaseTypeCodegenMethods, BuilderMethods, ConstCodegenMethods, + IntrinsicCallBuilderMethods, }; -#[cfg(feature = "master")] -use rustc_codegen_ssa::traits::{BaseTypeCodegenMethods, MiscCodegenMethods}; use rustc_middle::bug; -use rustc_middle::ty::layout::LayoutOf; #[cfg(feature = "master")] -use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv}; +use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt}; +use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf}; use rustc_middle::ty::{self, Instance, Ty}; use rustc_span::{Span, Symbol, sym}; use rustc_target::abi::HasDataLayout; @@ -66,7 +67,7 @@ fn get_simple_intrinsic<'gcc, 'tcx>( sym::log2f64 => "log2", sym::fmaf32 => "fmaf", sym::fmaf64 => "fma", - // FIXME: calling `fma` from libc without FMA target feature uses expensive sofware emulation + // FIXME: calling `fma` from libc without FMA target feature uses expensive software emulation sym::fmuladdf32 => "fmaf", // TODO: use gcc intrinsic analogous to llvm.fmuladd.f32 sym::fmuladdf64 => "fma", // TODO: use gcc intrinsic analogous to llvm.fmuladd.f64 sym::fabsf32 => "fabsf", @@ -139,6 +140,18 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc &args.iter().map(|arg| arg.immediate()).collect::>(), ) } + sym::fmaf16 => { + // TODO(antoyo): use the correct builtin for f16. + let func = self.cx.context.get_builtin_function("fmaf"); + let args: Vec<_> = args + .iter() + .map(|arg| { + self.cx.context.new_cast(self.location, arg.immediate(), self.cx.type_f32()) + }) + .collect(); + let result = self.cx.context.new_call(self.location, func, &args); + self.cx.context.new_cast(self.location, result, self.cx.type_f16()) + } sym::is_val_statically_known => { let a = args[0].immediate(); let builtin = self.context.get_builtin_function("__builtin_constant_p"); @@ -988,7 +1001,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { 128 => "__rust_i128_addo", _ => unreachable!(), }; - let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); + let (int_result, overflow) = + self.operation_with_overflow(func_name, lhs, rhs, width); self.llbb().add_assignment(self.location, res, int_result); overflow }; @@ -1058,7 +1072,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { 128 => "__rust_i128_subo", _ => unreachable!(), }; - let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); + let (int_result, overflow) = + self.operation_with_overflow(func_name, lhs, rhs, width); self.llbb().add_assignment(self.location, res, int_result); overflow }; diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs index 79d1a06dd467f..1be452e5d05de 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs @@ -379,7 +379,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( // Make sure this is actually a SIMD vector. let idx_ty = args[2].layout.ty; let n: u64 = if idx_ty.is_simd() - && matches!(idx_ty.simd_size_and_type(bx.cx.tcx).1.kind(), ty::Uint(ty::UintTy::U32)) + && matches!(*idx_ty.simd_size_and_type(bx.cx.tcx).1.kind(), ty::Uint(ty::UintTy::U32)) { idx_ty.simd_size_and_type(bx.cx.tcx).0 } else { @@ -829,6 +829,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( | sym::simd_flog | sym::simd_floor | sym::simd_fma + | sym::simd_relaxed_fma | sym::simd_fpow | sym::simd_fpowi | sym::simd_fsin diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs index f2efa981f976f..f6ad0c79de545 100644 --- a/compiler/rustc_codegen_gcc/src/lib.rs +++ b/compiler/rustc_codegen_gcc/src/lib.rs @@ -27,6 +27,8 @@ // Some "regular" crates we want to share with rustc extern crate object; extern crate smallvec; +// FIXME(antoyo): clippy bug: remove the #[allow] when it's fixed. +#[allow(unused_extern_crates)] extern crate tempfile; #[macro_use] extern crate tracing; @@ -88,11 +90,11 @@ use std::sync::atomic::Ordering; use std::sync::{Arc, Mutex}; use back::lto::{ThinBuffer, ThinData}; -use errors::LTONotSupported; use gccjit::{CType, Context, OptimizationLevel}; #[cfg(feature = "master")] use gccjit::{TargetInfo, Version}; use rustc_ast::expand::allocator::AllocatorKind; +use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn, @@ -108,9 +110,10 @@ use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::ty::TyCtxt; use rustc_middle::util::Providers; use rustc_session::Session; -use rustc_session::config::{Lto, OptLevel, OutputFilenames}; +use rustc_session::config::{OptLevel, OutputFilenames}; use rustc_span::Symbol; use rustc_span::fatal_error::FatalError; +use rustc_target::spec::RelocModel; use tempfile::TempDir; use crate::back::lto::ModuleBuffer; @@ -140,11 +143,15 @@ impl TargetInfo { false } - fn supports_128bit_int(&self) -> bool { - self.supports_128bit_integers.load(Ordering::SeqCst) - } - - fn supports_target_dependent_type(&self, _typ: CType) -> bool { + fn supports_target_dependent_type(&self, typ: CType) -> bool { + match typ { + CType::UInt128t | CType::Int128t => { + if self.supports_128bit_integers.load(Ordering::SeqCst) { + return true; + } + } + _ => (), + } false } } @@ -165,10 +172,6 @@ impl LockedTargetInfo { self.info.lock().expect("lock").cpu_supports(feature) } - fn supports_128bit_int(&self) -> bool { - self.info.lock().expect("lock").supports_128bit_int() - } - fn supports_target_dependent_type(&self, typ: CType) -> bool { self.info.lock().expect("lock").supports_target_dependent_type(typ) } @@ -201,10 +204,6 @@ impl CodegenBackend for GccCodegenBackend { #[cfg(feature = "master")] gccjit::set_global_personality_function_name(b"rust_eh_personality\0"); - if sess.lto() == Lto::Thin { - sess.dcx().emit_warn(LTONotSupported {}); - } - #[cfg(not(feature = "master"))] { let temp_dir = TempDir::new().expect("cannot create temporary directory"); @@ -296,6 +295,7 @@ impl ExtraBackendMethods for GccCodegenBackend { ) -> Self::Module { let mut mods = GccContext { context: Arc::new(SyncContext::new(new_context(tcx))), + relocation_model: tcx.sess.relocation_model(), should_combine_object_files: false, temp_dir: None, }; @@ -327,6 +327,9 @@ impl ExtraBackendMethods for GccCodegenBackend { pub struct GccContext { context: Arc, + /// This field is needed in order to be able to set the flag -fPIC when necessary when doing + /// LTO. + relocation_model: RelocModel, should_combine_object_files: bool, // Temporary directory used by LTO. We keep it here so that it's not removed before linking. temp_dir: Option, @@ -439,6 +442,15 @@ impl WriteBackendMethods for GccCodegenBackend { ) -> Result, FatalError> { back::write::link(cgcx, dcx, modules) } + fn autodiff( + _cgcx: &CodegenContext, + _tcx: TyCtxt<'_>, + _module: &ModuleCodegen, + _diff_fncs: Vec, + _config: &ModuleConfig, + ) -> Result<(), FatalError> { + unimplemented!() + } } /// This is the entrypoint for a hot plugged rustc_codegen_gccjit @@ -482,10 +494,10 @@ fn target_features_cfg( sess.target .rust_target_features() .iter() - .filter(|(_, gate, _)| gate.in_cfg()) - .filter_map(|(feature, gate, _)| { + .filter(|&&(_, gate, _)| gate.in_cfg()) + .filter_map(|&(feature, gate, _)| { if sess.is_nightly_build() || allow_unstable || gate.requires_nightly().is_none() { - Some(*feature) + Some(feature) } else { None } diff --git a/compiler/rustc_codegen_gcc/tests/failing-non-lto-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-non-lto-tests.txt deleted file mode 100644 index 384dfdc26fb51..0000000000000 --- a/compiler/rustc_codegen_gcc/tests/failing-non-lto-tests.txt +++ /dev/null @@ -1,11 +0,0 @@ -tests/ui/issues/issue-44056.rs -tests/ui/lto/fat-lto.rs -tests/ui/lto/debuginfo-lto.rs -tests/ui/lto/lto-many-codegen-units.rs -tests/ui/lto/issue-100772.rs -tests/ui/lto/lto-rustc-loads-linker-plugin.rs -tests/ui/panic-runtime/lto-unwind.rs -tests/ui/sanitizer/issue-111184-cfi-coroutine-witness.rs -tests/ui/sepcomp/sepcomp-lib-lto.rs -tests/ui/lto/lto-opt-level-s.rs -tests/ui/lto/lto-opt-level-z.rs diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt index 457072b1a5bcb..082958bfe1f85 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt @@ -69,20 +69,22 @@ tests/ui/mir/mir_heavy_promoted.rs tests/ui/consts/const_cmp_type_id.rs tests/ui/consts/issue-73976-monomorphic.rs tests/ui/consts/issue-94675.rs -tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail.rs -tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs +tests/ui/traits/const-traits/const-drop-fail.rs +tests/ui/traits/const-traits/const-drop.rs tests/ui/runtime/on-broken-pipe/child-processes.rs -tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs -tests/ui/sanitizer/cfi-async-closures.rs -tests/ui/sanitizer/cfi-closures.rs -tests/ui/sanitizer/cfi-complex-receiver.rs -tests/ui/sanitizer/cfi-coroutine.rs -tests/ui/sanitizer/cfi-drop-in-place.rs -tests/ui/sanitizer/cfi-drop-no-principal.rs -tests/ui/sanitizer/cfi-fn-ptr.rs -tests/ui/sanitizer/cfi-self-ref.rs -tests/ui/sanitizer/cfi-supertraits.rs -tests/ui/sanitizer/cfi-virtual-auto.rs +tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs +tests/ui/sanitizer/cfi/async-closures.rs +tests/ui/sanitizer/cfi/closures.rs +tests/ui/sanitizer/cfi/complex-receiver.rs +tests/ui/sanitizer/cfi/coroutine.rs +tests/ui/sanitizer/cfi/drop-in-place.rs +tests/ui/sanitizer/cfi/drop-no-principal.rs +tests/ui/sanitizer/cfi/fn-ptr.rs +tests/ui/sanitizer/cfi/self-ref.rs +tests/ui/sanitizer/cfi/supertraits.rs +tests/ui/sanitizer/cfi/virtual-auto.rs +tests/ui/sanitizer/cfi/sized-associated-ty.rs +tests/ui/sanitizer/cfi/can-reveal-opaques.rs tests/ui/sanitizer/kcfi-mangling.rs tests/ui/statics/const_generics.rs tests/ui/backtrace/dylib-dep.rs @@ -91,6 +93,7 @@ tests/ui/delegation/fn-header.rs tests/ui/consts/zst_no_llvm_alloc.rs tests/ui/consts/const-eval/parse_ints.rs tests/ui/simd/intrinsic/generic-arithmetic-pass.rs +tests/ui/simd/intrinsic/generic-as.rs tests/ui/backtrace/backtrace.rs tests/ui/lifetimes/tail-expr-lock-poisoning.rs tests/ui/runtime/rt-explody-panic-payloads.rs @@ -118,5 +121,4 @@ tests/ui/codegen/equal-pointers-unequal/strict-provenance/print3.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/inline2.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/segfault.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/zero.rs -tests/ui/sanitizer/cfi-sized-associated-ty.rs -tests/ui/sanitizer/cfi-can-reveal-opaques.rs +tests/ui/simd/simd-bitmask-notpow2.rs diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests12.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests12.txt index 1d9bdaa552c6b..b10d4bc82aa8d 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests12.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests12.txt @@ -11,7 +11,6 @@ tests/ui/simd/array-type.rs tests/ui/simd/intrinsic/float-minmax-pass.rs tests/ui/simd/intrinsic/generic-arithmetic-pass.rs tests/ui/simd/intrinsic/generic-arithmetic-saturating-pass.rs -tests/ui/simd/intrinsic/generic-as.rs tests/ui/simd/intrinsic/generic-cast-pass.rs tests/ui/simd/intrinsic/generic-cast-pointer-width.rs tests/ui/simd/intrinsic/generic-comparison-pass.rs diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.lock b/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.lock new file mode 100644 index 0000000000000..fe252db442539 --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.lock @@ -0,0 +1,14 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "hello_world" +version = "0.0.0" +dependencies = [ + "mylib", +] + +[[package]] +name = "mylib" +version = "0.1.0" diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml b/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml index 0b8cdc63fbe84..c6e22f642f673 100644 --- a/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml +++ b/compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml @@ -1,4 +1,12 @@ [package] name = "hello_world" +edition = "2024" [dependencies] +mylib = { path = "mylib" } + +[profile.dev] +lto = "thin" + +[profile.release] +lto = "fat" diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.lock b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.lock new file mode 100644 index 0000000000000..c8a0bfc635474 --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.lock @@ -0,0 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "mylib" +version = "0.1.0" diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.toml b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.toml new file mode 100644 index 0000000000000..d15f62bfb6dca --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "mylib" +version = "0.1.0" +authors = ["Antoni Boucher "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/mylib/src/lib.rs b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/src/lib.rs new file mode 100644 index 0000000000000..8d3d111bd1993 --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/hello-world/mylib/src/lib.rs @@ -0,0 +1,7 @@ +pub fn my_func(a: i32, b: i32) -> i32 { + let mut res = a; + for i in a..b { + res += i; + } + res +} diff --git a/compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs b/compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs index e7a11a969c037..71c78d364ac88 100644 --- a/compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs +++ b/compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs @@ -1,3 +1,5 @@ +use mylib::my_func; + fn main() { - println!("Hello, world!"); + println!("{}", my_func(5, 10)); } diff --git a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs index aecea37ab5a91..64c932a265819 100644 --- a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs +++ b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs @@ -1,10 +1,8 @@ //! The common code for `tests/lang_tests_*.rs` -use std::{ - env::{self, current_dir}, - path::{Path, PathBuf}, - process::Command, -}; +use std::env::{self, current_dir}; +use std::path::{Path, PathBuf}; +use std::process::Command; use boml::Toml; use lang_tester::LangTester; @@ -22,14 +20,20 @@ pub fn main_inner(profile: Profile) { let tempdir = TempDir::new().expect("temp dir"); let current_dir = current_dir().expect("current dir"); let current_dir = current_dir.to_str().expect("current dir").to_string(); - let toml = Toml::parse(include_str!("../config.toml")).expect("Failed to parse `config.toml`"); - let gcc_path = if let Ok(gcc_path) = toml.get_string("gcc-path") { - PathBuf::from(gcc_path.to_string()) - } else { - // then we try to retrieve it from the `target` folder. - let commit = include_str!("../libgccjit.version").trim(); - Path::new("build/libgccjit").join(commit) - }; + + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + + let gcc_path = std::fs::read_to_string(manifest_dir.join("config.toml")) + .ok() + .and_then(|v| { + let toml = Toml::parse(&v).expect("Failed to parse `config.toml`"); + toml.get_string("gcc-path").map(PathBuf::from).ok() + }) + .unwrap_or_else(|| { + // then we try to retrieve it from the `target` folder. + let commit = include_str!("../libgccjit.version").trim(); + Path::new("build/libgccjit").join(commit) + }); let gcc_path = Path::new(&gcc_path) .canonicalize() @@ -83,6 +87,8 @@ pub fn main_inner(profile: Profile) { &format!("{}/build/build_sysroot/sysroot/", current_dir), "-C", "link-arg=-lc", + "--extern", + "mini_core=target/out/libmini_core.rlib", "-o", exe.to_str().expect("to_str"), path.to_str().expect("to_str"), diff --git a/compiler/rustc_codegen_gcc/tests/run/array.rs b/compiler/rustc_codegen_gcc/tests/run/array.rs index d8de9f28d4cb9..c3c08c29c6dba 100644 --- a/compiler/rustc_codegen_gcc/tests/run/array.rs +++ b/compiler/rustc_codegen_gcc/tests/run/array.rs @@ -7,38 +7,12 @@ // 5 // 10 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i16 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] @@ -48,182 +22,6 @@ mod libc { } } -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - intrinsics::abort(); - } -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "sub"] -pub trait Sub { - type Output; - - fn sub(self, rhs: RHS) -> Self::Output; -} - -impl Sub for usize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for isize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for u8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i16 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -#[track_caller] -#[lang = "panic_const_add_overflow"] -pub fn panic_const_add_overflow() -> ! { - panic("attempt to add with overflow"); -} - -#[track_caller] -#[lang = "panic_const_sub_overflow"] -pub fn panic_const_sub_overflow() -> ! { - panic("attempt to subtract with overflow"); -} - -/* - * Code - */ - static mut ONE: usize = 1; fn make_array() -> [u8; 3] { diff --git a/compiler/rustc_codegen_gcc/tests/run/closure.rs b/compiler/rustc_codegen_gcc/tests/run/closure.rs index b0d0ca4ee8dfa..46c47bc54ed01 100644 --- a/compiler/rustc_codegen_gcc/tests/run/closure.rs +++ b/compiler/rustc_codegen_gcc/tests/run/closure.rs @@ -8,200 +8,20 @@ // Int argument: 2 // Both args: 11 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, - unboxed_closures, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u32 {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] extern "C" { - pub fn puts(s: *const u8) -> i32; pub fn printf(format: *const i8, ...) -> i32; } } -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -#[lang = "tuple_trait"] -pub trait Tuple {} - -#[lang = "unsize"] -pub trait Unsize {} - -#[lang = "coerce_unsized"] -pub trait CoerceUnsized {} - -impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {} -impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} -impl, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} - -#[lang = "fn_once"] -#[rustc_paren_sugar] -pub trait FnOnce { - #[lang = "fn_once_output"] - type Output; - - extern "rust-call" fn call_once(self, args: Args) -> Self::Output; -} - -#[lang = "fn_mut"] -#[rustc_paren_sugar] -pub trait FnMut: FnOnce { - extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output; -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - intrinsics::abort(); - } -} - -#[track_caller] -#[lang = "panic_const_add_overflow"] -pub fn panic_const_add_overflow() -> ! { - panic("attempt to add with overflow"); -} - -/* - * Code - */ - #[start] fn main(mut argc: isize, _argv: *const *const u8) -> isize { let string = "Arg: %d\n\0"; diff --git a/compiler/rustc_codegen_gcc/tests/run/condition.rs b/compiler/rustc_codegen_gcc/tests/run/condition.rs index 770b18a89e3e0..039ef94eaa717 100644 --- a/compiler/rustc_codegen_gcc/tests/run/condition.rs +++ b/compiler/rustc_codegen_gcc/tests/run/condition.rs @@ -5,304 +5,20 @@ // stdout: true // 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for u64 {} -impl Copy for i32 {} -impl Copy for u32 {} -impl Copy for bool {} -impl Copy for u16 {} -impl Copy for i16 {} -impl Copy for char {} -impl Copy for i8 {} -impl Copy for u8 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] extern "C" { pub fn printf(format: *const i8, ...) -> i32; - pub fn puts(s: *const u8) -> i32; - } -} - -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - intrinsics::abort(); - } -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "sub"] -pub trait Sub { - type Output; - - fn sub(self, rhs: RHS) -> Self::Output; -} - -impl Sub for usize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for isize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for u8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs } } -impl Sub for i16 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -#[lang = "eq"] -pub trait PartialEq { - fn eq(&self, other: &Rhs) -> bool; - fn ne(&self, other: &Rhs) -> bool; -} - -impl PartialEq for u8 { - fn eq(&self, other: &u8) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &u8) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for u16 { - fn eq(&self, other: &u16) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &u16) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for u32 { - fn eq(&self, other: &u32) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &u32) -> bool { - (*self) != (*other) - } -} - - -impl PartialEq for u64 { - fn eq(&self, other: &u64) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &u64) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for usize { - fn eq(&self, other: &usize) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &usize) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for i8 { - fn eq(&self, other: &i8) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &i8) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for i32 { - fn eq(&self, other: &i32) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &i32) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for isize { - fn eq(&self, other: &isize) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &isize) -> bool { - (*self) != (*other) - } -} - -impl PartialEq for char { - fn eq(&self, other: &char) -> bool { - (*self) == (*other) - } - fn ne(&self, other: &char) -> bool { - (*self) != (*other) - } -} - -/* - * Code - */ - #[start] fn main(argc: isize, _argv: *const *const u8) -> isize { unsafe { diff --git a/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs b/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs index 523544ee6bbbb..ed1bf72bb2754 100644 --- a/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs +++ b/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs @@ -4,212 +4,20 @@ // status: 0 // stdout: 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i16 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] extern "C" { pub fn printf(format: *const i8, ...) -> i32; - pub fn puts(s: *const u8) -> i32; - } -} - -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - intrinsics::abort(); - } -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "sub"] -pub trait Sub { - type Output; - - fn sub(self, rhs: RHS) -> Self::Output; -} - -impl Sub for usize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for isize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for u8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs } } -impl Sub for i8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i16 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - - -/* - * Code - */ - fn i16_as_i8(a: i16) -> i8 { a as i8 } diff --git a/compiler/rustc_codegen_gcc/tests/run/operations.rs b/compiler/rustc_codegen_gcc/tests/run/operations.rs index 2e3c021d5f772..0e44fc580b8c4 100644 --- a/compiler/rustc_codegen_gcc/tests/run/operations.rs +++ b/compiler/rustc_codegen_gcc/tests/run/operations.rs @@ -38,8 +38,8 @@ pub trait Deref { fn deref(&self) -> &Self::Target; } -#[lang = "receiver"] -trait Receiver { +#[lang = "legacy_receiver"] +trait LegacyReceiver { } #[lang = "freeze"] diff --git a/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs b/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs index c7510d16449ef..2b8812ad51c5e 100644 --- a/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs +++ b/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs @@ -4,212 +4,20 @@ // status: 0 // stdout: 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i16 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] extern "C" { pub fn printf(format: *const i8, ...) -> i32; - pub fn puts(s: *const u8) -> i32; - } -} - -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - intrinsics::abort(); - } -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "sub"] -pub trait Sub { - type Output; - - fn sub(self, rhs: RHS) -> Self::Output; -} - -impl Sub for usize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for isize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for u8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs } } -impl Sub for i8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i16 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - - -/* - * Code - */ - static mut ONE: usize = 1; fn make_array() -> [u8; 3] { diff --git a/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs b/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs index 8d40deb8c85ef..f2a5a2e4384df 100644 --- a/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs +++ b/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs @@ -15,18 +15,18 @@ #[lang = "copy"] pub unsafe trait Copy {} -unsafe impl Copy for bool {} -unsafe impl Copy for u8 {} -unsafe impl Copy for u16 {} -unsafe impl Copy for u32 {} -unsafe impl Copy for u64 {} -unsafe impl Copy for usize {} -unsafe impl Copy for i8 {} -unsafe impl Copy for i16 {} -unsafe impl Copy for i32 {} -unsafe impl Copy for isize {} -unsafe impl Copy for f32 {} -unsafe impl Copy for char {} +impl Copy for bool {} +impl Copy for u8 {} +impl Copy for u16 {} +impl Copy for u32 {} +impl Copy for u64 {} +impl Copy for usize {} +impl Copy for i8 {} +impl Copy for i16 {} +impl Copy for i32 {} +impl Copy for isize {} +impl Copy for f32 {} +impl Copy for char {} mod libc { #[link(name = "c")] @@ -43,8 +43,8 @@ mod libc { #[lang = "sized"] pub trait Sized {} -#[lang = "receiver"] -trait Receiver { +#[lang = "legacy_receiver"] +trait LegacyReceiver { } #[lang = "freeze"] diff --git a/compiler/rustc_codegen_gcc/tests/run/slice.rs b/compiler/rustc_codegen_gcc/tests/run/slice.rs index 35ad594ecdea3..fba93fc155495 100644 --- a/compiler/rustc_codegen_gcc/tests/run/slice.rs +++ b/compiler/rustc_codegen_gcc/tests/run/slice.rs @@ -4,36 +4,12 @@ // status: 0 // stdout: 5 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] -#![allow(internal_features)] +#![feature(no_core, start)] #![no_std] #![no_core] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u32 {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} +extern crate mini_core; mod libc { #[link(name = "c")] @@ -42,79 +18,6 @@ mod libc { } } -#[lang = "index"] -pub trait Index { - type Output: ?Sized; - fn index(&self, index: Idx) -> &Self::Output; -} - -impl Index for [T; 3] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -impl Index for [T] { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self[index] - } -} - -#[lang = "unsize"] -pub trait Unsize {} - -#[lang = "coerce_unsized"] -pub trait CoerceUnsized {} - -impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {} -impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} -impl, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -#[lang = "panic_bounds_check"] -#[track_caller] -#[no_mangle] -fn panic_bounds_check(index: usize, len: usize) -> ! { - unsafe { - libc::printf("index out of bounds: the len is %d but the index is %d\n\0" as *const str as *const i8, len, index); - intrinsics::abort(); - } -} - -mod intrinsics { - use super::Sized; - - #[rustc_nounwind] - #[rustc_intrinsic] - #[rustc_intrinsic_must_be_overridden] - pub fn abort() -> ! { - loop {} - } -} - -/* - * Code - */ - static mut TWO: usize = 2; fn index_slice(s: &[u32]) -> u32 { diff --git a/compiler/rustc_codegen_gcc/tests/run/volatile2.rs b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs new file mode 100644 index 0000000000000..a177b817ab35a --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs @@ -0,0 +1,113 @@ +// Compiler: +// +// Run-time: +// status: 0 + +mod libc { + #[link(name = "c")] + extern "C" { + pub fn puts(s: *const u8) -> i32; + + pub fn sigaction(signum: i32, act: *const sigaction, oldact: *mut sigaction) -> i32; + pub fn mmap(addr: *mut (), len: usize, prot: i32, flags: i32, fd: i32, offset: i64) -> *mut (); + pub fn mprotect(addr: *mut (), len: usize, prot: i32) -> i32; + } + + pub const PROT_READ: i32 = 1; + pub const PROT_WRITE: i32 = 2; + pub const MAP_PRIVATE: i32 = 0x0002; + pub const MAP_ANONYMOUS: i32 = 0x0020; + pub const MAP_FAILED: *mut u8 = !0 as *mut u8; + + /// glibc sigaction + #[repr(C)] + pub struct sigaction { + pub sa_sigaction: Option, + pub sa_mask: [u32; 32], + pub sa_flags: i32, + pub sa_restorer: Option, + } + + pub const SA_SIGINFO: i32 = 0x00000004; + pub const SIGSEGV: i32 = 11; +} + +static mut COUNT: u32 = 0; +static mut STORAGE: *mut u8 = core::ptr::null_mut(); +const PAGE_SIZE: usize = 1 << 15; + +fn main() { + unsafe { + // Register a segfault handler + libc::sigaction( + libc::SIGSEGV, + &libc::sigaction { + sa_sigaction: Some(segv_handler), + sa_flags: libc::SA_SIGINFO, + ..core::mem::zeroed() + }, + core::ptr::null_mut(), + ); + + STORAGE = libc::mmap( + core::ptr::null_mut(), + PAGE_SIZE * 2, + 0, + libc::MAP_PRIVATE | libc::MAP_ANONYMOUS, + -1, + 0, + ).cast(); + if STORAGE == libc::MAP_FAILED { + panic!("error: mmap failed"); + } + + let p_count = (&mut COUNT) as *mut u32; + p_count.write_volatile(0); + + // Trigger segfaults + STORAGE.add(0).write_volatile(1); + STORAGE.add(PAGE_SIZE).write_volatile(1); + STORAGE.add(0).write_volatile(1); + STORAGE.add(PAGE_SIZE).write_volatile(1); + STORAGE.add(0).write_volatile(1); + STORAGE.add(PAGE_SIZE).write_volatile(1); + STORAGE.add(0).read_volatile(); + STORAGE.add(PAGE_SIZE).read_volatile(); + STORAGE.add(0).read_volatile(); + STORAGE.add(PAGE_SIZE).read_volatile(); + STORAGE.add(0).read_volatile(); + STORAGE.add(PAGE_SIZE).read_volatile(); + STORAGE.add(0).write_volatile(1); + STORAGE.add(PAGE_SIZE).write_volatile(1); + + // The segfault handler should have been called for every `write_volatile` and + // `read_volatile` in `STORAGE`. If the compiler ignores volatility, some of these writes + // will be combined, causing a different number of segfaults. + // + // This `p_count` read is done by a volatile read. If the compiler + // ignores volatility, the compiler will speculate that `*p_count` is + // unchanged and remove this check, failing the test. + if p_count.read_volatile() != 14 { + panic!("error: segfault count mismatch: {}", p_count.read_volatile()); + } + } +} + +unsafe extern "C" fn segv_handler(_: i32, _: *mut (), _: *mut ()) { + let p_count = (&mut COUNT) as *mut u32; + p_count.write_volatile(p_count.read_volatile() + 1); + let count = p_count.read_volatile(); + + // Toggle the protected page so that the handler will be called for + // each `write_volatile` + libc::mprotect( + STORAGE.cast(), + PAGE_SIZE, + if count % 2 == 1 { libc::PROT_READ | libc::PROT_WRITE } else { 0 }, + ); + libc::mprotect( + STORAGE.add(PAGE_SIZE).cast(), + PAGE_SIZE, + if count % 2 == 0 { libc::PROT_READ | libc::PROT_WRITE } else { 0 }, + ); +} diff --git a/compiler/rustc_codegen_llvm/Cargo.toml b/compiler/rustc_codegen_llvm/Cargo.toml index 689986d642d90..c44d1a5e5c220 100644 --- a/compiler/rustc_codegen_llvm/Cargo.toml +++ b/compiler/rustc_codegen_llvm/Cargo.toml @@ -9,6 +9,7 @@ test = false [dependencies] # tidy-alphabetical-start bitflags = "2.4.1" +gimli = "0.30" itertools = "0.12" libc = "0.2" measureme = "11" diff --git a/compiler/rustc_codegen_llvm/messages.ftl b/compiler/rustc_codegen_llvm/messages.ftl index 63c64269eb805..9585848cbf033 100644 --- a/compiler/rustc_codegen_llvm/messages.ftl +++ b/compiler/rustc_codegen_llvm/messages.ftl @@ -1,3 +1,5 @@ +codegen_llvm_autodiff_without_lto = using the autodiff feature requires using fat-lto + codegen_llvm_copy_bitcode = failed to copy bitcode to object file: {$err} codegen_llvm_dynamic_linking_with_lto = @@ -8,7 +10,7 @@ codegen_llvm_dynamic_linking_with_lto = codegen_llvm_fixed_x18_invalid_arch = the `-Zfixed-x18` flag is not supported on the `{$arch}` architecture codegen_llvm_forbidden_ctarget_feature = - target feature `{$feature}` cannot be toggled with `-Ctarget-feature`: {$reason} + target feature `{$feature}` cannot be {$enabled} with `-Ctarget-feature`: {$reason} .note = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! codegen_llvm_forbidden_ctarget_feature_issue = for more information, see issue #116344 @@ -22,8 +24,6 @@ codegen_llvm_invalid_minimum_alignment_not_power_of_two = codegen_llvm_invalid_minimum_alignment_too_large = invalid minimum global alignment: {$align} is too large -codegen_llvm_invalid_target_feature_prefix = target feature `{$feature}` must begin with a `+` or `-`" - codegen_llvm_load_bitcode = failed to load bitcode of module "{$name}" codegen_llvm_load_bitcode_with_llvm_err = failed to load bitcode of module "{$name}": {$llvm_err} @@ -47,6 +47,8 @@ codegen_llvm_parse_bitcode_with_llvm_err = failed to parse bitcode for LTO modul codegen_llvm_parse_target_machine_config = failed to parse target machine config to target machine: {$error} +codegen_llvm_prepare_autodiff = failed to prepare autodiff: src: {$src}, target: {$target}, {$error} +codegen_llvm_prepare_autodiff_with_llvm_err = failed to prepare autodiff: {$llvm_err}, src: {$src}, target: {$target}, {$error} codegen_llvm_prepare_thin_lto_context = failed to prepare thin LTO context codegen_llvm_prepare_thin_lto_context_with_llvm_err = failed to prepare thin LTO context: {$llvm_err} diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 1d35138b01348..31ee0eeca11fa 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -1,3 +1,4 @@ +use std::borrow::Borrow; use std::cmp; use libc::c_uint; @@ -312,7 +313,7 @@ impl<'ll, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { pub(crate) trait FnAbiLlvmExt<'ll, 'tcx> { fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type; fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type; - fn llvm_cconv(&self) -> llvm::CallConv; + fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv; /// Apply attributes to a function declaration/definition. fn apply_attrs_llfn( @@ -404,8 +405,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { cx.type_ptr_ext(cx.data_layout().instruction_address_space) } - fn llvm_cconv(&self) -> llvm::CallConv { - self.conv.into() + fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv { + llvm::CallConv::from_conv(self.conv, cx.tcx.sess.target.arch.borrow()) } fn apply_attrs_llfn( @@ -617,7 +618,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } - let cconv = self.llvm_cconv(); + let cconv = self.llvm_cconv(&bx.cx); if cconv != llvm::CCallConv { llvm::SetInstructionCallConv(callsite, cconv); } @@ -655,8 +656,8 @@ impl<'tcx> AbiBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { } } -impl From for llvm::CallConv { - fn from(conv: Conv) -> Self { +impl llvm::CallConv { + pub fn from_conv(conv: Conv, arch: &str) -> Self { match conv { Conv::C | Conv::Rust @@ -666,6 +667,15 @@ impl From for llvm::CallConv { Conv::Cold => llvm::ColdCallConv, Conv::PreserveMost => llvm::PreserveMost, Conv::PreserveAll => llvm::PreserveAll, + Conv::GpuKernel => { + if arch == "amdgpu" { + llvm::AmdgpuKernel + } else if arch == "nvptx64" { + llvm::PtxKernel + } else { + panic!("Architecture {arch} does not support GpuKernel calling convention"); + } + } Conv::AvrInterrupt => llvm::AvrInterrupt, Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt, Conv::ArmAapcs => llvm::ArmAapcsCallConv, diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index f8454fd9960b6..95e0481b035e2 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -37,7 +37,9 @@ fn inline_attr<'ll>(cx: &CodegenCx<'ll, '_>, inline: InlineAttr) -> Option<&'ll } match inline { InlineAttr::Hint => Some(AttributeKind::InlineHint.create_attr(cx.llcx)), - InlineAttr::Always => Some(AttributeKind::AlwaysInline.create_attr(cx.llcx)), + InlineAttr::Always | InlineAttr::Force { .. } => { + Some(AttributeKind::AlwaysInline.create_attr(cx.llcx)) + } InlineAttr::Never => { if cx.sess().target.arch != "amdgpu" { Some(AttributeKind::NoInline.create_attr(cx.llcx)) @@ -472,7 +474,11 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( let allocated_pointer = AttributeKind::AllocatedPointer.create_attr(cx.llcx); attributes::apply_to_llfn(llfn, AttributePlace::Argument(0), &[allocated_pointer]); } - if let Some(align) = codegen_fn_attrs.alignment { + // function alignment can be set globally with the `-Zmin-function-alignment=` flag; + // the alignment from a `#[repr(align())]` is used if it specifies a higher alignment. + if let Some(align) = + Ord::max(cx.tcx.sess.opts.unstable_opts.min_function_alignment, codegen_fn_attrs.alignment) + { llvm::set_alignment(llfn, align); } if let Some(backchain) = backchain_attr(cx) { diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs index f6d2ec24da6f2..78c759bbe8c03 100644 --- a/compiler/rustc_codegen_llvm/src/back/lto.rs +++ b/compiler/rustc_codegen_llvm/src/back/lto.rs @@ -1,7 +1,6 @@ use std::collections::BTreeMap; use std::ffi::{CStr, CString}; use std::fs::File; -use std::mem::ManuallyDrop; use std::path::Path; use std::sync::Arc; use std::{io, iter, slice}; @@ -9,7 +8,7 @@ use std::{io, iter, slice}; use object::read::archive::ArchiveFile; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared}; use rustc_codegen_ssa::back::symbol_export; -use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, TargetMachineFactoryConfig}; +use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::fx::FxHashMap; @@ -604,7 +603,14 @@ pub(crate) fn run_pass_manager( debug!("running the pass manager"); let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO }; let opt_level = config.opt_level.unwrap_or(config::OptLevel::No); - unsafe { write::llvm_optimize(cgcx, dcx, module, config, opt_level, opt_stage) }?; + + // If this rustc version was build with enzyme/autodiff enabled, and if users applied the + // `#[autodiff]` macro at least once, then we will later call llvm_optimize a second time. + let first_run = true; + debug!("running llvm pm opt pipeline"); + unsafe { + write::llvm_optimize(cgcx, dcx, module, config, opt_level, opt_stage, first_run)?; + } debug!("lto done"); Ok(()) } @@ -699,18 +705,15 @@ pub(crate) unsafe fn optimize_thin_module( let dcx = dcx.handle(); let module_name = &thin_module.shared.module_names[thin_module.idx]; - let tm_factory_config = TargetMachineFactoryConfig::new(cgcx, module_name.to_str().unwrap()); - let tm = (cgcx.tm_factory)(tm_factory_config).map_err(|e| write::llvm_err(dcx, e))?; // Right now the implementation we've got only works over serialized // modules, so we create a fresh new LLVM context and parse the module // into that context. One day, however, we may do this for upstream // crates but for locally codegened modules we may be able to reuse // that LLVM Context and Module. - let llcx = unsafe { llvm::LLVMRustContextCreate(cgcx.fewer_names) }; - let llmod_raw = parse_module(llcx, module_name, thin_module.data(), dcx)? as *const _; + let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx)?; let mut module = ModuleCodegen { - module_llvm: ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) }, + module_llvm, name: thin_module.name().to_string(), kind: ModuleKind::Regular, }; @@ -730,11 +733,7 @@ pub(crate) unsafe fn optimize_thin_module( { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name()); - if unsafe { - !llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target) - } { - return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule)); - } + unsafe { llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target) }; save_temp_bitcode(cgcx, &module, "thin-lto-after-rename"); } diff --git a/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs b/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs index 44c30d22a9e9c..4cbd49aa44d48 100644 --- a/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs +++ b/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs @@ -25,7 +25,7 @@ impl OwnedTargetMachine { model: llvm::CodeModel, reloc: llvm::RelocModel, level: llvm::CodeGenOptLevel, - use_soft_fp: bool, + float_abi: llvm::FloatAbi, function_sections: bool, data_sections: bool, unique_section_names: bool, @@ -57,7 +57,7 @@ impl OwnedTargetMachine { model, reloc, level, - use_soft_fp, + float_abi, function_sections, data_sections, unique_section_names, diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index 66ca4e2b4738c..509b24dd703a6 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -25,10 +25,9 @@ use rustc_session::Session; use rustc_session::config::{ self, Lto, OutputType, Passes, RemapPathScopeComponents, SplitDwarfKind, SwitchWithOptPath, }; -use rustc_span::symbol::sym; -use rustc_span::{BytePos, InnerSpan, Pos, SpanData, SyntaxContext}; -use rustc_target::spec::{CodeModel, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel}; -use tracing::debug; +use rustc_span::{BytePos, InnerSpan, Pos, SpanData, SyntaxContext, sym}; +use rustc_target::spec::{CodeModel, FloatAbi, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel}; +use tracing::{debug, trace}; use crate::back::lto::ThinBuffer; use crate::back::owned_target_machine::OwnedTargetMachine; @@ -182,6 +181,14 @@ pub(crate) fn to_llvm_code_model(code_model: Option) -> llvm::CodeMod } } +fn to_llvm_float_abi(float_abi: Option) -> llvm::FloatAbi { + match float_abi { + None => llvm::FloatAbi::Default, + Some(FloatAbi::Soft) => llvm::FloatAbi::Soft, + Some(FloatAbi::Hard) => llvm::FloatAbi::Hard, + } +} + pub(crate) fn target_machine_factory( sess: &Session, optlvl: config::OptLevel, @@ -190,12 +197,12 @@ pub(crate) fn target_machine_factory( let reloc_model = to_llvm_relocation_model(sess.relocation_model()); let (opt_level, _) = to_llvm_opt_settings(optlvl); - let use_softfp = if sess.target.arch == "arm" && sess.target.abi == "eabihf" { - sess.opts.cg.soft_float + let float_abi = if sess.target.arch == "arm" && sess.opts.cg.soft_float { + llvm::FloatAbi::Soft } else { // `validate_commandline_args_with_session_available` has already warned about this being // ignored. Let's make sure LLVM doesn't suddenly start using this flag on more targets. - false + to_llvm_float_abi(sess.target.llvm_floatabi) }; let ffunction_sections = @@ -291,7 +298,7 @@ pub(crate) fn target_machine_factory( code_model, reloc_model, opt_level, - use_softfp, + float_abi, ffunction_sections, fdata_sections, funique_section_names, @@ -530,9 +537,35 @@ pub(crate) unsafe fn llvm_optimize( config: &ModuleConfig, opt_level: config::OptLevel, opt_stage: llvm::OptStage, + skip_size_increasing_opts: bool, ) -> Result<(), FatalError> { - let unroll_loops = - opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin; + // Enzyme: + // The whole point of compiler based AD is to differentiate optimized IR instead of unoptimized + // source code. However, benchmarks show that optimizations increasing the code size + // tend to reduce AD performance. Therefore deactivate them before AD, then differentiate the code + // and finally re-optimize the module, now with all optimizations available. + // FIXME(ZuseZ4): In a future update we could figure out how to only optimize individual functions getting + // differentiated. + + let unroll_loops; + let vectorize_slp; + let vectorize_loop; + + // When we build rustc with enzyme/autodiff support, we want to postpone size-increasing + // optimizations until after differentiation. FIXME(ZuseZ4): Before shipping on nightly, + // we should make this more granular, or at least check that the user has at least one autodiff + // call in their code, to justify altering the compilation pipeline. + if skip_size_increasing_opts && cfg!(llvm_enzyme) { + unroll_loops = false; + vectorize_slp = false; + vectorize_loop = false; + } else { + unroll_loops = + opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin; + vectorize_slp = config.vectorize_slp; + vectorize_loop = config.vectorize_loop; + } + trace!(?unroll_loops, ?vectorize_slp, ?vectorize_loop); let using_thin_buffers = opt_stage == llvm::OptStage::PreLinkThinLTO || config.bitcode_needed(); let pgo_gen_path = get_pgo_gen_path(config); let pgo_use_path = get_pgo_use_path(config); @@ -596,8 +629,8 @@ pub(crate) unsafe fn llvm_optimize( using_thin_buffers, config.merge_functions, unroll_loops, - config.vectorize_slp, - config.vectorize_loop, + vectorize_slp, + vectorize_loop, config.no_builtins, config.emit_lifetime_markers, sanitizer_options.as_ref(), @@ -641,6 +674,8 @@ pub(crate) unsafe fn optimize( unsafe { llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr()) }; } + // FIXME(ZuseZ4): support SanitizeHWAddress and prevent illegal/unsupported opts + if let Some(opt_level) = config.opt_level { let opt_stage = match cgcx.lto { Lto::Fat => llvm::OptStage::PreLinkFatLTO, @@ -648,7 +683,20 @@ pub(crate) unsafe fn optimize( _ if cgcx.opts.cg.linker_plugin_lto.enabled() => llvm::OptStage::PreLinkThinLTO, _ => llvm::OptStage::PreLinkNoLTO, }; - return unsafe { llvm_optimize(cgcx, dcx, module, config, opt_level, opt_stage) }; + + // If we know that we will later run AD, then we disable vectorization and loop unrolling + let skip_size_increasing_opts = cfg!(llvm_enzyme); + return unsafe { + llvm_optimize( + cgcx, + dcx, + module, + config, + opt_level, + opt_stage, + skip_size_increasing_opts, + ) + }; } Ok(()) } diff --git a/compiler/rustc_codegen_llvm/src/base.rs b/compiler/rustc_codegen_llvm/src/base.rs index f62310bd94808..d05faf5577b01 100644 --- a/compiler/rustc_codegen_llvm/src/base.rs +++ b/compiler/rustc_codegen_llvm/src/base.rs @@ -23,7 +23,7 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::mir::mono::{Linkage, Visibility}; use rustc_middle::ty::TyCtxt; use rustc_session::config::DebugInfo; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use rustc_target::spec::SanitizerSet; use super::ModuleLlvm; diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index b5bb7630ca6c9..5a34b52e6ef30 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -2,6 +2,8 @@ use std::borrow::Cow; use std::ops::Deref; use std::{iter, ptr}; +pub(crate) mod autodiff; + use libc::{c_char, c_uint}; use rustc_abi as abi; use rustc_abi::{Align, Size, WrappingRange}; @@ -608,14 +610,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { } fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) { - if self.sess().target.arch == "amdgpu" { - // amdgpu/LLVM does something weird and thinks an i64 value is - // split into a v2i32, halving the bitwidth LLVM expects, - // tripping an assertion. So, for now, just disable this - // optimization. - return; - } - if self.cx.sess().opts.optimize == OptLevel::No { // Don't emit metadata we're not going to use return; diff --git a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs new file mode 100644 index 0000000000000..38f7eaa090f91 --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs @@ -0,0 +1,344 @@ +use std::ptr; + +use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, AutoDiffItem, DiffActivity, DiffMode}; +use rustc_codegen_ssa::ModuleCodegen; +use rustc_codegen_ssa::back::write::ModuleConfig; +use rustc_codegen_ssa::traits::{BaseTypeCodegenMethods, BuilderMethods}; +use rustc_errors::FatalError; +use rustc_middle::ty::TyCtxt; +use rustc_session::config::Lto; +use tracing::{debug, trace}; + +use crate::back::write::{llvm_err, llvm_optimize}; +use crate::builder::Builder; +use crate::declare::declare_raw_fn; +use crate::errors::LlvmError; +use crate::llvm::AttributePlace::Function; +use crate::llvm::{Metadata, True}; +use crate::value::Value; +use crate::{CodegenContext, LlvmCodegenBackend, ModuleLlvm, attributes, context, llvm}; + +fn get_params(fnc: &Value) -> Vec<&Value> { + unsafe { + let param_num = llvm::LLVMCountParams(fnc) as usize; + let mut fnc_args: Vec<&Value> = vec![]; + fnc_args.reserve(param_num); + llvm::LLVMGetParams(fnc, fnc_args.as_mut_ptr()); + fnc_args.set_len(param_num); + fnc_args + } +} + +/// When differentiating `fn_to_diff`, take a `outer_fn` and generate another +/// function with expected naming and calling conventions[^1] which will be +/// discovered by the enzyme LLVM pass and its body populated with the differentiated +/// `fn_to_diff`. `outer_fn` is then modified to have a call to the generated +/// function and handle the differences between the Rust calling convention and +/// Enzyme. +/// [^1]: +// FIXME(ZuseZ4): `outer_fn` should include upstream safety checks to +// cover some assumptions of enzyme/autodiff, which could lead to UB otherwise. +fn generate_enzyme_call<'ll, 'tcx>( + cx: &context::CodegenCx<'ll, 'tcx>, + fn_to_diff: &'ll Value, + outer_fn: &'ll Value, + attrs: AutoDiffAttrs, +) { + let inputs = attrs.input_activity; + let output = attrs.ret_activity; + + // We have to pick the name depending on whether we want forward or reverse mode autodiff. + // FIXME(ZuseZ4): The new pass based approach should not need the {Forward/Reverse}First method anymore, since + // it will handle higher-order derivatives correctly automatically (in theory). Currently + // higher-order derivatives fail, so we should debug that before adjusting this code. + let mut ad_name: String = match attrs.mode { + DiffMode::Forward => "__enzyme_fwddiff", + DiffMode::Reverse => "__enzyme_autodiff", + DiffMode::ForwardFirst => "__enzyme_fwddiff", + DiffMode::ReverseFirst => "__enzyme_autodiff", + _ => panic!("logic bug in autodiff, unrecognized mode"), + } + .to_string(); + + // add outer_fn name to ad_name to make it unique, in case users apply autodiff to multiple + // functions. Unwrap will only panic, if LLVM gave us an invalid string. + let name = llvm::get_value_name(outer_fn); + let outer_fn_name = std::ffi::CStr::from_bytes_with_nul(name).unwrap().to_str().unwrap(); + ad_name.push_str(outer_fn_name.to_string().as_str()); + + // Let us assume the user wrote the following function square: + // + // ```llvm + // define double @square(double %x) { + // entry: + // %0 = fmul double %x, %x + // ret double %0 + // } + // ``` + // + // The user now applies autodiff to the function square, in which case fn_to_diff will be `square`. + // Our macro generates the following placeholder code (slightly simplified): + // + // ```llvm + // define double @dsquare(double %x) { + // ; placeholder code + // return 0.0; + // } + // ``` + // + // so our `outer_fn` will be `dsquare`. The unsafe code section below now removes the placeholder + // code and inserts an autodiff call. We also add a declaration for the __enzyme_autodiff call. + // Again, the arguments to all functions are slightly simplified. + // ```llvm + // declare double @__enzyme_autodiff_square(...) + // + // define double @dsquare(double %x) { + // entry: + // %0 = tail call double (...) @__enzyme_autodiff_square(double (double)* nonnull @square, double %x) + // ret double %0 + // } + // ``` + unsafe { + // On LLVM-IR, we can luckily declare __enzyme_ functions without specifying the input + // arguments. We do however need to declare them with their correct return type. + // We already figured the correct return type out in our frontend, when generating the outer_fn, + // so we can now just go ahead and use that. FIXME(ZuseZ4): This doesn't handle sret yet. + let fn_ty = llvm::LLVMGlobalGetValueType(outer_fn); + let ret_ty = llvm::LLVMGetReturnType(fn_ty); + + // LLVM can figure out the input types on it's own, so we take a shortcut here. + let enzyme_ty = llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True); + + //FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and + // think a bit more about what should go here. + let cc = llvm::LLVMGetFunctionCallConv(outer_fn); + let ad_fn = declare_raw_fn( + cx, + &ad_name, + llvm::CallConv::try_from(cc).expect("invalid callconv"), + llvm::UnnamedAddr::No, + llvm::Visibility::Default, + enzyme_ty, + ); + + // Otherwise LLVM might inline our temporary code before the enzyme pass has a chance to + // do it's work. + let attr = llvm::AttributeKind::NoInline.create_attr(cx.llcx); + attributes::apply_to_llfn(ad_fn, Function, &[attr]); + + // first, remove all calls from fnc + let entry = llvm::LLVMGetFirstBasicBlock(outer_fn); + let br = llvm::LLVMRustGetTerminator(entry); + llvm::LLVMRustEraseInstFromParent(br); + + let last_inst = llvm::LLVMRustGetLastInstruction(entry).unwrap(); + let mut builder = Builder::build(cx, entry); + + let num_args = llvm::LLVMCountParams(&fn_to_diff); + let mut args = Vec::with_capacity(num_args as usize + 1); + args.push(fn_to_diff); + + let enzyme_const = cx.create_metadata("enzyme_const".to_string()).unwrap(); + let enzyme_out = cx.create_metadata("enzyme_out".to_string()).unwrap(); + let enzyme_dup = cx.create_metadata("enzyme_dup".to_string()).unwrap(); + let enzyme_dupnoneed = cx.create_metadata("enzyme_dupnoneed".to_string()).unwrap(); + let enzyme_primal_ret = cx.create_metadata("enzyme_primal_return".to_string()).unwrap(); + + match output { + DiffActivity::Dual => { + args.push(cx.get_metadata_value(enzyme_primal_ret)); + } + DiffActivity::Active => { + args.push(cx.get_metadata_value(enzyme_primal_ret)); + } + _ => {} + } + + trace!("matching autodiff arguments"); + // We now handle the issue that Rust level arguments not always match the llvm-ir level + // arguments. A slice, `&[f32]`, for example, is represented as a pointer and a length on + // llvm-ir level. The number of activities matches the number of Rust level arguments, so we + // need to match those. + // FIXME(ZuseZ4): This logic is a bit more complicated than it should be, can we simplify it + // using iterators and peek()? + let mut outer_pos: usize = 0; + let mut activity_pos = 0; + let outer_args: Vec<&llvm::Value> = get_params(outer_fn); + while activity_pos < inputs.len() { + let activity = inputs[activity_pos as usize]; + // Duplicated arguments received a shadow argument, into which enzyme will write the + // gradient. + let (activity, duplicated): (&Metadata, bool) = match activity { + DiffActivity::None => panic!("not a valid input activity"), + DiffActivity::Const => (enzyme_const, false), + DiffActivity::Active => (enzyme_out, false), + DiffActivity::ActiveOnly => (enzyme_out, false), + DiffActivity::Dual => (enzyme_dup, true), + DiffActivity::DualOnly => (enzyme_dupnoneed, true), + DiffActivity::Duplicated => (enzyme_dup, true), + DiffActivity::DuplicatedOnly => (enzyme_dupnoneed, true), + DiffActivity::FakeActivitySize => (enzyme_const, false), + }; + let outer_arg = outer_args[outer_pos]; + args.push(cx.get_metadata_value(activity)); + args.push(outer_arg); + if duplicated { + // We know that duplicated args by construction have a following argument, + // so this can not be out of bounds. + let next_outer_arg = outer_args[outer_pos + 1]; + let next_outer_ty = cx.val_ty(next_outer_arg); + // FIXME(ZuseZ4): We should add support for Vec here too, but it's less urgent since + // vectors behind references (&Vec) are already supported. Users can not pass a + // Vec by value for reverse mode, so this would only help forward mode autodiff. + let slice = { + if activity_pos + 1 >= inputs.len() { + // If there is no arg following our ptr, it also can't be a slice, + // since that would lead to a ptr, int pair. + false + } else { + let next_activity = inputs[activity_pos + 1]; + // We analyze the MIR types and add this dummy activity if we visit a slice. + next_activity == DiffActivity::FakeActivitySize + } + }; + if slice { + // A duplicated slice will have the following two outer_fn arguments: + // (..., ptr1, int1, ptr2, int2, ...). We add the following llvm-ir to our __enzyme call: + // (..., metadata! enzyme_dup, ptr, ptr, int1, ...). + // FIXME(ZuseZ4): We will upstream a safety check later which asserts that + // int2 >= int1, which means the shadow vector is large enough to store the gradient. + assert!(llvm::LLVMRustGetTypeKind(next_outer_ty) == llvm::TypeKind::Integer); + let next_outer_arg2 = outer_args[outer_pos + 2]; + let next_outer_ty2 = cx.val_ty(next_outer_arg2); + assert!(llvm::LLVMRustGetTypeKind(next_outer_ty2) == llvm::TypeKind::Pointer); + let next_outer_arg3 = outer_args[outer_pos + 3]; + let next_outer_ty3 = cx.val_ty(next_outer_arg3); + assert!(llvm::LLVMRustGetTypeKind(next_outer_ty3) == llvm::TypeKind::Integer); + args.push(next_outer_arg2); + args.push(cx.get_metadata_value(enzyme_const)); + args.push(next_outer_arg); + outer_pos += 4; + activity_pos += 2; + } else { + // A duplicated pointer will have the following two outer_fn arguments: + // (..., ptr, ptr, ...). We add the following llvm-ir to our __enzyme call: + // (..., metadata! enzyme_dup, ptr, ptr, ...). + assert!(llvm::LLVMRustGetTypeKind(next_outer_ty) == llvm::TypeKind::Pointer); + args.push(next_outer_arg); + outer_pos += 2; + activity_pos += 1; + } + } else { + // We do not differentiate with resprect to this argument. + // We already added the metadata and argument above, so just increase the counters. + outer_pos += 1; + activity_pos += 1; + } + } + + let call = builder.call(enzyme_ty, None, None, ad_fn, &args, None, None); + + // This part is a bit iffy. LLVM requires that a call to an inlineable function has some + // metadata attachted to it, but we just created this code oota. Given that the + // differentiated function already has partly confusing metadata, and given that this + // affects nothing but the auttodiff IR, we take a shortcut and just steal metadata from the + // dummy code which we inserted at a higher level. + // FIXME(ZuseZ4): Work with Enzyme core devs to clarify what debug metadata issues we have, + // and how to best improve it for enzyme core and rust-enzyme. + let md_ty = cx.get_md_kind_id("dbg"); + if llvm::LLVMRustHasMetadata(last_inst, md_ty) { + let md = llvm::LLVMRustDIGetInstMetadata(last_inst) + .expect("failed to get instruction metadata"); + let md_todiff = cx.get_metadata_value(md); + llvm::LLVMSetMetadata(call, md_ty, md_todiff); + } else { + // We don't panic, since depending on whether we are in debug or release mode, we might + // have no debug info to copy, which would then be ok. + trace!("no dbg info"); + } + // Now that we copied the metadata, get rid of dummy code. + llvm::LLVMRustEraseInstBefore(entry, last_inst); + llvm::LLVMRustEraseInstFromParent(last_inst); + + if cx.val_ty(outer_fn) != cx.type_void() { + builder.ret(call); + } else { + builder.ret_void(); + } + + // Let's crash in case that we messed something up above and generated invalid IR. + llvm::LLVMRustVerifyFunction( + outer_fn, + llvm::LLVMRustVerifierFailureAction::LLVMAbortProcessAction, + ); + } +} + +pub(crate) fn differentiate<'ll, 'tcx>( + module: &'ll ModuleCodegen, + cgcx: &CodegenContext, + tcx: TyCtxt<'tcx>, + diff_items: Vec, + config: &ModuleConfig, +) -> Result<(), FatalError> { + for item in &diff_items { + trace!("{}", item); + } + + let diag_handler = cgcx.create_dcx(); + let (_, cgus) = tcx.collect_and_partition_mono_items(()); + let cx = context::CodegenCx::new(tcx, &cgus.first().unwrap(), &module.module_llvm); + + // Before dumping the module, we want all the TypeTrees to become part of the module. + for item in diff_items.iter() { + let name = item.source.clone(); + let fn_def: Option<&llvm::Value> = cx.get_function(&name); + let Some(fn_def) = fn_def else { + return Err(llvm_err(diag_handler.handle(), LlvmError::PrepareAutoDiff { + src: item.source.clone(), + target: item.target.clone(), + error: "could not find source function".to_owned(), + })); + }; + debug!(?item.target); + let fn_target: Option<&llvm::Value> = cx.get_function(&item.target); + let Some(fn_target) = fn_target else { + return Err(llvm_err(diag_handler.handle(), LlvmError::PrepareAutoDiff { + src: item.source.clone(), + target: item.target.clone(), + error: "could not find target function".to_owned(), + })); + }; + + generate_enzyme_call(&cx, fn_def, fn_target, item.attrs.clone()); + } + + // FIXME(ZuseZ4): support SanitizeHWAddress and prevent illegal/unsupported opts + + if let Some(opt_level) = config.opt_level { + let opt_stage = match cgcx.lto { + Lto::Fat => llvm::OptStage::PreLinkFatLTO, + Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO, + _ if cgcx.opts.cg.linker_plugin_lto.enabled() => llvm::OptStage::PreLinkThinLTO, + _ => llvm::OptStage::PreLinkNoLTO, + }; + // This is our second opt call, so now we run all opts, + // to make sure we get the best performance. + let skip_size_increasing_opts = false; + trace!("running Module Optimization after differentiation"); + unsafe { + llvm_optimize( + cgcx, + diag_handler.handle(), + module, + config, + opt_level, + opt_stage, + skip_size_increasing_opts, + )? + }; + } + trace!("done with differentiate()"); + + Ok(()) +} diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index c602d99ff9d80..6534575184256 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -1,6 +1,6 @@ use std::borrow::Borrow; use std::cell::{Cell, RefCell}; -use std::ffi::{CStr, c_uint}; +use std::ffi::{CStr, c_char, c_uint}; use std::str; use rustc_abi::{HasDataLayout, TargetDataLayout, VariantIdx}; @@ -600,6 +600,31 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { llvm::set_section(g, c"llvm.metadata"); } } + + pub(crate) fn get_metadata_value(&self, metadata: &'ll Metadata) -> &'ll Value { + unsafe { llvm::LLVMMetadataAsValue(self.llcx, metadata) } + } + + pub(crate) fn get_function(&self, name: &str) -> Option<&'ll Value> { + let name = SmallCStr::new(name); + unsafe { llvm::LLVMGetNamedFunction(self.llmod, name.as_ptr()) } + } + + pub(crate) fn get_md_kind_id(&self, name: &str) -> u32 { + unsafe { + llvm::LLVMGetMDKindIDInContext( + self.llcx, + name.as_ptr() as *const c_char, + name.len() as c_uint, + ) + } + } + + pub(crate) fn create_metadata(&self, name: String) -> Option<&'ll Metadata> { + Some(unsafe { + llvm::LLVMMDStringInContext2(self.llcx, name.as_ptr() as *const c_char, name.len()) + }) + } } impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { @@ -716,7 +741,10 @@ impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { if self.get_declared_value(entry_name).is_none() { Some(self.declare_entry_fn( entry_name, - self.sess().target.entry_abi.into(), + llvm::CallConv::from_conv( + self.sess().target.entry_abi, + self.sess().target.arch.borrow(), + ), llvm::UnnamedAddr::Global, fn_type, )) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs index 1f133141c18fd..b617f4d37f5bf 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs @@ -1,6 +1,4 @@ -use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId, SourceRegion}; - -use crate::coverageinfo::mapgen::LocalFileId; +use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId}; /// Must match the layout of `LLVMRustCounterKind`. #[derive(Copy, Clone, Debug)] @@ -126,30 +124,16 @@ pub(crate) struct CoverageSpan { /// Local index into the function's local-to-global file ID table. /// The value at that index is itself an index into the coverage filename /// table in the CGU's `__llvm_covmap` section. - file_id: u32, + pub(crate) file_id: u32, /// 1-based starting line of the source code span. - start_line: u32, + pub(crate) start_line: u32, /// 1-based starting column of the source code span. - start_col: u32, + pub(crate) start_col: u32, /// 1-based ending line of the source code span. - end_line: u32, + pub(crate) end_line: u32, /// 1-based ending column of the source code span. High bit must be unset. - end_col: u32, -} - -impl CoverageSpan { - pub(crate) fn from_source_region( - local_file_id: LocalFileId, - code_region: &SourceRegion, - ) -> Self { - let file_id = local_file_id.as_u32(); - let &SourceRegion { start_line, start_col, end_line, end_col } = code_region; - // Internally, LLVM uses the high bit of `end_col` to distinguish between - // code regions and gap regions, so it can't be used by the column number. - assert!(end_col & (1u32 << 31) == 0, "high bit of `end_col` must be unset: {end_col:#X}"); - Self { file_id, start_line, start_col, end_line, end_col } - } + pub(crate) end_col: u32, } /// Holds tables of the various region types in one struct. @@ -184,7 +168,7 @@ impl Regions { #[derive(Clone, Debug)] #[repr(C)] pub(crate) struct CodeRegion { - pub(crate) span: CoverageSpan, + pub(crate) cov_span: CoverageSpan, pub(crate) counter: Counter, } @@ -192,7 +176,7 @@ pub(crate) struct CodeRegion { #[derive(Clone, Debug)] #[repr(C)] pub(crate) struct BranchRegion { - pub(crate) span: CoverageSpan, + pub(crate) cov_span: CoverageSpan, pub(crate) true_counter: Counter, pub(crate) false_counter: Counter, } @@ -201,7 +185,7 @@ pub(crate) struct BranchRegion { #[derive(Clone, Debug)] #[repr(C)] pub(crate) struct MCDCBranchRegion { - pub(crate) span: CoverageSpan, + pub(crate) cov_span: CoverageSpan, pub(crate) true_counter: Counter, pub(crate) false_counter: Counter, pub(crate) mcdc_branch_params: mcdc::BranchParameters, @@ -211,6 +195,6 @@ pub(crate) struct MCDCBranchRegion { #[derive(Clone, Debug)] #[repr(C)] pub(crate) struct MCDCDecisionRegion { - pub(crate) span: CoverageSpan, + pub(crate) cov_span: CoverageSpan, pub(crate) mcdc_decision_params: mcdc::DecisionParameters, } diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs index 086cf1f44a03e..2cd7fa3225acd 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs @@ -40,11 +40,10 @@ pub(crate) fn create_pgo_func_name_var<'ll>( } } -pub(crate) fn write_filenames_to_buffer<'a>( - filenames: impl IntoIterator, -) -> Vec { +pub(crate) fn write_filenames_to_buffer(filenames: &[impl AsRef]) -> Vec { let (pointers, lengths) = filenames .into_iter() + .map(AsRef::as_ref) .map(|s: &str| (s.as_c_char_ptr(), s.len())) .unzip::<_, _, Vec<_>, Vec<_>>(); diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs deleted file mode 100644 index c5d1ebdfe7c5f..0000000000000 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs +++ /dev/null @@ -1,84 +0,0 @@ -use rustc_data_structures::captures::Captures; -use rustc_middle::mir::coverage::{ - CovTerm, CoverageIdsInfo, Expression, FunctionCoverageInfo, Mapping, MappingKind, Op, - SourceRegion, -}; - -use crate::coverageinfo::ffi::{Counter, CounterExpression, ExprKind}; - -pub(crate) struct FunctionCoverage<'tcx> { - pub(crate) function_coverage_info: &'tcx FunctionCoverageInfo, - /// If `None`, the corresponding function is unused. - ids_info: Option<&'tcx CoverageIdsInfo>, -} - -impl<'tcx> FunctionCoverage<'tcx> { - pub(crate) fn new_used( - function_coverage_info: &'tcx FunctionCoverageInfo, - ids_info: &'tcx CoverageIdsInfo, - ) -> Self { - Self { function_coverage_info, ids_info: Some(ids_info) } - } - - pub(crate) fn new_unused(function_coverage_info: &'tcx FunctionCoverageInfo) -> Self { - Self { function_coverage_info, ids_info: None } - } - - /// Returns true for a used (called) function, and false for an unused function. - pub(crate) fn is_used(&self) -> bool { - self.ids_info.is_some() - } - - /// Return the source hash, generated from the HIR node structure, and used to indicate whether - /// or not the source code structure changed between different compilations. - pub(crate) fn source_hash(&self) -> u64 { - if self.is_used() { self.function_coverage_info.function_source_hash } else { 0 } - } - - /// Convert this function's coverage expression data into a form that can be - /// passed through FFI to LLVM. - pub(crate) fn counter_expressions( - &self, - ) -> impl Iterator + ExactSizeIterator + Captures<'_> { - // We know that LLVM will optimize out any unused expressions before - // producing the final coverage map, so there's no need to do the same - // thing on the Rust side unless we're confident we can do much better. - // (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.) - - self.function_coverage_info.expressions.iter().map(move |&Expression { lhs, op, rhs }| { - CounterExpression { - lhs: self.counter_for_term(lhs), - kind: match op { - Op::Add => ExprKind::Add, - Op::Subtract => ExprKind::Subtract, - }, - rhs: self.counter_for_term(rhs), - } - }) - } - - /// Converts this function's coverage mappings into an intermediate form - /// that will be used by `mapgen` when preparing for FFI. - pub(crate) fn counter_regions( - &self, - ) -> impl Iterator + ExactSizeIterator { - self.function_coverage_info.mappings.iter().map(move |mapping| { - let Mapping { kind, source_region } = mapping; - let kind = - kind.map_terms(|term| if self.is_zero_term(term) { CovTerm::Zero } else { term }); - (kind, source_region) - }) - } - - fn counter_for_term(&self, term: CovTerm) -> Counter { - if self.is_zero_term(term) { Counter::ZERO } else { Counter::from_term(term) } - } - - fn is_zero_term(&self, term: CovTerm) -> bool { - match self.ids_info { - Some(ids_info) => ids_info.is_zero_term(term), - // This function is unused, so all coverage counters/expressions are zero. - None => true, - } - } -} diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index 4f2af73252751..b3ad2a0e4098f 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -1,28 +1,28 @@ -use std::iter; +use std::sync::Arc; -use itertools::Itertools as _; +use itertools::Itertools; use rustc_abi::Align; use rustc_codegen_ssa::traits::{ BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods, }; -use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_index::IndexVec; +use rustc_middle::mir; use rustc_middle::ty::{self, TyCtxt}; -use rustc_middle::{bug, mir}; use rustc_session::RemapFileNameExt; use rustc_session::config::RemapPathScopeComponents; use rustc_span::def_id::DefIdSet; -use rustc_span::{Span, Symbol}; +use rustc_span::{SourceFile, StableSourceFileId}; use tracing::debug; use crate::common::CodegenCx; use crate::coverageinfo::llvm_cov; -use crate::coverageinfo::map_data::FunctionCoverage; use crate::coverageinfo::mapgen::covfun::prepare_covfun_record; use crate::llvm; mod covfun; +mod spans; /// Generates and exports the coverage map, which is embedded in special /// linker sections in the final binary. @@ -49,46 +49,40 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { debug!("Generating coverage map for CodegenUnit: `{}`", cx.codegen_unit.name()); - // In order to show that unused functions have coverage counts of zero (0), LLVM requires the - // functions exist. Generate synthetic functions with a (required) single counter, and add the - // MIR `Coverage` code regions to the `function_coverage_map`, before calling - // `ctx.take_function_coverage_map()`. - if cx.codegen_unit.is_code_coverage_dead_code_cgu() { - add_unused_functions(cx); - } - // FIXME(#132395): Can this be none even when coverage is enabled? - let function_coverage_map = match cx.coverage_cx { - Some(ref cx) => cx.take_function_coverage_map(), + let instances_used = match cx.coverage_cx { + Some(ref cx) => cx.instances_used.borrow(), None => return, }; - if function_coverage_map.is_empty() { - // This CGU has no functions with coverage instrumentation. - return; - } - - let all_file_names = function_coverage_map - .iter() - .map(|(_, fn_cov)| fn_cov.function_coverage_info.body_span) - .map(|span| span_file_name(tcx, span)); - let global_file_table = GlobalFileTable::new(all_file_names); - - // Encode all filenames referenced by coverage mappings in this CGU. - let filenames_buffer = global_file_table.make_filenames_buffer(tcx); - // The `llvm-cov` tool uses this hash to associate each covfun record with - // its corresponding filenames table, since the final binary will typically - // contain multiple covmap records from different compilation units. - let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer); - let mut unused_function_names = Vec::new(); + // The order of entries in this global file table needs to be deterministic, + // and ideally should also be independent of the details of stable-hashing, + // because coverage tests snapshots (`.cov-map`) can observe the order and + // would need to be re-blessed if it changes. As long as those requirements + // are satisfied, the order can be arbitrary. + let mut global_file_table = GlobalFileTable::new(); - let covfun_records = function_coverage_map - .into_iter() - .filter_map(|(instance, function_coverage)| { - prepare_covfun_record(tcx, &global_file_table, instance, &function_coverage) - }) + let mut covfun_records = instances_used + .iter() + .copied() + // Sort by symbol name, so that the global file table is built in an + // order that doesn't depend on the stable-hash-based order in which + // instances were visited during codegen. + .sorted_by_cached_key(|&instance| tcx.symbol_name(instance).name) + .filter_map(|instance| prepare_covfun_record(tcx, &mut global_file_table, instance, true)) .collect::>(); + // In a single designated CGU, also prepare covfun records for functions + // in this crate that were instrumented for coverage, but are unused. + if cx.codegen_unit.is_code_coverage_dead_code_cgu() { + let mut unused_instances = gather_unused_function_instances(cx); + // Sort the unused instances by symbol name, for the same reason as the used ones. + unused_instances.sort_by_cached_key(|&instance| tcx.symbol_name(instance).name); + covfun_records.extend(unused_instances.into_iter().filter_map(|instance| { + prepare_covfun_record(tcx, &mut global_file_table, instance, false) + })); + } + // If there are no covfun records for this CGU, don't generate a covmap record. // Emitting a covmap record without any covfun records causes `llvm-cov` to // fail when generating coverage reports, and if there are no covfun records @@ -98,6 +92,15 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { return; } + // Encode all filenames referenced by coverage mappings in this CGU. + let filenames_buffer = global_file_table.make_filenames_buffer(tcx); + // The `llvm-cov` tool uses this hash to associate each covfun record with + // its corresponding filenames table, since the final binary will typically + // contain multiple covmap records from different compilation units. + let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer); + + let mut unused_function_names = vec![]; + for covfun in &covfun_records { unused_function_names.extend(covfun.mangled_function_name_if_unused()); @@ -129,54 +132,51 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { generate_covmap_record(cx, covmap_version, &filenames_buffer); } -/// Maps "global" (per-CGU) file ID numbers to their underlying filenames. +/// Maps "global" (per-CGU) file ID numbers to their underlying source files. struct GlobalFileTable { - /// This "raw" table doesn't include the working dir, so a filename's + /// This "raw" table doesn't include the working dir, so a file's /// global ID is its index in this set **plus one**. - raw_file_table: FxIndexSet, + raw_file_table: FxIndexMap>, } impl GlobalFileTable { - fn new(all_file_names: impl IntoIterator) -> Self { - // Collect all of the filenames into a set. Filenames usually come in - // contiguous runs, so we can dedup adjacent ones to save work. - let mut raw_file_table = all_file_names.into_iter().dedup().collect::>(); - - // Sort the file table by its actual string values, not the arbitrary - // ordering of its symbols. - raw_file_table.sort_unstable_by(|a, b| a.as_str().cmp(b.as_str())); - - Self { raw_file_table } + fn new() -> Self { + Self { raw_file_table: FxIndexMap::default() } } - fn global_file_id_for_file_name(&self, file_name: Symbol) -> GlobalFileId { - let raw_id = self.raw_file_table.get_index_of(&file_name).unwrap_or_else(|| { - bug!("file name not found in prepared global file table: {file_name}"); - }); + fn global_file_id_for_file(&mut self, file: &Arc) -> GlobalFileId { + // Ensure the given file has a table entry, and get its index. + let entry = self.raw_file_table.entry(file.stable_id); + let raw_id = entry.index(); + entry.or_insert_with(|| Arc::clone(file)); + // The raw file table doesn't include an entry for the working dir // (which has ID 0), so add 1 to get the correct ID. GlobalFileId::from_usize(raw_id + 1) } fn make_filenames_buffer(&self, tcx: TyCtxt<'_>) -> Vec { + let mut table = Vec::with_capacity(self.raw_file_table.len() + 1); + // LLVM Coverage Mapping Format version 6 (zero-based encoded as 5) // requires setting the first filename to the compilation directory. // Since rustc generates coverage maps with relative paths, the // compilation directory can be combined with the relative paths // to get absolute paths, if needed. - use rustc_session::RemapFileNameExt; - use rustc_session::config::RemapPathScopeComponents; - let working_dir: &str = &tcx - .sess - .opts - .working_dir - .for_scope(tcx.sess, RemapPathScopeComponents::MACRO) - .to_string_lossy(); - - // Insert the working dir at index 0, before the other filenames. - let filenames = - iter::once(working_dir).chain(self.raw_file_table.iter().map(Symbol::as_str)); - llvm_cov::write_filenames_to_buffer(filenames) + table.push( + tcx.sess + .opts + .working_dir + .for_scope(tcx.sess, RemapPathScopeComponents::MACRO) + .to_string_lossy(), + ); + + // Add the regular entries after the base directory. + table.extend(self.raw_file_table.values().map(|file| { + file.name.for_scope(tcx.sess, RemapPathScopeComponents::MACRO).to_string_lossy() + })); + + llvm_cov::write_filenames_to_buffer(&table) } } @@ -189,7 +189,7 @@ rustc_index::newtype_index! { /// An index into a function's list of global file IDs. That underlying list /// of local-to-global mappings will be embedded in the function's record in /// the `__llvm_covfun` linker section. - pub(crate) struct LocalFileId {} + struct LocalFileId {} } /// Holds a mapping from "local" (per-function) file IDs to "global" (per-CGU) @@ -215,13 +215,6 @@ impl VirtualFileMapping { } } -fn span_file_name(tcx: TyCtxt<'_>, span: Span) -> Symbol { - let source_file = tcx.sess.source_map().lookup_source_file(span.lo()); - let name = - source_file.name.for_scope(tcx.sess, RemapPathScopeComponents::MACRO).to_string_lossy(); - Symbol::intern(&name) -} - /// Generates the contents of the covmap record for this CGU, which mostly /// consists of a header and a list of filenames. The record is then stored /// as a global variable in the `__llvm_covmap` section. @@ -264,39 +257,35 @@ fn generate_covmap_record<'ll>(cx: &CodegenCx<'ll, '_>, version: u32, filenames_ /// coverage map (in a single designated CGU) so that we still emit coverage mappings for them. /// We also end up adding their symbol names to a special global array that LLVM will include in /// its embedded coverage data. -fn add_unused_functions(cx: &CodegenCx<'_, '_>) { +fn gather_unused_function_instances<'tcx>(cx: &CodegenCx<'_, 'tcx>) -> Vec> { assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); let tcx = cx.tcx; let usage = prepare_usage_sets(tcx); let is_unused_fn = |def_id: LocalDefId| -> bool { - let def_id = def_id.to_def_id(); - - // To be eligible for "unused function" mappings, a definition must: - // - Be function-like + // Usage sets expect `DefId`, so convert from `LocalDefId`. + let d: DefId = LocalDefId::to_def_id(def_id); + // To be potentially eligible for "unused function" mappings, a definition must: + // - Be eligible for coverage instrumentation // - Not participate directly in codegen (or have lost all its coverage statements) // - Not have any coverage statements inlined into codegenned functions - tcx.def_kind(def_id).is_fn_like() - && (!usage.all_mono_items.contains(&def_id) - || usage.missing_own_coverage.contains(&def_id)) - && !usage.used_via_inlining.contains(&def_id) + tcx.is_eligible_for_coverage(def_id) + && (!usage.all_mono_items.contains(&d) || usage.missing_own_coverage.contains(&d)) + && !usage.used_via_inlining.contains(&d) }; - // Scan for unused functions that were instrumented for coverage. - for def_id in tcx.mir_keys(()).iter().copied().filter(|&def_id| is_unused_fn(def_id)) { - // Get the coverage info from MIR, skipping functions that were never instrumented. - let body = tcx.optimized_mir(def_id); - let Some(function_coverage_info) = body.function_coverage_info.as_deref() else { continue }; - - // FIXME(79651): Consider trying to filter out dummy instantiations of - // unused generic functions from library crates, because they can produce - // "unused instantiation" in coverage reports even when they are actually - // used by some downstream crate in the same binary. + // FIXME(#79651): Consider trying to filter out dummy instantiations of + // unused generic functions from library crates, because they can produce + // "unused instantiation" in coverage reports even when they are actually + // used by some downstream crate in the same binary. - debug!("generating unused fn: {def_id:?}"); - add_unused_function_coverage(cx, def_id, function_coverage_info); - } + tcx.mir_keys(()) + .iter() + .copied() + .filter(|&def_id| is_unused_fn(def_id)) + .map(|def_id| make_dummy_instance(tcx, def_id)) + .collect::>() } struct UsageSets<'tcx> { @@ -361,16 +350,11 @@ fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> { UsageSets { all_mono_items, used_via_inlining, missing_own_coverage } } -fn add_unused_function_coverage<'tcx>( - cx: &CodegenCx<'_, 'tcx>, - def_id: LocalDefId, - function_coverage_info: &'tcx mir::coverage::FunctionCoverageInfo, -) { - let tcx = cx.tcx; - let def_id = def_id.to_def_id(); +fn make_dummy_instance<'tcx>(tcx: TyCtxt<'tcx>, local_def_id: LocalDefId) -> ty::Instance<'tcx> { + let def_id = local_def_id.to_def_id(); // Make a dummy instance that fills in all generics with placeholders. - let instance = ty::Instance::new( + ty::Instance::new( def_id, ty::GenericArgs::for_item(tcx, def_id, |param, _| { if let ty::GenericParamDefKind::Lifetime = param.kind { @@ -379,9 +363,5 @@ fn add_unused_function_coverage<'tcx>( tcx.mk_param_from_def(param) } }), - ); - - // An unused function's mappings will all be rewritten to map to zero. - let function_coverage = FunctionCoverage::new_unused(function_coverage_info); - cx.coverage_cx().function_coverage_map.borrow_mut().insert(instance, function_coverage); + ) } diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs index 33e7a0f2f201b..5428d776f41e8 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs @@ -10,15 +10,16 @@ use rustc_abi::Align; use rustc_codegen_ssa::traits::{ BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods, }; -use rustc_middle::bug; -use rustc_middle::mir::coverage::MappingKind; +use rustc_middle::mir::coverage::{ + CovTerm, CoverageIdsInfo, Expression, FunctionCoverageInfo, Mapping, MappingKind, Op, +}; use rustc_middle::ty::{Instance, TyCtxt}; +use rustc_span::Span; use rustc_target::spec::HasTargetSpec; use tracing::debug; use crate::common::CodegenCx; -use crate::coverageinfo::map_data::FunctionCoverage; -use crate::coverageinfo::mapgen::{GlobalFileTable, VirtualFileMapping, span_file_name}; +use crate::coverageinfo::mapgen::{GlobalFileTable, VirtualFileMapping, spans}; use crate::coverageinfo::{ffi, llvm_cov}; use crate::llvm; @@ -45,77 +46,129 @@ impl<'tcx> CovfunRecord<'tcx> { pub(crate) fn prepare_covfun_record<'tcx>( tcx: TyCtxt<'tcx>, - global_file_table: &GlobalFileTable, + global_file_table: &mut GlobalFileTable, instance: Instance<'tcx>, - function_coverage: &FunctionCoverage<'tcx>, + is_used: bool, ) -> Option> { + let fn_cov_info = tcx.instance_mir(instance.def).function_coverage_info.as_deref()?; + let ids_info = tcx.coverage_ids_info(instance.def); + + let expressions = prepare_expressions(fn_cov_info, ids_info, is_used); + let mut covfun = CovfunRecord { mangled_function_name: tcx.symbol_name(instance).name, - source_hash: function_coverage.source_hash(), - is_used: function_coverage.is_used(), + source_hash: if is_used { fn_cov_info.function_source_hash } else { 0 }, + is_used, virtual_file_mapping: VirtualFileMapping::default(), - expressions: function_coverage.counter_expressions().collect::>(), + expressions, regions: ffi::Regions::default(), }; - fill_region_tables(tcx, global_file_table, function_coverage, &mut covfun); + fill_region_tables(tcx, global_file_table, fn_cov_info, ids_info, &mut covfun); if covfun.regions.has_no_regions() { - if covfun.is_used { - bug!("a used function should have had coverage mapping data but did not: {covfun:?}"); - } else { - debug!(?covfun, "unused function had no coverage mapping data"); - return None; - } + debug!(?covfun, "function has no mappings to embed; skipping"); + return None; } Some(covfun) } +/// Convert the function's coverage-counter expressions into a form suitable for FFI. +fn prepare_expressions( + fn_cov_info: &FunctionCoverageInfo, + ids_info: &CoverageIdsInfo, + is_used: bool, +) -> Vec { + // If any counters or expressions were removed by MIR opts, replace their + // terms with zero. + let counter_for_term = |term| { + if !is_used || ids_info.is_zero_term(term) { + ffi::Counter::ZERO + } else { + ffi::Counter::from_term(term) + } + }; + + // We know that LLVM will optimize out any unused expressions before + // producing the final coverage map, so there's no need to do the same + // thing on the Rust side unless we're confident we can do much better. + // (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.) + fn_cov_info + .expressions + .iter() + .map(move |&Expression { lhs, op, rhs }| ffi::CounterExpression { + lhs: counter_for_term(lhs), + kind: match op { + Op::Add => ffi::ExprKind::Add, + Op::Subtract => ffi::ExprKind::Subtract, + }, + rhs: counter_for_term(rhs), + }) + .collect::>() +} + /// Populates the mapping region tables in the current function's covfun record. fn fill_region_tables<'tcx>( tcx: TyCtxt<'tcx>, - global_file_table: &GlobalFileTable, - function_coverage: &FunctionCoverage<'tcx>, + global_file_table: &mut GlobalFileTable, + fn_cov_info: &'tcx FunctionCoverageInfo, + ids_info: &'tcx CoverageIdsInfo, covfun: &mut CovfunRecord<'tcx>, ) { - let counter_regions = function_coverage.counter_regions(); - if counter_regions.is_empty() { - return; - } - // Currently a function's mappings must all be in the same file as its body span. - let file_name = span_file_name(tcx, function_coverage.function_coverage_info.body_span); + let source_map = tcx.sess.source_map(); + let source_file = source_map.lookup_source_file(fn_cov_info.body_span.lo()); - // Look up the global file ID for that filename. - let global_file_id = global_file_table.global_file_id_for_file_name(file_name); + // Look up the global file ID for that file. + let global_file_id = global_file_table.global_file_id_for_file(&source_file); // Associate that global file ID with a local file ID for this function. let local_file_id = covfun.virtual_file_mapping.local_id_for_global(global_file_id); - debug!(" file id: {local_file_id:?} => {global_file_id:?} = '{file_name:?}'"); let ffi::Regions { code_regions, branch_regions, mcdc_branch_regions, mcdc_decision_regions } = &mut covfun.regions; + let make_cov_span = |span: Span| { + spans::make_coverage_span(local_file_id, source_map, fn_cov_info, &source_file, span) + }; + let discard_all = tcx.sess.coverage_discard_all_spans_in_codegen(); + // For each counter/region pair in this function+file, convert it to a // form suitable for FFI. - for (mapping_kind, region) in counter_regions { - debug!("Adding counter {mapping_kind:?} to map for {region:?}"); - let span = ffi::CoverageSpan::from_source_region(local_file_id, region); - match mapping_kind { + let is_zero_term = |term| !covfun.is_used || ids_info.is_zero_term(term); + for &Mapping { ref kind, span } in &fn_cov_info.mappings { + // If the mapping refers to counters/expressions that were removed by + // MIR opts, replace those occurrences with zero. + let kind = kind.map_terms(|term| if is_zero_term(term) { CovTerm::Zero } else { term }); + + // Convert the `Span` into coordinates that we can pass to LLVM, or + // discard the span if conversion fails. In rare, cases _all_ of a + // function's spans are discarded, and the rest of coverage codegen + // needs to handle that gracefully to avoid a repeat of #133606. + // We don't have a good test case for triggering that organically, so + // instead we set `-Zcoverage-options=discard-all-spans-in-codegen` + // to force it to occur. + let Some(cov_span) = make_cov_span(span) else { continue }; + if discard_all { + continue; + } + + match kind { MappingKind::Code(term) => { - code_regions.push(ffi::CodeRegion { span, counter: ffi::Counter::from_term(term) }); + code_regions + .push(ffi::CodeRegion { cov_span, counter: ffi::Counter::from_term(term) }); } MappingKind::Branch { true_term, false_term } => { branch_regions.push(ffi::BranchRegion { - span, + cov_span, true_counter: ffi::Counter::from_term(true_term), false_counter: ffi::Counter::from_term(false_term), }); } MappingKind::MCDCBranch { true_term, false_term, mcdc_params } => { mcdc_branch_regions.push(ffi::MCDCBranchRegion { - span, + cov_span, true_counter: ffi::Counter::from_term(true_term), false_counter: ffi::Counter::from_term(false_term), mcdc_branch_params: ffi::mcdc::BranchParameters::from(mcdc_params), @@ -123,7 +176,7 @@ fn fill_region_tables<'tcx>( } MappingKind::MCDCDecision(mcdc_decision_params) => { mcdc_decision_regions.push(ffi::MCDCDecisionRegion { - span, + cov_span, mcdc_decision_params: ffi::mcdc::DecisionParameters::from(mcdc_decision_params), }); } diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs new file mode 100644 index 0000000000000..6d1d91340c295 --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs @@ -0,0 +1,126 @@ +use rustc_middle::mir::coverage::FunctionCoverageInfo; +use rustc_span::source_map::SourceMap; +use rustc_span::{BytePos, Pos, SourceFile, Span}; +use tracing::debug; + +use crate::coverageinfo::ffi; +use crate::coverageinfo::mapgen::LocalFileId; + +/// Converts the span into its start line and column, and end line and column. +/// +/// Line numbers and column numbers are 1-based. Unlike most column numbers emitted by +/// the compiler, these column numbers are denoted in **bytes**, because that's what +/// LLVM's `llvm-cov` tool expects to see in coverage maps. +/// +/// Returns `None` if the conversion failed for some reason. This shouldn't happen, +/// but it's hard to rule out entirely (especially in the presence of complex macros +/// or other expansions), and if it does happen then skipping a span or function is +/// better than an ICE or `llvm-cov` failure that the user might have no way to avoid. +pub(crate) fn make_coverage_span( + file_id: LocalFileId, + source_map: &SourceMap, + fn_cov_info: &FunctionCoverageInfo, + file: &SourceFile, + span: Span, +) -> Option { + let span = ensure_non_empty_span(source_map, fn_cov_info, span)?; + + let lo = span.lo(); + let hi = span.hi(); + + // Column numbers need to be in bytes, so we can't use the more convenient + // `SourceMap` methods for looking up file coordinates. + let line_and_byte_column = |pos: BytePos| -> Option<(usize, usize)> { + let rpos = file.relative_position(pos); + let line_index = file.lookup_line(rpos)?; + let line_start = file.lines()[line_index]; + // Line numbers and column numbers are 1-based, so add 1 to each. + Some((line_index + 1, (rpos - line_start).to_usize() + 1)) + }; + + let (mut start_line, start_col) = line_and_byte_column(lo)?; + let (mut end_line, end_col) = line_and_byte_column(hi)?; + + // Apply an offset so that code in doctests has correct line numbers. + // FIXME(#79417): Currently we have no way to offset doctest _columns_. + start_line = source_map.doctest_offset_line(&file.name, start_line); + end_line = source_map.doctest_offset_line(&file.name, end_line); + + check_coverage_span(ffi::CoverageSpan { + file_id: file_id.as_u32(), + start_line: start_line as u32, + start_col: start_col as u32, + end_line: end_line as u32, + end_col: end_col as u32, + }) +} + +fn ensure_non_empty_span( + source_map: &SourceMap, + fn_cov_info: &FunctionCoverageInfo, + span: Span, +) -> Option { + if !span.is_empty() { + return Some(span); + } + + let lo = span.lo(); + let hi = span.hi(); + + // The span is empty, so try to expand it to cover an adjacent '{' or '}', + // but only within the bounds of the body span. + let try_next = hi < fn_cov_info.body_span.hi(); + let try_prev = fn_cov_info.body_span.lo() < lo; + if !(try_next || try_prev) { + return None; + } + + source_map + .span_to_source(span, |src, start, end| try { + // Adjusting span endpoints by `BytePos(1)` is normally a bug, + // but in this case we have specifically checked that the character + // we're skipping over is one of two specific ASCII characters, so + // adjusting by exactly 1 byte is correct. + if try_next && src.as_bytes()[end] == b'{' { + Some(span.with_hi(hi + BytePos(1))) + } else if try_prev && src.as_bytes()[start - 1] == b'}' { + Some(span.with_lo(lo - BytePos(1))) + } else { + None + } + }) + .ok()? +} + +/// If `llvm-cov` sees a source region that is improperly ordered (end < start), +/// it will immediately exit with a fatal error. To prevent that from happening, +/// discard regions that are improperly ordered, or might be interpreted in a +/// way that makes them improperly ordered. +fn check_coverage_span(cov_span: ffi::CoverageSpan) -> Option { + let ffi::CoverageSpan { file_id: _, start_line, start_col, end_line, end_col } = cov_span; + + // Line/column coordinates are supposed to be 1-based. If we ever emit + // coordinates of 0, `llvm-cov` might misinterpret them. + let all_nonzero = [start_line, start_col, end_line, end_col].into_iter().all(|x| x != 0); + // Coverage mappings use the high bit of `end_col` to indicate that a + // region is actually a "gap" region, so make sure it's unset. + let end_col_has_high_bit_unset = (end_col & (1 << 31)) == 0; + // If a region is improperly ordered (end < start), `llvm-cov` will exit + // with a fatal error, which is inconvenient for users and hard to debug. + let is_ordered = (start_line, start_col) <= (end_line, end_col); + + if all_nonzero && end_col_has_high_bit_unset && is_ordered { + Some(cov_span) + } else { + debug!( + ?cov_span, + ?all_nonzero, + ?end_col_has_high_bit_unset, + ?is_ordered, + "Skipping source region that would be misinterpreted or rejected by LLVM" + ); + // If this happens in a debug build, ICE to make it easier to notice. + debug_assert!(false, "Improper source region: {cov_span:?}"); + None + } +} diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 82b6677e2038f..7311cd9d230c6 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -5,7 +5,7 @@ use rustc_abi::Size; use rustc_codegen_ssa::traits::{ BuilderMethods, ConstCodegenMethods, CoverageInfoBuilderMethods, MiscCodegenMethods, }; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_middle::mir::coverage::CoverageKind; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::HasTyCtxt; @@ -13,18 +13,16 @@ use tracing::{debug, instrument}; use crate::builder::Builder; use crate::common::CodegenCx; -use crate::coverageinfo::map_data::FunctionCoverage; use crate::llvm; pub(crate) mod ffi; mod llvm_cov; -pub(crate) mod map_data; mod mapgen; /// Extra per-CGU context/state needed for coverage instrumentation. pub(crate) struct CguCoverageContext<'ll, 'tcx> { /// Coverage data for each instrumented function identified by DefId. - pub(crate) function_coverage_map: RefCell, FunctionCoverage<'tcx>>>, + pub(crate) instances_used: RefCell>>, pub(crate) pgo_func_name_var_map: RefCell, &'ll llvm::Value>>, pub(crate) mcdc_condition_bitmap_map: RefCell, Vec<&'ll llvm::Value>>>, @@ -34,17 +32,13 @@ pub(crate) struct CguCoverageContext<'ll, 'tcx> { impl<'ll, 'tcx> CguCoverageContext<'ll, 'tcx> { pub(crate) fn new() -> Self { Self { - function_coverage_map: Default::default(), + instances_used: RefCell::>::default(), pgo_func_name_var_map: Default::default(), mcdc_condition_bitmap_map: Default::default(), covfun_section_name: Default::default(), } } - fn take_function_coverage_map(&self) -> FxIndexMap, FunctionCoverage<'tcx>> { - self.function_coverage_map.replace(FxIndexMap::default()) - } - /// LLVM use a temp value to record evaluated mcdc test vector of each decision, which is /// called condition bitmap. In order to handle nested decisions, several condition bitmaps can /// be allocated for a function body. These values are named `mcdc.addr.{i}` and are a 32-bit @@ -157,12 +151,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { // Mark the instance as used in this CGU, for coverage purposes. // This includes functions that were not partitioned into this CGU, // but were MIR-inlined into one of this CGU's functions. - coverage_cx.function_coverage_map.borrow_mut().entry(instance).or_insert_with(|| { - FunctionCoverage::new_used( - function_coverage_info, - bx.tcx.coverage_ids_info(instance.def), - ) - }); + coverage_cx.instances_used.borrow_mut().insert(instance); match *kind { CoverageKind::SpanMarker | CoverageKind::BlockMarker { .. } => unreachable!( diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs index 07bd0f4d1c171..e545ce386ede2 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs @@ -4,7 +4,7 @@ use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext}; use rustc_codegen_ssa::traits::*; use rustc_data_structures::fx::FxHashMap; use rustc_index::Idx; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{Body, SourceScope}; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv}; use rustc_middle::ty::{self, Instance}; @@ -27,7 +27,7 @@ pub(crate) fn compute_mir_scopes<'ll, 'tcx>( ) { // Find all scopes with variables defined in them. let variables = if cx.sess().opts.debuginfo == DebugInfo::Full { - let mut vars = BitSet::new_empty(mir.source_scopes.len()); + let mut vars = DenseBitSet::new_empty(mir.source_scopes.len()); // FIXME(eddyb) take into account that arguments always have debuginfo, // irrespective of their name (assuming full debuginfo is enabled). // NOTE(eddyb) actually, on second thought, those are always in the @@ -40,7 +40,7 @@ pub(crate) fn compute_mir_scopes<'ll, 'tcx>( // Nothing to emit, of course. None }; - let mut instantiated = BitSet::new_empty(mir.source_scopes.len()); + let mut instantiated = DenseBitSet::new_empty(mir.source_scopes.len()); let mut discriminators = FxHashMap::default(); // Instantiate all scopes. for idx in 0..mir.source_scopes.len() { @@ -63,9 +63,9 @@ fn make_mir_scope<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>, mir: &Body<'tcx>, - variables: &Option>, + variables: &Option>, debug_context: &mut FunctionDebugContext<'tcx, &'ll DIScope, &'ll DILocation>, - instantiated: &mut BitSet, + instantiated: &mut DenseBitSet, discriminators: &mut FxHashMap, scope: SourceScope, ) { diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/dwarf_const.rs b/compiler/rustc_codegen_llvm/src/debuginfo/dwarf_const.rs new file mode 100644 index 0000000000000..408429152223d --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/debuginfo/dwarf_const.rs @@ -0,0 +1,37 @@ +//! Definitions of various DWARF-related constants. + +use libc::c_uint; + +/// Helper macro to let us redeclare gimli's constants as our own constants +/// with a different type, with less risk of copy-paste errors. +macro_rules! declare_constant { + ( + $name:ident : $type:ty + ) => { + #[allow(non_upper_case_globals)] + pub(crate) const $name: $type = ::gimli::constants::$name.0 as $type; + + // Assert that as-cast probably hasn't changed the value. + const _: () = assert!($name as i128 == ::gimli::constants::$name.0 as i128); + }; +} + +declare_constant!(DW_TAG_const_type: c_uint); + +// DWARF languages. +declare_constant!(DW_LANG_Rust: c_uint); + +// DWARF attribute type encodings. +declare_constant!(DW_ATE_boolean: c_uint); +declare_constant!(DW_ATE_float: c_uint); +declare_constant!(DW_ATE_signed: c_uint); +declare_constant!(DW_ATE_unsigned: c_uint); +declare_constant!(DW_ATE_UTF: c_uint); + +// DWARF expression operators. +declare_constant!(DW_OP_deref: u64); +declare_constant!(DW_OP_plus_uconst: u64); +/// Defined by LLVM in `llvm/include/llvm/BinaryFormat/Dwarf.h`. +/// Double-checked by a static assertion in `RustWrapper.cpp`. +#[allow(non_upper_case_globals)] +pub(crate) const DW_OP_LLVM_fragment: u64 = 0x1000; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs index aef8642f1998a..2c9f1cda13a8a 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs @@ -7,7 +7,7 @@ use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::bug; use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerType; use rustc_session::config::{CrateType, DebugInfo}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::builder::Builder; use crate::common::CodegenCx; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs index 592752540224f..88e43e1c678ad 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs @@ -16,13 +16,13 @@ use rustc_middle::ty::{ self, AdtKind, CoroutineArgsExt, Instance, PolyExistentialTraitRef, Ty, TyCtxt, Visibility, }; use rustc_session::config::{self, DebugInfo, Lto}; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, FileName, FileNameDisplayPreference, SourceFile, hygiene}; +use rustc_span::{DUMMY_SP, FileName, FileNameDisplayPreference, SourceFile, Symbol, hygiene}; use rustc_symbol_mangling::typeid_for_trait_ref; use rustc_target::spec::DebuginfoKind; use smallvec::smallvec; use tracing::{debug, instrument}; +pub(crate) use self::type_map::TypeMap; use self::type_map::{DINodeCreationResult, Stub, UniqueTypeId}; use super::CodegenUnitDebugContext; use super::namespace::mangled_name_of_instance; @@ -31,6 +31,7 @@ use super::utils::{ DIB, create_DIArray, debug_context, get_namespace_for_item, is_node_local_to_unit, }; use crate::common::{AsCCharPtr, CodegenCx}; +use crate::debuginfo::dwarf_const; use crate::debuginfo::metadata::type_map::build_type_with_children; use crate::debuginfo::utils::{WidePtrKind, wide_pointer_kind}; use crate::llvm::debuginfo::{ @@ -60,20 +61,6 @@ impl fmt::Debug for llvm::Metadata { } } -// From DWARF 5. -// See http://www.dwarfstd.org/ShowIssue.php?issue=140129.1. -const DW_LANG_RUST: c_uint = 0x1c; -#[allow(non_upper_case_globals)] -const DW_ATE_boolean: c_uint = 0x02; -#[allow(non_upper_case_globals)] -const DW_ATE_float: c_uint = 0x04; -#[allow(non_upper_case_globals)] -const DW_ATE_signed: c_uint = 0x05; -#[allow(non_upper_case_globals)] -const DW_ATE_unsigned: c_uint = 0x07; -#[allow(non_upper_case_globals)] -const DW_ATE_UTF: c_uint = 0x10; - pub(super) const UNKNOWN_LINE_NUMBER: c_uint = 0; pub(super) const UNKNOWN_COLUMN_NUMBER: c_uint = 0; @@ -88,8 +75,6 @@ type SmallVec = smallvec::SmallVec<[T; 16]>; mod enums; mod type_map; -pub(crate) use type_map::TypeMap; - /// Returns from the enclosing function if the type debuginfo node with the given /// unique ID can be found in the type map. macro_rules! return_if_di_node_created_in_meantime { @@ -520,7 +505,7 @@ fn recursion_marker_type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) -> &'ll D name.as_c_char_ptr(), name.len(), cx.tcx.data_layout.pointer_size.bits(), - DW_ATE_unsigned, + dwarf_const::DW_ATE_unsigned, ) } }) @@ -779,6 +764,8 @@ fn build_basic_type_di_node<'ll, 'tcx>( // .natvis visualizers (and perhaps other existing native debuggers?) let cpp_like_debuginfo = cpp_like_debuginfo(cx.tcx); + use dwarf_const::{DW_ATE_UTF, DW_ATE_boolean, DW_ATE_float, DW_ATE_signed, DW_ATE_unsigned}; + let (name, encoding) = match t.kind() { ty::Never => ("!", DW_ATE_unsigned), ty::Tuple(elements) if elements.is_empty() => { @@ -959,7 +946,7 @@ pub(crate) fn build_compile_unit_di_node<'ll, 'tcx>( let unit_metadata = llvm::LLVMRustDIBuilderCreateCompileUnit( debug_context.builder, - DW_LANG_RUST, + dwarf_const::DW_LANG_Rust, compile_unit_file, producer.as_c_char_ptr(), producer.len(), diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs index d374767f187d8..a72e205c9b249 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs @@ -12,6 +12,7 @@ use rustc_middle::ty::{self, AdtDef, CoroutineArgs, CoroutineArgsExt, Ty}; use smallvec::smallvec; use crate::common::{AsCCharPtr, CodegenCx}; +use crate::debuginfo::dwarf_const::DW_TAG_const_type; use crate::debuginfo::metadata::enums::DiscrResult; use crate::debuginfo::metadata::type_map::{self, Stub, UniqueTypeId}; use crate::debuginfo::metadata::{ @@ -212,21 +213,17 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>( ), |cx, enum_type_di_node| { match enum_type_and_layout.variants { - Variants::Single { index: variant_index } => { - if enum_adt_def.variants().is_empty() { - // Uninhabited enums have Variants::Single. We don't generate - // any members for them. - return smallvec![]; - } - - build_single_variant_union_fields( - cx, - enum_adt_def, - enum_type_and_layout, - enum_type_di_node, - variant_index, - ) + Variants::Empty => { + // We don't generate any members for uninhabited types. + return smallvec![]; } + Variants::Single { index: variant_index } => build_single_variant_union_fields( + cx, + enum_adt_def, + enum_type_and_layout, + enum_type_di_node, + variant_index, + ), Variants::Multiple { tag_encoding: TagEncoding::Direct, ref variants, @@ -303,6 +300,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( ) } Variants::Single { .. } + | Variants::Empty | Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => { bug!( "Encountered coroutine with non-direct-tag layout: {:?}", @@ -569,22 +567,39 @@ fn build_variant_struct_wrapper_type_di_node<'ll, 'tcx>( None, )); - let build_assoc_const = - |name: &str, type_di_node: &'ll DIType, value: u64, align: Align| unsafe { - llvm::LLVMRustDIBuilderCreateStaticMemberType( - DIB(cx), - wrapper_struct_type_di_node, - name.as_c_char_ptr(), - name.len(), - unknown_file_metadata(cx), - UNKNOWN_LINE_NUMBER, - type_di_node, - DIFlags::FlagZero, - Some(cx.const_u64(value)), - align.bits() as u32, - ) + let build_assoc_const = |name: &str, + type_di_node_: &'ll DIType, + value: u64, + align: Align| unsafe { + // FIXME: Currently we force all DISCR_* values to be u64's as LLDB seems to have + // problems inspecting other value types. Since DISCR_* is typically only going to be + // directly inspected via the debugger visualizer - which compares it to the `tag` value + // (whose type is not modified at all) it shouldn't cause any real problems. + let (t_di, align) = if name == ASSOC_CONST_DISCR_NAME { + (type_di_node_, align.bits() as u32) + } else { + let ty_u64 = Ty::new_uint(cx.tcx, ty::UintTy::U64); + (type_di_node(cx, ty_u64), Align::EIGHT.bits() as u32) }; + // must wrap type in a `const` modifier for LLDB to be able to inspect the value of the member + let field_type = + llvm::LLVMRustDIBuilderCreateQualifiedType(DIB(cx), DW_TAG_const_type, t_di); + + llvm::LLVMRustDIBuilderCreateStaticMemberType( + DIB(cx), + wrapper_struct_type_di_node, + name.as_c_char_ptr(), + name.len(), + unknown_file_metadata(cx), + UNKNOWN_LINE_NUMBER, + field_type, + DIFlags::FlagZero, + Some(cx.const_u64(value)), + align, + ) + }; + // We also always have an associated constant for the discriminant value // of the variant. fields.push(build_assoc_const( diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs index 65ab22ad89e81..9f6a5cc89e023 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs @@ -392,7 +392,7 @@ fn compute_discriminant_value<'ll, 'tcx>( variant_index: VariantIdx, ) -> DiscrResult { match enum_type_and_layout.layout.variants() { - &Variants::Single { .. } => DiscrResult::NoDiscriminant, + &Variants::Single { .. } | &Variants::Empty => DiscrResult::NoDiscriminant, &Variants::Multiple { tag_encoding: TagEncoding::Direct, .. } => DiscrResult::Value( enum_type_and_layout.ty.discriminant_for_variant(cx.tcx, variant_index).unwrap().val, ), diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs index 241bf167a81a0..11824398f243e 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs @@ -358,8 +358,8 @@ fn build_discr_member_di_node<'ll, 'tcx>( let containing_scope = enum_or_coroutine_type_di_node; match enum_or_coroutine_type_and_layout.layout.variants() { - // A single-variant enum has no discriminant. - &Variants::Single { .. } => None, + // A single-variant or no-variant enum has no discriminant. + &Variants::Single { .. } | &Variants::Empty => None, &Variants::Multiple { tag_field, .. } => { let tag_base_type = tag_base_type(cx.tcx, enum_or_coroutine_type_and_layout); diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs index a8fdfbed59244..755f4816acf98 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs @@ -19,9 +19,8 @@ use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf}; use rustc_middle::ty::{self, GenericArgsRef, Instance, Ty, TypeVisitableExt}; use rustc_session::Session; use rustc_session::config::{self, DebugInfo}; -use rustc_span::symbol::Symbol; use rustc_span::{ - BytePos, Pos, SourceFile, SourceFileAndLine, SourceFileHash, Span, StableSourceFileId, + BytePos, Pos, SourceFile, SourceFileAndLine, SourceFileHash, Span, StableSourceFileId, Symbol, }; use smallvec::SmallVec; use tracing::debug; @@ -40,6 +39,7 @@ use crate::llvm::debuginfo::{ use crate::value::Value; mod create_scope_map; +mod dwarf_const; mod gdb; pub(crate) mod metadata; mod namespace; @@ -48,6 +48,10 @@ mod utils; use self::create_scope_map::compute_mir_scopes; pub(crate) use self::metadata::build_global_var_di_node; +// FIXME(Zalathar): These `DW_TAG_*` constants are fake values that were +// removed from LLVM in 2015, and are only used by our own `RustWrapper.cpp` +// to decide which C++ API to call. Instead, we should just have two separate +// FFI functions and choose the correct one on the Rust side. #[allow(non_upper_case_globals)] const DW_TAG_auto_variable: c_uint = 0x100; #[allow(non_upper_case_globals)] @@ -153,29 +157,26 @@ impl<'ll> DebugInfoBuilderMethods for Builder<'_, 'll, '_> { indirect_offsets: &[Size], fragment: Option>, ) { + use dwarf_const::{DW_OP_LLVM_fragment, DW_OP_deref, DW_OP_plus_uconst}; + // Convert the direct and indirect offsets and fragment byte range to address ops. - // FIXME(eddyb) use `const`s instead of getting the values via FFI, - // the values should match the ones in the DWARF standard anyway. - let op_deref = || unsafe { llvm::LLVMRustDIBuilderCreateOpDeref() }; - let op_plus_uconst = || unsafe { llvm::LLVMRustDIBuilderCreateOpPlusUconst() }; - let op_llvm_fragment = || unsafe { llvm::LLVMRustDIBuilderCreateOpLLVMFragment() }; let mut addr_ops = SmallVec::<[u64; 8]>::new(); if direct_offset.bytes() > 0 { - addr_ops.push(op_plus_uconst()); + addr_ops.push(DW_OP_plus_uconst); addr_ops.push(direct_offset.bytes() as u64); } for &offset in indirect_offsets { - addr_ops.push(op_deref()); + addr_ops.push(DW_OP_deref); if offset.bytes() > 0 { - addr_ops.push(op_plus_uconst()); + addr_ops.push(DW_OP_plus_uconst); addr_ops.push(offset.bytes() as u64); } } if let Some(fragment) = fragment { // `DW_OP_LLVM_fragment` takes as arguments the fragment's // offset and size, both of them in bits. - addr_ops.push(op_llvm_fragment()); + addr_ops.push(DW_OP_LLVM_fragment); addr_ops.push(fragment.start.bits() as u64); addr_ops.push((fragment.end - fragment.start).bits() as u64); } diff --git a/compiler/rustc_codegen_llvm/src/declare.rs b/compiler/rustc_codegen_llvm/src/declare.rs index d338c84875497..c72b5b5611f2b 100644 --- a/compiler/rustc_codegen_llvm/src/declare.rs +++ b/compiler/rustc_codegen_llvm/src/declare.rs @@ -32,7 +32,7 @@ use crate::{attributes, llvm}; /// /// If there’s a value with the same name already declared, the function will /// update the declaration and return existing Value instead. -fn declare_raw_fn<'ll>( +pub(crate) fn declare_raw_fn<'ll>( cx: &CodegenCx<'ll, '_>, name: &str, callconv: llvm::CallConv, @@ -125,7 +125,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { let llfn = declare_raw_fn( self, name, - fn_abi.llvm_cconv(), + fn_abi.llvm_cconv(self), llvm::UnnamedAddr::Global, llvm::Visibility::Default, fn_abi.llvm_type(self), diff --git a/compiler/rustc_codegen_llvm/src/errors.rs b/compiler/rustc_codegen_llvm/src/errors.rs index 3cdb5b971d908..f4c9491f75844 100644 --- a/compiler/rustc_codegen_llvm/src/errors.rs +++ b/compiler/rustc_codegen_llvm/src/errors.rs @@ -37,6 +37,7 @@ pub(crate) struct UnstableCTargetFeature<'a> { #[note(codegen_llvm_forbidden_ctarget_feature_issue)] pub(crate) struct ForbiddenCTargetFeature<'a> { pub feature: &'a str, + pub enabled: &'a str, pub reason: &'a str, } @@ -89,6 +90,11 @@ impl Diagnostic<'_, G> for ParseTargetMachineConfig<'_> { } } +#[derive(Diagnostic)] +#[diag(codegen_llvm_autodiff_without_lto)] +#[note] +pub(crate) struct AutoDiffWithoutLTO; + #[derive(Diagnostic)] #[diag(codegen_llvm_lto_disallowed)] pub(crate) struct LtoDisallowed; @@ -131,6 +137,8 @@ pub enum LlvmError<'a> { PrepareThinLtoModule, #[diag(codegen_llvm_parse_bitcode)] ParseBitcode, + #[diag(codegen_llvm_prepare_autodiff)] + PrepareAutoDiff { src: String, target: String, error: String }, } pub(crate) struct WithLlvmError<'a>(pub LlvmError<'a>, pub String); @@ -152,6 +160,7 @@ impl Diagnostic<'_, G> for WithLlvmError<'_> { } PrepareThinLtoModule => fluent::codegen_llvm_prepare_thin_lto_module_with_llvm_err, ParseBitcode => fluent::codegen_llvm_parse_bitcode_with_llvm_err, + PrepareAutoDiff { .. } => fluent::codegen_llvm_prepare_autodiff_with_llvm_err, }; self.0 .into_diag(dcx, level) @@ -205,12 +214,6 @@ pub(crate) struct MismatchedDataLayout<'a> { pub llvm_layout: &'a str, } -#[derive(Diagnostic)] -#[diag(codegen_llvm_invalid_target_feature_prefix)] -pub(crate) struct InvalidTargetFeaturePrefix<'a> { - pub feature: &'a str, -} - #[derive(Diagnostic)] #[diag(codegen_llvm_fixed_x18_invalid_arch)] pub(crate) struct FixedX18InvalidArch<'a> { diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index c38c5d4c6442c..cabcfc9b42b4e 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -340,6 +340,37 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { self.const_i32(cache_type), ]) } + sym::carrying_mul_add => { + let (size, signed) = fn_args.type_at(0).int_size_and_signed(self.tcx); + + let wide_llty = self.type_ix(size.bits() * 2); + let args = args.as_array().unwrap(); + let [a, b, c, d] = args.map(|a| self.intcast(a.immediate(), wide_llty, signed)); + + let wide = if signed { + let prod = self.unchecked_smul(a, b); + let acc = self.unchecked_sadd(prod, c); + self.unchecked_sadd(acc, d) + } else { + let prod = self.unchecked_umul(a, b); + let acc = self.unchecked_uadd(prod, c); + self.unchecked_uadd(acc, d) + }; + + let narrow_llty = self.type_ix(size.bits()); + let low = self.trunc(wide, narrow_llty); + let bits_const = self.const_uint(wide_llty, size.bits()); + // No need for ashr when signed; LLVM changes it to lshr anyway. + let high = self.lshr(wide, bits_const); + // FIXME: could be `trunc nuw`, even for signed. + let high = self.trunc(high, narrow_llty); + + let pair_llty = self.type_struct(&[narrow_llty, narrow_llty], false); + let pair = self.const_poison(pair_llty); + let pair = self.insert_value(pair, low, 0); + let pair = self.insert_value(pair, high, 1); + pair + } sym::ctlz | sym::ctlz_nonzero | sym::cttz diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index af8562db05477..06afe8bb3ad54 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -17,6 +17,8 @@ #![feature(iter_intersperse)] #![feature(let_chains)] #![feature(rustdoc_internals)] +#![feature(slice_as_array)] +#![feature(try_blocks)] #![warn(unreachable_pub)] // tidy-alphabetical-end @@ -26,9 +28,10 @@ use std::mem::ManuallyDrop; use back::owned_target_machine::OwnedTargetMachine; use back::write::{create_informational_target_machine, create_target_machine}; -use errors::ParseTargetMachineConfig; +use errors::{AutoDiffWithoutLTO, ParseTargetMachineConfig}; pub use llvm_util::target_features_cfg; use rustc_ast::expand::allocator::AllocatorKind; +use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryConfig, TargetMachineFactoryFn, @@ -42,8 +45,8 @@ use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::ty::TyCtxt; use rustc_middle::util::Providers; use rustc_session::Session; -use rustc_session::config::{OptLevel, OutputFilenames, PrintKind, PrintRequest}; -use rustc_span::symbol::Symbol; +use rustc_session::config::{Lto, OptLevel, OutputFilenames, PrintKind, PrintRequest}; +use rustc_span::Symbol; mod back { pub(crate) mod archive; @@ -231,6 +234,20 @@ impl WriteBackendMethods for LlvmCodegenBackend { fn serialize_module(module: ModuleCodegen) -> (String, Self::ModuleBuffer) { (module.name, back::lto::ModuleBuffer::new(module.module_llvm.llmod())) } + /// Generate autodiff rules + fn autodiff( + cgcx: &CodegenContext, + tcx: TyCtxt<'_>, + module: &ModuleCodegen, + diff_fncs: Vec, + config: &ModuleConfig, + ) -> Result<(), FatalError> { + if cgcx.lto != Lto::Fat { + let dcx = cgcx.create_dcx(); + return Err(dcx.handle().emit_almost_fatal(AutoDiffWithoutLTO)); + } + builder::autodiff::differentiate(module, cgcx, tcx, diff_fncs, config) + } } unsafe impl Send for LlvmCodegenBackend {} // Llvm is on a per-thread basis diff --git a/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs new file mode 100644 index 0000000000000..729d6f62e2431 --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs @@ -0,0 +1,29 @@ +#![allow(non_camel_case_types)] + +use libc::{c_char, c_uint}; + +use super::ffi::{BasicBlock, Metadata, Module, Type, Value}; +use crate::llvm::Bool; +extern "C" { + // Enzyme + pub fn LLVMRustHasMetadata(I: &Value, KindID: c_uint) -> bool; + pub fn LLVMRustEraseInstBefore(BB: &BasicBlock, I: &Value); + pub fn LLVMRustGetLastInstruction<'a>(BB: &BasicBlock) -> Option<&'a Value>; + pub fn LLVMRustDIGetInstMetadata(I: &Value) -> Option<&Metadata>; + pub fn LLVMRustEraseInstFromParent(V: &Value); + pub fn LLVMRustGetTerminator<'a>(B: &BasicBlock) -> &'a Value; + pub fn LLVMRustVerifyFunction(V: &Value, action: LLVMRustVerifierFailureAction) -> Bool; + + pub fn LLVMGetFunctionCallConv(F: &Value) -> c_uint; + pub fn LLVMGetReturnType(T: &Type) -> &Type; + pub fn LLVMGetParams(Fnc: &Value, parms: *mut &Value); + pub fn LLVMGetNamedFunction(M: &Module, Name: *const c_char) -> Option<&Value>; +} + +#[repr(C)] +#[derive(Copy, Clone, PartialEq)] +pub enum LLVMRustVerifierFailureAction { + LLVMAbortProcessAction = 0, + LLVMPrintMessageAction = 1, + LLVMReturnStatusAction = 2, +} diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index d62632d143168..ec6c84f6f2567 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -56,25 +56,6 @@ pub enum LLVMRustResult { Failure, } -// Rust version of the C struct with the same name in rustc_llvm/llvm-wrapper/RustWrapper.cpp. -#[repr(C)] -pub struct LLVMRustCOFFShortExport { - pub name: *const c_char, - pub ordinal_present: bool, - /// value of `ordinal` only important when `ordinal_present` is true - pub ordinal: u16, -} - -impl LLVMRustCOFFShortExport { - pub fn new(name: *const c_char, ordinal: Option) -> LLVMRustCOFFShortExport { - LLVMRustCOFFShortExport { - name, - ordinal_present: ordinal.is_some(), - ordinal: ordinal.unwrap_or(0), - } - } -} - /// Translation of LLVM's MachineTypes enum, defined in llvm\include\llvm\BinaryFormat\COFF.h. /// /// We include only architectures supported on Windows. @@ -118,7 +99,7 @@ pub enum ModuleFlagMergeBehavior { /// LLVM CallingConv::ID. Should we wrap this? /// /// See -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, TryFromU32)] #[repr(C)] pub enum CallConv { CCallConv = 0, @@ -139,6 +120,7 @@ pub enum CallConv { X86_Intr = 83, AvrNonBlockingInterrupt = 84, AvrInterrupt = 85, + AmdgpuKernel = 91, } /// Must match the layout of `LLVMLinkage`. @@ -545,7 +527,7 @@ pub struct SanitizerOptions { pub sanitize_kernel_address_recover: bool, } -/// LLVMRelocMode +/// LLVMRustRelocModel #[derive(Copy, Clone, PartialEq)] #[repr(C)] pub enum RelocModel { @@ -557,6 +539,15 @@ pub enum RelocModel { ROPI_RWPI, } +/// LLVMRustFloatABI +#[derive(Copy, Clone, PartialEq)] +#[repr(C)] +pub enum FloatAbi { + Default, + Soft, + Hard, +} + /// LLVMRustCodeModel #[derive(Copy, Clone)] #[repr(C)] @@ -2018,6 +2009,12 @@ unsafe extern "C" { AlignInBits: u32, ) -> &'a DIDerivedType; + pub fn LLVMRustDIBuilderCreateQualifiedType<'a>( + Builder: &DIBuilder<'a>, + Tag: c_uint, + Type: &'a DIType, + ) -> &'a DIDerivedType; + pub fn LLVMRustDIBuilderCreateLexicalBlock<'a>( Builder: &DIBuilder<'a>, Scope: &'a DIScope, @@ -2181,9 +2178,6 @@ unsafe extern "C" { Location: &'a DILocation, BD: c_uint, ) -> Option<&'a DILocation>; - pub fn LLVMRustDIBuilderCreateOpDeref() -> u64; - pub fn LLVMRustDIBuilderCreateOpPlusUconst() -> u64; - pub fn LLVMRustDIBuilderCreateOpLLVMFragment() -> u64; pub fn LLVMRustWriteTypeToString(Type: &Type, s: &RustString); pub fn LLVMRustWriteValueToString(value_ref: &Value, s: &RustString); @@ -2211,7 +2205,7 @@ unsafe extern "C" { Model: CodeModel, Reloc: RelocModel, Level: CodeGenOptLevel, - UseSoftFP: bool, + FloatABIType: FloatAbi, FunctionSections: bool, DataSections: bool, UniqueSectionNames: bool, @@ -2347,15 +2341,6 @@ unsafe extern "C" { ) -> &'a mut RustArchiveMember<'a>; pub fn LLVMRustArchiveMemberFree<'a>(Member: &'a mut RustArchiveMember<'a>); - pub fn LLVMRustWriteImportLibrary( - ImportName: *const c_char, - Path: *const c_char, - Exports: *const LLVMRustCOFFShortExport, - NumExports: usize, - Machine: u16, - MinGW: bool, - ) -> LLVMRustResult; - pub fn LLVMRustSetDataLayoutFromTargetMachine<'a>(M: &'a Module, TM: &'a TargetMachine); pub fn LLVMRustPositionBuilderAtStart<'a>(B: &Builder<'a>, BB: &'a BasicBlock); @@ -2390,7 +2375,7 @@ unsafe extern "C" { Data: &ThinLTOData, Module: &Module, Target: &TargetMachine, - ) -> bool; + ); pub fn LLVMRustPrepareThinLTOResolveWeak(Data: &ThinLTOData, Module: &Module) -> bool; pub fn LLVMRustPrepareThinLTOInternalize(Data: &ThinLTOData, Module: &Module) -> bool; pub fn LLVMRustPrepareThinLTOImport( diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 909afe35a179b..2592a7df95c29 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -22,8 +22,11 @@ use crate::common::AsCCharPtr; pub mod archive_ro; pub mod diagnostic; +pub mod enzyme_ffi; mod ffi; +pub use self::enzyme_ffi::*; + impl LLVMRustResult { pub fn into_result(self) -> Result<(), ()> { match self { diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 194438af88c26..c3d7c217861fb 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -15,14 +15,14 @@ use rustc_fs_util::path_to_c_string; use rustc_middle::bug; use rustc_session::Session; use rustc_session::config::{PrintKind, PrintRequest}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use rustc_target::spec::{MergeFunctions, PanicStrategy, SmallDataThresholdSupport}; use rustc_target::target_features::{RUSTC_SPECIAL_FEATURES, RUSTC_SPECIFIC_FEATURES}; use crate::back::write::create_informational_target_machine; use crate::errors::{ - FixedX18InvalidArch, ForbiddenCTargetFeature, InvalidTargetFeaturePrefix, PossibleFeature, - UnknownCTargetFeature, UnknownCTargetFeaturePrefix, UnstableCTargetFeature, + FixedX18InvalidArch, ForbiddenCTargetFeature, PossibleFeature, UnknownCTargetFeature, + UnknownCTargetFeaturePrefix, UnstableCTargetFeature, }; use crate::llvm; @@ -109,7 +109,10 @@ unsafe fn configure_llvm(sess: &Session) { add("-wasm-enable-eh", false); } - if sess.target.os == "emscripten" && sess.panic_strategy() == PanicStrategy::Unwind { + if sess.target.os == "emscripten" + && !sess.opts.unstable_opts.emscripten_wasm_eh + && sess.panic_strategy() == PanicStrategy::Unwind + { add("-enable-emscripten-cxx-exceptions", false); } @@ -348,7 +351,16 @@ pub fn target_features_cfg(sess: &Session, allow_unstable: bool) -> Vec { if enabled { // Also add all transitively implied features. - features.extend(sess.target.implied_target_features(std::iter::once(feature))); + + // We don't care about the order in `features` since the only thing we use it for is the + // `features.contains` below. + #[allow(rustc::potential_query_instability)] + features.extend( + sess.target + .implied_target_features(std::iter::once(feature.as_str())) + .iter() + .map(|s| Symbol::intern(s)), + ); } else { // Remove transitively reverse-implied features. @@ -356,7 +368,11 @@ pub fn target_features_cfg(sess: &Session, allow_unstable: bool) -> Vec // `features.contains` below. #[allow(rustc::potential_query_instability)] features.retain(|f| { - if sess.target.implied_target_features(std::iter::once(*f)).contains(&feature) { + if sess + .target + .implied_target_features(std::iter::once(f.as_str())) + .contains(&feature.as_str()) + { // If `f` if implies `feature`, then `!feature` implies `!f`, so we have to // remove `f`. (This is the standard logical contraposition principle.) false @@ -638,7 +654,7 @@ pub(crate) fn global_llvm_features( sess.target .features .split(',') - .filter(|v| !v.is_empty() && backend_feature_name(sess, v).is_some()) + .filter(|v| !v.is_empty()) // Drop +v8plus feature introduced in LLVM 20. .filter(|v| *v != "+v8plus" || get_version() >= (20, 0, 0)) .map(String::from), @@ -651,89 +667,136 @@ pub(crate) fn global_llvm_features( // -Ctarget-features if !only_base_features { let known_features = sess.target.rust_target_features(); + // Will only be filled when `diagnostics` is set! let mut featsmap = FxHashMap::default(); - // insert implied features + // Ensure that all ABI-required features are enabled, and the ABI-forbidden ones + // are disabled. + let abi_feature_constraints = sess.target.abi_required_features(); + let abi_incompatible_set = + FxHashSet::from_iter(abi_feature_constraints.incompatible.iter().copied()); + + // Compute implied features let mut all_rust_features = vec![]; for feature in sess.opts.cg.target_feature.split(',') { - match feature.strip_prefix('+') { - Some(feature) => all_rust_features.extend( - UnordSet::from( - sess.target - .implied_target_features(std::iter::once(Symbol::intern(feature))), - ) - .to_sorted_stable_ord() - .iter() - .map(|s| format!("+{}", s.as_str())), - ), - _ => all_rust_features.push(feature.to_string()), + if let Some(feature) = feature.strip_prefix('+') { + all_rust_features.extend( + UnordSet::from(sess.target.implied_target_features(std::iter::once(feature))) + .to_sorted_stable_ord() + .iter() + .map(|&&s| (true, s)), + ) + } else if let Some(feature) = feature.strip_prefix('-') { + // FIXME: Why do we not remove implied features on "-" here? + // We do the equivalent above in `target_features_cfg`. + // See . + all_rust_features.push((false, feature)); + } else if !feature.is_empty() { + if diagnostics { + sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature }); + } } } + // Remove features that are meant for rustc, not LLVM. + all_rust_features.retain(|(_, feature)| { + // Retain if it is not a rustc feature + !RUSTC_SPECIFIC_FEATURES.contains(feature) + }); - let feats = all_rust_features - .iter() - .filter_map(|s| { - let enable_disable = match s.chars().next() { - None => return None, - Some(c @ ('+' | '-')) => c, - Some(_) => { - if diagnostics { - sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature: s }); - } - return None; - } - }; - - // Get the backend feature name, if any. - // This excludes rustc-specific features, which do not get passed to LLVM. - let feature = backend_feature_name(sess, s)?; - // Warn against use of LLVM specific feature names and unstable features on the CLI. - if diagnostics { - let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); - match feature_state { - None => { - let rust_feature = - known_features.iter().find_map(|&(rust_feature, _, _)| { - let llvm_features = to_llvm_features(sess, rust_feature)?; - if llvm_features.contains(feature) - && !llvm_features.contains(rust_feature) - { - Some(rust_feature) - } else { - None - } - }); - let unknown_feature = if let Some(rust_feature) = rust_feature { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::Some { rust_feature }, - } - } else { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::None, + // Check feature validity. + if diagnostics { + for &(enable, feature) in &all_rust_features { + let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); + match feature_state { + None => { + let rust_feature = + known_features.iter().find_map(|&(rust_feature, _, _)| { + let llvm_features = to_llvm_features(sess, rust_feature)?; + if llvm_features.contains(feature) + && !llvm_features.contains(rust_feature) + { + Some(rust_feature) + } else { + None } - }; - sess.dcx().emit_warn(unknown_feature); - } - Some((_, stability, _)) => { - if let Err(reason) = - stability.toggle_allowed(&sess.target, enable_disable == '+') - { - sess.dcx().emit_warn(ForbiddenCTargetFeature { feature, reason }); - } else if stability.requires_nightly().is_some() { - // An unstable feature. Warn about using it. It makes little sense - // to hard-error here since we just warn about fully unknown - // features above. - sess.dcx().emit_warn(UnstableCTargetFeature { feature }); + }); + let unknown_feature = if let Some(rust_feature) = rust_feature { + UnknownCTargetFeature { + feature, + rust_feature: PossibleFeature::Some { rust_feature }, } + } else { + UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } + }; + sess.dcx().emit_warn(unknown_feature); + } + Some((_, stability, _)) => { + if let Err(reason) = stability.toggle_allowed() { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: if enable { "enabled" } else { "disabled" }, + reason, + }); + } else if stability.requires_nightly().is_some() { + // An unstable feature. Warn about using it. It makes little sense + // to hard-error here since we just warn about fully unknown + // features above. + sess.dcx().emit_warn(UnstableCTargetFeature { feature }); } } + } - // FIXME(nagisa): figure out how to not allocate a full hashset here. - featsmap.insert(feature, enable_disable == '+'); + // Ensure that the features we enable/disable are compatible with the ABI. + if enable { + if abi_incompatible_set.contains(feature) { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: "enabled", + reason: "this feature is incompatible with the target ABI", + }); + } + } else { + // FIXME: we have to request implied features here since + // negative features do not handle implied features above. + for &required in abi_feature_constraints.required.iter() { + let implied = + sess.target.implied_target_features(std::iter::once(required)); + if implied.contains(feature) { + sess.dcx().emit_warn(ForbiddenCTargetFeature { + feature, + enabled: "disabled", + reason: "this feature is required by the target ABI", + }); + } + } } + // FIXME(nagisa): figure out how to not allocate a full hashset here. + featsmap.insert(feature, enable); + } + } + + // To be sure the ABI-relevant features are all in the right state, we explicitly + // (un)set them here. This means if the target spec sets those features wrong, + // we will silently correct them rather than silently producing wrong code. + // (The target sanity check tries to catch this, but we can't know which features are + // enabled in LLVM by default so we can't be fully sure about that check.) + // We add these at the beginning of the list so that `-Ctarget-features` can + // still override it... that's unsound, but more compatible with past behavior. + all_rust_features.splice( + 0..0, + abi_feature_constraints + .required + .iter() + .map(|&f| (true, f)) + .chain(abi_feature_constraints.incompatible.iter().map(|&f| (false, f))), + ); + + // Translate this into LLVM features. + let feats = all_rust_features + .iter() + .filter_map(|&(enable, feature)| { + let enable_disable = if enable { '+' } else { '-' }; // We run through `to_llvm_features` when // passing requests down to LLVM. This means that all in-language // features also work on the command line instead of having two @@ -746,9 +809,9 @@ pub(crate) fn global_llvm_features( enable_disable, llvm_feature.llvm_feature_name )) .chain(llvm_feature.dependency.into_iter().filter_map( - move |feat| match (enable_disable, feat) { - ('-' | '+', TargetFeatureFoldStrength::Both(f)) - | ('+', TargetFeatureFoldStrength::EnableOnly(f)) => { + move |feat| match (enable, feat) { + (_, TargetFeatureFoldStrength::Both(f)) + | (true, TargetFeatureFoldStrength::EnableOnly(f)) => { Some(format!("{enable_disable}{f}")) } _ => None, @@ -780,22 +843,6 @@ pub(crate) fn global_llvm_features( features } -/// Returns a feature name for the given `+feature` or `-feature` string. -/// -/// Only allows features that are backend specific (i.e. not [`RUSTC_SPECIFIC_FEATURES`].) -fn backend_feature_name<'a>(sess: &Session, s: &'a str) -> Option<&'a str> { - // features must start with a `+` or `-`. - let feature = s - .strip_prefix(&['+', '-'][..]) - .unwrap_or_else(|| sess.dcx().emit_fatal(InvalidTargetFeaturePrefix { feature: s })); - // Rustc-specific feature requests like `+crt-static` or `-crt-static` - // are not passed down to LLVM. - if s.is_empty() || RUSTC_SPECIFIC_FEATURES.contains(&feature) { - return None; - } - Some(feature) -} - pub(crate) fn tune_cpu(sess: &Session) -> Option<&str> { let name = sess.opts.unstable_opts.tune_cpu.as_ref()?; Some(handle_native(name)) diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs index 2b05e24a7babf..b0b6da869da68 100644 --- a/compiler/rustc_codegen_llvm/src/type_of.rs +++ b/compiler/rustc_codegen_llvm/src/type_of.rs @@ -38,7 +38,7 @@ fn uncached_llvm_type<'a, 'tcx>( if let (&ty::Adt(def, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) { - if def.is_enum() && !def.variants().is_empty() { + if def.is_enum() { write!(&mut name, "::{}", def.variant(index).name).unwrap(); } } diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index 628543443b372..3254b5d38e7a4 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -8,7 +8,9 @@ edition = "2021" ar_archive_writer = "0.4.2" arrayvec = { version = "0.7", default-features = false } bitflags = "2.4.1" -cc = "1.1.23" +# Pinned so `cargo update` bumps don't cause breakage. Please also update the +# `cc` in `rustc_llvm` if you update the `cc` here. +cc = "=1.2.7" either = "1.5.0" itertools = "0.12" pathdiff = "0.2.0" diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index 56188714b44fd..484f467068a14 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -67,7 +67,7 @@ codegen_ssa_failed_to_write = failed to write {$path}: {$error} codegen_ssa_field_associated_value_expected = associated value expected for `{$name}` codegen_ssa_forbidden_target_feature_attr = - target feature `{$feature}` cannot be toggled with `#[target_feature]`: {$reason} + target feature `{$feature}` cannot be enabled with `#[target_feature]`: {$reason} codegen_ssa_ignoring_emit_path = ignoring emit path because multiple .{$extension} files were produced diff --git a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs index d1b1ff88b4a0c..76561766511c5 100644 --- a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs +++ b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs @@ -33,8 +33,7 @@ use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::mir::mono::CodegenUnitNameBuilder; use rustc_middle::ty::TyCtxt; use rustc_session::Session; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Symbol, sym}; use thin_vec::ThinVec; use tracing::debug; @@ -91,7 +90,7 @@ impl<'tcx> AssertModuleSource<'tcx> { other => { self.tcx .dcx() - .emit_fatal(errors::UnknownReuseKind { span: attr.span, kind: other }); + .emit_fatal(errors::UnknownReuseKind { span: attr.span(), kind: other }); } } } else { @@ -99,7 +98,7 @@ impl<'tcx> AssertModuleSource<'tcx> { }; if !self.tcx.sess.opts.unstable_opts.query_dep_graph { - self.tcx.dcx().emit_fatal(errors::MissingQueryDepGraph { span: attr.span }); + self.tcx.dcx().emit_fatal(errors::MissingQueryDepGraph { span: attr.span() }); } if !self.check_config(attr) { @@ -112,7 +111,7 @@ impl<'tcx> AssertModuleSource<'tcx> { if !user_path.starts_with(&crate_name) { self.tcx.dcx().emit_fatal(errors::MalformedCguName { - span: attr.span, + span: attr.span(), user_path, crate_name, }); @@ -142,7 +141,7 @@ impl<'tcx> AssertModuleSource<'tcx> { let cgu_names: Vec<&str> = self.available_cgus.items().map(|cgu| cgu.as_str()).into_sorted_stable_ord(); self.tcx.dcx().emit_err(errors::NoModuleNamed { - span: attr.span, + span: attr.span(), user_path, cgu_name, cgu_names: cgu_names.join(", "), @@ -152,7 +151,7 @@ impl<'tcx> AssertModuleSource<'tcx> { self.cgu_reuse_tracker.set_expectation( cgu_name, user_path, - attr.span, + attr.span(), expected_reuse, comp_kind, ); @@ -172,7 +171,7 @@ impl<'tcx> AssertModuleSource<'tcx> { } } - self.tcx.dcx().emit_fatal(errors::NoField { span: attr.span, name }); + self.tcx.dcx().emit_fatal(errors::NoField { span: attr.span(), name }); } /// Scan for a `cfg="foo"` attribute and check whether we have a diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs index d4836eb7a1de6..58eb137c06879 100644 --- a/compiler/rustc_codegen_ssa/src/back/archive.rs +++ b/compiler/rustc_codegen_ssa/src/back/archive.rs @@ -2,7 +2,7 @@ use std::env; use std::error::Error; use std::ffi::OsString; use std::fs::{self, File}; -use std::io::{self, Write}; +use std::io::{self, BufWriter, Write}; use std::path::{Path, PathBuf}; use ar_archive_writer::{ @@ -14,7 +14,7 @@ use object::read::macho::FatArch; use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::memmap::Mmap; use rustc_session::Session; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use tempfile::Builder as TempFileBuilder; use tracing::trace; @@ -509,9 +509,10 @@ impl<'a> ArArchiveBuilder<'a> { io_error_context("couldn't create a directory for the temp file", err) })?; let archive_tmpfile_path = archive_tmpdir.path().join("tmp.a"); - let mut archive_tmpfile = File::create_new(&archive_tmpfile_path) + let archive_tmpfile = File::create_new(&archive_tmpfile_path) .map_err(|err| io_error_context("couldn't create the temp file", err))?; + let mut archive_tmpfile = BufWriter::new(archive_tmpfile); write_archive_to_stream( &mut archive_tmpfile, &entries, @@ -519,6 +520,8 @@ impl<'a> ArArchiveBuilder<'a> { false, /* is_ec = */ self.sess.target.arch == "arm64ec", )?; + archive_tmpfile.flush()?; + drop(archive_tmpfile); let any_entries = !entries.is_empty(); drop(entries); diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index dee6dd7b262a9..df35b5e8426f1 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -15,7 +15,7 @@ use rustc_ast::CRATE_NODE_ID; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::memmap::Mmap; use rustc_data_structures::temp_dir::MaybeTempDir; -use rustc_errors::{DiagCtxtHandle, FatalError}; +use rustc_errors::DiagCtxtHandle; use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize}; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file}; @@ -35,7 +35,7 @@ use rustc_session::utils::NativeLibKind; /// For all the linkers we support, and information they might /// need out of the shared crate context before we get rid of it. use rustc_session::{Session, filesearch}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use rustc_target::spec::crt_objects::CrtObjects; use rustc_target::spec::{ Cc, LinkOutputKind, LinkSelfContainedComponents, LinkSelfContainedDefault, LinkerFeatures, @@ -234,8 +234,6 @@ pub fn each_linked_rlib( crate_type: Option, f: &mut dyn FnMut(CrateNum, &Path), ) -> Result<(), errors::LinkRlibError> { - let crates = info.used_crates.iter(); - let fmts = if let Some(crate_type) = crate_type { let Some(fmts) = info.dependency_formats.get(&crate_type) else { return Err(errors::LinkRlibError::MissingFormat); @@ -261,8 +259,9 @@ pub fn each_linked_rlib( info.dependency_formats.first().unwrap().1 }; - for &cnum in crates { - match fmts.get(cnum.as_usize() - 1) { + let used_dep_crates = info.used_crates.iter(); + for &cnum in used_dep_crates { + match fmts.get(cnum) { Some(&Linkage::NotLinked | &Linkage::Dynamic | &Linkage::IncludedFromDylib) => continue, Some(_) => {} None => return Err(errors::LinkRlibError::MissingFormat), @@ -624,7 +623,7 @@ fn link_staticlib( let mut all_rust_dylibs = vec![]; for &cnum in crates { - match fmts.get(cnum.as_usize() - 1) { + match fmts.get(cnum) { Some(&Linkage::Dynamic) => {} _ => continue, } @@ -1009,12 +1008,8 @@ fn link_natively( && (code < 1000 || code > 9999) { let is_vs_installed = windows_registry::find_vs_version().is_ok(); - // FIXME(cc-rs#1265) pass only target arch to find_tool() - let has_linker = windows_registry::find_tool( - sess.opts.target_triple.tuple(), - "link.exe", - ) - .is_some(); + let has_linker = + windows_registry::find_tool(&sess.target.arch, "link.exe").is_some(); sess.dcx().emit_note(errors::LinkExeUnexpectedError); if is_vs_installed && has_linker { @@ -1039,22 +1034,22 @@ fn link_natively( Err(e) => { let linker_not_found = e.kind() == io::ErrorKind::NotFound; - if linker_not_found { - sess.dcx().emit_err(errors::LinkerNotFound { linker_path, error: e }); + let err = if linker_not_found { + sess.dcx().emit_err(errors::LinkerNotFound { linker_path, error: e }) } else { sess.dcx().emit_err(errors::UnableToExeLinker { linker_path, error: e, command_formatted: format!("{cmd:?}"), - }); - } + }) + }; if sess.target.is_like_msvc && linker_not_found { sess.dcx().emit_note(errors::MsvcMissingLinker); sess.dcx().emit_note(errors::CheckInstalledVisualStudio); sess.dcx().emit_note(errors::InsufficientVSCodeProduct); } - FatalError.raise(); + err.raise_fatal(); } } @@ -1105,14 +1100,14 @@ fn link_natively( let stripcmd = "rust-objcopy"; match (strip, crate_type) { (Strip::Debuginfo, _) => { - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &["-S"]) + strip_with_external_utility(sess, stripcmd, out_filename, &["--strip-debug"]) } // Per the manpage, `-x` is the maximum safe strip level for dynamic libraries. (#93988) (Strip::Symbols, CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro) => { - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &["-x"]) + strip_with_external_utility(sess, stripcmd, out_filename, &["-x"]) } (Strip::Symbols, _) => { - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &[]) + strip_with_external_utility(sess, stripcmd, out_filename, &["--strip-all"]) } (Strip::None, _) => {} } @@ -1128,9 +1123,7 @@ fn link_natively( let stripcmd = if !sess.host.is_like_solaris { "rust-objcopy" } else { "/usr/bin/strip" }; match strip { // Always preserve the symbol table (-x). - Strip::Debuginfo => { - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &["-x"]) - } + Strip::Debuginfo => strip_with_external_utility(sess, stripcmd, out_filename, &["-x"]), // Strip::Symbols is handled via the --strip-all linker option. Strip::Symbols => {} Strip::None => {} @@ -1146,15 +1139,11 @@ fn link_natively( match strip { Strip::Debuginfo => { // FIXME: AIX's strip utility only offers option to strip line number information. - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &[ - "-X32_64", "-l", - ]) + strip_with_external_utility(sess, stripcmd, out_filename, &["-X32_64", "-l"]) } Strip::Symbols => { // Must be noted this option might remove symbol __aix_rust_metadata and thus removes .info section which contains metadata. - strip_symbols_with_external_utility(sess, stripcmd, out_filename, &[ - "-X32_64", "-r", - ]) + strip_with_external_utility(sess, stripcmd, out_filename, &["-X32_64", "-r"]) } Strip::None => {} } @@ -1167,12 +1156,7 @@ fn link_natively( } } -fn strip_symbols_with_external_utility( - sess: &Session, - util: &str, - out_filename: &Path, - options: &[&str], -) { +fn strip_with_external_utility(sess: &Session, util: &str, out_filename: &Path, options: &[&str]) { let mut cmd = Command::new(util); cmd.args(options); @@ -2361,8 +2345,8 @@ fn linker_with_args( .crate_info .native_libraries .iter() - .filter_map(|(cnum, libraries)| { - (dependency_linkage[cnum.as_usize() - 1] != Linkage::Static).then_some(libraries) + .filter_map(|(&cnum, libraries)| { + (dependency_linkage[cnum] != Linkage::Static).then_some(libraries) }) .flatten() .collect::>(); @@ -2467,10 +2451,12 @@ fn add_order_independent_options( } if sess.target.os == "emscripten" { - cmd.cc_arg("-s").cc_arg(if sess.panic_strategy() == PanicStrategy::Abort { - "DISABLE_EXCEPTION_CATCHING=1" + cmd.cc_arg(if sess.opts.unstable_opts.emscripten_wasm_eh { + "-fwasm-exceptions" + } else if sess.panic_strategy() == PanicStrategy::Abort { + "-sDISABLE_EXCEPTION_CATCHING=1" } else { - "DISABLE_EXCEPTION_CATCHING=0" + "-sDISABLE_EXCEPTION_CATCHING=0" }); } @@ -2754,7 +2740,7 @@ fn add_upstream_rust_crates( // (e.g. `libstd` when `-C prefer-dynamic` is used). // FIXME: `dependency_formats` can report `profiler_builtins` as `NotLinked` for some // reason, it shouldn't do that because `profiler_builtins` should indeed be linked. - let linkage = data[cnum.as_usize() - 1]; + let linkage = data[cnum]; let link_static_crate = linkage == Linkage::Static || (linkage == Linkage::IncludedFromDylib || linkage == Linkage::NotLinked) && (codegen_results.crate_info.compiler_builtins == Some(cnum) diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs index 301b22f2be4f8..8fb831471a9ba 100644 --- a/compiler/rustc_codegen_ssa/src/back/linker.rs +++ b/compiler/rustc_codegen_ssa/src/back/linker.rs @@ -16,7 +16,7 @@ use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo, S use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_session::config::{self, CrateType, DebugInfo, LinkerPluginLto, Lto, OptLevel, Strip}; -use rustc_span::symbol::sym; +use rustc_span::sym; use rustc_target::spec::{Cc, LinkOutputKind, LinkerFlavor, Lld}; use tracing::{debug, warn}; @@ -50,8 +50,7 @@ pub(crate) fn get_linker<'a>( self_contained: bool, target_cpu: &'a str, ) -> Box { - // FIXME(cc-rs#1265) pass only target arch to find_tool() - let msvc_tool = windows_registry::find_tool(sess.opts.target_triple.tuple(), "link.exe"); + let msvc_tool = windows_registry::find_tool(&sess.target.arch, "link.exe"); // If our linker looks like a batch script on Windows then to execute this // we'll need to spawn `cmd` explicitly. This is primarily done to handle @@ -1744,15 +1743,10 @@ fn for_each_exported_symbols_include_dep<'tcx>( crate_type: CrateType, mut callback: impl FnMut(ExportedSymbol<'tcx>, SymbolExportInfo, CrateNum), ) { - for &(symbol, info) in tcx.exported_symbols(LOCAL_CRATE).iter() { - callback(symbol, info, LOCAL_CRATE); - } - let formats = tcx.dependency_formats(()); let deps = &formats[&crate_type]; - for (index, dep_format) in deps.iter().enumerate() { - let cnum = CrateNum::new(index + 1); + for (cnum, dep_format) in deps.iter_enumerated() { // For each dependency that we are linking to statically ... if *dep_format == Linkage::Static { for &(symbol, info) in tcx.exported_symbols(cnum).iter() { diff --git a/compiler/rustc_codegen_ssa/src/back/lto.rs b/compiler/rustc_codegen_ssa/src/back/lto.rs index ab8b06a05fc74..efccf7687a1e4 100644 --- a/compiler/rustc_codegen_ssa/src/back/lto.rs +++ b/compiler/rustc_codegen_ssa/src/back/lto.rs @@ -1,11 +1,14 @@ use std::ffi::CString; use std::sync::Arc; +use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_data_structures::memmap::Mmap; use rustc_errors::FatalError; +use rustc_middle::ty::TyCtxt; use super::write::CodegenContext; use crate::ModuleCodegen; +use crate::back::write::ModuleConfig; use crate::traits::*; pub struct ThinModule { @@ -81,6 +84,24 @@ impl LtoModuleCodegen { LtoModuleCodegen::Thin(ref m) => m.cost(), } } + + /// Run autodiff on Fat LTO module + pub unsafe fn autodiff( + self, + cgcx: &CodegenContext, + tcx: TyCtxt<'_>, + diff_fncs: Vec, + config: &ModuleConfig, + ) -> Result, FatalError> { + match &self { + LtoModuleCodegen::Fat(module) => { + B::autodiff(cgcx, tcx, &module, diff_fncs, config)?; + } + _ => panic!("autodiff called with non-fat LTO module"), + } + + Ok(self) + } } pub enum SerializedModule { diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index 683defcafee24..b40bb4ed5d2ac 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -33,8 +33,7 @@ use rustc_session::config::{ self, CrateType, Lto, OutFileName, OutputFilenames, OutputType, Passes, SwitchWithOptPath, }; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::sym; -use rustc_span::{FileName, InnerSpan, Span, SpanData}; +use rustc_span::{FileName, InnerSpan, Span, SpanData, sym}; use rustc_target::spec::{MergeFunctions, SanitizerSet}; use tracing::debug; diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index dab035d3339cb..08f5303bf2013 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -4,7 +4,9 @@ use std::time::{Duration, Instant}; use itertools::Itertools; use rustc_abi::FIRST_VARIANT; +use rustc_ast as ast; use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_name}; +use rustc_attr_parsing::OptimizeAttr; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry}; use rustc_data_structures::sync::{Lrc, par_map}; @@ -24,13 +26,11 @@ use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; use rustc_session::Session; use rustc_session::config::{self, CrateType, EntryFnType, OptLevel, OutputType}; -use rustc_span::symbol::sym; -use rustc_span::{DUMMY_SP, Symbol}; +use rustc_span::{DUMMY_SP, Symbol, sym}; use rustc_trait_selection::infer::at::ToTrace; use rustc_trait_selection::infer::{BoundRegionConversionTime, TyCtxtInferExt}; use rustc_trait_selection::traits::{ObligationCause, ObligationCtxt}; use tracing::{debug, info}; -use {rustc_ast as ast, rustc_attr_parsing as attr}; use crate::assert_module_sources::CguReuse; use crate::back::link::are_upstream_rust_objects_already_included; @@ -389,7 +389,8 @@ pub(crate) fn build_shift_expr_rhs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // exceptions. This means that the VM does the unwinding for // us pub fn wants_wasm_eh(sess: &Session) -> bool { - sess.target.is_like_wasm && sess.target.os != "emscripten" + sess.target.is_like_wasm + && (sess.target.os != "emscripten" || sess.opts.unstable_opts.emscripten_wasm_eh) } /// Returns `true` if this session's target will use SEH-based unwinding. @@ -1057,8 +1058,8 @@ pub(crate) fn provide(providers: &mut Providers) { let any_for_speed = defids.items().any(|id| { let CodegenFnAttrs { optimize, .. } = tcx.codegen_fn_attrs(*id); match optimize { - attr::OptimizeAttr::None | attr::OptimizeAttr::Size => false, - attr::OptimizeAttr::Speed => true, + OptimizeAttr::None | OptimizeAttr::Size => false, + OptimizeAttr::Speed => true, } }); diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index b243f904aee4e..8ac4327077f18 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -1,6 +1,7 @@ use rustc_ast::attr::list_contains_name; use rustc_ast::{MetaItemInner, attr}; -use rustc_attr_parsing::{InlineAttr, InstructionSetAttr, OptimizeAttr}; +use rustc_attr_parsing::ReprAttr::ReprAlign; +use rustc_attr_parsing::{AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::codes::*; use rustc_errors::{DiagMessage, SubdiagMessage, struct_span_code_err}; @@ -16,9 +17,9 @@ use rustc_middle::query::Providers; use rustc_middle::ty::{self as ty, TyCtxt}; use rustc_session::parse::feature_err; use rustc_session::{Session, lint}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, sym}; +use rustc_span::{Ident, Span, sym}; use rustc_target::spec::{SanitizerSet, abi}; +use tracing::debug; use crate::errors; use crate::target_features::{check_target_feature_trait_unsafe, from_target_feature_attr}; @@ -93,12 +94,26 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if let Fn | AssocFn | Variant | Ctor(..) = def_kind { Some(tcx.fn_sig(did)) } else { - tcx.dcx() - .span_delayed_bug(attr.span, "this attribute can only be applied to functions"); + tcx.dcx().span_delayed_bug( + attr.span(), + "this attribute can only be applied to functions", + ); None } }; + if let hir::Attribute::Parsed(p) = attr { + match p { + AttributeKind::Repr(reprs) => { + codegen_fn_attrs.alignment = reprs + .iter() + .find_map(|(r, _)| if let ReprAlign(x) = r { Some(*x) } else { None }); + } + + _ => {} + } + } + let Some(Ident { name, .. }) = attr.ident() else { continue; }; @@ -119,14 +134,14 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if tcx.opt_item_name(did.to_def_id()).is_some() { codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_MANGLE; mixed_export_name_no_mangle_lint_state.track_no_mangle( - attr.span, + attr.span(), tcx.local_def_id_to_hir_id(did), attr, ); } else { tcx.dcx() .struct_span_err( - attr.span, + attr.span(), format!( "`#[no_mangle]` cannot be used on {} {} as it has no name", tcx.def_descr_article(did.to_def_id()), @@ -147,7 +162,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { feature_err( &tcx.sess, sym::used_with_arg, - attr.span, + attr.span(), "`#[used(linker)]` is currently unstable", ) .emit(); @@ -159,7 +174,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { feature_err( &tcx.sess, sym::used_with_arg, - attr.span, + attr.span(), "`#[used(compiler)]` is currently unstable", ) .emit(); @@ -167,7 +182,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { codegen_fn_attrs.flags |= CodegenFnAttrFlags::USED; } Some(_) => { - tcx.dcx().emit_err(errors::ExpectedUsedSymbol { span: attr.span }); + tcx.dcx().emit_err(errors::ExpectedUsedSymbol { span: attr.span() }); } None => { // Unfortunately, unconditionally using `llvm.used` causes @@ -212,7 +227,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { { struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0737, "`#[track_caller]` requires Rust ABI" ) @@ -220,12 +235,12 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } if is_closure && !tcx.features().closure_track_caller() - && !attr.span.allows_unstable(sym::closure_track_caller) + && !attr.span().allows_unstable(sym::closure_track_caller) { feature_err( &tcx.sess, sym::closure_track_caller, - attr.span, + attr.span(), "`#[track_caller]` on closures is currently unstable", ) .emit(); @@ -239,21 +254,25 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { // so it may not contain any null characters. struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0648, "`export_name` may not contain null characters" ) .emit(); } codegen_fn_attrs.export_name = Some(s); - mixed_export_name_no_mangle_lint_state.track_export_name(attr.span); + mixed_export_name_no_mangle_lint_state.track_export_name(attr.span()); } } sym::target_feature => { - if !tcx.is_closure_like(did.to_def_id()) - && let Some(fn_sig) = fn_sig() - && fn_sig.skip_binder().safety().is_safe() - { + let Some(sig) = tcx.hir_node_by_def_id(did).fn_sig() else { + tcx.dcx().span_delayed_bug(attr.span(), "target_feature applied to non-fn"); + continue; + }; + let safe_target_features = + matches!(sig.header.safety, hir::HeaderSafety::SafeTargetFeatures); + codegen_fn_attrs.safe_target_features = safe_target_features; + if safe_target_features { if tcx.sess.target.is_like_wasm || tcx.sess.opts.actually_rustdoc { // The `#[target_feature]` attribute is allowed on // WebAssembly targets on all functions, including safe @@ -279,13 +298,13 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { feature_err( &tcx.sess, sym::target_feature_11, - attr.span, + attr.span(), "`#[target_feature(..)]` can only be applied to `unsafe` functions", ) .with_span_label(tcx.def_span(did), "not an `unsafe` function") .emit(); } else { - check_target_feature_trait_unsafe(tcx, did, attr.span); + check_target_feature_trait_unsafe(tcx, did, attr.span()); } } from_target_feature_attr( @@ -303,7 +322,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if tcx.is_mutable_static(did.into()) { let mut diag = tcx.dcx().struct_span_err( - attr.span, + attr.span(), "extern mutable statics are not allowed with `#[linkage]`", ); diag.note( @@ -322,7 +341,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if let Some(val) = attr.value_str() { if val.as_str().bytes().any(|b| b == 0) { let msg = format!("illegal null byte in link_section value: `{val}`"); - tcx.dcx().span_err(attr.span, msg); + tcx.dcx().span_err(attr.span(), msg); } else { codegen_fn_attrs.link_section = Some(val); } @@ -330,13 +349,13 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } sym::link_name => codegen_fn_attrs.link_name = attr.value_str(), sym::link_ordinal => { - link_ordinal_span = Some(attr.span); + link_ordinal_span = Some(attr.span()); if let ordinal @ Some(_) = check_link_ordinal(tcx, attr) { codegen_fn_attrs.link_ordinal = ordinal; } } sym::no_sanitize => { - no_sanitize_span = Some(attr.span); + no_sanitize_span = Some(attr.span()); if let Some(list) = attr.meta_item_list() { for item in list.iter() { match item.name_or_empty() { @@ -373,7 +392,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if !tcx.sess.target.has_thumb_interworking { struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0779, "target does not support `#[instruction_set]`" ) @@ -390,7 +409,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { _ => { struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0779, "invalid instruction set specified", ) @@ -402,7 +421,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { [] => { struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0778, "`#[instruction_set]` requires an argument" ) @@ -412,7 +431,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { _ => { struct_span_code_err!( tcx.dcx(), - attr.span, + attr.span(), E0779, "cannot specify more than one instruction set" ) @@ -421,27 +440,6 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } }) } - sym::repr => { - codegen_fn_attrs.alignment = if let Some(items) = attr.meta_item_list() - && let [item] = items.as_slice() - && let Some((sym::align, literal)) = item.singleton_lit_list() - { - rustc_attr_parsing::parse_alignment(&literal.kind) - .map_err(|msg| { - struct_span_code_err!( - tcx.dcx(), - literal.span, - E0589, - "invalid `repr(align)` attribute: {}", - msg - ) - .emit(); - }) - .ok() - } else { - None - }; - } sym::patchable_function_entry => { codegen_fn_attrs.patchable_function_entry = attr.meta_item_list().and_then(|l| { let mut prefix = None; @@ -507,7 +505,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } if let (None, None) = (prefix, entry) { - tcx.dcx().span_err(attr.span, "must specify at least one parameter"); + tcx.dcx().span_err(attr.span(), "must specify at least one parameter"); } Some(PatchableFunctionEntry::from_prefix_and_entry( @@ -526,12 +524,14 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if !attr.has_name(sym::inline) { return ia; } + if attr.is_word() { InlineAttr::Hint } else if let Some(ref items) = attr.meta_item_list() { - inline_span = Some(attr.span); + inline_span = Some(attr.span()); if items.len() != 1 { - struct_span_code_err!(tcx.dcx(), attr.span, E0534, "expected one argument").emit(); + struct_span_code_err!(tcx.dcx(), attr.span(), E0534, "expected one argument") + .emit(); InlineAttr::None } else if list_contains_name(items, sym::always) { InlineAttr::Always @@ -548,6 +548,20 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { ia } }); + codegen_fn_attrs.inline = attrs.iter().fold(codegen_fn_attrs.inline, |ia, attr| { + if !attr.has_name(sym::rustc_force_inline) || !tcx.features().rustc_attrs() { + return ia; + } + + if attr.is_word() { + InlineAttr::Force { attr_span: attr.span(), reason: None } + } else if let Some(val) = attr.value_str() { + InlineAttr::Force { attr_span: attr.span(), reason: Some(val) } + } else { + debug!("`rustc_force_inline` not checked by attribute validation"); + ia + } + }); // naked function MUST NOT be inlined! This attribute is required for the rust compiler itself, // but not for the code generation backend because at that point the naked function will just be @@ -562,12 +576,12 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } let err = |sp, s| struct_span_code_err!(tcx.dcx(), sp, E0722, "{}", s).emit(); if attr.is_word() { - err(attr.span, "expected one argument"); + err(attr.span(), "expected one argument"); ia } else if let Some(ref items) = attr.meta_item_list() { - inline_span = Some(attr.span); + inline_span = Some(attr.span()); if items.len() != 1 { - err(attr.span, "expected one argument"); + err(attr.span(), "expected one argument"); OptimizeAttr::None } else if list_contains_name(items, sym::size) { OptimizeAttr::Size @@ -597,7 +611,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { // is probably a poor usage of `#[inline(always)]` and easily avoided by not using the attribute. if tcx.features().target_feature_11() && tcx.is_closure_like(did.to_def_id()) - && codegen_fn_attrs.inline != InlineAttr::Always + && !codegen_fn_attrs.inline.always() { let owner_id = tcx.parent(did.to_def_id()); if tcx.def_kind(owner_id).has_codegen_attrs() { @@ -607,22 +621,28 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } } - // If a function uses #[target_feature] it can't be inlined into general + // If a function uses `#[target_feature]` it can't be inlined into general // purpose functions as they wouldn't have the right target features - // enabled. For that reason we also forbid #[inline(always)] as it can't be + // enabled. For that reason we also forbid `#[inline(always)]` as it can't be // respected. - if !codegen_fn_attrs.target_features.is_empty() && codegen_fn_attrs.inline == InlineAttr::Always + // + // `#[rustc_force_inline]` doesn't need to be prohibited here, only + // `#[inline(always)]`, as forced inlining is implemented entirely within + // rustc (and so the MIR inliner can do any necessary checks for compatible target + // features). + // + // This sidesteps the LLVM blockers in enabling `target_features` + + // `inline(always)` to be used together (see rust-lang/rust#116573 and + // llvm/llvm-project#70563). + if !codegen_fn_attrs.target_features.is_empty() + && matches!(codegen_fn_attrs.inline, InlineAttr::Always) { if let Some(span) = inline_span { - tcx.dcx().span_err( - span, - "cannot use `#[inline(always)]` with \ - `#[target_feature]`", - ); + tcx.dcx().span_err(span, "cannot use `#[inline(always)]` with `#[target_feature]`"); } } - if !codegen_fn_attrs.no_sanitize.is_empty() && codegen_fn_attrs.inline == InlineAttr::Always { + if !codegen_fn_attrs.no_sanitize.is_empty() && codegen_fn_attrs.inline.always() { if let (Some(no_sanitize_span), Some(inline_span)) = (no_sanitize_span, inline_span) { let hir_id = tcx.local_def_id_to_hir_id(did); tcx.node_span_lint( @@ -681,7 +701,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { let span = tcx .get_attrs(did, sym::target_feature) .next() - .map_or_else(|| tcx.def_span(did), |a| a.span); + .map_or_else(|| tcx.def_span(did), |a| a.span()); tcx.dcx() .create_err(errors::TargetFeatureDisableOrEnable { features, @@ -733,7 +753,7 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &hir::Attribute) -> Option { let sole_meta_list = match meta_item_list { Some([item]) => item.lit(), Some(_) => { - tcx.dcx().emit_err(errors::InvalidLinkOrdinalNargs { span: attr.span }); + tcx.dcx().emit_err(errors::InvalidLinkOrdinalNargs { span: attr.span() }); return None; } _ => None, @@ -759,13 +779,13 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &hir::Attribute) -> Option { } else { let msg = format!("ordinal value in `link_ordinal` is too large: `{ordinal}`"); tcx.dcx() - .struct_span_err(attr.span, msg) + .struct_span_err(attr.span(), msg) .with_note("the value may not exceed `u16::MAX`") .emit(); None } } else { - tcx.dcx().emit_err(errors::InvalidLinkOrdinalFormat { span: attr.span }); + tcx.dcx().emit_err(errors::InvalidLinkOrdinalFormat { span: attr.span() }); None } } @@ -811,7 +831,7 @@ impl<'a> MixedExportNameAndNoMangleState<'a> { export_name: Some(export_name), no_mangle: Some(no_mangle), hir_id: Some(hir_id), - no_mangle_attr: Some(no_mangle_attr), + no_mangle_attr: Some(_), } = self { tcx.emit_node_span_lint( @@ -820,7 +840,7 @@ impl<'a> MixedExportNameAndNoMangleState<'a> { no_mangle, errors::MixedExportNameAndNoMangle { no_mangle, - no_mangle_attr: rustc_hir_pretty::attribute_to_string(&tcx, no_mangle_attr), + no_mangle_attr: "#[unsafe(no_mangle)]".to_string(), export_name, removal_span: no_mangle, }, diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs index 88d36b19da432..7c62c03d574c1 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs @@ -65,8 +65,8 @@ fn tag_base_type_opt<'tcx>( }); match enum_type_and_layout.layout.variants() { - // A single-variant enum has no discriminant. - Variants::Single { .. } => None, + // A single-variant or no-variant enum has no discriminant. + Variants::Single { .. } | Variants::Empty => None, Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => { // Niche tags are always normalized to unsized integers of the correct size. diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs index cf72c2ed7423b..869798d8be194 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs @@ -432,6 +432,7 @@ fn push_debuginfo_type_name<'tcx>( push_closure_or_coroutine_name(tcx, def_id, args, qualified, output, visited); } } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binders)"), ty::Param(_) | ty::Error(_) | ty::Infer(_) diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 7dc8ab38a9764..65c6067c74066 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -42,7 +42,7 @@ use rustc_session::Session; use rustc_session::config::{CrateType, OutputFilenames, OutputType, RUST_CGU_EXT}; use rustc_session::cstore::{self, CrateSource}; use rustc_session::utils::NativeLibKind; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; pub mod assert_module_sources; pub mod back; diff --git a/compiler/rustc_codegen_ssa/src/mir/analyze.rs b/compiler/rustc_codegen_ssa/src/mir/analyze.rs index 43b8230c679e1..23baab3124ec6 100644 --- a/compiler/rustc_codegen_ssa/src/mir/analyze.rs +++ b/compiler/rustc_codegen_ssa/src/mir/analyze.rs @@ -2,7 +2,7 @@ //! which do not. use rustc_data_structures::graph::dominators::Dominators; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{self, DefLocation, Location, TerminatorKind, traversal}; @@ -16,7 +16,7 @@ use crate::traits::*; pub(crate) fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( fx: &FunctionCx<'a, 'tcx, Bx>, traversal_order: &[mir::BasicBlock], -) -> BitSet { +) -> DenseBitSet { let mir = fx.mir; let dominators = mir.basic_blocks.dominators(); let locals = mir @@ -44,7 +44,7 @@ pub(crate) fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( analyzer.visit_basic_block_data(bb, data); } - let mut non_ssa_locals = BitSet::new_empty(analyzer.locals.len()); + let mut non_ssa_locals = DenseBitSet::new_empty(analyzer.locals.len()); for (local, kind) in analyzer.locals.iter_enumerated() { if matches!(kind, LocalKind::Memory) { non_ssa_locals.insert(local); diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index d4d7f16db55da..843a996d2bfe5 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -10,8 +10,7 @@ use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::{Instance, Ty}; use rustc_middle::{bug, mir, ty}; use rustc_session::config::DebugInfo; -use rustc_span::symbol::{Symbol, kw}; -use rustc_span::{BytePos, Span, hygiene}; +use rustc_span::{BytePos, Span, Symbol, hygiene, kw}; use super::operand::{OperandRef, OperandValue}; use super::place::{PlaceRef, PlaceValue}; diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs index 299b98c0a4f03..304ac4544ee43 100644 --- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs @@ -75,7 +75,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // If we're swapping something that's *not* an `OperandValue::Ref`, // then we can do it directly and avoid the alloca. // Otherwise, we'll let the fallback MIR body take care of it. - if let sym::typed_swap = name { + if let sym::typed_swap_nonoverlapping = name { let pointee_ty = fn_args.type_at(0); let pointee_layout = bx.layout_of(pointee_ty); if !bx.is_backend_ref(pointee_layout) diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 62f69af3f2f75..3a896071bc6b8 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -1,7 +1,7 @@ use std::iter; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::mir::{UnwindTerminateReason, traversal}; use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout}; @@ -293,7 +293,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // So drop the builder of `start_llbb` to avoid having two at the same time. drop(start_bx); - let mut unreached_blocks = BitSet::new_filled(mir.basic_blocks.len()); + let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len()); // Codegen the body of each reachable block using our reverse postorder list. for bb in traversal_order { fx.codegen_block(bb); @@ -316,7 +316,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, fx: &mut FunctionCx<'a, 'tcx, Bx>, - memory_locals: &BitSet, + memory_locals: &DenseBitSet, ) -> Vec> { let mir = fx.mir; let mut idx = 0; diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index cac3cc587cb45..8df270abc8173 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -132,9 +132,13 @@ fn prefix_and_suffix<'tcx>( let attrs = tcx.codegen_fn_attrs(instance.def_id()); let link_section = attrs.link_section.map(|symbol| symbol.as_str().to_string()); - let align = attrs.alignment.map(|a| a.bytes()).unwrap_or(4); - // See https://sourceware.org/binutils/docs/as/ARM-Directives.html for info on these directives. + // function alignment can be set globally with the `-Zmin-function-alignment=` flag; + // the alignment from a `#[repr(align())]` is used if it specifies a higher alignment. + // if no alignment is specified, an alignment of 4 bytes is used. + let min_function_alignment = tcx.sess.opts.unstable_opts.min_function_alignment; + let align = Ord::max(min_function_alignment, attrs.alignment).map(|a| a.bytes()).unwrap_or(4); + // In particular, `.arm` can also be written `.code 32` and `.thumb` as `.code 16`. let (arch_prefix, arch_suffix) = if is_arm { ( diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs index a9e80e27ed401..c634f864ffb89 100644 --- a/compiler/rustc_codegen_ssa/src/mir/place.rs +++ b/compiler/rustc_codegen_ssa/src/mir/place.rs @@ -243,6 +243,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { return bx.cx().const_poison(cast_to); } let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants { + Variants::Empty => unreachable!("we already handled uninhabited types"), Variants::Single { index } => { let discr_val = self .layout @@ -365,9 +366,9 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { return; } match self.layout.variants { - Variants::Single { index } => { - assert_eq!(index, variant_index); - } + Variants::Empty => unreachable!("we already handled uninhabited types"), + Variants::Single { index } => assert_eq!(index, variant_index), + Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => { let ptr = self.project_field(bx, tag_field); let to = diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index cf53739223467..31793641d75ed 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -10,9 +10,9 @@ use rustc_session::config::OptLevel; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; +use super::FunctionCx; use super::operand::{OperandRef, OperandValue}; use super::place::PlaceRef; -use super::{FunctionCx, LocalRef}; use crate::common::IntPredicate; use crate::traits::*; use crate::{MemFlags, base}; @@ -93,23 +93,37 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { return; } - if let OperandValue::Immediate(v) = cg_elem.val { + let try_init_all_same = |bx: &mut Bx, v| { let start = dest.val.llval; let size = bx.const_usize(dest.layout.size.bytes()); - // Use llvm.memset.p0i8.* to initialize all zero arrays - if bx.cx().const_to_opt_u128(v, false) == Some(0) { - let fill = bx.cx().const_u8(0); - bx.memset(start, fill, size, dest.val.align, MemFlags::empty()); - return; + // Use llvm.memset.p0i8.* to initialize all same byte arrays + if let Some(int) = bx.cx().const_to_opt_u128(v, false) { + let bytes = &int.to_le_bytes()[..cg_elem.layout.size.bytes_usize()]; + let first = bytes[0]; + if bytes[1..].iter().all(|&b| b == first) { + let fill = bx.cx().const_u8(first); + bx.memset(start, fill, size, dest.val.align, MemFlags::empty()); + return true; + } } // Use llvm.memset.p0i8.* to initialize byte arrays let v = bx.from_immediate(v); if bx.cx().val_ty(v) == bx.cx().type_i8() { bx.memset(start, v, size, dest.val.align, MemFlags::empty()); - return; + return true; } + false + }; + + match cg_elem.val { + OperandValue::Immediate(v) => { + if try_init_all_same(bx, v) { + return; + } + } + _ => (), } let count = self @@ -593,14 +607,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.codegen_place_to_pointer(bx, place, mk_ptr) } - mir::Rvalue::Len(place) => { - let size = self.evaluate_array_len(bx, place); - OperandRef { - val: OperandValue::Immediate(size), - layout: bx.cx().layout_of(bx.tcx().types.usize), - } - } - mir::Rvalue::BinaryOp(op_with_overflow, box (ref lhs, ref rhs)) if let Some(op) = op_with_overflow.overflowing_to_wrapping() => { @@ -800,24 +806,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } - fn evaluate_array_len(&mut self, bx: &mut Bx, place: mir::Place<'tcx>) -> Bx::Value { - // ZST are passed as operands and require special handling - // because codegen_place() panics if Local is operand. - if let Some(index) = place.as_local() { - if let LocalRef::Operand(op) = self.locals[index] { - if let ty::Array(_, n) = op.layout.ty.kind() { - let n = n - .try_to_target_usize(bx.tcx()) - .expect("expected monomorphic const in codegen"); - return bx.cx().const_usize(n); - } - } - } - // use common size calculation for non zero-sized types - let cg_value = self.codegen_place(bx, place.as_ref()); - cg_value.len(bx.cx()) - } - /// Codegen an `Rvalue::RawPtr` or `Rvalue::Ref` fn codegen_place_to_pointer( &mut self, @@ -1089,7 +1077,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::Rvalue::Ref(..) | mir::Rvalue::CopyForDeref(..) | mir::Rvalue::RawPtr(..) | - mir::Rvalue::Len(..) | mir::Rvalue::Cast(..) | // (*) mir::Rvalue::ShallowInitBox(..) | // (*) mir::Rvalue::BinaryOp(..) | diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs index c31e8aa7d31d9..d8b9bdb55da69 100644 --- a/compiler/rustc_codegen_ssa/src/target_features.rs +++ b/compiler/rustc_codegen_ssa/src/target_features.rs @@ -9,8 +9,7 @@ use rustc_middle::middle::codegen_fn_attrs::TargetFeature; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; use rustc_session::parse::feature_err; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use rustc_target::target_features; use crate::errors; @@ -20,7 +19,7 @@ use crate::errors; pub(crate) fn from_target_feature_attr( tcx: TyCtxt<'_>, attr: &hir::Attribute, - rust_target_features: &UnordMap, + rust_target_features: &UnordMap, target_features: &mut Vec, ) { let Some(list) = attr.meta_item_list() else { return }; @@ -33,7 +32,7 @@ pub(crate) fn from_target_feature_attr( .emit(); }; let rust_features = tcx.features(); - let mut added_target_features = Vec::new(); + let abi_feature_constraints = tcx.sess.target.abi_required_features(); for item in list { // Only `enable = ...` is accepted in the meta-item list. if !item.has_name(sym::enable) { @@ -48,7 +47,7 @@ pub(crate) fn from_target_feature_attr( }; // We allow comma separation to enable multiple features. - added_target_features.extend(value.as_str().split(',').filter_map(|feature| { + for feature in value.as_str().split(',') { let Some(stability) = rust_target_features.get(feature) else { let msg = format!("the feature named `{feature}` is not valid for this target"); let mut err = tcx.dcx().struct_span_err(item.span(), msg); @@ -60,12 +59,12 @@ pub(crate) fn from_target_feature_attr( } } err.emit(); - return None; + continue; }; // Only allow target features whose feature gates have been enabled // and which are permitted to be toggled. - if let Err(reason) = stability.toggle_allowed(/*enable*/ true) { + if let Err(reason) = stability.toggle_allowed() { tcx.dcx().emit_err(errors::ForbiddenTargetFeatureAttr { span: item.span(), feature, @@ -81,31 +80,25 @@ pub(crate) fn from_target_feature_attr( format!("the target feature `{feature}` is currently unstable"), ) .emit(); + } else { + // Add this and the implied features. + let feature_sym = Symbol::intern(feature); + for &name in tcx.implied_target_features(feature_sym) { + // But ensure the ABI does not forbid enabling this. + // Here we do assume that LLVM doesn't add even more implied features + // we don't know about, at least no features that would have ABI effects! + if abi_feature_constraints.incompatible.contains(&name.as_str()) { + tcx.dcx().emit_err(errors::ForbiddenTargetFeatureAttr { + span: item.span(), + feature: name.as_str(), + reason: "this feature is incompatible with the target ABI", + }); + } + target_features.push(TargetFeature { name, implied: name != feature_sym }) + } } - Some(Symbol::intern(feature)) - })); - } - - // Add explicit features - target_features.extend( - added_target_features.iter().copied().map(|name| TargetFeature { name, implied: false }), - ); - - // Add implied features - let mut implied_target_features = UnordSet::new(); - for feature in added_target_features.iter() { - implied_target_features.extend(tcx.implied_target_features(*feature).clone()); - } - for feature in added_target_features.iter() { - implied_target_features.remove(feature); + } } - target_features.extend( - implied_target_features - .into_sorted_stable_ord() - .iter() - .copied() - .map(|name| TargetFeature { name, implied: true }), - ) } /// Computes the set of target features used in a function for the purposes of @@ -148,25 +141,28 @@ pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { rust_target_features: |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); - let target = &tcx.sess.target; if tcx.sess.opts.actually_rustdoc { // rustdoc needs to be able to document functions that use all the features, so // whitelist them all rustc_target::target_features::all_rust_features() - .map(|(a, b)| (a.to_string(), b.compute_toggleability(target))) + .map(|(a, b)| (a.to_string(), b)) .collect() } else { tcx.sess .target .rust_target_features() .iter() - .map(|(a, b, _)| (a.to_string(), b.compute_toggleability(target))) + .map(|(a, b, _)| (a.to_string(), *b)) .collect() } }, - implied_target_features: |tcx, feature| { + implied_target_features: |tcx, feature: Symbol| { + let feature = feature.as_str(); UnordSet::from(tcx.sess.target.implied_target_features(std::iter::once(feature))) .into_sorted_stable_ord() + .into_iter() + .map(|s| Symbol::intern(s)) + .collect() }, asm_target_features, ..*providers diff --git a/compiler/rustc_codegen_ssa/src/traits/backend.rs b/compiler/rustc_codegen_ssa/src/traits/backend.rs index 4b17db2c49ee3..ebcf118b90380 100644 --- a/compiler/rustc_codegen_ssa/src/traits/backend.rs +++ b/compiler/rustc_codegen_ssa/src/traits/backend.rs @@ -11,7 +11,7 @@ use rustc_middle::ty::TyCtxt; use rustc_middle::util::Providers; use rustc_session::Session; use rustc_session::config::{self, OutputFilenames, PrintRequest}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use super::CodegenObject; use super::write::WriteBackendMethods; diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index b0138ac8bfed6..3ee13b19f665f 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -382,7 +382,7 @@ pub trait BuilderMethods<'a, 'tcx>: /// Avoids `alloca`s for Immediates and ScalarPairs. /// /// FIXME: Maybe do something smarter for Ref types too? - /// For now, the `typed_swap` intrinsic just doesn't call this for those + /// For now, the `typed_swap_nonoverlapping` intrinsic just doesn't call this for those /// cases (in non-debug), preferring the fallback body instead. fn typed_place_swap( &mut self, diff --git a/compiler/rustc_codegen_ssa/src/traits/write.rs b/compiler/rustc_codegen_ssa/src/traits/write.rs index aabe9e33c4aa1..51e2255efe142 100644 --- a/compiler/rustc_codegen_ssa/src/traits/write.rs +++ b/compiler/rustc_codegen_ssa/src/traits/write.rs @@ -1,5 +1,7 @@ +use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_errors::{DiagCtxtHandle, FatalError}; use rustc_middle::dep_graph::WorkProduct; +use rustc_middle::ty::TyCtxt; use crate::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; use crate::back::write::{CodegenContext, FatLtoInput, ModuleConfig}; @@ -61,6 +63,13 @@ pub trait WriteBackendMethods: 'static + Sized + Clone { want_summary: bool, ) -> (String, Self::ThinBuffer); fn serialize_module(module: ModuleCodegen) -> (String, Self::ModuleBuffer); + fn autodiff( + cgcx: &CodegenContext, + tcx: TyCtxt<'_>, + module: &ModuleCodegen, + diff_fncs: Vec, + config: &ModuleConfig, + ) -> Result<(), FatalError>; } pub trait ThinBufferMethods: Send + Sync { diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index c31c94495d015..4861b7a4430f0 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -12,8 +12,6 @@ const_eval_already_reported = const_eval_assume_false = `assume` called with `false` -const_eval_await_non_const = - cannot convert `{$ty}` into a future in {const_eval_const_context}s const_eval_bounds_check_failed = indexing out of bounds: the len is {$len} but the index is {$index} const_eval_call_nonzero_intrinsic = @@ -23,11 +21,6 @@ const_eval_closure_call = closures need an RFC before allowed to be called in {const_eval_const_context}s const_eval_closure_fndef_not_const = function defined here, but it is not `const` -const_eval_closure_non_const = - cannot call non-const closure in {const_eval_const_context}s - -const_eval_conditionally_const_call = - cannot call conditionally-const {$def_descr} `{$def_path_str}` in {const_eval_const_context}s const_eval_consider_dereferencing = consider dereferencing here @@ -62,10 +55,6 @@ const_eval_dealloc_incorrect_layout = const_eval_dealloc_kind_mismatch = deallocating {$alloc}, which is {$alloc_kind} memory, using {$kind} deallocation operation -const_eval_deref_coercion_non_const = - cannot perform deref coercion on `{$ty}` in {const_eval_const_context}s - .note = attempting to deref into `{$target_ty}` - .target_note = deref defined here const_eval_deref_function_pointer = accessing {$allocation} which contains a function const_eval_deref_vtable_pointer = @@ -109,9 +98,6 @@ const_eval_extern_type_field = `extern type` field does not have a known offset const_eval_fn_ptr_call = function pointers need an RFC before allowed to be called in {const_eval_const_context}s -const_eval_for_loop_into_iter_non_const = - cannot use `for` loop on `{$ty}` in {const_eval_const_context}s - const_eval_frame_note = {$times -> [0] {const_eval_frame_note_inner} *[other] [... {$times} additional calls {const_eval_frame_note_inner} ...] @@ -216,9 +202,6 @@ const_eval_long_running = .label = the const evaluator is currently interpreting this expression .help = the constant being evaluated -const_eval_match_eq_non_const = cannot match on `{$ty}` in {const_eval_const_context}s - .note = `{$ty}` cannot be compared in compile-time, and therefore cannot be used in `match`es - const_eval_max_num_nodes_in_const = maximum number of nodes exceeded in constant {$global_const_id} const_eval_memory_access_test = memory access failed @@ -249,11 +232,26 @@ const_eval_mutable_ref_escaping = If you really want global mutable state, try using an interior mutable `static` or a `static mut`. const_eval_nested_static_in_thread_local = #[thread_local] does not support implicit nested statics, please create explicit static items and refer to them instead + +const_eval_non_const_await = + cannot convert `{$ty}` into a future in {const_eval_const_context}s + +const_eval_non_const_closure = + cannot call {$non_or_conditionally}-const closure in {const_eval_const_context}s + +const_eval_non_const_deref_coercion = + cannot perform {$non_or_conditionally}-const deref coercion on `{$ty}` in {const_eval_const_context}s + .note = attempting to deref into `{$target_ty}` + .target_note = deref defined here + const_eval_non_const_fmt_macro_call = - cannot call non-const formatting macro in {const_eval_const_context}s + cannot call {$non_or_conditionally}-const formatting macro in {const_eval_const_context}s const_eval_non_const_fn_call = - cannot call non-const fn `{$def_path_str}` in {const_eval_const_context}s + cannot call {$non_or_conditionally}-const {$def_descr} `{$def_path_str}` in {const_eval_const_context}s + +const_eval_non_const_for_loop_into_iter = + cannot use `for` loop on `{$ty}` in {const_eval_const_context}s const_eval_non_const_impl = impl defined here, but it is not `const` @@ -261,6 +259,20 @@ const_eval_non_const_impl = const_eval_non_const_intrinsic = cannot call non-const intrinsic `{$name}` in {const_eval_const_context}s +const_eval_non_const_match_eq = cannot match on `{$ty}` in {const_eval_const_context}s + .note = `{$ty}` cannot be compared in compile-time, and therefore cannot be used in `match`es + +const_eval_non_const_operator = + cannot call {$non_or_conditionally}-const operator in {const_eval_const_context}s + +const_eval_non_const_question_branch = + `?` is not allowed on `{$ty}` in {const_eval_const_context}s +const_eval_non_const_question_from_residual = + `?` is not allowed on `{$ty}` in {const_eval_const_context}s + +const_eval_non_const_try_block_from_output = + `try` block cannot convert `{$ty}` to the result in {const_eval_const_context}s + const_eval_not_enough_caller_args = calling a function with fewer arguments than it requires @@ -281,8 +293,6 @@ const_eval_offset_from_unsigned_overflow = *[false] offset } than second: {$a_offset} < {$b_offset} -const_eval_operator_non_const = - cannot call non-const operator in {const_eval_const_context}s const_eval_overflow_arith = arithmetic overflow in `{$intrinsic}` const_eval_overflow_shift = @@ -325,11 +335,6 @@ const_eval_ptr_as_bytes_1 = const_eval_ptr_as_bytes_2 = the absolute address of a pointer is not known at compile-time, so such operations are not supported -const_eval_question_branch_non_const = - `?` is not allowed on `{$ty}` in {const_eval_const_context}s -const_eval_question_from_residual_non_const = - `?` is not allowed on `{$ty}` in {const_eval_const_context}s - const_eval_range = in the range {$lo}..={$hi} const_eval_range_lower = greater or equal to {$lo} const_eval_range_singular = equal to {$lo} @@ -379,8 +384,6 @@ const_eval_too_generic = const_eval_too_many_caller_args = calling a function with more arguments than it expected -const_eval_try_block_from_output_non_const = - `try` block cannot convert `{$ty}` to the result in {const_eval_const_context}s const_eval_unallowed_fn_pointer_call = function pointer calls are not allowed in {const_eval_const_context}s const_eval_unallowed_heap_allocations = diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index f4257ad9671f1..6c940124193c7 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -10,7 +10,7 @@ use rustc_attr_parsing::{ConstStability, StabilityLevel}; use rustc_errors::{Diag, ErrorGuaranteed}; use rustc_hir::def_id::DefId; use rustc_hir::{self as hir, LangItem}; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; @@ -35,6 +35,12 @@ use crate::errors; type QualifResults<'mir, 'tcx, Q> = rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>; +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum ConstConditionsHold { + Yes, + No, +} + #[derive(Default)] pub(crate) struct Qualifs<'mir, 'tcx> { has_mut_interior: Option>, @@ -172,7 +178,7 @@ pub struct Checker<'mir, 'tcx> { /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead /// when this MIR body returns. - transient_locals: Option>, + transient_locals: Option>, error_emitted: Option, secondary_errors: Vec>, @@ -242,7 +248,7 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { // And then check all `Return` in the MIR, and if a local is "maybe live" at a // `Return` then it is definitely not transient. - let mut transient = BitSet::new_filled(ccx.body.local_decls.len()); + let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len()); // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise. for (bb, data) in traversal::reachable(&ccx.body) { if matches!(data.terminator().kind, TerminatorKind::Return) { @@ -376,15 +382,15 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { callee: DefId, callee_args: ty::GenericArgsRef<'tcx>, call_span: Span, - ) -> bool { + ) -> Option { let tcx = self.tcx; if !tcx.is_conditionally_const(callee) { - return false; + return None; } let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args); if const_conditions.is_empty() { - return false; + return None; } let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx)); @@ -413,12 +419,13 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { })); let errors = ocx.select_all_or_error(); - if !errors.is_empty() { + if errors.is_empty() { + Some(ConstConditionsHold::Yes) + } else { tcx.dcx() .span_delayed_bug(call_span, "this should have reported a ~const error in HIR"); + Some(ConstConditionsHold::No) } - - true } pub fn check_drop_terminator( @@ -488,8 +495,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { Rvalue::Use(_) | Rvalue::CopyForDeref(..) | Rvalue::Repeat(..) - | Rvalue::Discriminant(..) - | Rvalue::Len(_) => {} + | Rvalue::Discriminant(..) => {} Rvalue::Aggregate(kind, ..) => { if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref() @@ -707,9 +713,17 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { trace!("attempting to call a trait method"); let trait_is_const = tcx.is_const_trait(trait_did); - if trait_is_const { + // Only consider a trait to be const if the const conditions hold. + // Otherwise, it's really misleading to call something "conditionally" + // const when it's very obviously not conditionally const. + if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) { // Trait calls are always conditionally-const. - self.check_op(ops::ConditionallyConstCall { callee, args: fn_args }); + self.check_op(ops::ConditionallyConstCall { + callee, + args: fn_args, + span: *fn_span, + call_source, + }); // FIXME(const_trait_impl): do a more fine-grained check whether this // particular trait can be const-stably called. } else { @@ -726,8 +740,13 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { } // Even if we know the callee, ensure we can use conditionally-const calls. - if has_const_conditions { - self.check_op(ops::ConditionallyConstCall { callee, args: fn_args }); + if has_const_conditions.is_some() { + self.check_op(ops::ConditionallyConstCall { + callee, + args: fn_args, + span: *fn_span, + call_source, + }); } // At this point, we are calling a function, `callee`, whose `DefId` is known... diff --git a/compiler/rustc_const_eval/src/check_consts/mod.rs b/compiler/rustc_const_eval/src/check_consts/mod.rs index ab68691f1b97a..3afa841e46fb4 100644 --- a/compiler/rustc_const_eval/src/check_consts/mod.rs +++ b/compiler/rustc_const_eval/src/check_consts/mod.rs @@ -4,12 +4,13 @@ //! has interior mutability or needs to be dropped, as well as the visitor that emits errors when //! it finds operations that are invalid in a certain context. +use rustc_attr_parsing::{AttributeKind, find_attr}; use rustc_errors::DiagCtxtHandle; +use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::ty::{self, PolyFnSig, TyCtxt}; use rustc_middle::{bug, mir}; use rustc_span::Symbol; -use {rustc_attr_parsing as attr, rustc_hir as hir}; pub use self::qualifs::Qualif; @@ -81,7 +82,9 @@ pub fn rustc_allow_const_fn_unstable( feature_gate: Symbol, ) -> bool { let attrs = tcx.hir().attrs(tcx.local_def_id_to_hir_id(def_id)); - attr::rustc_allow_const_fn_unstable(tcx.sess, attrs).any(|name| name == feature_gate) + + find_attr!(attrs, AttributeKind::AllowConstFnUnstable(syms) => syms.contains(&feature_gate)) + .unwrap_or(false) } /// Returns `true` if the given `const fn` is "safe to expose on stable". diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index 23f2aa4d0296a..7d103055a7c43 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -14,9 +14,11 @@ use rustc_middle::ty::{ self, Closure, FnDef, FnPtr, GenericArgKind, GenericArgsRef, Param, TraitRef, Ty, suggest_constraining_type_param, }; -use rustc_middle::util::{CallDesugaringKind, CallKind, call_kind}; -use rustc_span::symbol::sym; -use rustc_span::{BytePos, Pos, Span, Symbol}; +use rustc_session::parse::add_feature_diagnostics; +use rustc_span::{BytePos, Pos, Span, Symbol, sym}; +use rustc_trait_selection::error_reporting::traits::call_kind::{ + CallDesugaringKind, CallKind, call_kind, +}; use rustc_trait_selection::traits::SelectionContext; use tracing::debug; @@ -78,6 +80,8 @@ impl<'tcx> NonConstOp<'tcx> for FnCallIndirect { pub(crate) struct ConditionallyConstCall<'tcx> { pub callee: DefId, pub args: GenericArgsRef<'tcx>, + pub span: Span, + pub call_source: CallSource, } impl<'tcx> NonConstOp<'tcx> for ConditionallyConstCall<'tcx> { @@ -92,16 +96,22 @@ impl<'tcx> NonConstOp<'tcx> for ConditionallyConstCall<'tcx> { } } - fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, span: Span) -> Diag<'tcx> { - ccx.tcx.sess.create_feature_err( - errors::ConditionallyConstCall { - span, - def_path_str: ccx.tcx.def_path_str_with_args(self.callee, self.args), - def_descr: ccx.tcx.def_descr(self.callee), - kind: ccx.const_kind(), - }, - sym::const_trait_impl, - ) + fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, _: Span) -> Diag<'tcx> { + let mut diag = build_error_for_const_call( + ccx, + self.callee, + self.args, + self.span, + self.call_source, + "conditionally", + |_, _, _| {}, + ); + + // Override code and mention feature. + diag.code(E0658); + add_feature_diagnostics(&mut diag, ccx.tcx.sess, sym::const_trait_impl); + + diag } } @@ -119,209 +129,252 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> { #[allow(rustc::diagnostic_outside_of_impl)] #[allow(rustc::untranslatable_diagnostic)] fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, _: Span) -> Diag<'tcx> { - let FnCallNonConst { callee, args, span, call_source } = *self; - let ConstCx { tcx, typing_env, .. } = *ccx; + let tcx = ccx.tcx; let caller = ccx.def_id(); - let diag_trait = |err, self_ty: Ty<'_>, trait_id| { - let trait_ref = TraitRef::from_method(tcx, trait_id, args); - - match self_ty.kind() { - Param(param_ty) => { - debug!(?param_ty); - if let Some(generics) = tcx.hir_node_by_def_id(caller).generics() { - let constraint = with_no_trimmed_paths!(format!( - "~const {}", - trait_ref.print_trait_sugared(), - )); - suggest_constraining_type_param( - tcx, - generics, - err, - param_ty.name.as_str(), - &constraint, - Some(trait_ref.def_id), - None, - ); + let mut err = build_error_for_const_call( + ccx, + self.callee, + self.args, + self.span, + self.call_source, + "non", + |err, self_ty, trait_id| { + // FIXME(const_trait_impl): Do we need any of this on the non-const codepath? + + let trait_ref = TraitRef::from_method(tcx, trait_id, self.args); + + match self_ty.kind() { + Param(param_ty) => { + debug!(?param_ty); + if let Some(generics) = tcx.hir_node_by_def_id(caller).generics() { + let constraint = with_no_trimmed_paths!(format!( + "~const {}", + trait_ref.print_trait_sugared(), + )); + suggest_constraining_type_param( + tcx, + generics, + err, + param_ty.name.as_str(), + &constraint, + Some(trait_ref.def_id), + None, + ); + } } - } - ty::Adt(..) => { - let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); - let obligation = - Obligation::new(tcx, ObligationCause::dummy(), param_env, trait_ref); - let mut selcx = SelectionContext::new(&infcx); - let implsrc = selcx.select(&obligation); - if let Ok(Some(ImplSource::UserDefined(data))) = implsrc { - // FIXME(const_trait_impl) revisit this - if !tcx.is_const_trait_impl(data.impl_def_id) { - let span = tcx.def_span(data.impl_def_id); - err.subdiagnostic(errors::NonConstImplNote { span }); + ty::Adt(..) => { + let (infcx, param_env) = + tcx.infer_ctxt().build_with_typing_env(ccx.typing_env); + let obligation = + Obligation::new(tcx, ObligationCause::dummy(), param_env, trait_ref); + let mut selcx = SelectionContext::new(&infcx); + let implsrc = selcx.select(&obligation); + if let Ok(Some(ImplSource::UserDefined(data))) = implsrc { + // FIXME(const_trait_impl) revisit this + if !tcx.is_const_trait_impl(data.impl_def_id) { + let span = tcx.def_span(data.impl_def_id); + err.subdiagnostic(errors::NonConstImplNote { span }); + } } } + _ => {} } - _ => {} + }, + ); + + if let ConstContext::Static(_) = ccx.const_kind() { + err.note(fluent_generated::const_eval_lazy_lock); + } + + err + } +} + +/// Build an error message reporting that a function call is not const (or only +/// conditionally const). In case that this call is desugared (like an operator +/// or sugar from something like a `for` loop), try to build a better error message +/// that doesn't call it a method. +fn build_error_for_const_call<'tcx>( + ccx: &ConstCx<'_, 'tcx>, + callee: DefId, + args: ty::GenericArgsRef<'tcx>, + span: Span, + call_source: CallSource, + non_or_conditionally: &'static str, + note_trait_if_possible: impl FnOnce(&mut Diag<'tcx>, Ty<'tcx>, DefId), +) -> Diag<'tcx> { + let tcx = ccx.tcx; + + let call_kind = + call_kind(tcx, ccx.typing_env, callee, args, span, call_source.from_hir_call(), None); + + debug!(?call_kind); + + let mut err = match call_kind { + CallKind::Normal { desugaring: Some((kind, self_ty)), .. } => { + macro_rules! error { + ($err:ident) => { + tcx.dcx().create_err(errors::$err { + span, + ty: self_ty, + kind: ccx.const_kind(), + non_or_conditionally, + }) + }; } - }; - - let call_kind = - call_kind(tcx, ccx.typing_env, callee, args, span, call_source.from_hir_call(), None); - - debug!(?call_kind); - - let mut err = match call_kind { - CallKind::Normal { desugaring: Some((kind, self_ty)), .. } => { - macro_rules! error { - ($err:ident) => { - tcx.dcx().create_err(errors::$err { - span, - ty: self_ty, - kind: ccx.const_kind(), - }) - }; - } - // Don't point at the trait if this is a desugaring... - // FIXME(const_trait_impl): we could perhaps do this for `Iterator`. - match kind { - CallDesugaringKind::ForLoopIntoIter | CallDesugaringKind::ForLoopNext => { - error!(NonConstForLoopIntoIter) - } - CallDesugaringKind::QuestionBranch => { - error!(NonConstQuestionBranch) - } - CallDesugaringKind::QuestionFromResidual => { - error!(NonConstQuestionFromResidual) - } - CallDesugaringKind::TryBlockFromOutput => { - error!(NonConstTryBlockFromOutput) - } - CallDesugaringKind::Await => { - error!(NonConstAwait) - } + // Don't point at the trait if this is a desugaring... + // FIXME(const_trait_impl): we could perhaps do this for `Iterator`. + match kind { + CallDesugaringKind::ForLoopIntoIter | CallDesugaringKind::ForLoopNext => { + error!(NonConstForLoopIntoIter) + } + CallDesugaringKind::QuestionBranch => { + error!(NonConstQuestionBranch) + } + CallDesugaringKind::QuestionFromResidual => { + error!(NonConstQuestionFromResidual) + } + CallDesugaringKind::TryBlockFromOutput => { + error!(NonConstTryBlockFromOutput) + } + CallDesugaringKind::Await => { + error!(NonConstAwait) } } - CallKind::FnCall { fn_trait_id, self_ty } => { - let note = match self_ty.kind() { - FnDef(def_id, ..) => { - let span = tcx.def_span(*def_id); - if ccx.tcx.is_const_fn(*def_id) { - span_bug!(span, "calling const FnDef errored when it shouldn't"); - } - - Some(errors::NonConstClosureNote::FnDef { span }) + } + CallKind::FnCall { fn_trait_id, self_ty } => { + let note = match self_ty.kind() { + FnDef(def_id, ..) => { + let span = tcx.def_span(*def_id); + if ccx.tcx.is_const_fn(*def_id) { + span_bug!(span, "calling const FnDef errored when it shouldn't"); } - FnPtr(..) => Some(errors::NonConstClosureNote::FnPtr), - Closure(..) => Some(errors::NonConstClosureNote::Closure), - _ => None, - }; - let mut err = tcx.dcx().create_err(errors::NonConstClosure { + Some(errors::NonConstClosureNote::FnDef { span }) + } + FnPtr(..) => Some(errors::NonConstClosureNote::FnPtr), + Closure(..) => Some(errors::NonConstClosureNote::Closure), + _ => None, + }; + + let mut err = tcx.dcx().create_err(errors::NonConstClosure { + span, + kind: ccx.const_kind(), + note, + non_or_conditionally, + }); + + note_trait_if_possible(&mut err, self_ty, fn_trait_id); + err + } + CallKind::Operator { trait_id, self_ty, .. } => { + let mut err = if let CallSource::MatchCmp = call_source { + tcx.dcx().create_err(errors::NonConstMatchEq { span, kind: ccx.const_kind(), - note, - }); - - diag_trait(&mut err, self_ty, fn_trait_id); - err - } - CallKind::Operator { trait_id, self_ty, .. } => { - let mut err = if let CallSource::MatchCmp = call_source { - tcx.dcx().create_err(errors::NonConstMatchEq { - span, - kind: ccx.const_kind(), - ty: self_ty, - }) - } else { - let mut sugg = None; - - if ccx.tcx.is_lang_item(trait_id, LangItem::PartialEq) { - match (args[0].unpack(), args[1].unpack()) { - (GenericArgKind::Type(self_ty), GenericArgKind::Type(rhs_ty)) - if self_ty == rhs_ty - && self_ty.is_ref() - && self_ty.peel_refs().is_primitive() => - { - let mut num_refs = 0; - let mut tmp_ty = self_ty; - while let rustc_middle::ty::Ref(_, inner_ty, _) = tmp_ty.kind() { - num_refs += 1; - tmp_ty = *inner_ty; - } - let deref = "*".repeat(num_refs); - - if let Ok(call_str) = - ccx.tcx.sess.source_map().span_to_snippet(span) - { - if let Some(eq_idx) = call_str.find("==") { - if let Some(rhs_idx) = call_str[(eq_idx + 2)..] - .find(|c: char| !c.is_whitespace()) - { - let rhs_pos = span.lo() - + BytePos::from_usize(eq_idx + 2 + rhs_idx); - let rhs_span = span.with_lo(rhs_pos).with_hi(rhs_pos); - sugg = Some(errors::ConsiderDereferencing { - deref, - span: span.shrink_to_lo(), - rhs_span, - }); - } + ty: self_ty, + non_or_conditionally, + }) + } else { + let mut sugg = None; + + if ccx.tcx.is_lang_item(trait_id, LangItem::PartialEq) { + match (args[0].unpack(), args[1].unpack()) { + (GenericArgKind::Type(self_ty), GenericArgKind::Type(rhs_ty)) + if self_ty == rhs_ty + && self_ty.is_ref() + && self_ty.peel_refs().is_primitive() => + { + let mut num_refs = 0; + let mut tmp_ty = self_ty; + while let rustc_middle::ty::Ref(_, inner_ty, _) = tmp_ty.kind() { + num_refs += 1; + tmp_ty = *inner_ty; + } + let deref = "*".repeat(num_refs); + + if let Ok(call_str) = ccx.tcx.sess.source_map().span_to_snippet(span) { + if let Some(eq_idx) = call_str.find("==") { + if let Some(rhs_idx) = + call_str[(eq_idx + 2)..].find(|c: char| !c.is_whitespace()) + { + let rhs_pos = + span.lo() + BytePos::from_usize(eq_idx + 2 + rhs_idx); + let rhs_span = span.with_lo(rhs_pos).with_hi(rhs_pos); + sugg = Some(errors::ConsiderDereferencing { + deref, + span: span.shrink_to_lo(), + rhs_span, + }); } } } - _ => {} } + _ => {} } - tcx.dcx().create_err(errors::NonConstOperator { - span, - kind: ccx.const_kind(), - sugg, - }) - }; - - diag_trait(&mut err, self_ty, trait_id); - err - } - CallKind::DerefCoercion { deref_target, deref_target_ty, self_ty } => { - // Check first whether the source is accessible (issue #87060) - let target = if tcx.sess.source_map().is_span_accessible(deref_target) { - Some(deref_target) - } else { - None - }; - - let mut err = tcx.dcx().create_err(errors::NonConstDerefCoercion { + } + tcx.dcx().create_err(errors::NonConstOperator { span, - ty: self_ty, kind: ccx.const_kind(), - target_ty: deref_target_ty, - deref_target: target, - }); + sugg, + non_or_conditionally, + }) + }; - diag_trait(&mut err, self_ty, tcx.require_lang_item(LangItem::Deref, Some(span))); - err - } - _ if tcx.opt_parent(callee) == tcx.get_diagnostic_item(sym::ArgumentMethods) => { - ccx.dcx().create_err(errors::NonConstFmtMacroCall { span, kind: ccx.const_kind() }) - } - _ => ccx.dcx().create_err(errors::NonConstFnCall { + note_trait_if_possible(&mut err, self_ty, trait_id); + err + } + CallKind::DerefCoercion { deref_target_span, deref_target_ty, self_ty } => { + // Check first whether the source is accessible (issue #87060) + let target = if let Some(deref_target_span) = deref_target_span + && tcx.sess.source_map().is_span_accessible(deref_target_span) + { + Some(deref_target_span) + } else { + None + }; + + let mut err = tcx.dcx().create_err(errors::NonConstDerefCoercion { span, - def_path_str: ccx.tcx.def_path_str_with_args(callee, args), + ty: self_ty, kind: ccx.const_kind(), - }), - }; + target_ty: deref_target_ty, + deref_target: target, + non_or_conditionally, + }); + + note_trait_if_possible( + &mut err, + self_ty, + tcx.require_lang_item(LangItem::Deref, Some(span)), + ); + err + } + _ if tcx.opt_parent(callee) == tcx.get_diagnostic_item(sym::ArgumentMethods) => { + ccx.dcx().create_err(errors::NonConstFmtMacroCall { + span, + kind: ccx.const_kind(), + non_or_conditionally, + }) + } + _ => ccx.dcx().create_err(errors::NonConstFnCall { + span, + def_descr: ccx.tcx.def_descr(callee), + def_path_str: ccx.tcx.def_path_str_with_args(callee, args), + kind: ccx.const_kind(), + non_or_conditionally, + }), + }; - err.note(format!( - "calls in {}s are limited to constant functions, \ + err.note(format!( + "calls in {}s are limited to constant functions, \ tuple structs and tuple variants", - ccx.const_kind(), - )); - - if let ConstContext::Static(_) = ccx.const_kind() { - err.note(fluent_generated::const_eval_lazy_lock); - } + ccx.const_kind(), + )); - err - } + err } /// A call to an `#[unstable]` const fn or `#[rustc_const_unstable]` function. diff --git a/compiler/rustc_const_eval/src/check_consts/post_drop_elaboration.rs b/compiler/rustc_const_eval/src/check_consts/post_drop_elaboration.rs index 951e19b470bab..16e142a85ee6f 100644 --- a/compiler/rustc_const_eval/src/check_consts/post_drop_elaboration.rs +++ b/compiler/rustc_const_eval/src/check_consts/post_drop_elaboration.rs @@ -1,7 +1,7 @@ use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::{self, BasicBlock, Location}; use rustc_middle::ty::TyCtxt; -use rustc_span::symbol::sym; +use rustc_span::sym; use tracing::trace; use super::ConstCx; diff --git a/compiler/rustc_const_eval/src/check_consts/qualifs.rs b/compiler/rustc_const_eval/src/check_consts/qualifs.rs index e244b50a4b5d9..b1b7fb406b105 100644 --- a/compiler/rustc_const_eval/src/check_consts/qualifs.rs +++ b/compiler/rustc_const_eval/src/check_consts/qualifs.rs @@ -230,9 +230,7 @@ where Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx)) } - Rvalue::Discriminant(place) | Rvalue::Len(place) => { - in_place::(cx, in_local, place.as_ref()) - } + Rvalue::Discriminant(place) => in_place::(cx, in_local, place.as_ref()), Rvalue::CopyForDeref(place) => in_place::(cx, in_local, place.as_ref()), diff --git a/compiler/rustc_const_eval/src/check_consts/resolver.rs b/compiler/rustc_const_eval/src/check_consts/resolver.rs index 763c37a41aff0..5a6e7ab2beef4 100644 --- a/compiler/rustc_const_eval/src/check_consts/resolver.rs +++ b/compiler/rustc_const_eval/src/check_consts/resolver.rs @@ -5,7 +5,7 @@ use std::fmt; use std::marker::PhantomData; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::MixedBitSet; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::{ self, BasicBlock, CallReturnPlaces, Local, Location, Statement, StatementKind, TerminatorEdges, @@ -197,7 +197,6 @@ where | mir::Rvalue::CopyForDeref(..) | mir::Rvalue::ThreadLocalRef(..) | mir::Rvalue::Repeat(..) - | mir::Rvalue::Len(..) | mir::Rvalue::BinaryOp(..) | mir::Rvalue::NullaryOp(..) | mir::Rvalue::UnaryOp(..) @@ -246,12 +245,14 @@ where } #[derive(Debug, PartialEq, Eq)] +/// The state for the `FlowSensitiveAnalysis` dataflow analysis. This domain is likely homogeneous, +/// and has a big size, so we use a bitset that can be sparse (c.f. issue #134404). pub(super) struct State { /// Describes whether a local contains qualif. - pub qualif: BitSet, + pub qualif: MixedBitSet, /// Describes whether a local's address escaped and it might become qualified as a result an /// indirect mutation. - pub borrow: BitSet, + pub borrow: MixedBitSet, } impl Clone for State { @@ -320,8 +321,8 @@ where fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { State { - qualif: BitSet::new_empty(body.local_decls.len()), - borrow: BitSet::new_empty(body.local_decls.len()), + qualif: MixedBitSet::new_empty(body.local_decls.len()), + borrow: MixedBitSet::new_empty(body.local_decls.len()), } } diff --git a/compiler/rustc_const_eval/src/const_eval/dummy_machine.rs b/compiler/rustc_const_eval/src/const_eval/dummy_machine.rs index 817acfcca74bc..46dcebc46e9c8 100644 --- a/compiler/rustc_const_eval/src/const_eval/dummy_machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/dummy_machine.rs @@ -1,9 +1,11 @@ use rustc_middle::mir::interpret::{AllocId, ConstAllocation, InterpResult}; use rustc_middle::mir::*; use rustc_middle::query::TyCtxtAt; +use rustc_middle::ty::Ty; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::{bug, span_bug, ty}; use rustc_span::def_id::DefId; +use rustc_target::callconv::FnAbi; use crate::interpret::{ self, HasStaticRootDefId, ImmTy, Immediate, InterpCx, PointerArithmetic, interp_ok, @@ -86,7 +88,7 @@ impl<'tcx> interpret::Machine<'tcx> for DummyMachine { fn find_mir_or_eval_fn( _ecx: &mut InterpCx<'tcx, Self>, _instance: ty::Instance<'tcx>, - _abi: rustc_abi::ExternAbi, + _abi: &FnAbi<'tcx, Ty<'tcx>>, _args: &[interpret::FnArg<'tcx, Self::Provenance>], _destination: &interpret::MPlaceTy<'tcx, Self::Provenance>, _target: Option, diff --git a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs index babf99c4c1fe9..35c3e3ed3150e 100644 --- a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs @@ -4,31 +4,29 @@ use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; -fn parent_impl_constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { +fn parent_impl_or_trait_constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { let parent_id = tcx.local_parent(def_id); - if matches!(tcx.def_kind(parent_id), DefKind::Impl { .. }) - && let Some(header) = tcx.impl_trait_header(parent_id) - { - header.constness - } else { - hir::Constness::NotConst + match tcx.def_kind(parent_id) { + DefKind::Impl { of_trait: true } => tcx.impl_trait_header(parent_id).unwrap().constness, + DefKind::Trait => { + if tcx.is_const_trait(parent_id.into()) { + hir::Constness::Const + } else { + hir::Constness::NotConst + } + } + _ => hir::Constness::NotConst, } } -/// Checks whether an item is considered to be `const`. If it is a constructor, it is const. -/// If it is an assoc method or function, -/// return if it has a `const` modifier. If it is an intrinsic, report whether said intrinsic -/// has a `rustc_const_{un,}stable` attribute. Otherwise, panic. +/// Checks whether a function-like definition is considered to be `const`. fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { let node = tcx.hir_node_by_def_id(def_id); match node { - hir::Node::Ctor(hir::VariantData::Tuple(..)) - | hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Const(..), .. }) => { - hir::Constness::Const - } - hir::Node::ForeignItem(_) => { - // Foreign items cannot be evaluated at compile-time. + hir::Node::Ctor(hir::VariantData::Tuple(..)) => hir::Constness::Const, + hir::Node::ForeignItem(item) if let hir::ForeignItemKind::Fn(..) = item.kind => { + // Foreign functions cannot be evaluated at compile-time. hir::Constness::NotConst } hir::Node::Expr(e) if let hir::ExprKind::Closure(c) = e.kind => c.constness, @@ -40,7 +38,7 @@ fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { // If the function itself is not annotated with `const`, it may still be a `const fn` // if it resides in a const trait impl. - parent_impl_constness(tcx, def_id) + parent_impl_or_trait_constness(tcx, def_id) } else { tcx.dcx().span_bug( tcx.def_span(def_id), diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index 2368385179967..cfdfbdb7880b2 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -2,7 +2,7 @@ use std::borrow::{Borrow, Cow}; use std::fmt; use std::hash::Hash; -use rustc_abi::{Align, ExternAbi, Size}; +use rustc_abi::{Align, Size}; use rustc_ast::Mutability; use rustc_data_structures::fx::{FxHashMap, FxIndexMap, IndexEntry}; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -13,8 +13,8 @@ use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::{bug, mir}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; +use rustc_target::callconv::FnAbi; use tracing::debug; use super::error::*; @@ -340,7 +340,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { fn find_mir_or_eval_fn( ecx: &mut InterpCx<'tcx, Self>, orig_instance: ty::Instance<'tcx>, - _abi: ExternAbi, + _abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[FnArg<'tcx>], dest: &MPlaceTy<'tcx>, ret: Option, @@ -360,10 +360,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { // sensitive check here. But we can at least rule out functions that are not const at // all. That said, we have to allow calling functions inside a trait marked with // #[const_trait]. These *are* const-checked! - // FIXME(const_trait_impl): why does `is_const_fn` not classify them as const? - if (!ecx.tcx.is_const_fn(def) && !ecx.tcx.is_const_default_method(def)) - || ecx.tcx.has_attr(def, sym::rustc_do_not_const_check) - { + if !ecx.tcx.is_const_fn(def) || ecx.tcx.has_attr(def, sym::rustc_do_not_const_check) { // We certainly do *not* want to actually call the fn // though, so be sure we return here. throw_unsup_format!("calling non-const function `{}`", instance) diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 6f51b09323d9b..4ff8aa9a3b418 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -178,7 +178,8 @@ fn const_to_valtree_inner<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::CoroutineWitness(..) => Err(ValTreeCreationError::NonSupportedType(ty)), + | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)), } } @@ -358,7 +359,10 @@ pub fn valtree_to_const_value<'tcx>( | ty::FnPtr(..) | ty::Str | ty::Slice(_) - | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()), + | ty::Dynamic(..) + | ty::UnsafeBinder(_) => { + bug!("no ValTree should have been created for type {:?}", ty.kind()) + } } } diff --git a/compiler/rustc_const_eval/src/errors.rs b/compiler/rustc_const_eval/src/errors.rs index 80236ee05b7ff..3fe78171cd944 100644 --- a/compiler/rustc_const_eval/src/errors.rs +++ b/compiler/rustc_const_eval/src/errors.rs @@ -174,16 +174,7 @@ pub(crate) struct NonConstFmtMacroCall { #[primary_span] pub span: Span, pub kind: ConstContext, -} - -#[derive(Diagnostic)] -#[diag(const_eval_conditionally_const_call)] -pub(crate) struct ConditionallyConstCall { - #[primary_span] - pub span: Span, - pub def_path_str: String, - pub def_descr: &'static str, - pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] @@ -192,7 +183,9 @@ pub(crate) struct NonConstFnCall { #[primary_span] pub span: Span, pub def_path_str: String, + pub def_descr: &'static str, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] @@ -292,68 +285,75 @@ pub struct RawBytesNote { // FIXME(fee1-dead) do not use stringly typed `ConstContext` #[derive(Diagnostic)] -#[diag(const_eval_match_eq_non_const, code = E0015)] +#[diag(const_eval_non_const_match_eq, code = E0015)] #[note] pub struct NonConstMatchEq<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_for_loop_into_iter_non_const, code = E0015)] +#[diag(const_eval_non_const_for_loop_into_iter, code = E0015)] pub struct NonConstForLoopIntoIter<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_question_branch_non_const, code = E0015)] +#[diag(const_eval_non_const_question_branch, code = E0015)] pub struct NonConstQuestionBranch<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_question_from_residual_non_const, code = E0015)] +#[diag(const_eval_non_const_question_from_residual, code = E0015)] pub struct NonConstQuestionFromResidual<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_try_block_from_output_non_const, code = E0015)] +#[diag(const_eval_non_const_try_block_from_output, code = E0015)] pub struct NonConstTryBlockFromOutput<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_await_non_const, code = E0015)] +#[diag(const_eval_non_const_await, code = E0015)] pub struct NonConstAwait<'tcx> { #[primary_span] pub span: Span, pub ty: Ty<'tcx>, pub kind: ConstContext, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_closure_non_const, code = E0015)] +#[diag(const_eval_non_const_closure, code = E0015)] pub struct NonConstClosure { #[primary_span] pub span: Span, pub kind: ConstContext, #[subdiagnostic] pub note: Option, + pub non_or_conditionally: &'static str, } #[derive(Subdiagnostic)] @@ -380,17 +380,18 @@ pub struct ConsiderDereferencing { } #[derive(Diagnostic)] -#[diag(const_eval_operator_non_const, code = E0015)] +#[diag(const_eval_non_const_operator, code = E0015)] pub struct NonConstOperator { #[primary_span] pub span: Span, pub kind: ConstContext, #[subdiagnostic] pub sugg: Option, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] -#[diag(const_eval_deref_coercion_non_const, code = E0015)] +#[diag(const_eval_non_const_deref_coercion, code = E0015)] #[note] pub struct NonConstDerefCoercion<'tcx> { #[primary_span] @@ -400,6 +401,7 @@ pub struct NonConstDerefCoercion<'tcx> { pub target_ty: Ty<'tcx>, #[note(const_eval_target_note)] pub deref_target: Option, + pub non_or_conditionally: &'static str, } #[derive(Diagnostic)] diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index ed4a1a9e6478b..e6a34193c9d69 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -519,7 +519,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { return M::call_extra_fn( self, extra, - caller_abi, + caller_fn_abi, args, destination, target, @@ -566,11 +566,12 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) | ty::InstanceKind::Item(_) => { - // We need MIR for this fn + // We need MIR for this fn. + // Note that this can be an intrinsic, if we are executing its fallback body. let Some((body, instance)) = M::find_mir_or_eval_fn( self, instance, - caller_abi, + caller_fn_abi, args, destination, target, @@ -883,19 +884,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { .local_to_op(mir::RETURN_PLACE, None) .expect("return place should always be live"); let dest = self.frame().return_place.clone(); - let res = if self.stack().len() == 1 { - // The initializer of constants and statics will get validated separately - // after the constant has been fully evaluated. While we could fall back to the default - // code path, that will cause -Zenforce-validity to cycle on static initializers. - // Reading from a static's memory is not allowed during its evaluation, and will always - // trigger a cycle error. Validation must read from the memory of the current item. - // For Miri this means we do not validate the root frame return value, - // but Miri anyway calls `read_target_isize` on that so separate validation - // is not needed. - self.copy_op_no_dest_validation(&op, &dest) - } else { - self.copy_op_allow_transmute(&op, &dest) - }; + let res = self.copy_op_allow_transmute(&op, &dest); trace!("return value: {:?}", self.dump_place(&dest.into())); // We delay actually short-circuiting on this error until *after* the stack frame is // popped, since we want this error to be attributed to the caller, whose type defines diff --git a/compiler/rustc_const_eval/src/interpret/discriminant.rs b/compiler/rustc_const_eval/src/interpret/discriminant.rs index 6faac1582ab8d..2f0b1cb6d1ee5 100644 --- a/compiler/rustc_const_eval/src/interpret/discriminant.rs +++ b/compiler/rustc_const_eval/src/interpret/discriminant.rs @@ -44,7 +44,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - /// Read discriminant, return the runtime value as well as the variant index. + /// Read discriminant, return the variant index. /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)! /// /// Will never return an uninhabited variant. @@ -65,21 +65,17 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // We use "tag" to refer to how the discriminant is encoded in memory, which can be either // straight-forward (`TagEncoding::Direct`) or with a niche (`TagEncoding::Niche`). let (tag_scalar_layout, tag_encoding, tag_field) = match op.layout().variants { + Variants::Empty => { + throw_ub!(UninhabitedEnumVariantRead(None)); + } Variants::Single { index } => { - // Do some extra checks on enums. - if ty.is_enum() { - // Hilariously, `Single` is used even for 0-variant enums. - // (See https://github.com/rust-lang/rust/issues/89765). - if ty.ty_adt_def().unwrap().variants().is_empty() { - throw_ub!(UninhabitedEnumVariantRead(index)) - } + if op.layout().is_uninhabited() { // For consistency with `write_discriminant`, and to make sure that // `project_downcast` cannot fail due to strange layouts, we declare immediate UB - // for uninhabited variants. - if op.layout().for_variant(self, index).is_uninhabited() { - throw_ub!(UninhabitedEnumVariantRead(index)) - } + // for uninhabited enums. + throw_ub!(UninhabitedEnumVariantRead(Some(index))); } + // Since the type is inhabited, there must be an index. return interp_ok(index); } Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => { @@ -199,11 +195,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // `uninhabited_enum_branching` MIR pass. It also ensures consistency with // `write_discriminant`. if op.layout().for_variant(self, index).is_uninhabited() { - throw_ub!(UninhabitedEnumVariantRead(index)) + throw_ub!(UninhabitedEnumVariantRead(Some(index))) } interp_ok(index) } + /// Read discriminant, return the user-visible discriminant. + /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)! pub fn discriminant_for_variant( &self, ty: Ty<'tcx>, @@ -243,6 +241,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } match layout.variants { + abi::Variants::Empty => unreachable!("we already handled uninhabited types"), abi::Variants::Single { .. } => { // The tag of a `Single` enum is like the tag of the niched // variant: there's no tag as the discriminant is encoded diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index a79923e855571..0664a882c1d50 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -11,7 +11,7 @@ use rustc_middle::mir::{self, BinOp, ConstValue, NonDivergingIntrinsic}; use rustc_middle::ty::layout::{LayoutOf as _, TyAndLayout, ValidityRequirement}; use rustc_middle::ty::{GenericArgsRef, Ty, TyCtxt}; use rustc_middle::{bug, ty}; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use tracing::trace; use super::memory::MemoryKind; @@ -90,6 +90,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( | ty::CoroutineClosure(_, _) | ty::Coroutine(_, _) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(_) | ty::Error(_) => ConstValue::from_target_usize(0u64, &tcx), @@ -423,8 +424,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let result = self.raw_eq_intrinsic(&args[0], &args[1])?; self.write_scalar(result, dest)?; } - sym::typed_swap => { - self.typed_swap_intrinsic(&args[0], &args[1])?; + sym::typed_swap_nonoverlapping => { + self.typed_swap_nonoverlapping_intrinsic(&args[0], &args[1])?; } sym::vtable_size => { @@ -637,19 +638,35 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } /// Does a *typed* swap of `*left` and `*right`. - fn typed_swap_intrinsic( + fn typed_swap_nonoverlapping_intrinsic( &mut self, left: &OpTy<'tcx, >::Provenance>, right: &OpTy<'tcx, >::Provenance>, ) -> InterpResult<'tcx> { let left = self.deref_pointer(left)?; let right = self.deref_pointer(right)?; - debug_assert_eq!(left.layout, right.layout); + assert_eq!(left.layout, right.layout); + assert!(left.layout.is_sized()); let kind = MemoryKind::Stack; let temp = self.allocate(left.layout, kind)?; - self.copy_op(&left, &temp)?; - self.copy_op(&right, &left)?; - self.copy_op(&temp, &right)?; + self.copy_op(&left, &temp)?; // checks alignment of `left` + + // We want to always enforce non-overlapping, even if this is a scalar type. + // Therefore we directly use the underlying `mem_copy` here. + self.mem_copy(right.ptr(), left.ptr(), left.layout.size, /*nonoverlapping*/ true)?; + // This means we also need to do the validation of the value that used to be in `right` + // ourselves. This value is now in `left.` The one that started out in `left` already got + // validated by the copy above. + if M::enforce_validity(self, left.layout) { + self.validate_operand( + &left.clone().into(), + M::enforce_validity_recursively(self, left.layout), + /*reset_provenance_and_padding*/ true, + )?; + } + + self.copy_op(&temp, &right)?; // checks alignment of `right` + self.deallocate_ptr(temp.ptr(), None, kind)?; interp_ok(()) } diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 9ac2a024ccf37..36e5a2ff750ae 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -6,7 +6,7 @@ use std::borrow::{Borrow, Cow}; use std::fmt::Debug; use std::hash::Hash; -use rustc_abi::{Align, ExternAbi, Size}; +use rustc_abi::{Align, Size}; use rustc_apfloat::{Float, FloatConvert}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_middle::query::TyCtxtAt; @@ -15,6 +15,7 @@ use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::{mir, ty}; use rustc_span::Span; use rustc_span::def_id::DefId; +use rustc_target::callconv::FnAbi; use super::{ AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation, @@ -201,7 +202,7 @@ pub trait Machine<'tcx>: Sized { fn find_mir_or_eval_fn( ecx: &mut InterpCx<'tcx, Self>, instance: ty::Instance<'tcx>, - abi: ExternAbi, + abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option, @@ -213,7 +214,7 @@ pub trait Machine<'tcx>: Sized { fn call_extra_fn( ecx: &mut InterpCx<'tcx, Self>, fn_val: Self::ExtraFnVal, - abi: ExternAbi, + abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option, @@ -656,7 +657,7 @@ pub macro compile_time_machine(<$tcx: lifetime>) { fn call_extra_fn( _ecx: &mut InterpCx<$tcx, Self>, fn_val: !, - _abi: ExternAbi, + _abi: &FnAbi<$tcx, Ty<$tcx>>, _args: &[FnArg<$tcx>], _destination: &MPlaceTy<$tcx, Self::Provenance>, _target: Option, diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 027ba9644cba9..0790db984e345 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -1359,6 +1359,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let src_alloc = self.get_alloc_raw(src_alloc_id)?; let src_range = alloc_range(src_offset, size); assert!(!self.memory.validation_in_progress, "we can't be copying during validation"); + // For the overlapping case, it is crucial that we trigger the read hook + // before the write hook -- the aliasing model cares about the order. M::before_memory_read( tcx, &self.machine, diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index b861ffb6110d2..5d905cff1f216 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -704,8 +704,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> { let len = mplace.len(self)?; let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len))?; - let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?; - interp_ok(str) + let s = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?; + interp_ok(s) } /// Read from a local of the current frame. Convenience method for [`InterpCx::local_at_frame_to_op`]. diff --git a/compiler/rustc_const_eval/src/interpret/operator.rs b/compiler/rustc_const_eval/src/interpret/operator.rs index 8b7b78c7129a3..5fa632fc57aaf 100644 --- a/compiler/rustc_const_eval/src/interpret/operator.rs +++ b/compiler/rustc_const_eval/src/interpret/operator.rs @@ -6,7 +6,7 @@ use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar}; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, FloatTy, ScalarInt, Ty}; use rustc_middle::{bug, mir, span_bug}; -use rustc_span::symbol::sym; +use rustc_span::sym; use tracing::trace; use super::{ImmTy, InterpCx, Machine, MemPlaceMeta, interp_ok, throw_ub}; diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 810e9356b26c4..c97922ac132b5 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -773,22 +773,6 @@ where interp_ok(()) } - /// Copies the data from an operand to a place. - /// The layouts of the `src` and `dest` may disagree. - /// Does not perform validation of the destination. - /// The only known use case for this function is checking the return - /// value of a static during stack frame popping. - #[inline(always)] - pub(super) fn copy_op_no_dest_validation( - &mut self, - src: &impl Projectable<'tcx, M::Provenance>, - dest: &impl Writeable<'tcx, M::Provenance>, - ) -> InterpResult<'tcx> { - self.copy_op_inner( - src, dest, /* allow_transmute */ true, /* validate_dest */ false, - ) - } - /// Copies the data from an operand to a place. /// The layouts of the `src` and `dest` may disagree. #[inline(always)] @@ -797,9 +781,7 @@ where src: &impl Projectable<'tcx, M::Provenance>, dest: &impl Writeable<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { - self.copy_op_inner( - src, dest, /* allow_transmute */ true, /* validate_dest */ true, - ) + self.copy_op_inner(src, dest, /* allow_transmute */ true) } /// Copies the data from an operand to a place. @@ -810,9 +792,7 @@ where src: &impl Projectable<'tcx, M::Provenance>, dest: &impl Writeable<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { - self.copy_op_inner( - src, dest, /* allow_transmute */ false, /* validate_dest */ true, - ) + self.copy_op_inner(src, dest, /* allow_transmute */ false) } /// Copies the data from an operand to a place. @@ -824,22 +804,21 @@ where src: &impl Projectable<'tcx, M::Provenance>, dest: &impl Writeable<'tcx, M::Provenance>, allow_transmute: bool, - validate_dest: bool, ) -> InterpResult<'tcx> { // These are technically *two* typed copies: `src` is a not-yet-loaded value, - // so we're going a typed copy at `src` type from there to some intermediate storage. + // so we're doing a typed copy at `src` type from there to some intermediate storage. // And then we're doing a second typed copy from that intermediate storage to `dest`. // But as an optimization, we only make a single direct copy here. // Do the actual copy. self.copy_op_no_validate(src, dest, allow_transmute)?; - if validate_dest && M::enforce_validity(self, dest.layout()) { + if M::enforce_validity(self, dest.layout()) { let dest = dest.to_place(); // Given that there were two typed copies, we have to ensure this is valid at both types, // and we have to ensure this loses provenance and padding according to both types. // But if the types are identical, we only do one pass. - if allow_transmute && src.layout().ty != dest.layout().ty { + if src.layout().ty != dest.layout().ty { self.validate_operand( &dest.transmute(src.layout(), self)?, M::enforce_validity_recursively(self, src.layout()), @@ -1038,9 +1017,9 @@ where /// This is allocated in immutable global memory and deduplicated. pub fn allocate_str_dedup( &mut self, - str: &str, + s: &str, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> { - let bytes = str.as_bytes(); + let bytes = s.as_bytes(); let ptr = self.allocate_bytes_dedup(bytes)?; // Create length metadata for the string. diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs index 65a93784e2cc7..996142d7b03bd 100644 --- a/compiler/rustc_const_eval/src/interpret/projection.rs +++ b/compiler/rustc_const_eval/src/interpret/projection.rs @@ -325,7 +325,7 @@ where let actual_to = if from_end { if from.checked_add(to).is_none_or(|to| to > len) { // This can only be reached in ConstProp and non-rustc-MIR. - throw_ub!(BoundsCheckFailed { len: len, index: from.saturating_add(to) }); + throw_ub!(BoundsCheckFailed { len, index: from.saturating_add(to) }); } len.checked_sub(to).unwrap() } else { diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 6512675530a45..7d0e0492792a9 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -505,6 +505,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // We don't want to do any queries, so there is not much we can do with ADTs. ty::Adt(..) => false, + ty::UnsafeBinder(ty) => is_very_trivially_sized(ty.skip_binder()), + ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false, ty::Infer(ty::TyVar(_)) => false, diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index a26c2eca107c5..32e77fe1024da 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -15,7 +15,7 @@ use tracing::{info, instrument, trace}; use super::{ FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy, - Projectable, Scalar, interp_ok, throw_ub, + Projectable, interp_ok, throw_ub, }; use crate::util; @@ -218,12 +218,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.write_repeat(operand, &dest)?; } - Len(place) => { - let src = self.eval_place(place)?; - let len = src.len(self)?; - self.write_scalar(Scalar::from_target_usize(len, self), &dest)?; - } - Ref(_, borrow_kind, place) => { let src = self.eval_place(place)?; let place = self.force_allocation(&src)?; diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 273eaf42d87a4..d75df1ad4425b 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -26,7 +26,7 @@ use rustc_middle::mir::interpret::{ }; use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Ty}; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use tracing::trace; use super::machine::AllocMap; @@ -302,7 +302,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { }; } } - Variants::Single { .. } => {} + Variants::Single { .. } | Variants::Empty => {} } // Now we know we are projecting to a field, so figure out which one. @@ -344,6 +344,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { // Inside a variant PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name) } + Variants::Empty => panic!("there is no field in Variants::Empty types"), Variants::Multiple { .. } => bug!("we handled variants above"), } } @@ -767,6 +768,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { // Nothing to check. interp_ok(true) } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), // The above should be all the primitive types. The rest is compound, we // check them by visiting their fields/variants. ty::Adt(..) @@ -1010,7 +1012,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { } // Don't forget potential other variants. match &layout.variants { - Variants::Single { .. } => { + Variants::Single { .. } | Variants::Empty => { // Fully handled above. } Variants::Multiple { variants, .. } => { diff --git a/compiler/rustc_const_eval/src/interpret/visitor.rs b/compiler/rustc_const_eval/src/interpret/visitor.rs index 76ab0bb544f05..3647c109a6ed4 100644 --- a/compiler/rustc_const_eval/src/interpret/visitor.rs +++ b/compiler/rustc_const_eval/src/interpret/visitor.rs @@ -218,8 +218,8 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized { // recurse with the inner type self.visit_variant(v, idx, &inner)?; } - // For single-variant layouts, we already did anything there is to do. - Variants::Single { .. } => {} + // For single-variant layouts, we already did everything there is to do. + Variants::Single { .. } | Variants::Empty => {} } interp_ok(()) diff --git a/compiler/rustc_const_eval/src/util/caller_location.rs b/compiler/rustc_const_eval/src/util/caller_location.rs index 6593547cd2383..6dd9447cf5a5f 100644 --- a/compiler/rustc_const_eval/src/util/caller_location.rs +++ b/compiler/rustc_const_eval/src/util/caller_location.rs @@ -3,7 +3,7 @@ use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::{self}; use rustc_middle::{bug, mir}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use tracing::trace; use crate::const_eval::{CanAccessMutGlobal, CompileTimeInterpCx, mk_eval_cx_to_read_const_val}; diff --git a/compiler/rustc_const_eval/src/util/check_validity_requirement.rs b/compiler/rustc_const_eval/src/util/check_validity_requirement.rs index 651a797e97cf6..a729d9325c84a 100644 --- a/compiler/rustc_const_eval/src/util/check_validity_requirement.rs +++ b/compiler/rustc_const_eval/src/util/check_validity_requirement.rs @@ -155,6 +155,7 @@ fn check_validity_requirement_lax<'tcx>( } match &this.variants { + Variants::Empty => return Ok(false), Variants::Single { .. } => { // All fields of this single variant have already been checked above, there is nothing // else to do. diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 36c7bed5c1194..e14cd603c5828 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -38,7 +38,8 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::FnPtr(..) | ty::Never | ty::Tuple(_) - | ty::Dynamic(_, _, _) => self.pretty_print_type(ty), + | ty::Dynamic(_, _, _) + | ty::UnsafeBinder(_) => self.pretty_print_type(ty), // Placeholders (all printed as `_` to uniformize them). ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => { diff --git a/compiler/rustc_data_structures/src/flock.rs b/compiler/rustc_data_structures/src/flock.rs index e03962a54ecab..292a33d56469a 100644 --- a/compiler/rustc_data_structures/src/flock.rs +++ b/compiler/rustc_data_structures/src/flock.rs @@ -4,6 +4,7 @@ //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. +#[cfg(bootstrap)] cfg_match! { cfg(target_os = "linux") => { mod linux; @@ -27,4 +28,28 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + target_os = "linux" => { + mod linux; + use linux as imp; + } + target_os = "redox" => { + mod linux; + use linux as imp; + } + unix => { + mod unix; + use unix as imp; + } + windows => { + mod windows; + use self::windows as imp; + } + _ => { + mod unsupported; + use unsupported as imp; + } +} + pub use imp::Lock; diff --git a/compiler/rustc_data_structures/src/graph/implementation/mod.rs b/compiler/rustc_data_structures/src/graph/implementation/mod.rs index 43fdfe6ee0d73..7724e9347d822 100644 --- a/compiler/rustc_data_structures/src/graph/implementation/mod.rs +++ b/compiler/rustc_data_structures/src/graph/implementation/mod.rs @@ -22,7 +22,7 @@ use std::fmt::Debug; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use tracing::debug; #[cfg(test)] @@ -214,7 +214,7 @@ impl Graph { direction: Direction, entry_node: NodeIndex, ) -> Vec { - let mut visited = BitSet::new_empty(self.len_nodes()); + let mut visited = DenseBitSet::new_empty(self.len_nodes()); let mut stack = vec![]; let mut result = Vec::with_capacity(self.len_nodes()); let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| { @@ -287,7 +287,7 @@ impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> { pub struct DepthFirstTraversal<'g, N, E> { graph: &'g Graph, stack: Vec, - visited: BitSet, + visited: DenseBitSet, direction: Direction, } @@ -297,7 +297,7 @@ impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { start_node: NodeIndex, direction: Direction, ) -> Self { - let mut visited = BitSet::new_empty(graph.len_nodes()); + let mut visited = DenseBitSet::new_empty(graph.len_nodes()); visited.insert(start_node.node_id()); DepthFirstTraversal { graph, stack: vec![start_node], visited, direction } } diff --git a/compiler/rustc_data_structures/src/graph/iterate/mod.rs b/compiler/rustc_data_structures/src/graph/iterate/mod.rs index cbc6664d853a1..7b4573d7a84c7 100644 --- a/compiler/rustc_data_structures/src/graph/iterate/mod.rs +++ b/compiler/rustc_data_structures/src/graph/iterate/mod.rs @@ -1,6 +1,6 @@ use std::ops::ControlFlow; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; use super::{DirectedGraph, StartNode, Successors}; @@ -78,7 +78,7 @@ where { graph: G, stack: Vec, - visited: BitSet, + visited: DenseBitSet, } impl DepthFirstSearch @@ -86,7 +86,7 @@ where G: DirectedGraph + Successors, { pub fn new(graph: G) -> Self { - Self { stack: vec![], visited: BitSet::new_empty(graph.num_nodes()), graph } + Self { stack: vec![], visited: DenseBitSet::new_empty(graph.num_nodes()), graph } } /// Version of `push_start_node` that is convenient for chained @@ -125,6 +125,16 @@ where pub fn visited(&self, node: G::Node) -> bool { self.visited.contains(node) } + + /// Returns a reference to the set of nodes that have been visited, with + /// the same caveats as [`Self::visited`]. + /// + /// When incorporating the visited nodes into another bitset, using bulk + /// operations like `union` or `intersect` can be more efficient than + /// processing each node individually. + pub fn visited_set(&self) -> &DenseBitSet { + &self.visited + } } impl std::fmt::Debug for DepthFirstSearch @@ -207,8 +217,8 @@ where { graph: &'graph G, stack: Vec>, - visited: BitSet, - settled: BitSet, + visited: DenseBitSet, + settled: DenseBitSet, } impl<'graph, G> TriColorDepthFirstSearch<'graph, G> @@ -219,8 +229,8 @@ where TriColorDepthFirstSearch { graph, stack: vec![], - visited: BitSet::new_empty(graph.num_nodes()), - settled: BitSet::new_empty(graph.num_nodes()), + visited: DenseBitSet::new_empty(graph.num_nodes()), + settled: DenseBitSet::new_empty(graph.num_nodes()), } } diff --git a/compiler/rustc_data_structures/src/graph/mod.rs b/compiler/rustc_data_structures/src/graph/mod.rs index 103ddd917bf84..92035e8bc480f 100644 --- a/compiler/rustc_data_structures/src/graph/mod.rs +++ b/compiler/rustc_data_structures/src/graph/mod.rs @@ -4,6 +4,7 @@ pub mod dominators; pub mod implementation; pub mod iterate; mod reference; +pub mod reversed; pub mod scc; pub mod vec_graph; diff --git a/compiler/rustc_data_structures/src/graph/reversed.rs b/compiler/rustc_data_structures/src/graph/reversed.rs new file mode 100644 index 0000000000000..9b726deaa15b6 --- /dev/null +++ b/compiler/rustc_data_structures/src/graph/reversed.rs @@ -0,0 +1,42 @@ +use crate::graph::{DirectedGraph, Predecessors, Successors}; + +/// View that reverses the direction of edges in its underlying graph, so that +/// successors become predecessors and vice-versa. +/// +/// Because of `impl Graph for &G`, the underlying graph can be +/// wrapped by-reference instead of by-value if desired. +#[derive(Clone, Copy, Debug)] +pub struct ReversedGraph { + pub inner: G, +} + +impl ReversedGraph { + pub fn new(inner: G) -> Self { + Self { inner } + } +} + +impl DirectedGraph for ReversedGraph { + type Node = G::Node; + + fn num_nodes(&self) -> usize { + self.inner.num_nodes() + } +} + +// Implementing `StartNode` is not possible in general, because the start node +// of an underlying graph is instead an _end_ node in the reversed graph. +// But would be possible to define another wrapper type that adds an explicit +// start node to its underlying graph, if desired. + +impl Successors for ReversedGraph { + fn successors(&self, node: Self::Node) -> impl Iterator { + self.inner.predecessors(node) + } +} + +impl Predecessors for ReversedGraph { + fn predecessors(&self, node: Self::Node) -> impl Iterator { + self.inner.successors(node) + } +} diff --git a/compiler/rustc_data_structures/src/marker.rs b/compiler/rustc_data_structures/src/marker.rs index 2b629024bfe45..6ae97222f77fd 100644 --- a/compiler/rustc_data_structures/src/marker.rs +++ b/compiler/rustc_data_structures/src/marker.rs @@ -72,7 +72,7 @@ impl_dyn_send!( [Vec where T: DynSend, A: std::alloc::Allocator + DynSend] [Box where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend] [crate::sync::RwLock where T: DynSend] - [crate::tagged_ptr::CopyTaggedPtr where P: Send + crate::tagged_ptr::Pointer, T: Send + crate::tagged_ptr::Tag, const CP: bool] + [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag] [rustc_arena::TypedArena where T: DynSend] [indexmap::IndexSet where V: DynSend, S: DynSend] [indexmap::IndexMap where K: DynSend, V: DynSend, S: DynSend] @@ -148,7 +148,7 @@ impl_dyn_sync!( [crate::sync::RwLock where T: DynSend + DynSync] [crate::sync::WorkerLocal where T: DynSend] [crate::intern::Interned<'a, T> where 'a, T: DynSync] - [crate::tagged_ptr::CopyTaggedPtr where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool] + [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag] [parking_lot::lock_api::Mutex where R: DynSync, T: ?Sized + DynSend] [parking_lot::lock_api::RwLock where R: DynSync, T: ?Sized + DynSend + DynSync] [indexmap::IndexSet where V: DynSync, S: DynSync] diff --git a/compiler/rustc_data_structures/src/packed.rs b/compiler/rustc_data_structures/src/packed.rs index f54b12b5b532d..c8921536530a2 100644 --- a/compiler/rustc_data_structures/src/packed.rs +++ b/compiler/rustc_data_structures/src/packed.rs @@ -18,6 +18,13 @@ impl Pu128 { } } +impl From for u128 { + #[inline] + fn from(value: Pu128) -> Self { + value.get() + } +} + impl From for Pu128 { #[inline] fn from(value: u128) -> Self { diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index 19050746c2f18..18e98e6c39fa9 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -860,6 +860,7 @@ fn get_thread_id() -> u32 { } // Memory reporting +#[cfg(bootstrap)] cfg_match! { cfg(windows) => { pub fn get_resident_set_size() -> Option { @@ -921,5 +922,67 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + windows => { + pub fn get_resident_set_size() -> Option { + use std::mem; + + use windows::{ + Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS}, + Win32::System::Threading::GetCurrentProcess, + }; + + let mut pmc = PROCESS_MEMORY_COUNTERS::default(); + let pmc_size = mem::size_of_val(&pmc); + unsafe { + K32GetProcessMemoryInfo( + GetCurrentProcess(), + &mut pmc, + pmc_size as u32, + ) + } + .ok() + .ok()?; + + Some(pmc.WorkingSetSize) + } + } + target_os = "macos" => { + pub fn get_resident_set_size() -> Option { + use libc::{c_int, c_void, getpid, proc_pidinfo, proc_taskinfo, PROC_PIDTASKINFO}; + use std::mem; + const PROC_TASKINFO_SIZE: c_int = mem::size_of::() as c_int; + + unsafe { + let mut info: proc_taskinfo = mem::zeroed(); + let info_ptr = &mut info as *mut proc_taskinfo as *mut c_void; + let pid = getpid() as c_int; + let ret = proc_pidinfo(pid, PROC_PIDTASKINFO, 0, info_ptr, PROC_TASKINFO_SIZE); + if ret == PROC_TASKINFO_SIZE { + Some(info.pti_resident_size as usize) + } else { + None + } + } + } + } + unix => { + pub fn get_resident_set_size() -> Option { + let field = 1; + let contents = fs::read("/proc/self/statm").ok()?; + let contents = String::from_utf8(contents).ok()?; + let s = contents.split_whitespace().nth(field)?; + let npages = s.parse::().ok()?; + Some(npages * 4096) + } + } + _ => { + pub fn get_resident_set_size() -> Option { + None + } + } +} + #[cfg(test)] mod tests; diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs index 0872bd2c9acc9..9cd0cc499ca32 100644 --- a/compiler/rustc_data_structures/src/stable_hasher.rs +++ b/compiler/rustc_data_structures/src/stable_hasher.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use std::mem; use std::num::NonZero; -use rustc_index::bit_set::{self, BitSet}; +use rustc_index::bit_set::{self, DenseBitSet}; use rustc_index::{Idx, IndexSlice, IndexVec}; use smallvec::SmallVec; @@ -544,7 +544,7 @@ where } } -impl HashStable for BitSet { +impl HashStable for DenseBitSet { fn hash_stable(&self, _ctx: &mut CTX, hasher: &mut StableHasher) { ::std::hash::Hash::hash(self, hasher); } diff --git a/compiler/rustc_data_structures/src/stable_hasher/tests.rs b/compiler/rustc_data_structures/src/stable_hasher/tests.rs index aab50a13af0e6..635f241847c43 100644 --- a/compiler/rustc_data_structures/src/stable_hasher/tests.rs +++ b/compiler/rustc_data_structures/src/stable_hasher/tests.rs @@ -17,9 +17,9 @@ fn hash>(t: &T) -> Hash128 { // Check that bit set hash includes the domain size. #[test] fn test_hash_bit_set() { - use rustc_index::bit_set::BitSet; - let a: BitSet = BitSet::new_empty(1); - let b: BitSet = BitSet::new_empty(2); + use rustc_index::bit_set::DenseBitSet; + let a: DenseBitSet = DenseBitSet::new_empty(1); + let b: DenseBitSet = DenseBitSet::new_empty(2); assert_ne!(a, b); assert_ne!(hash(&a), hash(&b)); } diff --git a/compiler/rustc_data_structures/src/tagged_ptr.rs b/compiler/rustc_data_structures/src/tagged_ptr.rs index 2914eece6796b..94db421f77e89 100644 --- a/compiler/rustc_data_structures/src/tagged_ptr.rs +++ b/compiler/rustc_data_structures/src/tagged_ptr.rs @@ -1,116 +1,26 @@ -//! This module implements tagged pointers. +//! This module implements tagged pointers. In order to utilize the pointer +//! packing, you must have a tag type implementing the [`Tag`] trait. //! -//! In order to utilize the pointer packing, you must have two types: a pointer, -//! and a tag. -//! -//! The pointer must implement the [`Pointer`] trait, with the primary -//! requirement being convertible to and from a raw pointer. Note that the -//! pointer must be dereferenceable, so raw pointers generally cannot implement -//! the [`Pointer`] trait. This implies that the pointer must also be non-null. -//! -//! Many common pointer types already implement the [`Pointer`] trait. -//! -//! The tag must implement the [`Tag`] trait. -//! -//! We assert that the tag and the [`Pointer`] types are compatible at compile +//! We assert that the tag and the reference type is compatible at compile //! time. +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::num::NonZero; use std::ops::Deref; use std::ptr::NonNull; -use std::rc::Rc; -use std::sync::Arc; use crate::aligned::Aligned; +use crate::stable_hasher::{HashStable, StableHasher}; -mod copy; -mod drop; -mod impl_tag; - -pub use copy::CopyTaggedPtr; -pub use drop::TaggedPtr; - -/// This describes the pointer type encapsulated by [`TaggedPtr`] and -/// [`CopyTaggedPtr`]. -/// -/// # Safety -/// -/// The pointer returned from [`into_ptr`] must be a [valid], pointer to -/// [`::Target`]. -/// -/// Note that if `Self` implements [`DerefMut`] the pointer returned from -/// [`into_ptr`] must be valid for writes (and thus calling [`NonNull::as_mut`] -/// on it must be safe). -/// -/// The [`BITS`] constant must be correct. [`BITS`] least-significant bits, -/// must be zero on all pointers returned from [`into_ptr`]. -/// -/// For example, if the alignment of [`Self::Target`] is 2, then `BITS` should be 1. -/// -/// [`BITS`]: Pointer::BITS -/// [`into_ptr`]: Pointer::into_ptr -/// [valid]: std::ptr#safety -/// [`::Target`]: Deref::Target -/// [`Self::Target`]: Deref::Target -/// [`DerefMut`]: std::ops::DerefMut -pub unsafe trait Pointer: Deref { - /// Number of unused (always zero) **least-significant bits** in this - /// pointer, usually related to the pointees alignment. - /// - /// For example if [`BITS`] = `2`, then given `ptr = Self::into_ptr(..)`, - /// `ptr.addr() & 0b11 == 0` must be true. - /// - /// Most likely the value you want to use here is the following, unless - /// your [`Self::Target`] type is unsized (e.g., `ty::List` in rustc) - /// or your pointer is over/under aligned, in which case you'll need to - /// manually figure out what the right type to pass to [`bits_for`] is, or - /// what the value to set here. - /// - /// ```rust - /// # use std::ops::Deref; - /// # use rustc_data_structures::tagged_ptr::bits_for; - /// # struct T; - /// # impl Deref for T { type Target = u8; fn deref(&self) -> &u8 { &0 } } - /// # impl T { - /// const BITS: u32 = bits_for::<::Target>(); - /// # } - /// ``` - /// - /// [`BITS`]: Pointer::BITS - /// [`Self::Target`]: Deref::Target - const BITS: u32; - - /// Turns this pointer into a raw, non-null pointer. - /// - /// The inverse of this function is [`from_ptr`]. - /// - /// This function guarantees that the least-significant [`Self::BITS`] bits - /// are zero. - /// - /// [`from_ptr`]: Pointer::from_ptr - /// [`Self::BITS`]: Pointer::BITS - fn into_ptr(self) -> NonNull; - - /// Re-creates the original pointer, from a raw pointer returned by [`into_ptr`]. - /// - /// # Safety - /// - /// The passed `ptr` must be returned from [`into_ptr`]. - /// - /// This acts as [`ptr::read::()`] semantically, it should not be called more than - /// once on non-[`Copy`] `Pointer`s. - /// - /// [`into_ptr`]: Pointer::into_ptr - /// [`ptr::read::()`]: std::ptr::read - unsafe fn from_ptr(ptr: NonNull) -> Self; -} - -/// This describes tags that the [`TaggedPtr`] struct can hold. +/// This describes tags that the [`TaggedRef`] struct can hold. /// /// # Safety /// -/// The [`BITS`] constant must be correct. -/// -/// No more than [`BITS`] least-significant bits may be set in the returned usize. +/// - The [`BITS`] constant must be correct. +/// - No more than [`BITS`] least-significant bits may be set in the returned usize. +/// - [`Eq`] and [`Hash`] must be implementable with the returned `usize` from `into_usize`. /// /// [`BITS`]: Tag::BITS pub unsafe trait Tag: Copy { @@ -166,118 +76,217 @@ pub const fn bits_for_tags(mut tags: &[usize]) -> u32 { bits } -unsafe impl Pointer for Box { - const BITS: u32 = bits_for::(); +/// A covariant [`Copy`] tagged borrow. This is essentially `{ pointer: &'a P, tag: T }` packed +/// in a single reference. +pub struct TaggedRef<'a, Pointee: Aligned + ?Sized, T: Tag> { + /// This is semantically a pair of `pointer: &'a P` and `tag: T` fields, + /// however we pack them in a single pointer, to save space. + /// + /// We pack the tag into the **most**-significant bits of the pointer to + /// ease retrieval of the value. A left shift is a multiplication and + /// those are embeddable in instruction encoding, for example: + /// + /// ```asm + /// // () + /// example::shift_read3: + /// mov eax, dword ptr [8*rdi] + /// ret + /// + /// example::mask_read3: + /// and rdi, -8 + /// mov eax, dword ptr [rdi] + /// ret + /// ``` + /// + /// This is ASM outputted by rustc for reads of values behind tagged + /// pointers for different approaches of tagging: + /// - `shift_read3` uses `<< 3` (the tag is in the most-significant bits) + /// - `mask_read3` uses `& !0b111` (the tag is in the least-significant bits) + /// + /// The shift approach thus produces less instructions and is likely faster + /// (see ). + /// + /// Encoding diagram: + /// ```text + /// [ packed.addr ] + /// [ tag ] [ pointer.addr >> T::BITS ] <-- usize::BITS - T::BITS bits + /// ^ + /// | + /// T::BITS bits + /// ``` + /// + /// The tag can be retrieved by `packed.addr() >> T::BITS` and the pointer + /// can be retrieved by `packed.map_addr(|addr| addr << T::BITS)`. + packed: NonNull, + tag_pointer_ghost: PhantomData<(&'a Pointee, T)>, +} +impl<'a, P, T> TaggedRef<'a, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ + /// Tags `pointer` with `tag`. + /// + /// [`TaggedRef`]: crate::tagged_ptr::TaggedRef #[inline] - fn into_ptr(self) -> NonNull { - // Safety: pointers from `Box::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Box::into_raw(self)) } + pub fn new(pointer: &'a P, tag: T) -> Self { + Self { packed: Self::pack(NonNull::from(pointer), tag), tag_pointer_ghost: PhantomData } } + /// Retrieves the pointer. #[inline] - unsafe fn from_ptr(ptr: NonNull) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Box::into_raw` - unsafe { Box::from_raw(ptr.as_ptr()) } + pub fn pointer(self) -> &'a P { + // SAFETY: pointer_raw returns the original pointer + unsafe { self.pointer_raw().as_ref() } } -} - -unsafe impl Pointer for Rc { - const BITS: u32 = bits_for::(); + /// Retrieves the tag. #[inline] - fn into_ptr(self) -> NonNull { - // Safety: pointers from `Rc::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Rc::into_raw(self).cast_mut()) } + pub fn tag(&self) -> T { + // Unpack the tag, according to the `self.packed` encoding scheme + let tag = self.packed.addr().get() >> Self::TAG_BIT_SHIFT; + + // Safety: + // The shift retrieves the original value from `T::into_usize`, + // satisfying `T::from_usize`'s preconditions. + unsafe { T::from_usize(tag) } } + /// Sets the tag to a new value. #[inline] - unsafe fn from_ptr(ptr: NonNull) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Rc::into_raw` - unsafe { Rc::from_raw(ptr.as_ptr()) } + pub fn set_tag(&mut self, tag: T) { + self.packed = Self::pack(self.pointer_raw(), tag); } -} -unsafe impl Pointer for Arc { - const BITS: u32 = bits_for::(); + const TAG_BIT_SHIFT: u32 = usize::BITS - T::BITS; + const ASSERTION: () = { assert!(T::BITS <= bits_for::

()) }; + /// Pack pointer `ptr` with a `tag`, according to `self.packed` encoding scheme. #[inline] - fn into_ptr(self) -> NonNull { - // Safety: pointers from `Arc::into_raw` are valid & non-null - unsafe { NonNull::new_unchecked(Arc::into_raw(self).cast_mut()) } + fn pack(ptr: NonNull

, tag: T) -> NonNull

{ + // Trigger assert! + let () = Self::ASSERTION; + + let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT; + + ptr.map_addr(|addr| { + // Safety: + // - The pointer is `NonNull` => it's address is `NonZero` + // - `P::BITS` least significant bits are always zero (`Pointer` contract) + // - `T::BITS <= P::BITS` (from `Self::ASSERTION`) + // + // Thus `addr >> T::BITS` is guaranteed to be non-zero. + // + // `{non_zero} | packed_tag` can't make the value zero. + + let packed = (addr.get() >> T::BITS) | packed_tag; + unsafe { NonZero::new_unchecked(packed) } + }) } + /// Retrieves the original raw pointer from `self.packed`. #[inline] - unsafe fn from_ptr(ptr: NonNull) -> Self { - // Safety: `ptr` comes from `into_ptr` which calls `Arc::into_raw` - unsafe { Arc::from_raw(ptr.as_ptr()) } + pub(super) fn pointer_raw(&self) -> NonNull

{ + self.packed.map_addr(|addr| unsafe { NonZero::new_unchecked(addr.get() << T::BITS) }) } } -unsafe impl<'a, T: 'a + ?Sized + Aligned> Pointer for &'a T { - const BITS: u32 = bits_for::(); +impl Copy for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ +} +impl Clone for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ #[inline] - fn into_ptr(self) -> NonNull { - NonNull::from(self) + fn clone(&self) -> Self { + *self } +} + +impl Deref for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ + type Target = P; #[inline] - unsafe fn from_ptr(ptr: NonNull) -> Self { - // Safety: - // `ptr` comes from `into_ptr` which gets the pointer from a reference - unsafe { ptr.as_ref() } + fn deref(&self) -> &Self::Target { + self.pointer() } } -unsafe impl<'a, T: 'a + ?Sized + Aligned> Pointer for &'a mut T { - const BITS: u32 = bits_for::(); +impl fmt::Debug for TaggedRef<'_, P, T> +where + P: Aligned + fmt::Debug + ?Sized, + T: Tag + fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TaggedRef") + .field("pointer", &self.pointer()) + .field("tag", &self.tag()) + .finish() + } +} +impl PartialEq for TaggedRef<'_, P, T> +where + P: Aligned + ?Sized, + T: Tag, +{ #[inline] - fn into_ptr(self) -> NonNull { - NonNull::from(self) + #[allow(ambiguous_wide_pointer_comparisons)] + fn eq(&self, other: &Self) -> bool { + self.packed == other.packed } +} +impl Eq for TaggedRef<'_, P, T> {} + +impl Hash for TaggedRef<'_, P, T> { #[inline] - unsafe fn from_ptr(mut ptr: NonNull) -> Self { - // Safety: - // `ptr` comes from `into_ptr` which gets the pointer from a reference - unsafe { ptr.as_mut() } + fn hash(&self, state: &mut H) { + self.packed.hash(state); } } -/// A tag type used in [`CopyTaggedPtr`] and [`TaggedPtr`] tests. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -#[cfg(test)] -enum Tag2 { - B00 = 0b00, - B01 = 0b01, - B10 = 0b10, - B11 = 0b11, +impl<'a, P, T, HCX> HashStable for TaggedRef<'a, P, T> +where + P: HashStable + Aligned + ?Sized, + T: Tag + HashStable, +{ + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { + self.pointer().hash_stable(hcx, hasher); + self.tag().hash_stable(hcx, hasher); + } } -#[cfg(test)] -unsafe impl Tag for Tag2 { - const BITS: u32 = 2; - - fn into_usize(self) -> usize { - self as _ - } +// Safety: +// `TaggedRef` is semantically just `{ ptr: P, tag: T }`, as such +// it's ok to implement `Sync` as long as `P: Sync, T: Sync` +unsafe impl Sync for TaggedRef<'_, P, T> +where + P: Sync + Aligned + ?Sized, + T: Sync + Tag, +{ +} - unsafe fn from_usize(tag: usize) -> Self { - match tag { - 0b00 => Tag2::B00, - 0b01 => Tag2::B01, - 0b10 => Tag2::B10, - 0b11 => Tag2::B11, - _ => unreachable!(), - } - } +// Safety: +// `TaggedRef` is semantically just `{ ptr: P, tag: T }`, as such +// it's ok to implement `Send` as long as `P: Send, T: Send` +unsafe impl Send for TaggedRef<'_, P, T> +where + P: Sync + Aligned + ?Sized, + T: Send + Tag, +{ } #[cfg(test)] -impl crate::stable_hasher::HashStable for Tag2 { - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut crate::stable_hasher::StableHasher) { - (*self as u8).hash_stable(hcx, hasher); - } -} +mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/copy.rs b/compiler/rustc_data_structures/src/tagged_ptr/copy.rs deleted file mode 100644 index 25e107b0f413c..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/copy.rs +++ /dev/null @@ -1,330 +0,0 @@ -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; -use std::mem::ManuallyDrop; -use std::num::NonZero; -use std::ops::{Deref, DerefMut}; -use std::ptr::NonNull; - -use super::{Pointer, Tag}; -use crate::stable_hasher::{HashStable, StableHasher}; - -/// A [`Copy`] tagged pointer. -/// -/// This is essentially `{ pointer: P, tag: T }` packed in a single pointer. -/// -/// You should use this instead of the [`TaggedPtr`] type in all cases where -/// `P` implements [`Copy`]. -/// -/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without -/// unpacking. Otherwise we don't implement [`PartialEq`], [`Eq`] and [`Hash`]; -/// if you want that, wrap the [`CopyTaggedPtr`]. -/// -/// [`TaggedPtr`]: crate::tagged_ptr::TaggedPtr -pub struct CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ - /// This is semantically a pair of `pointer: P` and `tag: T` fields, - /// however we pack them in a single pointer, to save space. - /// - /// We pack the tag into the **most**-significant bits of the pointer to - /// ease retrieval of the value. A left shift is a multiplication and - /// those are embeddable in instruction encoding, for example: - /// - /// ```asm - /// // () - /// example::shift_read3: - /// mov eax, dword ptr [8*rdi] - /// ret - /// - /// example::mask_read3: - /// and rdi, -8 - /// mov eax, dword ptr [rdi] - /// ret - /// ``` - /// - /// This is ASM outputted by rustc for reads of values behind tagged - /// pointers for different approaches of tagging: - /// - `shift_read3` uses `<< 3` (the tag is in the most-significant bits) - /// - `mask_read3` uses `& !0b111` (the tag is in the least-significant bits) - /// - /// The shift approach thus produces less instructions and is likely faster - /// (see ). - /// - /// Encoding diagram: - /// ```text - /// [ packed.addr ] - /// [ tag ] [ pointer.addr >> T::BITS ] <-- usize::BITS - T::BITS bits - /// ^ - /// | - /// T::BITS bits - /// ``` - /// - /// The tag can be retrieved by `packed.addr() >> T::BITS` and the pointer - /// can be retrieved by `packed.map_addr(|addr| addr << T::BITS)`. - packed: NonNull, - tag_ghost: PhantomData, -} - -// Note that even though `CopyTaggedPtr` is only really expected to work with -// `P: Copy`, can't add `P: Copy` bound, because `CopyTaggedPtr` is used in the -// `TaggedPtr`'s implementation. -impl CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ - /// Tags `pointer` with `tag`. - /// - /// Note that this leaks `pointer`: it won't be dropped when - /// `CopyTaggedPtr` is dropped. If you have a pointer with a significant - /// drop, use [`TaggedPtr`] instead. - /// - /// [`TaggedPtr`]: crate::tagged_ptr::TaggedPtr - #[inline] - pub fn new(pointer: P, tag: T) -> Self { - Self { packed: Self::pack(P::into_ptr(pointer), tag), tag_ghost: PhantomData } - } - - /// Retrieves the pointer. - #[inline] - pub fn pointer(self) -> P - where - P: Copy, - { - // SAFETY: pointer_raw returns the original pointer - // - // Note that this isn't going to double-drop or anything because we have - // P: Copy - unsafe { P::from_ptr(self.pointer_raw()) } - } - - /// Retrieves the tag. - #[inline] - pub fn tag(&self) -> T { - // Unpack the tag, according to the `self.packed` encoding scheme - let tag = self.packed.addr().get() >> Self::TAG_BIT_SHIFT; - - // Safety: - // The shift retrieves the original value from `T::into_usize`, - // satisfying `T::from_usize`'s preconditions. - unsafe { T::from_usize(tag) } - } - - /// Sets the tag to a new value. - #[inline] - pub fn set_tag(&mut self, tag: T) { - self.packed = Self::pack(self.pointer_raw(), tag); - } - - const TAG_BIT_SHIFT: u32 = usize::BITS - T::BITS; - const ASSERTION: () = { assert!(T::BITS <= P::BITS) }; - - /// Pack pointer `ptr` that comes from [`P::into_ptr`] with a `tag`, - /// according to `self.packed` encoding scheme. - /// - /// [`P::into_ptr`]: Pointer::into_ptr - #[inline] - fn pack(ptr: NonNull, tag: T) -> NonNull { - // Trigger assert! - let () = Self::ASSERTION; - - let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT; - - ptr.map_addr(|addr| { - // Safety: - // - The pointer is `NonNull` => it's address is `NonZero` - // - `P::BITS` least significant bits are always zero (`Pointer` contract) - // - `T::BITS <= P::BITS` (from `Self::ASSERTION`) - // - // Thus `addr >> T::BITS` is guaranteed to be non-zero. - // - // `{non_zero} | packed_tag` can't make the value zero. - - let packed = (addr.get() >> T::BITS) | packed_tag; - unsafe { NonZero::new_unchecked(packed) } - }) - } - - /// Retrieves the original raw pointer from `self.packed`. - #[inline] - pub(super) fn pointer_raw(&self) -> NonNull { - self.packed.map_addr(|addr| unsafe { NonZero::new_unchecked(addr.get() << T::BITS) }) - } - - /// This provides a reference to the `P` pointer itself, rather than the - /// `Deref::Target`. It is used for cases where we want to call methods - /// that may be implement differently for the Pointer than the Pointee - /// (e.g., `Rc::clone` vs cloning the inner value). - pub(super) fn with_pointer_ref(&self, f: impl FnOnce(&P) -> R) -> R { - // Safety: - // - `self.raw.pointer_raw()` is originally returned from `P::into_ptr` - // and as such is valid for `P::from_ptr`. - // - This also allows us to not care whatever `f` panics or not. - // - Even though we create a copy of the pointer, we store it inside - // `ManuallyDrop` and only access it by-ref, so we don't double-drop. - // - // Semantically this is just `f(&self.pointer)` (where `self.pointer` - // is non-packed original pointer). - // - // Note that even though `CopyTaggedPtr` is only really expected to - // work with `P: Copy`, we have to assume `P: ?Copy`, because - // `CopyTaggedPtr` is used in the `TaggedPtr`'s implementation. - let ptr = unsafe { ManuallyDrop::new(P::from_ptr(self.pointer_raw())) }; - f(&ptr) - } -} - -impl Copy for CopyTaggedPtr -where - P: Pointer + Copy, - T: Tag, -{ -} - -impl Clone for CopyTaggedPtr -where - P: Pointer + Copy, - T: Tag, -{ - #[inline] - fn clone(&self) -> Self { - *self - } -} - -impl Deref for CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ - type Target = P::Target; - - #[inline] - fn deref(&self) -> &Self::Target { - // Safety: - // `pointer_raw` returns the original pointer from `P::into_ptr` which, - // by the `Pointer`'s contract, must be valid. - unsafe { self.pointer_raw().as_ref() } - } -} - -impl DerefMut for CopyTaggedPtr -where - P: Pointer + DerefMut, - T: Tag, -{ - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - // Safety: - // `pointer_raw` returns the original pointer from `P::into_ptr` which, - // by the `Pointer`'s contract, must be valid for writes if - // `P: DerefMut`. - unsafe { self.pointer_raw().as_mut() } - } -} - -impl fmt::Debug for CopyTaggedPtr -where - P: Pointer + fmt::Debug, - T: Tag + fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.with_pointer_ref(|ptr| { - f.debug_struct("CopyTaggedPtr").field("pointer", ptr).field("tag", &self.tag()).finish() - }) - } -} - -impl PartialEq for CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ - #[inline] - #[allow(ambiguous_wide_pointer_comparisons)] - fn eq(&self, other: &Self) -> bool { - self.packed == other.packed - } -} - -impl Eq for CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ -} - -impl Hash for CopyTaggedPtr -where - P: Pointer, - T: Tag, -{ - #[inline] - fn hash(&self, state: &mut H) { - self.packed.hash(state); - } -} - -impl HashStable for CopyTaggedPtr -where - P: Pointer + HashStable, - T: Tag + HashStable, -{ - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { - self.with_pointer_ref(|ptr| ptr.hash_stable(hcx, hasher)); - self.tag().hash_stable(hcx, hasher); - } -} - -// Safety: -// `CopyTaggedPtr` is semantically just `{ ptr: P, tag: T }`, as such -// it's ok to implement `Sync` as long as `P: Sync, T: Sync` -unsafe impl Sync for CopyTaggedPtr -where - P: Sync + Pointer, - T: Sync + Tag, -{ -} - -// Safety: -// `CopyTaggedPtr` is semantically just `{ ptr: P, tag: T }`, as such -// it's ok to implement `Send` as long as `P: Send, T: Send` -unsafe impl Send for CopyTaggedPtr -where - P: Send + Pointer, - T: Send + Tag, -{ -} - -/// Test that `new` does not compile if there is not enough alignment for the -/// tag in the pointer. -/// -/// ```compile_fail,E0080 -/// use rustc_data_structures::tagged_ptr::{CopyTaggedPtr, Tag}; -/// -/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; -/// -/// unsafe impl Tag for Tag2 { -/// const BITS: u32 = 2; -/// -/// fn into_usize(self) -> usize { todo!() } -/// unsafe fn from_usize(tag: usize) -> Self { todo!() } -/// } -/// -/// let value = 12u16; -/// let reference = &value; -/// let tag = Tag2::B01; -/// -/// let _ptr = CopyTaggedPtr::<_, _, true>::new(reference, tag); -/// ``` -// For some reason miri does not get the compile error -// probably it `check`s instead of `build`ing? -#[cfg(not(miri))] -const _: () = (); - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs deleted file mode 100644 index 160af8a65d9c2..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/copy/tests.rs +++ /dev/null @@ -1,50 +0,0 @@ -use std::ptr; - -use crate::hashes::Hash128; -use crate::stable_hasher::{HashStable, StableHasher}; -use crate::tagged_ptr::{CopyTaggedPtr, Pointer, Tag, Tag2}; - -#[test] -fn smoke() { - let value = 12u32; - let reference = &value; - let tag = Tag2::B01; - - let ptr = tag_ptr(reference, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - assert!(ptr::eq(ptr.pointer(), reference)); - - let copy = ptr; - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(copy.tag(), tag); - assert_eq!(*copy, 12); - assert!(ptr::eq(copy.pointer(), reference)); -} - -#[test] -fn stable_hash_hashes_as_tuple() { - let hash_packed = { - let mut hasher = StableHasher::new(); - tag_ptr(&12, Tag2::B11).hash_stable(&mut (), &mut hasher); - hasher.finish::() - }; - - let hash_tupled = { - let mut hasher = StableHasher::new(); - (&12, Tag2::B11).hash_stable(&mut (), &mut hasher); - hasher.finish::() - }; - - assert_eq!(hash_packed, hash_tupled); -} - -/// Helper to create tagged pointers without specifying `COMPARE_PACKED` if it does not matter. -fn tag_ptr(ptr: P, tag: T) -> CopyTaggedPtr { - CopyTaggedPtr::new(ptr, tag) -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs b/compiler/rustc_data_structures/src/tagged_ptr/drop.rs deleted file mode 100644 index 319a8cdd3990a..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::ops::{Deref, DerefMut}; - -use super::{CopyTaggedPtr, Pointer, Tag}; -use crate::stable_hasher::{HashStable, StableHasher}; - -/// A tagged pointer that supports pointers that implement [`Drop`]. -/// -/// This is essentially `{ pointer: P, tag: T }` packed in a single pointer. -/// -/// You should use [`CopyTaggedPtr`] instead of the this type in all cases -/// where `P` implements [`Copy`]. -/// -/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without -/// unpacking. Otherwise we don't implement [`PartialEq`], [`Eq`] and [`Hash`]; -/// if you want that, wrap the [`TaggedPtr`]. -pub struct TaggedPtr -where - P: Pointer, - T: Tag, -{ - raw: CopyTaggedPtr, -} - -impl TaggedPtr -where - P: Pointer, - T: Tag, -{ - /// Tags `pointer` with `tag`. - #[inline] - pub fn new(pointer: P, tag: T) -> Self { - TaggedPtr { raw: CopyTaggedPtr::new(pointer, tag) } - } - - /// Retrieves the tag. - #[inline] - pub fn tag(&self) -> T { - self.raw.tag() - } - - /// Sets the tag to a new value. - #[inline] - pub fn set_tag(&mut self, tag: T) { - self.raw.set_tag(tag) - } -} - -impl Clone for TaggedPtr -where - P: Pointer + Clone, - T: Tag, -{ - fn clone(&self) -> Self { - let ptr = self.raw.with_pointer_ref(P::clone); - - Self::new(ptr, self.tag()) - } -} - -impl Deref for TaggedPtr -where - P: Pointer, - T: Tag, -{ - type Target = P::Target; - - #[inline] - fn deref(&self) -> &Self::Target { - self.raw.deref() - } -} - -impl DerefMut for TaggedPtr -where - P: Pointer + DerefMut, - T: Tag, -{ - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - self.raw.deref_mut() - } -} - -impl Drop for TaggedPtr -where - P: Pointer, - T: Tag, -{ - fn drop(&mut self) { - // No need to drop the tag, as it's Copy - unsafe { - drop(P::from_ptr(self.raw.pointer_raw())); - } - } -} - -impl fmt::Debug for TaggedPtr -where - P: Pointer + fmt::Debug, - T: Tag + fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.raw.with_pointer_ref(|ptr| { - f.debug_struct("TaggedPtr").field("pointer", ptr).field("tag", &self.tag()).finish() - }) - } -} - -impl PartialEq for TaggedPtr -where - P: Pointer, - T: Tag, -{ - #[inline] - fn eq(&self, other: &Self) -> bool { - self.raw.eq(&other.raw) - } -} - -impl Eq for TaggedPtr -where - P: Pointer, - T: Tag, -{ -} - -impl Hash for TaggedPtr -where - P: Pointer, - T: Tag, -{ - #[inline] - fn hash(&self, state: &mut H) { - self.raw.hash(state); - } -} - -impl HashStable for TaggedPtr -where - P: Pointer + HashStable, - T: Tag + HashStable, -{ - fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { - self.raw.hash_stable(hcx, hasher); - } -} - -/// Test that `new` does not compile if there is not enough alignment for the -/// tag in the pointer. -/// -/// ```compile_fail,E0080 -/// use rustc_data_structures::tagged_ptr::{TaggedPtr, Tag}; -/// -/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] -/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; -/// -/// unsafe impl Tag for Tag2 { -/// const BITS: u32 = 2; -/// -/// fn into_usize(self) -> usize { todo!() } -/// unsafe fn from_usize(tag: usize) -> Self { todo!() } -/// } -/// -/// let value = 12u16; -/// let reference = &value; -/// let tag = Tag2::B01; -/// -/// let _ptr = TaggedPtr::<_, _, true>::new(reference, tag); -/// ``` -// For some reason miri does not get the compile error -// probably it `check`s instead of `build`ing? -#[cfg(not(miri))] -const _: () = (); - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs deleted file mode 100644 index 4d342c72cc52a..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/drop/tests.rs +++ /dev/null @@ -1,72 +0,0 @@ -use std::ptr; -use std::sync::Arc; - -use crate::tagged_ptr::{Pointer, Tag, Tag2, TaggedPtr}; - -#[test] -fn smoke() { - let value = 12u32; - let reference = &value; - let tag = Tag2::B01; - - let ptr = tag_ptr(reference, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - assert!(ptr::eq(&*ptr, &*clone)) -} - -#[test] -fn boxed() { - let value = 12u32; - let boxed = Box::new(value); - let tag = Tag2::B01; - - let ptr = tag_ptr(boxed, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - - let mut ptr = ptr; - ptr.set_tag(Tag2::B00); - assert_eq!(ptr.tag(), Tag2::B00); - - assert_eq!(clone.tag(), tag); - assert_eq!(*clone, 12); - assert!(!ptr::eq(&*ptr, &*clone)) -} - -#[test] -fn arclones() { - let value = 12u32; - let arc = Arc::new(value); - let tag = Tag2::B01; - - let ptr = tag_ptr(arc, tag); - - assert_eq!(ptr.tag(), tag); - assert_eq!(*ptr, 12); - - let clone = ptr.clone(); - assert!(ptr::eq(&*ptr, &*clone)) -} - -/// Helper to create tagged pointers without specifying `COMPARE_PACKED` if it does not matter. -fn tag_ptr(ptr: P, tag: T) -> TaggedPtr { - TaggedPtr::new(ptr, tag) -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs b/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs deleted file mode 100644 index f17a0bf26d747..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag.rs +++ /dev/null @@ -1,144 +0,0 @@ -/// Implements [`Tag`] for a given type. -/// -/// You can use `impl_tag` on structs and enums. -/// You need to specify the type and all its possible values, -/// which can only be paths with optional fields. -/// -/// [`Tag`]: crate::tagged_ptr::Tag -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// #![feature(macro_metavar_expr)] -/// use rustc_data_structures::{impl_tag, tagged_ptr::Tag}; -/// -/// #[derive(Copy, Clone, PartialEq, Debug)] -/// enum SomeTag { -/// A, -/// B, -/// X { v: bool }, -/// Y(bool, bool), -/// } -/// -/// impl_tag! { -/// // The type for which the `Tag` will be implemented -/// impl Tag for SomeTag; -/// // You need to specify all possible tag values: -/// SomeTag::A, // 0 -/// SomeTag::B, // 1 -/// // For variants with fields, you need to specify the fields: -/// SomeTag::X { v: true }, // 2 -/// SomeTag::X { v: false }, // 3 -/// // For tuple variants use named syntax: -/// SomeTag::Y { 0: true, 1: true }, // 4 -/// SomeTag::Y { 0: false, 1: true }, // 5 -/// SomeTag::Y { 0: true, 1: false }, // 6 -/// SomeTag::Y { 0: false, 1: false }, // 7 -/// } -/// -/// // Tag values are assigned in order: -/// assert_eq!(SomeTag::A.into_usize(), 0); -/// assert_eq!(SomeTag::X { v: false }.into_usize(), 3); -/// assert_eq!(SomeTag::Y(false, true).into_usize(), 5); -/// -/// assert_eq!(unsafe { SomeTag::from_usize(1) }, SomeTag::B); -/// assert_eq!(unsafe { SomeTag::from_usize(2) }, SomeTag::X { v: true }); -/// assert_eq!(unsafe { SomeTag::from_usize(7) }, SomeTag::Y(false, false)); -/// ``` -/// -/// Structs are supported: -/// -/// ``` -/// #![feature(macro_metavar_expr)] -/// # use rustc_data_structures::impl_tag; -/// #[derive(Copy, Clone)] -/// struct Flags { a: bool, b: bool } -/// -/// impl_tag! { -/// impl Tag for Flags; -/// Flags { a: true, b: true }, -/// Flags { a: false, b: true }, -/// Flags { a: true, b: false }, -/// Flags { a: false, b: false }, -/// } -/// ``` -/// -/// Not specifying all values results in a compile error: -/// -/// ```compile_fail,E0004 -/// #![feature(macro_metavar_expr)] -/// # use rustc_data_structures::impl_tag; -/// #[derive(Copy, Clone)] -/// enum E { -/// A, -/// B, -/// } -/// -/// impl_tag! { -/// impl Tag for E; -/// E::A, -/// } -/// ``` -#[macro_export] -macro_rules! impl_tag { - ( - impl Tag for $Self:ty; - $( - $($path:ident)::* $( { $( $fields:tt )* })?, - )* - ) => { - // Safety: - // `bits_for_tags` is called on the same `${index()}`-es as - // `into_usize` returns, thus `BITS` constant is correct. - unsafe impl $crate::tagged_ptr::Tag for $Self { - const BITS: u32 = $crate::tagged_ptr::bits_for_tags(&[ - $( - ${index()}, - $( ${ignore($path)} )* - )* - ]); - - #[inline] - fn into_usize(self) -> usize { - // This forbids use of repeating patterns (`Enum::V`&`Enum::V`, etc) - // (or at least it should, see ) - #[forbid(unreachable_patterns)] - match self { - // `match` is doing heavy lifting here, by requiring exhaustiveness - $( - $($path)::* $( { $( $fields )* } )? => ${index()}, - )* - } - } - - #[inline] - unsafe fn from_usize(tag: usize) -> Self { - match tag { - $( - ${index()} => $($path)::* $( { $( $fields )* } )?, - )* - - // Safety: - // `into_usize` only returns `${index()}` of the same - // repetition as we are filtering above, thus if this is - // reached, the safety contract of this function was - // already breached. - _ => unsafe { - debug_assert!( - false, - "invalid tag: {tag}\ - (this is a bug in the caller of `from_usize`)" - ); - std::hint::unreachable_unchecked() - }, - } - } - - } - }; -} - -#[cfg(test)] -mod tests; diff --git a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs deleted file mode 100644 index 62c926153e1e9..0000000000000 --- a/compiler/rustc_data_structures/src/tagged_ptr/impl_tag/tests.rs +++ /dev/null @@ -1,34 +0,0 @@ -#[test] -fn bits_constant() { - use crate::tagged_ptr::Tag; - - #[derive(Copy, Clone)] - struct Unit; - impl_tag! { impl Tag for Unit; Unit, } - assert_eq!(Unit::BITS, 0); - - #[derive(Copy, Clone)] - enum Enum3 { - A, - B, - C, - } - impl_tag! { impl Tag for Enum3; Enum3::A, Enum3::B, Enum3::C, } - assert_eq!(Enum3::BITS, 2); - - #[derive(Copy, Clone)] - struct Eight(bool, bool, bool); - impl_tag! { - impl Tag for Eight; - Eight { 0: true, 1: true, 2: true }, - Eight { 0: true, 1: true, 2: false }, - Eight { 0: true, 1: false, 2: true }, - Eight { 0: true, 1: false, 2: false }, - Eight { 0: false, 1: true, 2: true }, - Eight { 0: false, 1: true, 2: false }, - Eight { 0: false, 1: false, 2: true }, - Eight { 0: false, 1: false, 2: false }, - } - - assert_eq!(Eight::BITS, 3); -} diff --git a/compiler/rustc_data_structures/src/tagged_ptr/tests.rs b/compiler/rustc_data_structures/src/tagged_ptr/tests.rs new file mode 100644 index 0000000000000..b1bdee18d6d43 --- /dev/null +++ b/compiler/rustc_data_structures/src/tagged_ptr/tests.rs @@ -0,0 +1,105 @@ +use std::ptr; + +use super::*; +use crate::hashes::Hash128; +use crate::stable_hasher::{HashStable, StableHasher}; + +/// A tag type used in [`TaggedRef`] tests. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Tag2 { + B00 = 0b00, + B01 = 0b01, + B10 = 0b10, + B11 = 0b11, +} + +unsafe impl Tag for Tag2 { + const BITS: u32 = 2; + + fn into_usize(self) -> usize { + self as _ + } + + unsafe fn from_usize(tag: usize) -> Self { + match tag { + 0b00 => Tag2::B00, + 0b01 => Tag2::B01, + 0b10 => Tag2::B10, + 0b11 => Tag2::B11, + _ => unreachable!(), + } + } +} + +impl crate::stable_hasher::HashStable for Tag2 { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut crate::stable_hasher::StableHasher) { + (*self as u8).hash_stable(hcx, hasher); + } +} + +#[test] +fn smoke() { + let value = 12u32; + let reference = &value; + let tag = Tag2::B01; + + let ptr = TaggedRef::new(reference, tag); + + assert_eq!(ptr.tag(), tag); + assert_eq!(*ptr, 12); + assert!(ptr::eq(ptr.pointer(), reference)); + + let copy = ptr; + + let mut ptr = ptr; + ptr.set_tag(Tag2::B00); + assert_eq!(ptr.tag(), Tag2::B00); + + assert_eq!(copy.tag(), tag); + assert_eq!(*copy, 12); + assert!(ptr::eq(copy.pointer(), reference)); +} + +#[test] +fn stable_hash_hashes_as_tuple() { + let hash_packed = { + let mut hasher = StableHasher::new(); + TaggedRef::new(&12, Tag2::B11).hash_stable(&mut (), &mut hasher); + hasher.finish::() + }; + + let hash_tupled = { + let mut hasher = StableHasher::new(); + (&12, Tag2::B11).hash_stable(&mut (), &mut hasher); + hasher.finish::() + }; + + assert_eq!(hash_packed, hash_tupled); +} + +/// Test that `new` does not compile if there is not enough alignment for the +/// tag in the pointer. +/// +/// ```compile_fail,E0080 +/// use rustc_data_structures::tagged_ptr::{TaggedRef, Tag}; +/// +/// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +/// enum Tag2 { B00 = 0b00, B01 = 0b01, B10 = 0b10, B11 = 0b11 }; +/// +/// unsafe impl Tag for Tag2 { +/// const BITS: u32 = 2; +/// +/// fn into_usize(self) -> usize { todo!() } +/// unsafe fn from_usize(tag: usize) -> Self { todo!() } +/// } +/// +/// let value = 12u16; +/// let reference = &value; +/// let tag = Tag2::B01; +/// +/// let _ptr = TaggedRef::<_, _, true>::new(reference, tag); +/// ``` +// For some reason miri does not get the compile error +// probably it `check`s instead of `build`ing? +#[cfg(not(miri))] +const _: () = (); diff --git a/compiler/rustc_data_structures/src/work_queue.rs b/compiler/rustc_data_structures/src/work_queue.rs index ca052e2eac6db..815756edfeba5 100644 --- a/compiler/rustc_data_structures/src/work_queue.rs +++ b/compiler/rustc_data_structures/src/work_queue.rs @@ -1,7 +1,7 @@ use std::collections::VecDeque; use rustc_index::Idx; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; /// A work queue is a handy data structure for tracking work left to /// do. (For example, basic blocks left to process.) It is basically a @@ -11,14 +11,14 @@ use rustc_index::bit_set::BitSet; /// and also use a bit set to track occupancy. pub struct WorkQueue { deque: VecDeque, - set: BitSet, + set: DenseBitSet, } impl WorkQueue { /// Creates a new work queue that starts empty, where elements range from (0..len). #[inline] pub fn with_none(len: usize) -> Self { - WorkQueue { deque: VecDeque::with_capacity(len), set: BitSet::new_empty(len) } + WorkQueue { deque: VecDeque::with_capacity(len), set: DenseBitSet::new_empty(len) } } /// Attempt to enqueue `element` in the work queue. Returns false if it was already present. diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 9728f848ce88e..0413e5e86348b 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -26,7 +26,7 @@ use std::fmt::Write as _; use std::fs::{self, File}; use std::io::{self, IsTerminal, Read, Write}; use std::panic::{self, PanicHookInfo, catch_unwind}; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::process::{self, Command, Stdio}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, OnceLock}; @@ -45,15 +45,15 @@ use rustc_errors::registry::Registry; use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown}; use rustc_feature::find_gated_cfg; use rustc_interface::util::{self, get_codegen_backend}; -use rustc_interface::{Linker, interface, passes}; +use rustc_interface::{Linker, create_and_enter_global_ctxt, interface, passes}; use rustc_lint::unerased_lint_store; use rustc_metadata::creader::MetadataLoader; use rustc_metadata::locator; use rustc_middle::ty::TyCtxt; use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal}; use rustc_session::config::{ - CG_OPTIONS, ErrorOutputType, Input, OutFileName, OutputType, UnstableOptions, Z_OPTIONS, - nightly_options, + CG_OPTIONS, ErrorOutputType, Input, OptionDesc, OutFileName, OutputType, UnstableOptions, + Z_OPTIONS, nightly_options, }; use rustc_session::getopts::{self, Matches}; use rustc_session::lint::{Lint, LintId}; @@ -387,76 +387,69 @@ fn run_compiler( return early_exit(); } - let linker = compiler.enter(|queries| { + // Parse the crate root source code (doesn't parse submodules yet) + // Everything else is parsed during macro expansion. + let krate = passes::parse(sess); + + // If pretty printing is requested: Figure out the representation, print it and exit + if let Some(pp_mode) = sess.opts.pretty { + if pp_mode.needs_ast_map() { + create_and_enter_global_ctxt(compiler, krate, |tcx| { + tcx.ensure().early_lint_checks(()); + pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx }); + passes::write_dep_info(tcx); + }); + } else { + pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing { krate: &krate }); + } + trace!("finished pretty-printing"); + return early_exit(); + } + + if callbacks.after_crate_root_parsing(compiler, &krate) == Compilation::Stop { + return early_exit(); + } + + if sess.opts.unstable_opts.parse_crate_root_only { + return early_exit(); + } + + let linker = create_and_enter_global_ctxt(compiler, krate, |tcx| { let early_exit = || { sess.dcx().abort_if_errors(); None }; - // Parse the crate root source code (doesn't parse submodules yet) - // Everything else is parsed during macro expansion. - queries.parse(); - - // If pretty printing is requested: Figure out the representation, print it and exit - if let Some(pp_mode) = sess.opts.pretty { - if pp_mode.needs_ast_map() { - queries.global_ctxt().enter(|tcx| { - tcx.ensure().early_lint_checks(()); - pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx }); - passes::write_dep_info(tcx); - }); - } else { - let krate = queries.parse(); - pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing { - krate: &*krate.borrow(), - }); - } - trace!("finished pretty-printing"); + // Make sure name resolution and macro expansion is run. + let _ = tcx.resolver_for_lowering(); + + if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir { + dump_feature_usage_metrics(tcx, metrics_dir); + } + + if callbacks.after_expansion(compiler, tcx) == Compilation::Stop { return early_exit(); } - if callbacks.after_crate_root_parsing(compiler, &*queries.parse().borrow()) - == Compilation::Stop + passes::write_dep_info(tcx); + + if sess.opts.output_types.contains_key(&OutputType::DepInfo) + && sess.opts.output_types.len() == 1 { return early_exit(); } - if sess.opts.unstable_opts.parse_crate_root_only { + if sess.opts.unstable_opts.no_analysis { return early_exit(); } - queries.global_ctxt().enter(|tcx| { - // Make sure name resolution and macro expansion is run. - let _ = tcx.resolver_for_lowering(); - - if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir { - dump_feature_usage_metrics(tcx, metrics_dir); - } + tcx.ensure().analysis(()); - if callbacks.after_expansion(compiler, tcx) == Compilation::Stop { - return early_exit(); - } - - passes::write_dep_info(tcx); - - if sess.opts.output_types.contains_key(&OutputType::DepInfo) - && sess.opts.output_types.len() == 1 - { - return early_exit(); - } - - if sess.opts.unstable_opts.no_analysis { - return early_exit(); - } - - tcx.ensure().analysis(()); - - if callbacks.after_analysis(compiler, tcx) == Compilation::Stop { - return early_exit(); - } + if callbacks.after_analysis(compiler, tcx) == Compilation::Stop { + return early_exit(); + } - Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)) - }) + Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)) }); // Linking is done outside the `compiler.enter()` so that the @@ -467,7 +460,7 @@ fn run_compiler( }) } -fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &PathBuf) { +fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &Path) { let output_filenames = tcxt.output_filenames(()); let mut metrics_file_name = std::ffi::OsString::from("unstable_feature_usage_metrics-"); let mut metrics_path = output_filenames.with_directory_and_extension(metrics_dir, "json"); @@ -1131,14 +1124,6 @@ pub fn describe_flag_categories(early_dcx: &EarlyDiagCtxt, matches: &Matches) -> return true; } - if cg_flags.iter().any(|x| *x == "no-stack-check") { - early_dcx.early_warn("the `-Cno-stack-check` flag is deprecated and does nothing"); - } - - if cg_flags.iter().any(|x| x.starts_with("inline-threshold")) { - early_dcx.early_warn("the `-Cinline-threshold` flag is deprecated and does nothing (consider using `-Cllvm-args=--inline-threshold=...`)"); - } - if cg_flags.iter().any(|x| *x == "passes=list") { let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend=")); @@ -1163,18 +1148,16 @@ fn describe_codegen_flags() { print_flag_list("-C", config::CG_OPTIONS); } -fn print_flag_list( - cmdline_opt: &str, - flag_list: &[(&'static str, T, &'static str, &'static str)], -) { - let max_len = flag_list.iter().map(|&(name, _, _, _)| name.chars().count()).max().unwrap_or(0); +fn print_flag_list(cmdline_opt: &str, flag_list: &[OptionDesc]) { + let max_len = + flag_list.iter().map(|opt_desc| opt_desc.name().chars().count()).max().unwrap_or(0); - for &(name, _, _, desc) in flag_list { + for opt_desc in flag_list { safe_println!( " {} {:>width$}=val -- {}", cmdline_opt, - name.replace('_', "-"), - desc, + opt_desc.name().replace('_', "-"), + opt_desc.desc(), width = max_len ); } @@ -1228,8 +1211,8 @@ pub fn handle_options(early_dcx: &EarlyDiagCtxt, args: &[String]) -> Option = match e { getopts::Fail::UnrecognizedOption(ref opt) => CG_OPTIONS .iter() - .map(|&(name, ..)| ('C', name)) - .chain(Z_OPTIONS.iter().map(|&(name, ..)| ('Z', name))) + .map(|opt_desc| ('C', opt_desc.name())) + .chain(Z_OPTIONS.iter().map(|opt_desc| ('Z', opt_desc.name()))) .find(|&(_, name)| *opt == name.replace('_', "-")) .map(|(flag, _)| format!("{e}. Did you mean `-{flag} {opt}`?")), getopts::Fail::ArgumentMissing(ref opt) => { @@ -1395,7 +1378,13 @@ pub fn install_ice_hook( // opt in to less-verbose backtraces by manually setting "RUST_BACKTRACE" // (e.g. `RUST_BACKTRACE=1`) if env::var_os("RUST_BACKTRACE").is_none() { - panic::set_backtrace_style(panic::BacktraceStyle::Full); + // HACK: this check is extremely dumb, but we don't really need it to be smarter since this should only happen in the test suite anyway. + let ui_testing = std::env::args().any(|arg| arg == "-Zui-testing"); + if env!("CFG_RELEASE_CHANNEL") == "dev" && !ui_testing { + panic::set_backtrace_style(panic::BacktraceStyle::Short); + } else { + panic::set_backtrace_style(panic::BacktraceStyle::Full); + } } let using_internal_features = Arc::new(std::sync::atomic::AtomicBool::default()); diff --git a/compiler/rustc_driver_impl/src/pretty.rs b/compiler/rustc_driver_impl/src/pretty.rs index 5df960be307b0..93f3d2ab911f8 100644 --- a/compiler/rustc_driver_impl/src/pretty.rs +++ b/compiler/rustc_driver_impl/src/pretty.rs @@ -4,15 +4,13 @@ use std::cell::Cell; use std::fmt::Write; use rustc_ast_pretty::pprust as pprust_ast; -use rustc_errors::FatalError; use rustc_middle::bug; use rustc_middle::mir::{write_mir_graphviz, write_mir_pretty}; use rustc_middle::ty::{self, TyCtxt}; use rustc_session::Session; use rustc_session::config::{OutFileName, PpHirMode, PpMode, PpSourceMode}; use rustc_smir::rustc_internal::pretty::write_smir_pretty; -use rustc_span::FileName; -use rustc_span::symbol::Ident; +use rustc_span::{FileName, Ident}; use tracing::debug; use {rustc_ast as ast, rustc_hir_pretty as pprust_hir}; @@ -312,9 +310,7 @@ pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { let tcx = ex.tcx(); let mut out = String::new(); rustc_hir_analysis::check_crate(tcx); - if tcx.dcx().has_errors().is_some() { - FatalError.raise(); - } + tcx.dcx().abort_if_errors(); debug!("pretty printing THIR tree"); for did in tcx.hir().body_owners() { let _ = writeln!(out, "{:?}:\n{}\n", did, tcx.thir_tree(did)); @@ -325,9 +321,7 @@ pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { let tcx = ex.tcx(); let mut out = String::new(); rustc_hir_analysis::check_crate(tcx); - if tcx.dcx().has_errors().is_some() { - FatalError.raise(); - } + tcx.dcx().abort_if_errors(); debug!("pretty printing THIR flat"); for did in tcx.hir().body_owners() { let _ = writeln!(out, "{:?}:\n{}\n", did, tcx.thir_flat(did)); diff --git a/compiler/rustc_driver_impl/src/signal_handler.rs b/compiler/rustc_driver_impl/src/signal_handler.rs index d4f8199390cc5..08b7d937661fb 100644 --- a/compiler/rustc_driver_impl/src/signal_handler.rs +++ b/compiler/rustc_driver_impl/src/signal_handler.rs @@ -2,7 +2,7 @@ //! Primarily used to extract a backtrace from stack overflow use std::alloc::{Layout, alloc}; -use std::{fmt, mem, ptr}; +use std::{fmt, mem, ptr, slice}; use rustc_interface::util::{DEFAULT_STACK_SIZE, STACK_SIZE}; @@ -35,20 +35,22 @@ macro raw_errln($tokens:tt) { } /// Signal handler installed for SIGSEGV -// FIXME(static_mut_refs): Do not allow `static_mut_refs` lint -#[allow(static_mut_refs)] -extern "C" fn print_stack_trace(_: libc::c_int) { +/// +/// # Safety +/// +/// Caller must ensure that this function is not re-entered. +unsafe extern "C" fn print_stack_trace(_: libc::c_int) { const MAX_FRAMES: usize = 256; - // Reserve data segment so we don't have to malloc in a signal handler, which might fail - // in incredibly undesirable and unexpected ways due to e.g. the allocator deadlocking - static mut STACK_TRACE: [*mut libc::c_void; MAX_FRAMES] = [ptr::null_mut(); MAX_FRAMES]; let stack = unsafe { + // Reserve data segment so we don't have to malloc in a signal handler, which might fail + // in incredibly undesirable and unexpected ways due to e.g. the allocator deadlocking + static mut STACK_TRACE: [*mut libc::c_void; MAX_FRAMES] = [ptr::null_mut(); MAX_FRAMES]; // Collect return addresses - let depth = libc::backtrace(STACK_TRACE.as_mut_ptr(), MAX_FRAMES as i32); + let depth = libc::backtrace(&raw mut STACK_TRACE as _, MAX_FRAMES as i32); if depth == 0 { return; } - &STACK_TRACE.as_slice()[0..(depth as _)] + slice::from_raw_parts(&raw const STACK_TRACE as _, depth as _) }; // Just a stack trace is cryptic. Explain what we're doing. diff --git a/compiler/rustc_error_codes/src/error_codes/E0015.md b/compiler/rustc_error_codes/src/error_codes/E0015.md index ac78f66adada0..244cc47624388 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0015.md +++ b/compiler/rustc_error_codes/src/error_codes/E0015.md @@ -7,7 +7,7 @@ fn create_some() -> Option { Some(1) } -// error: cannot call non-const fn `create_some` in constants +// error: cannot call non-const function `create_some` in constants const FOO: Option = create_some(); ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0207.md b/compiler/rustc_error_codes/src/error_codes/E0207.md index 95e7c9fc76ce2..5b35748f4723c 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0207.md +++ b/compiler/rustc_error_codes/src/error_codes/E0207.md @@ -195,6 +195,30 @@ impl<'a> Contains for Foo { Please note that unconstrained lifetime parameters are not supported if they are being used by an associated type. +In cases where the associated type's lifetime is meant to be tied to the the +self type, and none of the methods on the trait need ownership or different +mutability, then an option is to implement the trait on a borrowed type: + +```rust +struct Foo(i32); + +trait Contents { + type Item; + + fn get(&self) -> Self::Item; +} + +// Note the lifetime `'a` is used both for the self type... +impl<'a> Contents for &'a Foo { + // ...and the associated type. + type Item = &'a i32; + + fn get(&self) -> Self::Item { + &self.0 + } +} +``` + ### Additional information For more information, please see [RFC 447]. diff --git a/compiler/rustc_error_codes/src/error_codes/E0253.md b/compiler/rustc_error_codes/src/error_codes/E0253.md index aea51d4023821..705d1bfc53e59 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0253.md +++ b/compiler/rustc_error_codes/src/error_codes/E0253.md @@ -1,19 +1,19 @@ -Attempt was made to import an unimportable value. This can happen when trying -to import a method from a trait. +Attempt was made to import an unimportable type. This can happen when trying +to import a type from a trait. Erroneous code example: ```compile_fail,E0253 mod foo { pub trait MyTrait { - fn do_something(); + type SomeType; } } -use foo::MyTrait::do_something; -// error: `do_something` is not directly importable +use foo::MyTrait::SomeType; +// error: `SomeType` is not directly importable fn main() {} ``` -It's invalid to directly import methods belonging to a trait or concrete type. +It's invalid to directly import types belonging to a trait. diff --git a/compiler/rustc_error_codes/src/error_codes/E0665.md b/compiler/rustc_error_codes/src/error_codes/E0665.md index ae54d6d15798d..caa944233778b 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0665.md +++ b/compiler/rustc_error_codes/src/error_codes/E0665.md @@ -1,10 +1,9 @@ -#### Note: this error code is no longer emitted by the compiler. - -The `Default` trait was derived on an enum. +The `Default` trait was derived on an enum without specifying the default +variant. Erroneous code example: -```compile_fail +```compile_fail,E0665 #[derive(Default)] enum Food { Sweet, @@ -16,18 +15,30 @@ The `Default` cannot be derived on an enum for the simple reason that the compiler doesn't know which value to pick by default whereas it can for a struct as long as all its fields implement the `Default` trait as well. -If you still want to implement `Default` on your enum, you'll have to do it "by -hand": +For the case where the desired default variant has no payload, you can +annotate it with `#[default]` to derive it: ``` +#[derive(Default)] enum Food { + #[default] Sweet, Salty, } +``` + +In the case where the default variant does have a payload, you will have to +implement `Default` on your enum manually: + +``` +enum Food { + Sweet(i32), + Salty, +} impl Default for Food { fn default() -> Food { - Food::Sweet + Food::Sweet(1) } } ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0788.md b/compiler/rustc_error_codes/src/error_codes/E0788.md index d655e51fa6636..ba138aed2d12f 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0788.md +++ b/compiler/rustc_error_codes/src/error_codes/E0788.md @@ -1,26 +1,24 @@ -A `#[coverage]` attribute was applied to something which does not show up -in code coverage, or is too granular to be excluded from the coverage report. +A `#[coverage(off|on)]` attribute was found in a position where it is not +allowed. -For now, this attribute can only be applied to function, method, and closure -definitions. In the future, it may be added to statements, blocks, and -expressions, and for the time being, using this attribute in those places -will just emit an `unused_attributes` lint instead of this error. +Coverage attributes can be applied to: +- Function and method declarations that have a body, including trait methods + that have a default implementation. +- Closure expressions, in situations where attributes can be applied to + expressions. +- `impl` blocks (inherent or trait), and modules. Example of erroneous code: ```compile_fail,E0788 -#[coverage(off)] -struct Foo; - -#[coverage(on)] -const FOO: Foo = Foo; +unsafe extern "C" { + #[coverage(off)] + fn foreign_fn(); +} ``` -`#[coverage(off)]` tells the compiler to not generate coverage instrumentation -for a piece of code when the `-C instrument-coverage` flag is passed. Things -like structs and consts are not coverable code, and thus cannot do anything -with this attribute. - -If you wish to apply this attribute to all methods in an impl or module, -manually annotate each method; it is not possible to annotate the entire impl -with a `#[coverage]` attribute. +When using the `-C instrument-coverage` flag, coverage attributes act as a +hint to the compiler that it should instrument or not instrument the +corresponding function or enclosed functions. The precise effect of applying +a coverage attribute is not guaranteed and may change in future compiler +versions. diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs index 74b6d63365a58..a51c4140e1760 100644 --- a/compiler/rustc_error_messages/src/lib.rs +++ b/compiler/rustc_error_messages/src/lib.rs @@ -357,9 +357,9 @@ impl From> for DiagMessage { /// subdiagnostic derive refers to typed identifiers that are `DiagMessage`s, so need to be /// able to convert between these, as much as they'll be converted back into `DiagMessage` /// using `with_subdiagnostic_message` eventually. Don't use this other than for the derive. -impl Into for DiagMessage { - fn into(self) -> SubdiagMessage { - match self { +impl From for SubdiagMessage { + fn from(val: DiagMessage) -> Self { + match val { DiagMessage::Str(s) => SubdiagMessage::Str(s), DiagMessage::Translated(s) => SubdiagMessage::Translated(s), DiagMessage::FluentIdentifier(id, None) => SubdiagMessage::FluentIdentifier(id), diff --git a/compiler/rustc_errors/Cargo.toml b/compiler/rustc_errors/Cargo.toml index 06bae57638f3c..091fbe9b72ed2 100644 --- a/compiler/rustc_errors/Cargo.toml +++ b/compiler/rustc_errors/Cargo.toml @@ -10,12 +10,14 @@ derive_setters = "0.1.6" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } +rustc_attr_data_structures = { path = "../rustc_attr_data_structures" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_codes = { path = "../rustc_error_codes" } rustc_error_messages = { path = "../rustc_error_messages" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } +rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index 4352de3ad25fe..797dcd7b4d1df 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -11,8 +11,7 @@ use rustc_error_messages::{FluentValue, fluent_value_from_str_list_sep_by_and}; use rustc_lint_defs::Applicability; use rustc_macros::{Decodable, Encodable}; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol}; use tracing::debug; use crate::snippet::Style; @@ -157,9 +156,9 @@ impl IntoDiagArg for DiagArgValue { } } -impl Into> for DiagArgValue { - fn into(self) -> FluentValue<'static> { - match self { +impl From for FluentValue<'static> { + fn from(val: DiagArgValue) -> Self { + match val { DiagArgValue::Str(s) => From::from(s), DiagArgValue::Number(n) => From::from(n), DiagArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l), @@ -881,7 +880,7 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { ) } } - /// Show a suggestion that has multiple parts to it, always as it's own subdiagnostic. + /// Show a suggestion that has multiple parts to it, always as its own subdiagnostic. /// In other words, multiple changes need to be applied as part of this suggestion. #[rustc_lint_diagnostics] pub fn multipart_suggestion_verbose( diff --git a/compiler/rustc_errors/src/diagnostic_impls.rs b/compiler/rustc_errors/src/diagnostic_impls.rs index b451037132397..dfa54b39e7911 100644 --- a/compiler/rustc_errors/src/diagnostic_impls.rs +++ b/compiler/rustc_errors/src/diagnostic_impls.rs @@ -8,10 +8,10 @@ use std::process::ExitStatus; use rustc_abi::TargetDataLayoutErrors; use rustc_ast::util::parser::ExprPrecedence; use rustc_ast_pretty::pprust; +use rustc_attr_data_structures::RustcVersion; use rustc_macros::Subdiagnostic; -use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent, Symbol}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol}; use rustc_target::spec::{PanicStrategy, SplitDebuginfo, StackProtector, TargetTuple}; use rustc_type_ir::{ClosureKind, FloatTy}; use {rustc_ast as ast, rustc_hir as hir}; @@ -96,6 +96,12 @@ into_diag_arg_using_display!( ErrCode, ); +impl IntoDiagArg for RustcVersion { + fn into_diag_arg(self) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string())) + } +} + impl IntoDiagArg for rustc_type_ir::TraitRef { fn into_diag_arg(self) -> DiagArgValue { self.to_string().into_diag_arg() diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index ac2f91cdeb3fe..f938352820df7 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -19,6 +19,7 @@ use derive_setters::Setters; use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{DynSend, IntoDynSyncSend, Lrc}; use rustc_error_messages::{FluentArgs, SpanLabel}; +use rustc_lexer; use rustc_lint_defs::pluralize; use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::source_map::SourceMap; @@ -1698,9 +1699,14 @@ impl HumanEmitter { if let Some(source_string) = line.line_index.checked_sub(1).and_then(|l| file.get_line(l)) { + // Whitespace can only be removed (aka considered leading) + // if the lexer considers it whitespace. + // non-rustc_lexer::is_whitespace() chars are reported as an + // error (ex. no-break-spaces \u{a0}), and thus can't be considered + // for removal during error reporting. let leading_whitespace = source_string .chars() - .take_while(|c| c.is_whitespace()) + .take_while(|c| rustc_lexer::is_whitespace(*c)) .map(|c| { match c { // Tabs are displayed as 4 spaces @@ -1709,7 +1715,7 @@ impl HumanEmitter { } }) .sum(); - if source_string.chars().any(|c| !c.is_whitespace()) { + if source_string.chars().any(|c| !rustc_lexer::is_whitespace(c)) { whitespace_margin = min(whitespace_margin, leading_whitespace); } } @@ -2210,6 +2216,11 @@ impl HumanEmitter { show_code_change { for part in parts { + let snippet = if let Ok(snippet) = sm.span_to_snippet(part.span) { + snippet + } else { + String::new() + }; let span_start_pos = sm.lookup_char_pos(part.span.lo()).col_display; let span_end_pos = sm.lookup_char_pos(part.span.hi()).col_display; @@ -2257,13 +2268,80 @@ impl HumanEmitter { } if let DisplaySuggestion::Diff = show_code_change { // Colorize removal with red in diff format. - buffer.set_style_range( - row_num - 2, - (padding as isize + span_start_pos as isize) as usize, - (padding as isize + span_end_pos as isize) as usize, - Style::Removal, - true, - ); + + // Below, there's some tricky buffer indexing going on. `row_num` at this + // point corresponds to: + // + // | + // LL | CODE + // | ++++ <- `row_num` + // + // in the buffer. When we have a diff format output, we end up with + // + // | + // LL - OLDER <- row_num - 2 + // LL + NEWER + // | <- row_num + // + // The `row_num - 2` is to select the buffer line that has the "old version + // of the diff" at that point. When the removal is a single line, `i` is + // `0`, `newlines` is `1` so `(newlines - i - 1)` ends up being `0`, so row + // points at `LL - OLDER`. When the removal corresponds to multiple lines, + // we end up with `newlines > 1` and `i` being `0..newlines - 1`. + // + // | + // LL - OLDER <- row_num - 2 - (newlines - last_i - 1) + // LL - CODE + // LL - BEING + // LL - REMOVED <- row_num - 2 - (newlines - first_i - 1) + // LL + NEWER + // | <- row_num + + let newlines = snippet.lines().count(); + if newlines > 0 && row_num > newlines { + // Account for removals where the part being removed spans multiple + // lines. + // FIXME: We check the number of rows because in some cases, like in + // `tests/ui/lint/invalid-nan-comparison-suggestion.rs`, the rendered + // suggestion will only show the first line of code being replaced. The + // proper way of doing this would be to change the suggestion rendering + // logic to show the whole prior snippet, but the current output is not + // too bad to begin with, so we side-step that issue here. + for (i, line) in snippet.lines().enumerate() { + let line = normalize_whitespace(line); + let row = row_num - 2 - (newlines - i - 1); + // On the first line, we highlight between the start of the part + // span, and the end of that line. + // On the last line, we highlight between the start of the line, and + // the column of the part span end. + // On all others, we highlight the whole line. + let start = if i == 0 { + (padding as isize + span_start_pos as isize) as usize + } else { + padding + }; + let end = if i == 0 { + (padding as isize + + span_start_pos as isize + + line.len() as isize) + as usize + } else if i == newlines - 1 { + (padding as isize + span_end_pos as isize) as usize + } else { + (padding as isize + line.len() as isize) as usize + }; + buffer.set_style_range(row, start, end, Style::Removal, true); + } + } else { + // The removed code fits all in one line. + buffer.set_style_range( + row_num - 2, + (padding as isize + span_start_pos as isize) as usize, + (padding as isize + span_end_pos as isize) as usize, + Style::Removal, + true, + ); + } } // length of the code after substitution @@ -2523,7 +2601,7 @@ impl HumanEmitter { buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition); } [] => { - // FIXME: needed? Doesn't get excercised in any test. + // FIXME: needed? Doesn't get exercised in any test. self.draw_col_separator_no_space(buffer, *row_num, max_line_num_len + 1); } _ => { diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 25d4146255647..cc5eb9c335e8c 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -566,9 +566,7 @@ pub enum StashKey { /// FRU syntax MaybeFruTypo, CallAssocMethod, - TraitMissingMethod, AssociatedTypeSuggestion, - OpaqueHiddenTypeMismatch, MaybeForgetReturn, /// Query cycle detected, stashing in favor of a better error. Cycle, @@ -1569,18 +1567,18 @@ impl DiagCtxtInner { debug!(?diagnostic); debug!(?self.emitted_diagnostics); - let already_emitted_sub = |sub: &mut Subdiag| { + let not_yet_emitted = |sub: &mut Subdiag| { debug!(?sub); if sub.level != OnceNote && sub.level != OnceHelp { - return false; + return true; } let mut hasher = StableHasher::new(); sub.hash(&mut hasher); let diagnostic_hash = hasher.finish(); debug!(?diagnostic_hash); - !self.emitted_diagnostics.insert(diagnostic_hash) + self.emitted_diagnostics.insert(diagnostic_hash) }; - diagnostic.children.extract_if(already_emitted_sub).for_each(|_| {}); + diagnostic.children.retain_mut(not_yet_emitted); if already_emitted { let msg = "duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`"; diagnostic.sub(Note, msg, MultiSpan::new()); diff --git a/compiler/rustc_errors/src/markdown/parse.rs b/compiler/rustc_errors/src/markdown/parse.rs index 8dd146c1c337f..7a991a2ace71b 100644 --- a/compiler/rustc_errors/src/markdown/parse.rs +++ b/compiler/rustc_errors/src/markdown/parse.rs @@ -346,7 +346,7 @@ fn parse_with_end_pat<'a>( None } -/// Resturn `(match, residual)` to end of line. The EOL is returned with the +/// Return `(match, residual)` to end of line. The EOL is returned with the /// residual. fn parse_to_newline(buf: &[u8]) -> (&[u8], &[u8]) { buf.iter().position(|ch| *ch == b'\n').map_or((buf, &[]), |pos| buf.split_at(pos)) diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml index eb93972387d40..cae7007c2c561 100644 --- a/compiler/rustc_expand/Cargo.toml +++ b/compiler/rustc_expand/Cargo.toml @@ -17,6 +17,7 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 82d4847e27a0e..4ce66cd1ae267 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -10,11 +10,12 @@ use rustc_ast::token::Nonterminal; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind}; -use rustc_attr_parsing::{self as attr, Deprecation, Stability}; +use rustc_attr_parsing::{AttributeKind, Deprecation, Stability, find_attr}; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync::{self, Lrc}; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult}; use rustc_feature::Features; +use rustc_hir as hir; use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools}; use rustc_parse::MACRO_ARGUMENTS; use rustc_parse::parser::Parser; @@ -25,8 +26,7 @@ use rustc_span::def_id::{CrateNum, DefId, LocalDefId}; use rustc_span::edition::Edition; use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId, MacroKind}; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, FileName, Span}; +use rustc_span::{DUMMY_SP, FileName, Ident, Span, Symbol, kw, sym}; use smallvec::{SmallVec, smallvec}; use thin_vec::ThinVec; @@ -523,7 +523,7 @@ impl MacResult for MacEager { return Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, span: e.span, - kind: PatKind::Lit(e), + kind: PatKind::Expr(e), tokens: None, })); } @@ -838,19 +838,23 @@ impl SyntaxExtension { /// and other properties converted from attributes. pub fn new( sess: &Session, - features: &Features, kind: SyntaxExtensionKind, span: Span, helper_attrs: Vec, edition: Edition, name: Symbol, - attrs: &[impl AttributeExt], + attrs: &[hir::Attribute], is_local: bool, ) -> SyntaxExtension { let allow_internal_unstable = - rustc_attr_parsing::allow_internal_unstable(sess, attrs).collect::>(); + find_attr!(attrs, AttributeKind::AllowInternalUnstable(i) => i) + .map(|i| i.as_slice()) + .unwrap_or_default(); + // FIXME(jdonszelman): allow_internal_unsafe isn't yet new-style + // let allow_internal_unsafe = find_attr!(attrs, AttributeKind::AllowInternalUnsafe); + let allow_internal_unsafe = + ast::attr::find_by_name(attrs, sym::allow_internal_unsafe).is_some(); - let allow_internal_unsafe = ast::attr::contains_name(attrs, sym::allow_internal_unsafe); let local_inner_macros = ast::attr::find_by_name(attrs, sym::macro_export) .and_then(|macro_export| macro_export.meta_item_list()) .is_some_and(|l| ast::attr::list_contains_name(&l, sym::local_inner_macros)); @@ -867,16 +871,17 @@ impl SyntaxExtension { ) }) .unwrap_or_else(|| (None, helper_attrs)); - let stability = attr::find_stability(sess, attrs, span); - let const_stability = attr::find_const_stability(sess, attrs, span); - let body_stability = attr::find_body_stability(sess, attrs); - if let Some((_, sp)) = const_stability { + + let stability = find_attr!(attrs, AttributeKind::Stability{stability, ..} => *stability); + + // FIXME(jdonszelmann): make it impossible to miss the or_else in the typesystem + if let Some(sp) = find_attr!(attrs, AttributeKind::ConstStability{span, ..} => *span) { sess.dcx().emit_err(errors::MacroConstStability { span: sp, head_span: sess.source_map().guess_head_span(span), }); } - if let Some((_, sp)) = body_stability { + if let Some(sp) = find_attr!(attrs, AttributeKind::BodyStability{span, ..} => *span) { sess.dcx().emit_err(errors::MacroBodyStability { span: sp, head_span: sess.source_map().guess_head_span(span), @@ -887,9 +892,10 @@ impl SyntaxExtension { kind, span, allow_internal_unstable: (!allow_internal_unstable.is_empty()) - .then(|| allow_internal_unstable.into()), - stability: stability.map(|(s, _)| s), - deprecation: attr::find_deprecation(sess, features, attrs).map(|(d, _)| d), + // FIXME(jdonszelmann): avoid the into_iter/collect? + .then(|| allow_internal_unstable.iter().map(|i| i.0).collect::>().into()), + stability, + deprecation: find_attr!(attrs, AttributeKind::Deprecation{deprecation, ..} => *deprecation), helper_attrs, edition, builtin_name, diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index a673e2e3250d5..8bf09cf96b3f7 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -4,8 +4,7 @@ use rustc_ast::{ self as ast, AttrVec, BlockCheckMode, Expr, LocalKind, MatchKind, PatKind, UnOp, attr, token, }; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use crate::base::ExtCtxt; @@ -487,7 +486,7 @@ impl<'a> ExtCtxt<'a> { self.pat(span, PatKind::Wild) } pub fn pat_lit(&self, span: Span, expr: P) -> P { - self.pat(span, PatKind::Lit(expr)) + self.pat(span, PatKind::Expr(expr)) } pub fn pat_ident(&self, span: Span, ident: Ident) -> P { self.pat_ident_binding_mode(span, ident, ast::BindingMode::NONE) diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 8e500f538a78b..3e3f35332e0d5 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -18,8 +18,7 @@ use rustc_lint_defs::BuiltinLintDiag; use rustc_parse::validate_attr; use rustc_session::Session; use rustc_session::parse::feature_err; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{STDLIB_STABLE_CRATES, Span, Symbol, sym}; use thin_vec::ThinVec; use tracing::instrument; @@ -108,14 +107,11 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) - // If the enabled feature is unstable, record it. if UNSTABLE_LANG_FEATURES.iter().find(|f| name == f.name).is_some() { - // When the ICE comes from core, alloc or std (approximation of the standard - // library), there's a chance that the person hitting the ICE may be using - // -Zbuild-std or similar with an untested target. The bug is probably in the - // standard library and not the compiler in that case, but that doesn't really - // matter - we want a bug report. - if features.internal(name) - && ![sym::core, sym::alloc, sym::std].contains(&crate_name) - { + // When the ICE comes a standard library crate, there's a chance that the person + // hitting the ICE may be using -Zbuild-std or similar with an untested target. + // The bug is probably in the standard library and not the compiler in that case, + // but that doesn't really matter - we want a bug report. + if features.internal(name) && !STDLIB_STABLE_CRATES.contains(&crate_name) { sess.using_internal_features.store(true, std::sync::atomic::Ordering::Relaxed); } @@ -134,7 +130,7 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) - // Similar to above, detect internal lib features to suppress // the ICE message that asks for a report. - if features.internal(name) && ![sym::core, sym::alloc, sym::std].contains(&crate_name) { + if features.internal(name) && !STDLIB_STABLE_CRATES.contains(&crate_name) { sess.using_internal_features.store(true, std::sync::atomic::Ordering::Relaxed); } } diff --git a/compiler/rustc_expand/src/errors.rs b/compiler/rustc_expand/src/errors.rs index 7bd7c30553913..89bdc7b6dfa5d 100644 --- a/compiler/rustc_expand/src/errors.rs +++ b/compiler/rustc_expand/src/errors.rs @@ -4,8 +4,7 @@ use rustc_ast::ast; use rustc_errors::codes::*; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_session::Limit; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent}; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol}; #[derive(Diagnostic)] #[diag(expand_expr_repeat_no_syntax_vars)] diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 575108a548fcb..ec497f6f8f1f3 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -29,8 +29,7 @@ use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS}; use rustc_session::parse::feature_err; use rustc_session::{Limit, Session}; use rustc_span::hygiene::SyntaxContext; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span}; +use rustc_span::{ErrorGuaranteed, FileName, Ident, LocalExpnId, Span, sym}; use smallvec::SmallVec; use crate::base::*; diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs index e5d098f63d6d1..4ff8c02bcdb82 100644 --- a/compiler/rustc_expand/src/mbe.rs +++ b/compiler/rustc_expand/src/mbe.rs @@ -16,8 +16,7 @@ use metavar_expr::MetaVarExpr; use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan}; use rustc_macros::{Decodable, Encodable}; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; /// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. /// The delimiters are not represented explicitly in the `tts` vector. diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 77b8d22892218..9f48835e15d7b 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -7,8 +7,7 @@ use rustc_macros::Subdiagnostic; use rustc_parse::parser::{Parser, Recovery, token_descr}; use rustc_session::parse::ParseSess; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::Ident; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span}; use tracing::debug; use super::macro_rules::{NoopTracker, parser_from_cx}; diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index 1498b9cbd5d62..729dec2bfbdb1 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -115,8 +115,7 @@ use rustc_lint_defs::BuiltinLintDiag; use rustc_session::lint::builtin::{META_VARIABLE_MISUSE, MISSING_FRAGMENT_SPECIFIER}; use rustc_session::parse::ParseSess; use rustc_span::edition::Edition; -use rustc_span::symbol::{MacroRulesNormalizedIdent, kw}; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, MacroRulesNormalizedIdent, Span, kw}; use smallvec::SmallVec; use super::quoted::VALID_FRAGMENT_NAMES_MSG; diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index 2a8dddc1e004f..d709fd792817a 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -82,8 +82,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_errors::ErrorGuaranteed; use rustc_lint_defs::pluralize; use rustc_parse::parser::{ParseNtResult, Parser, token_descr}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span}; use crate::mbe::macro_rules::Tracker; use crate::mbe::{KleeneOp, TokenTree}; diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index ae8e24007bdcc..d402bf3d49554 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -3,17 +3,17 @@ use std::collections::hash_map::Entry; use std::{mem, slice}; use ast::token::IdentIsRaw; -use rustc_ast::attr::AttributeExt; use rustc_ast::token::NtPatKind::*; use rustc_ast::token::TokenKind::*; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId}; use rustc_ast_pretty::pprust; -use rustc_attr_parsing::{self as attr, TransparencyError}; +use rustc_attr_parsing::{AttributeKind, find_attr}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_errors::{Applicability, ErrorGuaranteed}; use rustc_feature::Features; +use rustc_hir as hir; use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::{ RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS, @@ -21,10 +21,9 @@ use rustc_lint_defs::builtin::{ use rustc_parse::parser::{ParseNtResult, Parser, Recovery}; use rustc_session::Session; use rustc_session::parse::ParseSess; -use rustc_span::Span; use rustc_span::edition::Edition; use rustc_span::hygiene::Transparency; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent, kw, sym}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, kw, sym}; use tracing::{debug, instrument, trace, trace_span}; use super::diagnostics; @@ -372,7 +371,7 @@ pub fn compile_declarative_macro( features: &Features, macro_def: &ast::MacroDef, ident: Ident, - attrs: &[impl AttributeExt], + attrs: &[hir::Attribute], span: Span, node_id: NodeId, edition: Edition, @@ -380,7 +379,6 @@ pub fn compile_declarative_macro( let mk_syn_ext = |expander| { SyntaxExtension::new( sess, - features, SyntaxExtensionKind::LegacyBang(expander), span, Vec::new(), @@ -392,7 +390,6 @@ pub fn compile_declarative_macro( }; let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new()); - let dcx = sess.dcx(); let lhs_nm = Ident::new(sym::lhs, span); let rhs_nm = Ident::new(sym::rhs, span); let tt_spec = Some(NonterminalKind::TT); @@ -534,16 +531,8 @@ pub fn compile_declarative_macro( check_emission(macro_check::check_meta_variables(&sess.psess, node_id, span, &lhses, &rhses)); - let (transparency, transparency_error) = attr::find_transparency(attrs, macro_rules); - match transparency_error { - Some(TransparencyError::UnknownTransparency(value, span)) => { - dcx.span_err(span, format!("unknown macro transparency: `{value}`")); - } - Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => { - dcx.span_err(vec![old_span, new_span], "multiple macro transparency attributes"); - } - None => {} - } + let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x) + .unwrap_or(Transparency::fallback(macro_rules)); if let Some(guar) = guar { // To avoid warning noise, only consider the rules of this diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 810a5d30c7ec4..1ccb070f83a4b 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -1,12 +1,11 @@ use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, Token, TokenKind}; -use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree}; use rustc_ast::{LitIntType, LitKind}; use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, PResult}; use rustc_macros::{Decodable, Encodable}; use rustc_session::parse::ParseSess; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; pub(crate) const RAW_IDENT_ERR: &str = "`${concat(..)}` currently does not support raw identifiers"; pub(crate) const UNSUPPORTED_CONCAT_ELEM_ERR: &str = "expected identifier or string literal"; @@ -39,14 +38,14 @@ impl MetaVarExpr { outer_span: Span, psess: &'psess ParseSess, ) -> PResult<'psess, MetaVarExpr> { - let mut tts = input.trees(); - let ident = parse_ident(&mut tts, psess, outer_span)?; - let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else { + let mut iter = input.iter(); + let ident = parse_ident(&mut iter, psess, outer_span)?; + let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = iter.next() else { let msg = "meta-variable expression parameter must be wrapped in parentheses"; return Err(psess.dcx().struct_span_err(ident.span, msg)); }; - check_trailing_token(&mut tts, psess)?; - let mut iter = args.trees(); + check_trailing_token(&mut iter, psess)?; + let mut iter = args.iter(); let rslt = match ident.as_str() { "concat" => { let mut result = Vec::new(); @@ -74,7 +73,7 @@ impl MetaVarExpr { } }; result.push(element); - if iter.look_ahead(0).is_none() { + if iter.peek().is_none() { break; } if !try_eat_comma(&mut iter) { @@ -143,7 +142,7 @@ pub(crate) enum MetaVarExprConcatElem { // Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}` fn check_trailing_token<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, ) -> PResult<'psess, ()> { if let Some(tt) = iter.next() { @@ -159,14 +158,14 @@ fn check_trailing_token<'psess>( /// Parse a meta-variable `count` expression: `count(ident[, depth])` fn parse_count<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, MetaVarExpr> { eat_dollar(iter, psess, span)?; let ident = parse_ident(iter, psess, span)?; let depth = if try_eat_comma(iter) { - if iter.look_ahead(0).is_none() { + if iter.peek().is_none() { return Err(psess.dcx().struct_span_err( span, "`count` followed by a comma must have an associated index indicating its depth", @@ -181,7 +180,7 @@ fn parse_count<'psess>( /// Parses the depth used by index(depth) and len(depth). fn parse_depth<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, usize> { @@ -204,7 +203,7 @@ fn parse_depth<'psess>( /// Parses an generic ident fn parse_ident<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, fallback_span: Span, ) -> PResult<'psess, Ident> { @@ -236,7 +235,7 @@ fn parse_ident_from_token<'psess>( } fn parse_token<'psess, 't>( - iter: &mut RefTokenTreeCursor<'t>, + iter: &mut TokenStreamIter<'t>, psess: &'psess ParseSess, fallback_span: Span, ) -> PResult<'psess, &'t Token> { @@ -251,8 +250,8 @@ fn parse_token<'psess, 't>( /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the /// iterator is not modified and the result is `false`. -fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool { - if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) { +fn try_eat_comma(iter: &mut TokenStreamIter<'_>) -> bool { + if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.peek() { let _ = iter.next(); return true; } @@ -261,8 +260,8 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool { /// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the /// iterator is not modified and the result is `false`. -fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool { - if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) { +fn try_eat_dollar(iter: &mut TokenStreamIter<'_>) -> bool { + if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() { let _ = iter.next(); return true; } @@ -271,12 +270,11 @@ fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool { /// Expects that the next item is a dollar sign. fn eat_dollar<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, ()> { - if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) { - let _ = iter.next(); + if try_eat_dollar(iter) { return Ok(()); } Err(psess.dcx().struct_span_err( diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 36094707faceb..a27d47892e461 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -1,12 +1,12 @@ use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token}; +use rustc_ast::tokenstream::TokenStreamIter; use rustc_ast::{NodeId, tokenstream}; use rustc_ast_pretty::pprust; use rustc_feature::Features; use rustc_session::Session; use rustc_session::parse::feature_err; -use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::symbol::{Ident, kw, sym}; +use rustc_span::{Ident, Span, kw, sym}; use crate::errors; use crate::mbe::macro_parser::count_metavar_decls; @@ -49,25 +49,25 @@ pub(super) fn parse( // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // additional trees if need be. - let mut trees = input.trees().peekable(); - while let Some(tree) = trees.next() { + let mut iter = input.iter(); + while let Some(tree) = iter.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition); + let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition); match tree { TokenTree::MetaVar(start_sp, ident) if parsing_patterns => { // Not consuming the next token immediately, as it may not be a colon - let span = match trees.peek() { + let span = match iter.peek() { Some(&tokenstream::TokenTree::Token( Token { kind: token::Colon, span: colon_span }, _, )) => { // Consume the colon first - trees.next(); + iter.next(); // It's ok to consume the next tree no matter how, // since if it's not a token then it will be an invalid declaration. - match trees.next() { + match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() { Some((fragment, _)) => { let span = token.span.with_lo(start_sp.lo()); @@ -143,14 +143,14 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess /// # Parameters /// /// - `tree`: the tree we wish to convert. -/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish +/// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` /// - `parsing_patterns`: same as [parse]. /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`: language features so we can do feature gating. fn parse_tree<'a>( tree: &'a tokenstream::TokenTree, - outer_trees: &mut impl Iterator, + outer_iter: &mut TokenStreamIter<'a>, parsing_patterns: bool, sess: &Session, node_id: NodeId, @@ -163,15 +163,16 @@ fn parse_tree<'a>( &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => { // FIXME: Handle `Invisible`-delimited groups in a more systematic way // during parsing. - let mut next = outer_trees.next(); - let mut trees: Box>; - match next { + let mut next = outer_iter.next(); + let mut iter_storage; + let mut iter: &mut TokenStreamIter<'_> = match next { Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => { - trees = Box::new(tts.trees()); - next = trees.next(); + iter_storage = tts.iter(); + next = iter_storage.next(); + &mut iter_storage } - _ => trees = Box::new(outer_trees), - } + _ => outer_iter, + }; match next { // `tree` is followed by a delimited set of token trees. @@ -230,7 +231,7 @@ fn parse_tree<'a>( let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition); // Get the Kleene operator and optional separator let (separator, kleene) = - parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess); + parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess); // Count the number of captured "names" (i.e., named metavars) let num_captures = if parsing_patterns { count_metavar_decls(&sequence) } else { 0 }; @@ -313,11 +314,11 @@ fn kleene_op(token: &Token) -> Option { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op<'a>( - input: &mut impl Iterator, +fn parse_kleene_op( + iter: &mut TokenStreamIter<'_>, span: Span, ) -> Result, Span> { - match input.next() { + match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) { Some(op) => Ok(Ok((op, token.span))), None => Ok(Err(token.clone())), @@ -334,22 +335,22 @@ fn parse_kleene_op<'a>( /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some /// stream of tokens in an invocation of a macro. /// -/// This function will take some input iterator `input` corresponding to `span` and a parsing -/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene +/// This function will take some input iterator `iter` corresponding to `span` and a parsing +/// session `sess`. If the next one (or possibly two) tokens in `iter` correspond to a Kleene /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. -fn parse_sep_and_kleene_op<'a>( - input: &mut impl Iterator, +fn parse_sep_and_kleene_op( + iter: &mut TokenStreamIter<'_>, span: Span, sess: &Session, ) -> (Option, KleeneToken) { // We basically look at two token trees here, denoted as #1 and #2 below - let span = match parse_kleene_op(input, span) { + let span = match parse_kleene_op(iter, span) { // #1 is a `?`, `+`, or `*` KleeneOp Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)), // #1 is a separator followed by #2, a KleeneOp - Ok(Err(token)) => match parse_kleene_op(input, token.span) { + Ok(Err(token)) => match parse_kleene_op(iter, token.span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((KleeneOp::ZeroOrOne, span))) => { // Error! diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index b77d02e630a21..ef209c2bce123 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -11,8 +11,9 @@ use rustc_parse::lexer::nfc_normalize; use rustc_parse::parser::ParseNtResult; use rustc_session::parse::{ParseSess, SymbolGallery}; use rustc_span::hygiene::{LocalExpnId, Transparency}; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent, sym}; -use rustc_span::{Span, Symbol, SyntaxContext, with_metavar_spans}; +use rustc_span::{ + Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans, +}; use smallvec::{SmallVec, smallvec}; use crate::errors::{ @@ -696,8 +697,10 @@ fn transcribe_metavar_expr<'a>( MetaVarExprConcatElem::Var(ident) => { match matched_from_ident(dcx, *ident, interp)? { NamedMatch::MatchedSeq(named_matches) => { - let curr_idx = repeats.last().unwrap().0; - match &named_matches[curr_idx] { + let Some((curr_idx, _)) = repeats.last() else { + return Err(dcx.struct_span_err(sp.entire(), "invalid syntax")); + }; + match &named_matches[*curr_idx] { // FIXME(c410-f3r) Nested repetitions are unimplemented MatchedSeq(_) => unimplemented!(), MatchedSingle(pnr) => { diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs index 85ea42e78ad49..9c35b26772b61 100644 --- a/compiler/rustc_expand/src/module.rs +++ b/compiler/rustc_expand/src/module.rs @@ -2,13 +2,12 @@ use std::iter::once; use std::path::{self, Path, PathBuf}; use rustc_ast::ptr::P; -use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans, token}; +use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans}; use rustc_errors::{Diag, ErrorGuaranteed}; -use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal, validate_attr}; +use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr}; use rustc_session::Session; use rustc_session::parse::ParseSess; -use rustc_span::Span; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use thin_vec::ThinVec; use crate::base::ModuleData; @@ -71,7 +70,7 @@ pub(crate) fn parse_external_mod( let mut parser = unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span))); let (inner_attrs, items, inner_span) = - parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?; + parser.parse_mod(exp!(Eof)).map_err(|err| ModError::ParserError(err))?; attrs.extend(inner_attrs); (items, inner_span, mp.file_path) }; diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index 9e459bd81a11e..e969f2d4fb55f 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -4,8 +4,7 @@ use rustc_ast::token::Delimiter; use rustc_ast::visit::AssocCtxt; use rustc_ast::{self as ast, Safety}; use rustc_data_structures::fx::FxHashMap; -use rustc_span::DUMMY_SP; -use rustc_span::symbol::Ident; +use rustc_span::{DUMMY_SP, Ident}; use smallvec::{SmallVec, smallvec}; use thin_vec::ThinVec; diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 263df235b3ecc..7eb09a64e96ae 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -15,11 +15,10 @@ use rustc_data_structures::sync::Lrc; use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult}; use rustc_parse::lexer::nfc_normalize; use rustc_parse::parser::Parser; -use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal}; +use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal}; use rustc_session::parse::ParseSess; use rustc_span::def_id::CrateNum; -use rustc_span::symbol::{self, Symbol, sym}; -use rustc_span::{BytePos, FileName, Pos, SourceFile, Span}; +use rustc_span::{BytePos, FileName, Pos, SourceFile, Span, Symbol, sym}; use smallvec::{SmallVec, smallvec}; use crate::base::ExtCtxt; @@ -112,9 +111,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { let delimiter = pm::Delimiter::from_internal(delim); @@ -230,7 +229,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { - let ident = symbol::Ident::new(name, span).without_first_quote(); + let ident = rustc_span::Ident::new(name, span).without_first_quote(); trees.extend([ TokenTree::Punct(Punct { ch: b'\'', joint: true, span }), TokenTree::Ident(Ident { sym: ident.name, is_raw: is_raw.into(), span }), @@ -474,7 +473,7 @@ impl server::FreeFunctions for Rustc<'_, '_> { unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned())); let first_span = parser.token.span.data(); - let minus_present = parser.eat(&token::BinOp(token::Minus)); + let minus_present = parser.eat(exp!(Minus)); let lit_span = parser.token.span.data(); let token::Literal(mut lit) = parser.token.kind else { diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index c5913ed27cf34..217a7aeb2d7f7 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -1,6 +1,6 @@ //! List of the accepted feature gates. -use rustc_span::symbol::sym; +use rustc_span::sym; use super::{Feature, to_nonzero}; @@ -73,7 +73,7 @@ declare_features! ( /// Allows free and inherent `async fn`s, `async` blocks, and `.await` expressions. (accepted, async_await, "1.39.0", Some(50547)), /// Allows `async || body` closures. - (accepted, async_closure, "CURRENT_RUSTC_VERSION", Some(62290)), + (accepted, async_closure, "1.85.0", Some(62290)), /// Allows async functions to be declared, implemented, and used in traits. (accepted, async_fn_in_trait, "1.75.0", Some(91611)), /// Allows all literals in attribute lists and values of key-value pairs. @@ -175,6 +175,8 @@ declare_features! ( (accepted, destructuring_assignment, "1.59.0", Some(71126)), /// Allows using the `#[diagnostic]` attribute tool namespace (accepted, diagnostic_namespace, "1.78.0", Some(111996)), + /// Controls errors in trait implementations. + (accepted, do_not_recommend, "1.85.0", Some(51992)), /// Allows `#[doc(alias = "...")]`. (accepted, doc_alias, "1.48.0", Some(50146)), /// Allows `..` in tuple (struct) patterns. @@ -197,7 +199,7 @@ declare_features! ( (accepted, extended_key_value_attributes, "1.54.0", Some(78835)), /// Allows using `efiapi`, `aapcs`, `sysv64` and `win64` as calling /// convention for functions with varargs. - (accepted, extended_varargs_abi_support, "CURRENT_RUSTC_VERSION", Some(100189)), + (accepted, extended_varargs_abi_support, "1.85.0", Some(100189)), /// Allows resolving absolute paths as paths from other crates. (accepted, extern_absolute_paths, "1.30.0", Some(44660)), /// Allows `extern crate foo as bar;`. This puts `bar` into extern prelude. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 3bf485c2eb6fb..5510e7e09e50c 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -6,7 +6,7 @@ use AttributeDuplicates::*; use AttributeGate::*; use AttributeType::*; use rustc_data_structures::fx::FxHashMap; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use crate::{Features, Stability}; @@ -37,6 +37,7 @@ const GATED_CFGS: &[GatedCfg] = &[ (sym::sanitizer_cfi_normalize_integers, sym::cfg_sanitizer_cfi, Features::cfg_sanitizer_cfi), // this is consistent with naming of the compiler flag it's for (sym::fmt_debug, sym::fmt_debug, Features::fmt_debug), + (sym::emscripten_wasm_eh, sym::cfg_emscripten_wasm_eh, Features::cfg_emscripten_wasm_eh), ]; /// Find a gated cfg determined by the `pred`icate which is given the cfg's name. @@ -571,7 +572,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ // `#[coroutine]` attribute to be applied to closures to make them coroutines instead gated!( coroutine, Normal, template!(Word), ErrorFollowing, - EncodeCrossCrate::No, coroutines, experimental!(coroutines) + EncodeCrossCrate::No, coroutines, experimental!(coroutine) ), // RFC 3543 @@ -622,7 +623,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::No, "allow_internal_unsafe side-steps the unsafe_code lint", ), rustc_attr!( - rustc_allowed_through_unstable_modules, Normal, template!(Word), + rustc_allowed_through_unstable_modules, Normal, template!(Word, NameValueStr: "deprecation message"), WarnFollowing, EncodeCrossCrate::No, "rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \ through unstable paths" @@ -913,22 +914,26 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!( rustc_deny_explicit_impl, AttributeType::Normal, - template!(List: "implement_via_object = (true|false)"), + template!(Word), ErrorFollowing, EncodeCrossCrate::No, "#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls" ), + rustc_attr!( + rustc_do_not_implement_via_object, + AttributeType::Normal, + template!(Word), + ErrorFollowing, + EncodeCrossCrate::No, + "#[rustc_do_not_implement_via_object] opts out of the automatic trait impl for trait objects \ + (`impl Trait for dyn Trait`)" + ), rustc_attr!( rustc_has_incoherent_inherent_impls, AttributeType::Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::Yes, "#[rustc_has_incoherent_inherent_impls] allows the addition of incoherent inherent impls for \ the given type by annotating all impl items with #[rustc_allow_incoherent_impl]." ), - rustc_attr!( - rustc_box, AttributeType::Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::No, - "#[rustc_box] allows creating boxes \ - and it is only intended to be used in `alloc`." - ), BuiltinAttribute { name: sym::rustc_diagnostic_item, @@ -1004,7 +1009,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), gated!( rustc_intrinsic, Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::Yes, intrinsics, - "the `#[rustc_intrinsic]` attribute is used to declare intrinsics with function bodies", + "the `#[rustc_intrinsic]` attribute is used to declare intrinsics as function items", ), gated!( rustc_intrinsic_must_be_overridden, Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::Yes, intrinsics, @@ -1014,6 +1019,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_no_mir_inline, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::Yes, "#[rustc_no_mir_inline] prevents the MIR inliner from inlining a function while not affecting codegen" ), + rustc_attr!( + rustc_force_inline, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing, EncodeCrossCrate::Yes, + "#![rustc_force_inline] forces a free function to be inlined" + ), // ========================================================================== // Internal attributes, Testing: @@ -1188,10 +1197,9 @@ pub static BUILTIN_ATTRIBUTE_MAP: LazyLock> map }); -pub fn is_stable_diagnostic_attribute(sym: Symbol, features: &Features) -> bool { +pub fn is_stable_diagnostic_attribute(sym: Symbol, _features: &Features) -> bool { match sym { - sym::on_unimplemented => true, - sym::do_not_recommend => features.do_not_recommend(), + sym::on_unimplemented | sym::do_not_recommend => true, _ => false, } } diff --git a/compiler/rustc_feature/src/lib.rs b/compiler/rustc_feature/src/lib.rs index 5d27b8f542cbb..0b034a2ae1075 100644 --- a/compiler/rustc_feature/src/lib.rs +++ b/compiler/rustc_feature/src/lib.rs @@ -28,7 +28,7 @@ mod tests; use std::num::NonZero; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; #[derive(Debug, Clone)] pub struct Feature { @@ -68,6 +68,16 @@ impl UnstableFeatures { /// If `krate` is [`Some`], then setting `RUSTC_BOOTSTRAP=krate` will enable the nightly /// features. Otherwise, only `RUSTC_BOOTSTRAP=1` will work. pub fn from_environment(krate: Option<&str>) -> Self { + Self::from_environment_value(krate, std::env::var("RUSTC_BOOTSTRAP")) + } + + /// Avoid unsafe `std::env::set_var()` by allowing tests to inject + /// `std::env::var("RUSTC_BOOTSTRAP")` with the `env_var_rustc_bootstrap` + /// arg. + fn from_environment_value( + krate: Option<&str>, + env_var_rustc_bootstrap: Result, + ) -> Self { // `true` if this is a feature-staged build, i.e., on the beta or stable channel. let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some_and(|s| s != "0"); @@ -75,7 +85,7 @@ impl UnstableFeatures { let is_unstable_crate = |var: &str| krate.is_some_and(|name| var.split(',').any(|new_krate| new_krate == name)); - let bootstrap = std::env::var("RUSTC_BOOTSTRAP").ok(); + let bootstrap = env_var_rustc_bootstrap.ok(); if let Some(val) = bootstrap.as_deref() { match val { val if val == "1" || is_unstable_crate(val) => return UnstableFeatures::Cheat, diff --git a/compiler/rustc_feature/src/removed.rs b/compiler/rustc_feature/src/removed.rs index e25840ba5fcb6..9aa59375706b4 100644 --- a/compiler/rustc_feature/src/removed.rs +++ b/compiler/rustc_feature/src/removed.rs @@ -1,6 +1,6 @@ //! List of the removed feature gates. -use rustc_span::symbol::sym; +use rustc_span::sym; use super::{Feature, to_nonzero}; @@ -120,7 +120,7 @@ declare_features! ( /// Allows defining generators. (removed, generators, "1.21.0", Some(43122), Some("renamed to `coroutines`")), /// An extension to the `generic_associated_types` feature, allowing incomplete features. - (removed, generic_associated_types_extended, "CURRENT_RUSTC_VERSION", Some(95451), + (removed, generic_associated_types_extended, "1.85.0", Some(95451), Some( "feature needs overhaul and reimplementation pending \ better implied higher-ranked implied bounds support" diff --git a/compiler/rustc_feature/src/tests.rs b/compiler/rustc_feature/src/tests.rs index cc0e1f3120965..a5d589171d105 100644 --- a/compiler/rustc_feature/src/tests.rs +++ b/compiler/rustc_feature/src/tests.rs @@ -2,9 +2,11 @@ use super::UnstableFeatures; #[test] fn rustc_bootstrap_parsing() { - let is_bootstrap = |env, krate| { - std::env::set_var("RUSTC_BOOTSTRAP", env); - matches!(UnstableFeatures::from_environment(krate), UnstableFeatures::Cheat) + let is_bootstrap = |env: &str, krate: Option<&str>| { + matches!( + UnstableFeatures::from_environment_value(krate, Ok(env.to_string())), + UnstableFeatures::Cheat + ) }; assert!(is_bootstrap("1", None)); assert!(is_bootstrap("1", Some("x"))); @@ -22,9 +24,11 @@ fn rustc_bootstrap_parsing() { assert!(!is_bootstrap("0", None)); // `RUSTC_BOOTSTRAP=-1` is force-stable, no unstable features allowed. - let is_force_stable = |krate| { - std::env::set_var("RUSTC_BOOTSTRAP", "-1"); - matches!(UnstableFeatures::from_environment(krate), UnstableFeatures::Disallow) + let is_force_stable = |krate: Option<&str>| { + matches!( + UnstableFeatures::from_environment_value(krate, Ok("-1".to_string())), + UnstableFeatures::Disallow + ) }; assert!(is_force_stable(None)); // Does not support specifying any crate. diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 76b9bee4b00dc..1dcde45333102 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -3,8 +3,7 @@ use std::path::PathBuf; use rustc_data_structures::fx::FxHashSet; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use super::{Feature, to_nonzero}; @@ -203,6 +202,8 @@ declare_features! ( (internal, allow_internal_unstable, "1.0.0", None), /// Allows using anonymous lifetimes in argument-position impl-trait. (unstable, anonymous_lifetime_in_impl_trait, "1.63.0", None), + /// Allows access to the emscripten_wasm_eh config, used by panic_unwind and unwind + (internal, cfg_emscripten_wasm_eh, "CURRENT_RUSTC_VERSION", None), /// Allows identifying the `compiler_builtins` crate. (internal, compiler_builtins, "1.13.0", None), /// Allows writing custom MIR @@ -332,7 +333,7 @@ declare_features! ( (unstable, hexagon_target_feature, "1.27.0", Some(44839)), (unstable, lahfsahf_target_feature, "1.78.0", Some(44839)), (unstable, loongarch_target_feature, "1.73.0", Some(44839)), - (unstable, m68k_target_feature, "CURRENT_RUSTC_VERSION", Some(134328)), + (unstable, m68k_target_feature, "1.85.0", Some(134328)), (unstable, mips_target_feature, "1.27.0", Some(44839)), (unstable, powerpc_target_feature, "1.27.0", Some(44839)), (unstable, prfchw_target_feature, "1.78.0", Some(44839)), @@ -343,7 +344,7 @@ declare_features! ( (unstable, sse4a_target_feature, "1.27.0", Some(44839)), (unstable, tbm_target_feature, "1.27.0", Some(44839)), (unstable, wasm_target_feature, "1.30.0", Some(44839)), - (unstable, x87_target_feature, "CURRENT_RUSTC_VERSION", Some(44839)), + (unstable, x87_target_feature, "1.85.0", Some(44839)), // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! @@ -360,6 +361,8 @@ declare_features! ( (unstable, abi_avr_interrupt, "1.45.0", Some(69664)), /// Allows `extern "C-cmse-nonsecure-call" fn()`. (unstable, abi_c_cmse_nonsecure_call, "1.51.0", Some(81391)), + /// Allows `extern "gpu-kernel" fn()`. + (unstable, abi_gpu_kernel, "CURRENT_RUSTC_VERSION", Some(135467)), /// Allows `extern "msp430-interrupt" fn()`. (unstable, abi_msp430_interrupt, "1.16.0", Some(38487)), /// Allows `extern "ptx-*" fn()`. @@ -379,11 +382,11 @@ declare_features! ( /// Enables experimental inline assembly support for additional architectures. (unstable, asm_experimental_arch, "1.58.0", Some(93335)), /// Enables experimental register support in inline assembly. - (unstable, asm_experimental_reg, "CURRENT_RUSTC_VERSION", Some(133416)), + (unstable, asm_experimental_reg, "1.85.0", Some(133416)), /// Allows using `label` operands in inline assembly. (unstable, asm_goto, "1.78.0", Some(119364)), /// Allows using `label` operands in inline assembly together with output operands. - (unstable, asm_goto_with_outputs, "CURRENT_RUSTC_VERSION", Some(119364)), + (unstable, asm_goto_with_outputs, "1.85.0", Some(119364)), /// Allows the `may_unwind` option in inline assembly. (unstable, asm_unwind, "1.58.0", Some(93334)), /// Allows users to enforce equality of associated constants `TraitImpl`. @@ -391,13 +394,13 @@ declare_features! ( /// Allows associated type defaults. (unstable, associated_type_defaults, "1.2.0", Some(29661)), /// Allows async functions to be called from `dyn Trait`. - (incomplete, async_fn_in_dyn_trait, "CURRENT_RUSTC_VERSION", Some(133119)), + (incomplete, async_fn_in_dyn_trait, "1.85.0", Some(133119)), /// Allows `#[track_caller]` on async functions. (unstable, async_fn_track_caller, "1.73.0", Some(110011)), /// Allows `for await` loops. (unstable, async_for_loop, "1.77.0", Some(118898)), /// Allows `async` trait bound modifier. - (unstable, async_trait_bounds, "CURRENT_RUSTC_VERSION", Some(62290)), + (unstable, async_trait_bounds, "1.85.0", Some(62290)), /// Allows using C-variadics. (unstable, c_variadic, "1.34.0", Some(44930)), /// Allows the use of `#[cfg()]`. @@ -435,7 +438,7 @@ declare_features! ( /// Allows `const || {}` closures in const contexts. (incomplete, const_closures, "1.68.0", Some(106003)), /// Allows using `~const Destruct` bounds and calling drop impls in const contexts. - (unstable, const_destruct, "CURRENT_RUSTC_VERSION", Some(133214)), + (unstable, const_destruct, "1.85.0", Some(133214)), /// Allows `for _ in _` loops in const contexts. (unstable, const_for, "1.56.0", Some(87575)), /// Be more precise when looking for live drops in a const context. @@ -459,15 +462,13 @@ declare_features! ( (unstable, decl_macro, "1.17.0", Some(39412)), /// Allows the use of default values on struct definitions and the construction of struct /// literals with the functional update syntax without a base. - (unstable, default_field_values, "CURRENT_RUSTC_VERSION", Some(132162)), + (unstable, default_field_values, "1.85.0", Some(132162)), /// Allows using `#[deprecated_safe]` to deprecate the safeness of a function or trait (unstable, deprecated_safe, "1.61.0", Some(94978)), /// Allows having using `suggestion` in the `#[deprecated]` attribute. (unstable, deprecated_suggestion, "1.61.0", Some(94785)), /// Allows deref patterns. (incomplete, deref_patterns, "1.79.0", Some(87121)), - /// Controls errors in trait implementations. - (unstable, do_not_recommend, "1.67.0", Some(51992)), /// Tells rustdoc to automatically generate `#[doc(cfg(...))]`. (unstable, doc_auto_cfg, "1.58.0", Some(43781)), /// Allows `#[doc(cfg(...))]`. @@ -511,7 +512,7 @@ declare_features! ( /// Allows registering static items globally, possibly across crates, to iterate over at runtime. (unstable, global_registration, "1.80.0", Some(125119)), /// Allows using guards in patterns. - (incomplete, guard_patterns, "CURRENT_RUSTC_VERSION", Some(129967)), + (incomplete, guard_patterns, "1.85.0", Some(129967)), /// Allows using `..=X` as a patterns in slices. (unstable, half_open_range_patterns_in_slices, "1.66.0", Some(67264)), /// Allows `if let` guard in match arms. @@ -522,6 +523,8 @@ declare_features! ( (unstable, impl_trait_in_bindings, "1.64.0", Some(63065)), /// Allows `impl Trait` as output type in `Fn` traits in return position of functions. (unstable, impl_trait_in_fn_trait_return, "1.64.0", Some(99697)), + /// Allows `use` associated functions from traits. + (unstable, import_trait_associated_functions, "CURRENT_RUSTC_VERSION", Some(134691)), /// Allows associated types in inherent impls. (incomplete, inherent_associated_types, "1.52.0", Some(8995)), /// Allow anonymous constants from an inline `const` block in pattern position @@ -640,9 +643,9 @@ declare_features! ( /// not changed from prior instances of the same struct (RFC #2528) (unstable, type_changing_struct_update, "1.58.0", Some(86555)), /// Allows using `unsafe<'a> &'a T` unsafe binder types. - (incomplete, unsafe_binders, "CURRENT_RUSTC_VERSION", Some(130516)), + (incomplete, unsafe_binders, "1.85.0", Some(130516)), /// Allows declaring fields `unsafe`. - (incomplete, unsafe_fields, "CURRENT_RUSTC_VERSION", Some(132922)), + (incomplete, unsafe_fields, "1.85.0", Some(132922)), /// Allows const generic parameters to be defined with types that /// are not `Sized`, e.g. `fn foo() {`. (incomplete, unsized_const_params, "1.82.0", Some(95174)), diff --git a/compiler/rustc_hir/Cargo.toml b/compiler/rustc_hir/Cargo.toml index 5bfc4756ec6a7..580967dce75c9 100644 --- a/compiler/rustc_hir/Cargo.toml +++ b/compiler/rustc_hir/Cargo.toml @@ -9,6 +9,7 @@ odht = { version = "0.3.1", features = ["nightly"] } rustc_abi = { path = "../rustc_abi" } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } +rustc_attr_data_structures = { path = "../rustc_attr_data_structures" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs index d85f9586d4210..dc527240f742a 100644 --- a/compiler/rustc_hir/src/definitions.rs +++ b/compiler/rustc_hir/src/definitions.rs @@ -11,7 +11,7 @@ use rustc_data_structures::stable_hasher::{Hash64, StableHasher}; use rustc_data_structures::unord::UnordMap; use rustc_index::IndexVec; use rustc_macros::{Decodable, Encodable}; -use rustc_span::symbol::{Symbol, kw, sym}; +use rustc_span::{Symbol, kw, sym}; use tracing::{debug, instrument}; pub use crate::def_id::DefPathHash; diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 56dba0c61e28d..4cff3dc5ba936 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1,18 +1,20 @@ +// ignore-tidy-filelength use std::fmt; use rustc_abi::ExternAbi; -// ignore-tidy-filelength use rustc_ast::attr::AttributeExt; use rustc_ast::token::CommentKind; use rustc_ast::util::parser::{AssocOp, ExprPrecedence}; use rustc_ast::{ - self as ast, AttrId, AttrStyle, DelimArgs, FloatTy, InlineAsmOptions, InlineAsmTemplatePiece, - IntTy, Label, LitKind, MetaItemInner, MetaItemLit, TraitObjectSyntax, UintTy, + self as ast, FloatTy, InlineAsmOptions, InlineAsmTemplatePiece, IntTy, Label, LitIntType, + LitKind, TraitObjectSyntax, UintTy, UnsafeBinderCastKind, }; pub use rustc_ast::{ - BinOp, BinOpKind, BindingMode, BorrowKind, BoundConstness, BoundPolarity, ByRef, CaptureBy, - ImplPolarity, IsAuto, Movability, Mutability, UnOp, UnsafeBinderCastKind, + AttrId, AttrStyle, BinOp, BinOpKind, BindingMode, BorrowKind, BoundConstness, BoundPolarity, + ByRef, CaptureBy, DelimArgs, ImplPolarity, IsAuto, MetaItemInner, MetaItemLit, Movability, + Mutability, UnOp, }; +use rustc_attr_data_structures::AttributeKind; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::sorted_map::SortedMap; use rustc_index::IndexVec; @@ -20,8 +22,7 @@ use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::def_id::LocalDefId; use rustc_span::hygiene::MacroKind; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, ErrorGuaranteed, Span}; +use rustc_span::{BytePos, DUMMY_SP, ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use rustc_target::asm::InlineAsmRegOrRegClass; use smallvec::SmallVec; use thin_vec::ThinVec; @@ -35,6 +36,7 @@ use crate::intravisit::FnKind; #[derive(Debug, Copy, Clone, HashStable_Generic)] pub struct Lifetime { + #[stable_hasher(ignore)] pub hir_id: HirId, /// Either "`'a`", referring to a named lifetime definition, @@ -53,6 +55,13 @@ pub enum ParamName { /// Some user-given name like `T` or `'x`. Plain(Ident), + /// Indicates an illegal name was given and an error has been + /// reported (so we should squelch other derived errors). + /// + /// Occurs when, e.g., `'_` is used in the wrong place, or a + /// lifetime name is duplicated. + Error(Ident), + /// Synthetic name generated when user elided a lifetime in an impl header. /// /// E.g., the lifetimes in cases like these: @@ -68,18 +77,13 @@ pub enum ParamName { /// where `'f` is something like `Fresh(0)`. The indices are /// unique per impl, but not necessarily continuous. Fresh, - - /// Indicates an illegal name was given and an error has been - /// reported (so we should squelch other derived errors). Occurs - /// when, e.g., `'_` is used in the wrong place. - Error, } impl ParamName { pub fn ident(&self) -> Ident { match *self { - ParamName::Plain(ident) => ident, - ParamName::Fresh | ParamName::Error => Ident::with_dummy_span(kw::UnderscoreLifetime), + ParamName::Plain(ident) | ParamName::Error(ident) => ident, + ParamName::Fresh => Ident::with_dummy_span(kw::UnderscoreLifetime), } } } @@ -213,6 +217,7 @@ impl Path<'_> { pub struct PathSegment<'hir> { /// The identifier portion of this path segment. pub ident: Ident, + #[stable_hasher(ignore)] pub hir_id: HirId, pub res: Res, @@ -303,6 +308,7 @@ pub enum ConstArgKind<'hir> { #[derive(Clone, Copy, Debug, HashStable_Generic)] pub struct InferArg { + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, } @@ -591,6 +597,7 @@ pub enum GenericParamKind<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct GenericParam<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub name: ParamName, @@ -849,6 +856,7 @@ impl<'hir> Generics<'hir> { /// A single predicate in a where-clause. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct WherePredicate<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, pub kind: &'hir WherePredicateKind<'hir>, @@ -957,60 +965,80 @@ pub enum AttrArgs { }, } -#[derive(Clone, Debug, Encodable, Decodable)] -pub enum AttrKind { - /// A normal attribute. - Normal(Box), - - /// A doc comment (e.g. `/// ...`, `//! ...`, `/** ... */`, `/*! ... */`). - /// Doc attributes (e.g. `#[doc="..."]`) are represented with the `Normal` - /// variant (which is much less compact and thus more expensive). - DocComment(CommentKind, Symbol), -} - #[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)] pub struct AttrPath { pub segments: Box<[Ident]>, pub span: Span, } +impl AttrPath { + pub fn from_ast(path: &ast::Path) -> Self { + AttrPath { + segments: path.segments.iter().map(|i| i.ident).collect::>().into_boxed_slice(), + span: path.span, + } + } +} + +impl fmt::Display for AttrPath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.segments.iter().map(|i| i.to_string()).collect::>().join("::")) + } +} + #[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)] pub struct AttrItem { - pub unsafety: Safety, // Not lowered to hir::Path because we have no NodeId to resolve to. pub path: AttrPath, pub args: AttrArgs, -} - -#[derive(Clone, Debug, Encodable, Decodable)] -pub struct Attribute { - pub kind: AttrKind, - pub id: AttrId, + pub id: HashIgnoredAttrId, /// Denotes if the attribute decorates the following construct (outer) /// or the construct this attribute is contained within (inner). pub style: AttrStyle, + /// Span of the entire attribute pub span: Span, } +/// The derived implementation of [`HashStable_Generic`] on [`Attribute`]s shouldn't hash +/// [`AttrId`]s. By wrapping them in this, we make sure we never do. +#[derive(Copy, Debug, Encodable, Decodable, Clone)] +pub struct HashIgnoredAttrId { + pub attr_id: AttrId, +} + +#[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)] +pub enum Attribute { + /// A parsed built-in attribute. + /// + /// Each attribute has a span connected to it. However, you must be somewhat careful using it. + /// That's because sometimes we merge multiple attributes together, like when an item has + /// multiple `repr` attributes. In this case the span might not be very useful. + Parsed(AttributeKind), + + /// An attribute that could not be parsed, out of a token-like representation. + /// This is the case for custom tool attributes. + Unparsed(Box), +} + impl Attribute { pub fn get_normal_item(&self) -> &AttrItem { - match &self.kind { - AttrKind::Normal(normal) => &normal, - AttrKind::DocComment(..) => panic!("unexpected doc comment"), + match &self { + Attribute::Unparsed(normal) => &normal, + _ => panic!("unexpected parsed attribute"), } } pub fn unwrap_normal_item(self) -> AttrItem { - match self.kind { - AttrKind::Normal(normal) => *normal, - AttrKind::DocComment(..) => panic!("unexpected doc comment"), + match self { + Attribute::Unparsed(normal) => *normal, + _ => panic!("unexpected parsed attribute"), } } pub fn value_lit(&self) -> Option<&MetaItemLit> { - match &self.kind { - AttrKind::Normal(n) => match n.as_ref() { - AttrItem { args: AttrArgs::Eq { expr, .. }, .. } => Some(expr), + match &self { + Attribute::Unparsed(n) => match n.as_ref() { + AttrItem { args: AttrArgs::Eq { eq_span: _, expr }, .. } => Some(expr), _ => None, }, _ => None, @@ -1020,12 +1048,15 @@ impl Attribute { impl AttributeExt for Attribute { fn id(&self) -> AttrId { - self.id + match &self { + Attribute::Unparsed(u) => u.id.attr_id, + _ => panic!(), + } } fn meta_item_list(&self) -> Option> { - match &self.kind { - AttrKind::Normal(n) => match n.as_ref() { + match &self { + Attribute::Unparsed(n) => match n.as_ref() { AttrItem { args: AttrArgs::Delimited(d), .. } => { ast::MetaItemKind::list_from_tokens(d.tokens.clone()) } @@ -1045,63 +1076,70 @@ impl AttributeExt for Attribute { /// For a single-segment attribute, returns its name; otherwise, returns `None`. fn ident(&self) -> Option { - match &self.kind { - AttrKind::Normal(n) => { + match &self { + Attribute::Unparsed(n) => { if let [ident] = n.path.segments.as_ref() { Some(*ident) } else { None } } - AttrKind::DocComment(..) => None, + _ => None, } } fn path_matches(&self, name: &[Symbol]) -> bool { - match &self.kind { - AttrKind::Normal(n) => { + match &self { + Attribute::Unparsed(n) => { n.path.segments.len() == name.len() && n.path.segments.iter().zip(name).all(|(s, n)| s.name == *n) } - AttrKind::DocComment(..) => false, + _ => false, } } fn is_doc_comment(&self) -> bool { - matches!(self.kind, AttrKind::DocComment(..)) + matches!(self, Attribute::Parsed(AttributeKind::DocComment { .. })) } fn span(&self) -> Span { - self.span + match &self { + Attribute::Unparsed(u) => u.span, + // FIXME: should not be needed anymore when all attrs are parsed + Attribute::Parsed(AttributeKind::Deprecation { span, .. }) => *span, + a => panic!("can't get the span of an arbitrary parsed attribute: {a:?}"), + } } fn is_word(&self) -> bool { - match &self.kind { - AttrKind::Normal(n) => { + match &self { + Attribute::Unparsed(n) => { matches!(n.args, AttrArgs::Empty) } - AttrKind::DocComment(..) => false, + _ => false, } } fn ident_path(&self) -> Option> { - match &self.kind { - AttrKind::Normal(n) => Some(n.path.segments.iter().copied().collect()), - AttrKind::DocComment(..) => None, + match &self { + Attribute::Unparsed(n) => Some(n.path.segments.iter().copied().collect()), + _ => None, } } fn doc_str(&self) -> Option { - match &self.kind { - AttrKind::DocComment(.., data) => Some(*data), - AttrKind::Normal(_) if self.has_name(sym::doc) => self.value_str(), + match &self { + Attribute::Parsed(AttributeKind::DocComment { comment, .. }) => Some(*comment), + Attribute::Unparsed(_) if self.has_name(sym::doc) => self.value_str(), _ => None, } } fn doc_str_and_comment_kind(&self) -> Option<(Symbol, CommentKind)> { - match &self.kind { - AttrKind::DocComment(kind, data) => Some((*data, *kind)), - AttrKind::Normal(_) if self.name_or_empty() == sym::doc => { + match &self { + Attribute::Parsed(AttributeKind::DocComment { kind, comment, .. }) => { + Some((*comment, *kind)) + } + Attribute::Unparsed(_) if self.name_or_empty() == sym::doc => { self.value_str().map(|s| (s, CommentKind::Line)) } _ => None, @@ -1109,7 +1147,10 @@ impl AttributeExt for Attribute { } fn style(&self) -> AttrStyle { - self.style + match &self { + Attribute::Unparsed(u) => u.style, + _ => panic!(), + } } } @@ -1385,8 +1426,8 @@ impl<'hir> Pat<'hir> { use PatKind::*; match self.kind { - Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true, - Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it), + Wild | Never | Expr(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true, + Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) | Guard(s, _) => s.walk_short_(it), Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)), TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)), Slice(before, slice, after) => { @@ -1412,8 +1453,8 @@ impl<'hir> Pat<'hir> { use PatKind::*; match self.kind { - Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {} - Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it), + Wild | Never | Expr(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {} + Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) | Guard(s, _) => s.walk_(it), Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)), TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)), Slice(before, slice, after) => { @@ -1518,6 +1559,27 @@ impl fmt::Debug for DotDotPos { } } +#[derive(Debug, Clone, Copy, HashStable_Generic)] +pub struct PatExpr<'hir> { + #[stable_hasher(ignore)] + pub hir_id: HirId, + pub span: Span, + pub kind: PatExprKind<'hir>, +} + +#[derive(Debug, Clone, Copy, HashStable_Generic)] +pub enum PatExprKind<'hir> { + Lit { + lit: &'hir Lit, + // FIXME: move this into `Lit` and handle negated literal expressions + // once instead of matching on unop neg expressions everywhere. + negated: bool, + }, + ConstBlock(ConstBlock), + /// A path pattern for a unit struct/variant or a (maybe-associated) constant. + Path(QPath<'hir>), +} + #[derive(Debug, Clone, Copy, HashStable_Generic)] pub enum PatKind<'hir> { /// Represents a wildcard pattern (i.e., `_`). @@ -1562,11 +1624,14 @@ pub enum PatKind<'hir> { /// A reference pattern (e.g., `&mut (a, b)`). Ref(&'hir Pat<'hir>, Mutability), - /// A literal. - Lit(&'hir Expr<'hir>), + /// A literal, const block or path. + Expr(&'hir PatExpr<'hir>), + + /// A guard pattern (e.g., `x if guard(x)`). + Guard(&'hir Pat<'hir>, &'hir Expr<'hir>), /// A range pattern (e.g., `1..=2` or `1..2`). - Range(Option<&'hir Expr<'hir>>, Option<&'hir Expr<'hir>>, RangeEnd), + Range(Option<&'hir PatExpr<'hir>>, Option<&'hir PatExpr<'hir>>, RangeEnd), /// A slice pattern, `[before_0, ..., before_n, (slice, after_0, ..., after_n)?]`. /// @@ -1586,6 +1651,7 @@ pub enum PatKind<'hir> { /// A statement. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Stmt<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: StmtKind<'hir>, pub span: Span, @@ -1617,6 +1683,7 @@ pub struct LetStmt<'hir> { pub init: Option<&'hir Expr<'hir>>, /// Else block for a `let...else` binding. pub els: Option<&'hir Block<'hir>>, + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, /// Can be `ForLoopDesugar` if the `let` statement is part of a `for` loop @@ -1913,6 +1980,7 @@ pub type Lit = Spanned; /// `const N: usize = { ... }` with `tcx.hir().opt_const_param_default_param_def_id(..)` #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct AnonConst { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub body: BodyId, @@ -1922,6 +1990,7 @@ pub struct AnonConst { /// An inline constant expression `const { something }`. #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct ConstBlock { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub body: BodyId, @@ -1937,6 +2006,7 @@ pub struct ConstBlock { /// [rust lang reference]: https://doc.rust-lang.org/reference/expressions.html #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Expr<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: ExprKind<'hir>, pub span: Span, @@ -1944,11 +2014,15 @@ pub struct Expr<'hir> { impl Expr<'_> { pub fn precedence(&self) -> ExprPrecedence { - match self.kind { - ExprKind::Closure { .. } => ExprPrecedence::Closure, + match &self.kind { + ExprKind::Closure(closure) => { + match closure.fn_decl.output { + FnRetTy::DefaultReturn(_) => ExprPrecedence::Jump, + FnRetTy::Return(_) => ExprPrecedence::Unambiguous, + } + } ExprKind::Break(..) - | ExprKind::Continue(..) | ExprKind::Ret(..) | ExprKind::Yield(..) | ExprKind::Become(..) => ExprPrecedence::Jump, @@ -1974,6 +2048,7 @@ impl Expr<'_> { | ExprKind::Block(..) | ExprKind::Call(..) | ExprKind::ConstBlock(_) + | ExprKind::Continue(..) | ExprKind::Field(..) | ExprKind::If(..) | ExprKind::Index(..) @@ -1991,7 +2066,7 @@ impl Expr<'_> { | ExprKind::UnsafeBinderCast(..) | ExprKind::Err(_) => ExprPrecedence::Unambiguous, - ExprKind::DropTemps(ref expr, ..) => expr.precedence(), + ExprKind::DropTemps(expr, ..) => expr.precedence(), } } @@ -2065,6 +2140,18 @@ impl Expr<'_> { } } + /// Check if expression is an integer literal that can be used + /// where `usize` is expected. + pub fn is_size_lit(&self) -> bool { + matches!( + self.kind, + ExprKind::Lit(Lit { + node: LitKind::Int(_, LitIntType::Unsuffixed | LitIntType::Unsigned(UintTy::Usize)), + .. + }) + ) + } + /// If `Self.kind` is `ExprKind::DropTemps(expr)`, drill down until we get a non-`DropTemps` /// `Expr`. This is used in suggestions to ignore this `ExprKind` as it is semantically /// silent, only signaling the ownership system. By doing this, suggestions that check the @@ -2798,6 +2885,7 @@ pub enum ImplItemKind<'hir> { /// * the `f(..): Bound` in `Trait` (feature `return_type_notation`) #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct AssocItemConstraint<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub ident: Ident, pub gen_args: &'hir GenericArgs<'hir>, @@ -2866,6 +2954,7 @@ impl<'hir> AssocItemConstraintKind<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Ty<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: TyKind<'hir>, pub span: Span, @@ -3061,6 +3150,7 @@ pub struct UnsafeBinderTy<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct OpaqueTy<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub bounds: GenericBounds<'hir>, @@ -3097,6 +3187,7 @@ impl PreciseCapturingArg<'_> { /// since resolve_bound_vars operates on `Lifetime`s. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct PreciseCapturingNonLifetimeArg { + #[stable_hasher(ignore)] pub hir_id: HirId, pub ident: Ident, pub res: Res, @@ -3270,6 +3361,7 @@ impl InlineAsm<'_> { /// Represents a parameter in a function header. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Param<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub pat: &'hir Pat<'hir>, pub ty_span: Span, @@ -3381,7 +3473,7 @@ impl<'hir> FnRetTy<'hir> { } } - pub fn get_infer_ret_ty(&self) -> Option<&'hir Ty<'hir>> { + pub fn is_suggestable_infer_ty(&self) -> Option<&'hir Ty<'hir>> { if let Self::Return(ty) = self { if ty.is_suggestable_infer_ty() { return Some(*ty); @@ -3427,6 +3519,7 @@ pub struct Variant<'hir> { /// Name of the variant. pub ident: Ident, /// Id of the variant (not the constructor, see `VariantData::ctor_hir_id()`). + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, /// Fields and constructor id of the variant. @@ -3499,6 +3592,7 @@ pub struct FieldDef<'hir> { pub span: Span, pub vis_span: Span, pub ident: Ident, + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub ty: &'hir Ty<'hir>, @@ -3523,11 +3617,11 @@ pub enum VariantData<'hir> { /// A tuple variant. /// /// E.g., `Bar(..)` as in `enum Foo { Bar(..) }`. - Tuple(&'hir [FieldDef<'hir>], HirId, LocalDefId), + Tuple(&'hir [FieldDef<'hir>], #[stable_hasher(ignore)] HirId, LocalDefId), /// A unit variant. /// /// E.g., `Bar = ..` as in `enum Foo { Bar = .. }`. - Unit(HirId, LocalDefId), + Unit(#[stable_hasher(ignore)] HirId, LocalDefId), } impl<'hir> VariantData<'hir> { @@ -3634,7 +3728,7 @@ impl<'hir> Item<'hir> { ItemKind::Const(ty, generics, body), (ty, generics, *body); expect_fn, (&FnSig<'hir>, &'hir Generics<'hir>, BodyId), - ItemKind::Fn(sig, generics, body), (sig, generics, *body); + ItemKind::Fn { sig, generics, body, .. }, (sig, generics, *body); expect_macro, (&ast::MacroDef, MacroKind), ItemKind::Macro(def, mk), (def, *mk); @@ -3721,9 +3815,30 @@ impl fmt::Display for Constness { } } +/// The actualy safety specified in syntax. We may treat +/// its safety different within the type system to create a +/// "sound by default" system that needs checking this enum +/// explicitly to allow unsafe operations. +#[derive(Copy, Clone, Debug, HashStable_Generic, PartialEq, Eq)] +pub enum HeaderSafety { + /// A safe function annotated with `#[target_features]`. + /// The type system treats this function as an unsafe function, + /// but safety checking will check this enum to treat it as safe + /// and allowing calling other safe target feature functions with + /// the same features without requiring an additional unsafe block. + SafeTargetFeatures, + Normal(Safety), +} + +impl From for HeaderSafety { + fn from(v: Safety) -> Self { + Self::Normal(v) + } +} + #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct FnHeader { - pub safety: Safety, + pub safety: HeaderSafety, pub constness: Constness, pub asyncness: IsAsync, pub abi: ExternAbi, @@ -3739,7 +3854,18 @@ impl FnHeader { } pub fn is_unsafe(&self) -> bool { - self.safety.is_unsafe() + self.safety().is_unsafe() + } + + pub fn is_safe(&self) -> bool { + self.safety().is_safe() + } + + pub fn safety(&self) -> Safety { + match self.safety { + HeaderSafety::SafeTargetFeatures => Safety::Unsafe, + HeaderSafety::Normal(safety) => safety, + } } } @@ -3762,7 +3888,15 @@ pub enum ItemKind<'hir> { /// A `const` item. Const(&'hir Ty<'hir>, &'hir Generics<'hir>, BodyId), /// A function declaration. - Fn(FnSig<'hir>, &'hir Generics<'hir>, BodyId), + Fn { + sig: FnSig<'hir>, + generics: &'hir Generics<'hir>, + body: BodyId, + /// Whether this function actually has a body. + /// For functions without a body, `body` is synthesized (to avoid ICEs all over the + /// compiler), but that code should never be translated. + has_body: bool, + }, /// A MBE macro definition (`macro_rules!` or `macro`). Macro(&'hir ast::MacroDef, MacroKind), /// A module. @@ -3813,7 +3947,7 @@ pub struct Impl<'hir> { impl ItemKind<'_> { pub fn generics(&self) -> Option<&Generics<'_>> { Some(match *self { - ItemKind::Fn(_, ref generics, _) + ItemKind::Fn { ref generics, .. } | ItemKind::TyAlias(_, ref generics) | ItemKind::Const(_, ref generics, _) | ItemKind::Enum(_, ref generics) @@ -3832,7 +3966,7 @@ impl ItemKind<'_> { ItemKind::Use(..) => "`use` import", ItemKind::Static(..) => "static item", ItemKind::Const(..) => "constant item", - ItemKind::Fn(..) => "function", + ItemKind::Fn { .. } => "function", ItemKind::Macro(..) => "macro", ItemKind::Mod(..) => "module", ItemKind::ForeignMod { .. } => "extern block", @@ -3998,7 +4132,7 @@ impl<'hir> OwnerNode<'hir> { match self { OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | OwnerNode::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | OwnerNode::Item(Item { kind: ItemKind::Fn { sig: fn_sig, .. }, .. }) | OwnerNode::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig), @@ -4010,7 +4144,7 @@ impl<'hir> OwnerNode<'hir> { match self { OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | OwnerNode::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | OwnerNode::Item(Item { kind: ItemKind::Fn { sig: fn_sig, .. }, .. }) | OwnerNode::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig.decl), @@ -4024,7 +4158,7 @@ impl<'hir> OwnerNode<'hir> { kind: ItemKind::Static(_, _, body) | ItemKind::Const(_, _, body) - | ItemKind::Fn(_, _, body), + | ItemKind::Fn { body, .. }, .. }) | OwnerNode::TraitItem(TraitItem { @@ -4068,33 +4202,33 @@ impl<'hir> OwnerNode<'hir> { } } -impl<'hir> Into> for &'hir Item<'hir> { - fn into(self) -> OwnerNode<'hir> { - OwnerNode::Item(self) +impl<'hir> From<&'hir Item<'hir>> for OwnerNode<'hir> { + fn from(val: &'hir Item<'hir>) -> Self { + OwnerNode::Item(val) } } -impl<'hir> Into> for &'hir ForeignItem<'hir> { - fn into(self) -> OwnerNode<'hir> { - OwnerNode::ForeignItem(self) +impl<'hir> From<&'hir ForeignItem<'hir>> for OwnerNode<'hir> { + fn from(val: &'hir ForeignItem<'hir>) -> Self { + OwnerNode::ForeignItem(val) } } -impl<'hir> Into> for &'hir ImplItem<'hir> { - fn into(self) -> OwnerNode<'hir> { - OwnerNode::ImplItem(self) +impl<'hir> From<&'hir ImplItem<'hir>> for OwnerNode<'hir> { + fn from(val: &'hir ImplItem<'hir>) -> Self { + OwnerNode::ImplItem(val) } } -impl<'hir> Into> for &'hir TraitItem<'hir> { - fn into(self) -> OwnerNode<'hir> { - OwnerNode::TraitItem(self) +impl<'hir> From<&'hir TraitItem<'hir>> for OwnerNode<'hir> { + fn from(val: &'hir TraitItem<'hir>) -> Self { + OwnerNode::TraitItem(val) } } -impl<'hir> Into> for OwnerNode<'hir> { - fn into(self) -> Node<'hir> { - match self { +impl<'hir> From> for Node<'hir> { + fn from(val: OwnerNode<'hir>) -> Self { + match val { OwnerNode::Item(n) => Node::Item(n), OwnerNode::ForeignItem(n) => Node::ForeignItem(n), OwnerNode::ImplItem(n) => Node::ImplItem(n), @@ -4127,6 +4261,10 @@ pub enum Node<'hir> { OpaqueTy(&'hir OpaqueTy<'hir>), Pat(&'hir Pat<'hir>), PatField(&'hir PatField<'hir>), + /// Needed as its own node with its own HirId for tracking + /// the unadjusted type of literals within patterns + /// (e.g. byte str literals not being of slice type). + PatExpr(&'hir PatExpr<'hir>), Arm(&'hir Arm<'hir>), Block(&'hir Block<'hir>), LetStmt(&'hir LetStmt<'hir>), @@ -4183,6 +4321,7 @@ impl<'hir> Node<'hir> { | Node::Block(..) | Node::Ctor(..) | Node::Pat(..) + | Node::PatExpr(..) | Node::Arm(..) | Node::LetStmt(..) | Node::Crate(..) @@ -4200,7 +4339,7 @@ impl<'hir> Node<'hir> { match self { Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | Node::Item(Item { kind: ItemKind::Fn { sig: fn_sig, .. }, .. }) | Node::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => { Some(fn_sig.decl) } @@ -4230,7 +4369,7 @@ impl<'hir> Node<'hir> { match self { Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | Node::Item(Item { kind: ItemKind::Fn { sig: fn_sig, .. }, .. }) | Node::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => { Some(fn_sig) } @@ -4275,7 +4414,7 @@ impl<'hir> Node<'hir> { Node::Item(Item { owner_id, kind: - ItemKind::Const(_, _, body) | ItemKind::Static(.., body) | ItemKind::Fn(.., body), + ItemKind::Const(_, _, body) | ItemKind::Static(.., body) | ItemKind::Fn { body, .. }, .. }) | Node::TraitItem(TraitItem { @@ -4332,7 +4471,7 @@ impl<'hir> Node<'hir> { pub fn fn_kind(self) -> Option> { match self { Node::Item(i) => match i.kind { - ItemKind::Fn(ref sig, ref generics, _) => { + ItemKind::Fn { sig, generics, .. } => { Some(FnKind::ItemFn(i.ident, generics, sig.header)) } _ => None, diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index 5ed5a43d522a8..ef863aca09052 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -66,9 +66,8 @@ use rustc_ast::Label; use rustc_ast::visit::{VisitorResult, try_visit, visit_opt, walk_list}; -use rustc_span::Span; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::{Ident, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::hir::*; @@ -343,6 +342,9 @@ pub trait Visitor<'v>: Sized { fn visit_pat_field(&mut self, f: &'v PatField<'v>) -> Self::Result { walk_pat_field(self, f) } + fn visit_pat_expr(&mut self, expr: &'v PatExpr<'v>) -> Self::Result { + walk_pat_expr(self, expr) + } fn visit_anon_const(&mut self, c: &'v AnonConst) -> Self::Result { walk_anon_const(self, c) } @@ -510,7 +512,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) -> V:: try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_nested_body(body)); } - ItemKind::Fn(ref sig, ref generics, body_id) => { + ItemKind::Fn { sig, generics, body: body_id, .. } => { try_visit!(visitor.visit_id(item.hir_id())); try_visit!(visitor.visit_fn( FnKind::ItemFn(item.ident, generics, sig.header), @@ -686,10 +688,10 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) -> V: try_visit!(visitor.visit_ident(ident)); visit_opt!(visitor, visit_pat, optional_subpattern); } - PatKind::Lit(ref expression) => try_visit!(visitor.visit_expr(expression)), + PatKind::Expr(ref expression) => try_visit!(visitor.visit_pat_expr(expression)), PatKind::Range(ref lower_bound, ref upper_bound, _) => { - visit_opt!(visitor, visit_expr, lower_bound); - visit_opt!(visitor, visit_expr, upper_bound); + visit_opt!(visitor, visit_pat_expr, lower_bound); + visit_opt!(visitor, visit_pat_expr, upper_bound); } PatKind::Never | PatKind::Wild | PatKind::Err(_) => (), PatKind::Slice(prepatterns, ref slice_pattern, postpatterns) => { @@ -697,6 +699,10 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) -> V: visit_opt!(visitor, visit_pat, slice_pattern); walk_list!(visitor, visit_pat, postpatterns); } + PatKind::Guard(subpat, condition) => { + try_visit!(visitor.visit_pat(subpat)); + try_visit!(visitor.visit_expr(condition)); + } } V::Result::output() } @@ -707,6 +713,15 @@ pub fn walk_pat_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v PatField<' visitor.visit_pat(field.pat) } +pub fn walk_pat_expr<'v, V: Visitor<'v>>(visitor: &mut V, expr: &'v PatExpr<'v>) -> V::Result { + try_visit!(visitor.visit_id(expr.hir_id)); + match &expr.kind { + PatExprKind::Lit { .. } => V::Result::output(), + PatExprKind::ConstBlock(c) => visitor.visit_inline_const(c), + PatExprKind::Path(qpath) => visitor.visit_qpath(qpath, expr.hir_id, expr.span), + } +} + pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonConst) -> V::Result { try_visit!(visitor.visit_id(constant.hir_id)); visitor.visit_nested_body(constant.body) @@ -929,8 +944,8 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>( ) -> V::Result { try_visit!(visitor.visit_id(param.hir_id)); match param.name { - ParamName::Plain(ident) => try_visit!(visitor.visit_ident(ident)), - ParamName::Error | ParamName::Fresh => {} + ParamName::Plain(ident) | ParamName::Error(ident) => try_visit!(visitor.visit_ident(ident)), + ParamName::Fresh => {} } match param.kind { GenericParamKind::Lifetime { .. } => {} diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 3684695774eac..fae3b778d7bdd 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -11,8 +11,7 @@ use rustc_ast::attr::AttributeExt; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, kw, sym}; +use rustc_span::{Span, Symbol, kw, sym}; use crate::def_id::DefId; use crate::{MethodKind, Target}; diff --git a/compiler/rustc_hir/src/pat_util.rs b/compiler/rustc_hir/src/pat_util.rs index 2ebbb4a06b6bf..bb853985c7d8a 100644 --- a/compiler/rustc_hir/src/pat_util.rs +++ b/compiler/rustc_hir/src/pat_util.rs @@ -1,7 +1,6 @@ use std::iter::Enumerate; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; use crate::def::{CtorOf, DefKind, Res}; use crate::def_id::{DefId, DefIdSet}; diff --git a/compiler/rustc_hir/src/stable_hash_impls.rs b/compiler/rustc_hir/src/stable_hash_impls.rs index db0d0fcf3b916..b0cd26d8a914b 100644 --- a/compiler/rustc_hir/src/stable_hash_impls.rs +++ b/compiler/rustc_hir/src/stable_hash_impls.rs @@ -1,19 +1,17 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey}; use rustc_span::def_id::DefPathHash; +use crate::HashIgnoredAttrId; use crate::hir::{ - Attribute, AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes, - TraitItemId, + AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes, TraitItemId, }; use crate::hir_id::{HirId, ItemLocalId}; /// Requirements for a `StableHashingContext` to be used in this crate. /// This is a hack to allow using the `HashStable_Generic` derive macro /// instead of implementing everything in `rustc_middle`. -pub trait HashStableContext: - rustc_ast::HashStableContext + rustc_target::HashStableContext -{ - fn hash_attr(&mut self, _: &Attribute, hasher: &mut StableHasher); +pub trait HashStableContext: rustc_attr_data_structures::HashStableContext { + fn hash_attr_id(&mut self, id: &HashIgnoredAttrId, hasher: &mut StableHasher); } impl ToStableHashKey for HirId { @@ -116,8 +114,8 @@ impl HashStable for Crate<'_> { } } -impl HashStable for Attribute { +impl HashStable for HashIgnoredAttrId { fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) { - hcx.hash_attr(self, hasher) + hcx.hash_attr_id(self, hasher) } } diff --git a/compiler/rustc_hir/src/target.rs b/compiler/rustc_hir/src/target.rs index 6ff57396b4a19..f3e8f059c9e3c 100644 --- a/compiler/rustc_hir/src/target.rs +++ b/compiler/rustc_hir/src/target.rs @@ -106,7 +106,7 @@ impl Target { ItemKind::Use(..) => Target::Use, ItemKind::Static { .. } => Target::Static, ItemKind::Const(..) => Target::Const, - ItemKind::Fn(..) => Target::Fn, + ItemKind::Fn { .. } => Target::Fn, ItemKind::Macro(..) => Target::MacroDef, ItemKind::Mod(..) => Target::Mod, ItemKind::ForeignMod { .. } => Target::ForeignMod, diff --git a/compiler/rustc_hir/src/tests.rs b/compiler/rustc_hir/src/tests.rs index 953e48a6d3347..e0e63d183c63d 100644 --- a/compiler/rustc_hir/src/tests.rs +++ b/compiler/rustc_hir/src/tests.rs @@ -1,4 +1,4 @@ -#![cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] +#![allow(rustc::symbol_intern_string_literal)] use rustc_data_structures::stable_hasher::Hash64; use rustc_span::def_id::{DefPathHash, StableCrateId}; diff --git a/compiler/rustc_hir/src/weak_lang_items.rs b/compiler/rustc_hir/src/weak_lang_items.rs index 337859cd1fbb1..b4e548effd46d 100644 --- a/compiler/rustc_hir/src/weak_lang_items.rs +++ b/compiler/rustc_hir/src/weak_lang_items.rs @@ -1,6 +1,6 @@ //! Validity checking for weak lang items -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use crate::LangItem; diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 0c3ed9b5c609b..d7ab6eca84b3d 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -135,7 +135,7 @@ hir_analysis_dispatch_from_dyn_multi = implementing the `DispatchFromDyn` trait hir_analysis_dispatch_from_dyn_repr = structs implementing `DispatchFromDyn` may not have `#[repr(packed)]` or `#[repr(C)]` -hir_analysis_dispatch_from_dyn_zst = the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment, and nothing else +hir_analysis_dispatch_from_dyn_zst = the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment that don't mention type/const generics, and nothing else .note = extra field `{$name}` of type `{$ty}` is not allowed hir_analysis_drop_impl_negative = negative `Drop` impls are not supported diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 5548a6a6ef797..0a43eaaf0529c 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -2,6 +2,8 @@ use std::cell::LazyCell; use std::ops::ControlFlow; use rustc_abi::FieldIdx; +use rustc_attr_parsing::AttributeKind; +use rustc_attr_parsing::ReprAttr::ReprPacked; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_errors::MultiSpan; use rustc_errors::codes::*; @@ -436,9 +438,9 @@ fn check_opaque_meets_bounds<'tcx>( } else { // Check that any hidden types found during wf checking match the hidden types that `type_of` sees. for (mut key, mut ty) in infcx.take_opaque_types() { - ty.hidden_type.ty = infcx.resolve_vars_if_possible(ty.hidden_type.ty); + ty.ty = infcx.resolve_vars_if_possible(ty.ty); key = infcx.resolve_vars_if_possible(key); - sanity_check_found_hidden_type(tcx, key, ty.hidden_type)?; + sanity_check_found_hidden_type(tcx, key, ty)?; } Ok(()) } @@ -575,7 +577,7 @@ fn sanity_check_found_hidden_type<'tcx>( } else { let span = tcx.def_span(key.def_id); let other = ty::OpaqueHiddenType { ty: hidden_ty, span }; - Err(ty.build_mismatch_error(&other, key.def_id, tcx)?.emit()) + Err(ty.build_mismatch_error(&other, tcx)?.emit()) } } @@ -1159,7 +1161,7 @@ fn check_impl_items_against_trait<'tcx>( if let Some(missing_items) = must_implement_one_of { let attr_span = tcx .get_attr(trait_ref.def_id, sym::rustc_must_implement_one_of) - .map(|attr| attr.span); + .map(|attr| attr.span()); missing_items_must_implement_one_of_err( tcx, @@ -1248,11 +1250,13 @@ fn check_simd(tcx: TyCtxt<'_>, sp: Span, def_id: LocalDefId) { pub(super) fn check_packed(tcx: TyCtxt<'_>, sp: Span, def: ty::AdtDef<'_>) { let repr = def.repr(); if repr.packed() { - for attr in tcx.get_attrs(def.did(), sym::repr) { - for r in attr::parse_repr_attr(tcx.sess, attr) { - if let attr::ReprPacked(pack) = r + if let Some(reprs) = + attr::find_attr!(tcx.get_all_attrs(def.did()), AttributeKind::Repr(r) => r) + { + for (r, _) in reprs { + if let ReprPacked(pack) = r && let Some(repr_pack) = repr.pack - && pack != repr_pack + && pack != &repr_pack { struct_span_code_err!( tcx.dcx(), @@ -1464,16 +1468,19 @@ fn check_enum(tcx: TyCtxt<'_>, def_id: LocalDefId) { def.destructor(tcx); // force the destructor to be evaluated if def.variants().is_empty() { - if let Some(attr) = tcx.get_attrs(def_id, sym::repr).next() { - struct_span_code_err!( - tcx.dcx(), - attr.span, - E0084, - "unsupported representation for zero-variant enum" - ) - .with_span_label(tcx.def_span(def_id), "zero-variant enum") - .emit(); - } + attr::find_attr!( + tcx.get_all_attrs(def_id), + AttributeKind::Repr(rs) => { + struct_span_code_err!( + tcx.dcx(), + rs.first().unwrap().1, + E0084, + "unsupported representation for zero-variant enum" + ) + .with_span_label(tcx.def_span(def_id), "zero-variant enum") + .emit(); + } + ); } let repr_type_ty = def.repr().discr_type().to_ty(tcx); @@ -1666,7 +1673,7 @@ fn check_type_alias_type_params_are_used<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalD .collect::>() }); - let mut params_used = BitSet::new_empty(generics.own_params.len()); + let mut params_used = DenseBitSet::new_empty(generics.own_params.len()); for leaf in ty.walk() { if let GenericArgKind::Type(leaf_ty) = leaf.unpack() && let ty::Param(param) = leaf_ty.kind() @@ -1845,13 +1852,18 @@ pub(super) fn check_coroutine_obligations( debug!(?typeck_results.coroutine_stalled_predicates); + let mode = if tcx.next_trait_solver_globally() { + TypingMode::post_borrowck_analysis(tcx, def_id) + } else { + TypingMode::analysis_in_body(tcx, def_id) + }; + let infcx = tcx .infer_ctxt() // typeck writeback gives us predicates with their regions erased. // As borrowck already has checked lifetimes, we do not need to do it again. .ignoring_regions() - // FIXME(#132279): This should eventually use the already defined hidden types. - .build(TypingMode::analysis_in_body(tcx, def_id)); + .build(mode); let ocx = ObligationCtxt::new_with_diagnostics(&infcx); for (predicate, cause) in &typeck_results.coroutine_stalled_predicates { @@ -1864,12 +1876,14 @@ pub(super) fn check_coroutine_obligations( return Err(infcx.err_ctxt().report_fulfillment_errors(errors)); } - // Check that any hidden types found when checking these stalled coroutine obligations - // are valid. - for (key, ty) in infcx.take_opaque_types() { - let hidden_type = infcx.resolve_vars_if_possible(ty.hidden_type); - let key = infcx.resolve_vars_if_possible(key); - sanity_check_found_hidden_type(tcx, key, hidden_type)?; + if !tcx.next_trait_solver_globally() { + // Check that any hidden types found when checking these stalled coroutine obligations + // are valid. + for (key, ty) in infcx.take_opaque_types() { + let hidden_type = infcx.resolve_vars_if_possible(ty); + let key = infcx.resolve_vars_if_possible(key); + sanity_check_found_hidden_type(tcx, key, hidden_type)?; + } } Ok(()) diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index e176fc589995f..4a957d5da242b 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -430,12 +430,12 @@ fn compare_method_predicate_entailment<'tcx>( Ok(()) } -struct RemapLateBound<'a, 'tcx> { +struct RemapLateParam<'a, 'tcx> { tcx: TyCtxt<'tcx>, - mapping: &'a FxIndexMap, + mapping: &'a FxIndexMap, } -impl<'tcx> TypeFolder> for RemapLateBound<'_, 'tcx> { +impl<'tcx> TypeFolder> for RemapLateParam<'_, 'tcx> { fn cx(&self) -> TyCtxt<'tcx> { self.tcx } @@ -445,7 +445,7 @@ impl<'tcx> TypeFolder> for RemapLateBound<'_, 'tcx> { ty::Region::new_late_param( self.tcx, fr.scope, - self.mapping.get(&fr.bound_region).copied().unwrap_or(fr.bound_region), + self.mapping.get(&fr.kind).copied().unwrap_or(fr.kind), ) } else { r @@ -529,6 +529,26 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis()); let ocx = ObligationCtxt::new_with_diagnostics(infcx); + // Check that the where clauses of the impl are satisfied by the hybrid param env. + // You might ask -- what does this have to do with RPITIT inference? Nothing. + // We check these because if the where clauses of the signatures do not match + // up, then we don't want to give spurious other errors that point at the RPITITs. + // They're not necessary to check, though, because we already check them in + // `compare_method_predicate_entailment`. + let impl_m_own_bounds = tcx.predicates_of(impl_m_def_id).instantiate_own_identity(); + for (predicate, span) in impl_m_own_bounds { + let normalize_cause = traits::ObligationCause::misc(span, impl_m_def_id); + let predicate = ocx.normalize(&normalize_cause, param_env, predicate); + + let cause = + ObligationCause::new(span, impl_m_def_id, ObligationCauseCode::CompareImplItem { + impl_item_def_id: impl_m_def_id, + trait_item_def_id: trait_m.def_id, + kind: impl_m.kind, + }); + ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate)); + } + // Normalize the impl signature with fresh variables for lifetime inference. let misc_cause = ObligationCause::misc(return_span, impl_m_def_id); let impl_sig = ocx.normalize( diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs index 6eac4ac3baf82..2b14594ea1bf4 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs @@ -289,11 +289,16 @@ fn report_mismatched_rpitit_signature<'tcx>( tcx.fn_sig(trait_m_def_id).skip_binder().bound_vars(), tcx.fn_sig(impl_m_def_id).skip_binder().bound_vars(), ) - .filter_map(|(impl_bv, trait_bv)| { + .enumerate() + .filter_map(|(idx, (impl_bv, trait_bv))| { if let ty::BoundVariableKind::Region(impl_bv) = impl_bv && let ty::BoundVariableKind::Region(trait_bv) = trait_bv { - Some((impl_bv, trait_bv)) + let var = ty::BoundVar::from_usize(idx); + Some(( + ty::LateParamRegionKind::from_bound(var, impl_bv), + ty::LateParamRegionKind::from_bound(var, trait_bv), + )) } else { None } @@ -301,7 +306,7 @@ fn report_mismatched_rpitit_signature<'tcx>( .collect(); let mut return_ty = - trait_m_sig.output().fold_with(&mut super::RemapLateBound { tcx, mapping: &mapping }); + trait_m_sig.output().fold_with(&mut super::RemapLateParam { tcx, mapping: &mapping }); if tcx.asyncness(impl_m_def_id).is_async() && tcx.asyncness(trait_m_def_id).is_async() { let ty::Alias(ty::Projection, future_ty) = return_ty.kind() else { diff --git a/compiler/rustc_hir_analysis/src/check/entry.rs b/compiler/rustc_hir_analysis/src/check/entry.rs index f3dd13c84b958..b18b5e2f99ed9 100644 --- a/compiler/rustc_hir_analysis/src/check/entry.rs +++ b/compiler/rustc_hir_analysis/src/check/entry.rs @@ -7,9 +7,8 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::span_bug; use rustc_middle::ty::{self, Ty, TyCtxt, TypingMode}; use rustc_session::config::EntryFnType; -use rustc_span::Span; use rustc_span::def_id::{CRATE_DEF_ID, DefId, LocalDefId}; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode}; @@ -45,7 +44,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { return None; } match tcx.hir_node_by_def_id(def_id.expect_local()) { - Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. }) => { + Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) => { generics.params.is_empty().not().then_some(generics.span) } _ => { @@ -59,7 +58,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { return None; } match tcx.hir_node_by_def_id(def_id.expect_local()) { - Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. }) => { + Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) => { Some(generics.where_clause_span) } _ => { @@ -80,7 +79,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { return None; } match tcx.hir_node_by_def_id(def_id.expect_local()) { - Node::Item(hir::Item { kind: hir::ItemKind::Fn(fn_sig, _, _), .. }) => { + Node::Item(hir::Item { kind: hir::ItemKind::Fn { sig: fn_sig, .. }, .. }) => { Some(fn_sig.decl.output.span()) } _ => { @@ -101,7 +100,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { } for attr in tcx.get_attrs(main_def_id, sym::track_caller) { - tcx.dcx().emit_err(errors::TrackCallerOnMain { span: attr.span, annotated: main_span }); + tcx.dcx().emit_err(errors::TrackCallerOnMain { span: attr.span(), annotated: main_span }); error = true; } @@ -202,7 +201,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { match start_t.kind() { ty::FnDef(..) => { if let Node::Item(it) = tcx.hir_node(start_id) { - if let hir::ItemKind::Fn(sig, generics, _) = &it.kind { + if let hir::ItemKind::Fn { sig, generics, .. } = &it.kind { let mut error = false; if !generics.params.is_empty() { tcx.dcx().emit_err(errors::StartFunctionParameters { span: generics.span }); @@ -224,7 +223,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { for attr in attrs { if attr.has_name(sym::track_caller) { tcx.dcx().emit_err(errors::StartTrackCaller { - span: attr.span, + span: attr.span(), start: start_span, }); error = true; @@ -236,7 +235,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { && !tcx.sess.opts.actually_rustdoc { tcx.dcx().emit_err(errors::StartTargetFeature { - span: attr.span, + span: attr.span(), start: start_span, }); error = true; diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 2e6b511412bb1..8aa95d1c1d516 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -8,8 +8,7 @@ use rustc_middle::bug; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Symbol, sym}; use crate::check::check_function_signature; use crate::errors::{ @@ -27,7 +26,7 @@ fn equate_intrinsic_type<'tcx>( sig: ty::PolyFnSig<'tcx>, ) { let (generics, span) = match tcx.hir_node_by_def_id(def_id) { - hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. }) + hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) | hir::Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(_, _, generics), .. @@ -87,6 +86,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) - | sym::assert_inhabited | sym::assert_zero_valid | sym::assert_mem_uninitialized_valid + | sym::box_new | sym::breakpoint | sym::size_of | sym::min_align_of @@ -95,6 +95,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) - | sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow + | sym::carrying_mul_add | sym::wrapping_add | sym::wrapping_sub | sym::wrapping_mul @@ -437,6 +438,10 @@ pub fn check_intrinsic_type( (1, 0, vec![param(0), param(0)], Ty::new_tup(tcx, &[param(0), tcx.types.bool])) } + sym::carrying_mul_add => { + (2, 0, vec![param(0); 4], Ty::new_tup(tcx, &[param(1), param(0)])) + } + sym::ptr_guaranteed_cmp => ( 1, 0, @@ -497,7 +502,9 @@ pub fn check_intrinsic_type( (1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], tcx.types.unit) } - sym::typed_swap => (1, 0, vec![Ty::new_mut_ptr(tcx, param(0)); 2], tcx.types.unit), + sym::typed_swap_nonoverlapping => { + (1, 0, vec![Ty::new_mut_ptr(tcx, param(0)); 2], tcx.types.unit) + } sym::discriminant_value => { let assoc_items = tcx.associated_item_def_ids( @@ -600,6 +607,8 @@ pub fn check_intrinsic_type( sym::ub_checks => (0, 0, Vec::new(), tcx.types.bool), + sym::box_new => (1, 0, vec![param(0)], Ty::new_box(tcx, param(0))), + sym::simd_eq | sym::simd_ne | sym::simd_lt diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 61e203a1ff668..92b18c80fd82d 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -79,7 +79,7 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_errors::{Diag, ErrorGuaranteed, pluralize, struct_span_code_err}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::Visitor; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_infer::infer::outlives::env::OutlivesEnvironment; use rustc_infer::infer::{self, TyCtxtInferExt as _}; use rustc_infer::traits::ObligationCause; @@ -89,8 +89,7 @@ use rustc_middle::ty::{self, GenericArgs, GenericArgsRef, Ty, TyCtxt, TypingMode use rustc_middle::{bug, span_bug}; use rustc_session::parse::feature_err; use rustc_span::def_id::CRATE_DEF_ID; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, Span, Symbol}; +use rustc_span::{BytePos, DUMMY_SP, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::error_reporting::infer::ObligationCauseExt as _; use rustc_trait_selection::error_reporting::traits::suggestions::ReturnsVisitor; diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs index b9cb48cafdcd2..83c69dc2ef415 100644 --- a/compiler/rustc_hir_analysis/src/check/region.rs +++ b/compiler/rustc_hir_analysis/src/check/region.rs @@ -31,7 +31,7 @@ struct Context { parent: Option<(Scope, ScopeDepth)>, } -struct RegionResolutionVisitor<'tcx> { +struct ScopeResolutionVisitor<'tcx> { tcx: TyCtxt<'tcx>, // The number of expressions and patterns visited in the current body. @@ -71,7 +71,7 @@ struct RegionResolutionVisitor<'tcx> { } /// Records the lifetime of a local variable as `cx.var_parent` -fn record_var_lifetime(visitor: &mut RegionResolutionVisitor<'_>, var_id: hir::ItemLocalId) { +fn record_var_lifetime(visitor: &mut ScopeResolutionVisitor<'_>, var_id: hir::ItemLocalId) { match visitor.cx.var_parent { None => { // this can happen in extern fn declarations like @@ -82,7 +82,7 @@ fn record_var_lifetime(visitor: &mut RegionResolutionVisitor<'_>, var_id: hir::I } } -fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx hir::Block<'tcx>) { +fn resolve_block<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, blk: &'tcx hir::Block<'tcx>) { debug!("resolve_block(blk.hir_id={:?})", blk.hir_id); let prev_cx = visitor.cx; @@ -129,7 +129,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h let mut prev_cx = visitor.cx; visitor.enter_scope(Scope { - id: blk.hir_id.local_id, + local_id: blk.hir_id.local_id, data: ScopeData::Remainder(FirstStatementIndex::new(i)), }); visitor.cx.var_parent = visitor.cx.parent; @@ -154,7 +154,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h // the first such subscope, which has the block itself as a // parent. visitor.enter_scope(Scope { - id: blk.hir_id.local_id, + local_id: blk.hir_id.local_id, data: ScopeData::Remainder(FirstStatementIndex::new(i)), }); visitor.cx.var_parent = visitor.cx.parent; @@ -184,7 +184,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h visitor .scope_tree .backwards_incompatible_scope - .insert(local_id, Scope { id: local_id, data: ScopeData::Node }); + .insert(local_id, Scope { local_id, data: ScopeData::Node }); } visitor.visit_expr(tail_expr); } @@ -193,7 +193,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h visitor.cx = prev_cx; } -fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir::Arm<'tcx>) { +fn resolve_arm<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, arm: &'tcx hir::Arm<'tcx>) { fn has_let_expr(expr: &Expr<'_>) -> bool { match &expr.kind { hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs), @@ -220,8 +220,8 @@ fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir visitor.cx = prev_cx; } -fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) { - visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node }); +fn resolve_pat<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) { + visitor.record_child_scope(Scope { local_id: pat.hir_id.local_id, data: ScopeData::Node }); // If this is a binding then record the lifetime of that binding. if let PatKind::Binding(..) = pat.kind { @@ -237,7 +237,7 @@ fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat); } -fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt<'tcx>) { +fn resolve_stmt<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt<'tcx>) { let stmt_id = stmt.hir_id.local_id; debug!("resolve_stmt(stmt.id={:?})", stmt_id); @@ -256,7 +256,7 @@ fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx h visitor.cx.parent = prev_parent; } -fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx hir::Expr<'tcx>) { +fn resolve_expr<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, expr: &'tcx hir::Expr<'tcx>) { debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr); let prev_cx = visitor.cx; @@ -420,10 +420,10 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h // properly, we can't miss any types. match expr.kind { - // Manually recurse over closures and inline consts, because they are the only - // case of nested bodies that share the parent environment. - hir::ExprKind::Closure(&hir::Closure { body, .. }) - | hir::ExprKind::ConstBlock(hir::ConstBlock { body, .. }) => { + // Manually recurse over closures, because they are nested bodies + // that share the parent environment. We handle const blocks in + // `visit_inline_const`. + hir::ExprKind::Closure(&hir::Closure { body, .. }) => { let body = visitor.tcx.hir().body(body); visitor.visit_body(body); } @@ -485,7 +485,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h } else { ScopeData::IfThen }; - visitor.enter_scope(Scope { id: then.hir_id.local_id, data }); + visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data }); visitor.cx.var_parent = visitor.cx.parent; visitor.visit_expr(cond); visitor.visit_expr(then); @@ -500,7 +500,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h } else { ScopeData::IfThen }; - visitor.enter_scope(Scope { id: then.hir_id.local_id, data }); + visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data }); visitor.cx.var_parent = visitor.cx.parent; visitor.visit_expr(cond); visitor.visit_expr(then); @@ -516,7 +516,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h if let hir::ExprKind::Yield(_, source) = &expr.kind { // Mark this expr's scope and all parent scopes as containing `yield`. - let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node }; + let mut scope = Scope { local_id: expr.hir_id.local_id, data: ScopeData::Node }; loop { let span = match expr.kind { hir::ExprKind::Yield(expr, hir::YieldSource::Await { .. }) => { @@ -554,7 +554,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h } fn resolve_local<'tcx>( - visitor: &mut RegionResolutionVisitor<'tcx>, + visitor: &mut ScopeResolutionVisitor<'tcx>, pat: Option<&'tcx hir::Pat<'tcx>>, init: Option<&'tcx hir::Expr<'tcx>>, ) { @@ -654,6 +654,7 @@ fn resolve_local<'tcx>( /// | ( ..., P&, ... ) /// | ... "|" P& "|" ... /// | box P& + /// | P& if ... /// ``` fn is_binding_pat(pat: &hir::Pat<'_>) -> bool { // Note that the code below looks for *explicit* refs only, that is, it won't @@ -694,14 +695,16 @@ fn resolve_local<'tcx>( | PatKind::TupleStruct(_, subpats, _) | PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(p)), - PatKind::Box(subpat) | PatKind::Deref(subpat) => is_binding_pat(subpat), + PatKind::Box(subpat) | PatKind::Deref(subpat) | PatKind::Guard(subpat, _) => { + is_binding_pat(subpat) + } PatKind::Ref(_, _) | PatKind::Binding(hir::BindingMode(hir::ByRef::No, _), ..) | PatKind::Wild | PatKind::Never | PatKind::Path(_) - | PatKind::Lit(_) + | PatKind::Expr(_) | PatKind::Range(_, _, _) | PatKind::Err(_) => false, } @@ -722,7 +725,7 @@ fn resolve_local<'tcx>( /// | ( E& ) /// ``` fn record_rvalue_scope_if_borrow_expr<'tcx>( - visitor: &mut RegionResolutionVisitor<'tcx>, + visitor: &mut ScopeResolutionVisitor<'tcx>, expr: &hir::Expr<'_>, blk_id: Option, ) { @@ -779,7 +782,7 @@ fn resolve_local<'tcx>( } } -impl<'tcx> RegionResolutionVisitor<'tcx> { +impl<'tcx> ScopeResolutionVisitor<'tcx> { /// Records the current parent (if any) as the parent of `child_scope`. /// Returns the depth of `child_scope`. fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth { @@ -803,9 +806,9 @@ impl<'tcx> RegionResolutionVisitor<'tcx> { // account for the destruction scope representing the scope of // the destructors that run immediately after it completes. if self.terminating_scopes.contains(&id) { - self.enter_scope(Scope { id, data: ScopeData::Destruction }); + self.enter_scope(Scope { local_id: id, data: ScopeData::Destruction }); } - self.enter_scope(Scope { id, data: ScopeData::Node }); + self.enter_scope(Scope { local_id: id, data: ScopeData::Node }); } fn enter_body(&mut self, hir_id: hir::HirId, f: impl FnOnce(&mut Self)) { @@ -822,8 +825,8 @@ impl<'tcx> RegionResolutionVisitor<'tcx> { let outer_pessimistic_yield = mem::replace(&mut self.pessimistic_yield, false); self.terminating_scopes.insert(hir_id.local_id); - self.enter_scope(Scope { id: hir_id.local_id, data: ScopeData::CallSite }); - self.enter_scope(Scope { id: hir_id.local_id, data: ScopeData::Arguments }); + self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::CallSite }); + self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::Arguments }); f(self); @@ -835,7 +838,7 @@ impl<'tcx> RegionResolutionVisitor<'tcx> { } } -impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> { +impl<'tcx> Visitor<'tcx> for ScopeResolutionVisitor<'tcx> { fn visit_block(&mut self, b: &'tcx Block<'tcx>) { resolve_block(self, b); } @@ -903,6 +906,10 @@ impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> { fn visit_local(&mut self, l: &'tcx LetStmt<'tcx>) { resolve_local(self, Some(l.pat), l.init) } + fn visit_inline_const(&mut self, c: &'tcx hir::ConstBlock) { + let body = self.tcx.hir().body(c.body); + self.visit_body(body); + } } /// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body; @@ -919,7 +926,7 @@ pub(crate) fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree { } let scope_tree = if let Some(body) = tcx.hir().maybe_body_owned_by(def_id.expect_local()) { - let mut visitor = RegionResolutionVisitor { + let mut visitor = ScopeResolutionVisitor { tcx, scope_tree: ScopeTree::default(), expr_and_pat_count: 0, diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 95ad8225f61bb..dd6adb17c5e47 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -23,8 +23,7 @@ use rustc_middle::ty::{ }; use rustc_middle::{bug, span_bug}; use rustc_session::parse::feature_err; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::regions::InferCtxtRegionExt; use rustc_trait_selection::traits::misc::{ @@ -294,7 +293,7 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) -> Result<() } res } - hir::ItemKind::Fn(ref sig, ..) => { + hir::ItemKind::Fn { sig, .. } => { check_item_fn(tcx, def_id, item.ident, item.span, sig.decl) } hir::ItemKind::Static(ty, ..) => { @@ -1121,7 +1120,7 @@ fn check_type_defn<'tcx>( } else { // Evaluate the constant proactively, to emit an error if the constant has // an unconditional error. We only do so if the const has no type params. - let _ = tcx.const_eval_poly(def_id.into()); + let _ = tcx.const_eval_poly(def_id); } } let field_id = field.did.expect_local(); @@ -2008,7 +2007,10 @@ fn check_variances_for_type_defn<'tcx>( } match hir_param.name { - hir::ParamName::Error => {} + hir::ParamName::Error(_) => { + // Don't report a bivariance error for a lifetime that isn't + // even valid to name. + } _ => { let has_explicit_bounds = explicitly_bounded_params.contains(¶meter); report_bivariance(tcx, hir_param, has_explicit_bounds, item); @@ -2340,8 +2342,11 @@ fn lint_redundant_lifetimes<'tcx>( ); // If we are in a function, add its late-bound lifetimes too. if matches!(def_kind, DefKind::Fn | DefKind::AssocFn) { - for var in tcx.fn_sig(owner_id).instantiate_identity().bound_vars() { + for (idx, var) in + tcx.fn_sig(owner_id).instantiate_identity().bound_vars().iter().enumerate() + { let ty::BoundVariableKind::Region(kind) = var else { continue }; + let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind); lifetimes.push(ty::Region::new_late_param(tcx, owner_id.to_def_id(), kind)); } } diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index 2eea65125b038..b43a808ccdc10 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -259,19 +259,37 @@ fn visit_implementation_of_dispatch_from_dyn(checker: &Checker<'_>) -> Result<() let coerced_fields = fields .iter() .filter(|field| { + // Ignore PhantomData fields + let unnormalized_ty = tcx.type_of(field.did).instantiate_identity(); + if tcx + .try_normalize_erasing_regions( + ty::TypingEnv::non_body_analysis(tcx, def_a.did()), + unnormalized_ty, + ) + .unwrap_or(unnormalized_ty) + .is_phantom_data() + { + return false; + } + let ty_a = field.ty(tcx, args_a); let ty_b = field.ty(tcx, args_b); - if let Ok(layout) = - tcx.layout_of(infcx.typing_env(param_env).as_query_input(ty_a)) - { - if layout.is_1zst() { + // FIXME: We could do normalization here, but is it really worth it? + if ty_a == ty_b { + // Allow 1-ZSTs that don't mention type params. + // + // Allowing type params here would allow us to possibly transmute + // between ZSTs, which may be used to create library unsoundness. + if let Ok(layout) = + tcx.layout_of(infcx.typing_env(param_env).as_query_input(ty_a)) + && layout.is_1zst() + && !ty_a.has_non_region_param() + { // ignore 1-ZST fields return false; } - } - if ty_a == ty_b { res = Err(tcx.dcx().emit_err(errors::DispatchFromDynZST { span, name: field.name, @@ -460,8 +478,16 @@ pub(crate) fn coerce_unsized_info<'tcx>( .filter_map(|(i, f)| { let (a, b) = (f.ty(tcx, args_a), f.ty(tcx, args_b)); - if tcx.type_of(f.did).instantiate_identity().is_phantom_data() { - // Ignore PhantomData fields + // Ignore PhantomData fields + let unnormalized_ty = tcx.type_of(f.did).instantiate_identity(); + if tcx + .try_normalize_erasing_regions( + ty::TypingEnv::non_body_analysis(tcx, def_a.did()), + unnormalized_ty, + ) + .unwrap_or(unnormalized_ty) + .is_phantom_data() + { return None; } @@ -673,37 +699,6 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err let impl_span = tcx.def_span(checker.impl_def_id); let self_ty = tcx.impl_trait_ref(checker.impl_def_id).unwrap().instantiate_identity().self_ty(); - // If an ADT is repr(transparent)... - if let ty::Adt(def, args) = *self_ty.kind() - && def.repr().transparent() - { - // FIXME(compiler-errors): This should and could be deduplicated into a query. - // Find the nontrivial field. - let adt_typing_env = ty::TypingEnv::non_body_analysis(tcx, def.did()); - let nontrivial_field = def.all_fields().find(|field_def| { - let field_ty = tcx.type_of(field_def.did).instantiate_identity(); - !tcx.layout_of(adt_typing_env.as_query_input(field_ty)) - .is_ok_and(|layout| layout.layout.is_1zst()) - }); - - if let Some(nontrivial_field) = nontrivial_field { - // Check that the nontrivial field implements `PointerLike`. - let nontrivial_field = nontrivial_field.ty(tcx, args); - let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); - let ocx = ObligationCtxt::new(&infcx); - ocx.register_bound( - ObligationCause::misc(impl_span, checker.impl_def_id), - param_env, - nontrivial_field, - tcx.lang_items().pointer_like().unwrap(), - ); - // FIXME(dyn-star): We should regionck this implementation. - if ocx.select_all_or_error().is_empty() { - return Ok(()); - } - } - } - let is_permitted_primitive = match *self_ty.kind() { ty::Adt(def, _) => def.is_box(), ty::Uint(..) | ty::Int(..) | ty::RawPtr(..) | ty::Ref(..) | ty::FnPtr(..) => true, @@ -717,6 +712,74 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err return Ok(()); } + let why_disqualified = match *self_ty.kind() { + // If an ADT is repr(transparent) + ty::Adt(self_ty_def, args) => { + if self_ty_def.repr().transparent() { + // FIXME(compiler-errors): This should and could be deduplicated into a query. + // Find the nontrivial field. + let adt_typing_env = ty::TypingEnv::non_body_analysis(tcx, self_ty_def.did()); + let nontrivial_field = self_ty_def.all_fields().find(|field_def| { + let field_ty = tcx.type_of(field_def.did).instantiate_identity(); + !tcx.layout_of(adt_typing_env.as_query_input(field_ty)) + .is_ok_and(|layout| layout.layout.is_1zst()) + }); + + if let Some(nontrivial_field) = nontrivial_field { + // Check that the nontrivial field implements `PointerLike`. + let nontrivial_field_ty = nontrivial_field.ty(tcx, args); + let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); + let ocx = ObligationCtxt::new(&infcx); + ocx.register_bound( + ObligationCause::misc(impl_span, checker.impl_def_id), + param_env, + nontrivial_field_ty, + tcx.lang_items().pointer_like().unwrap(), + ); + // FIXME(dyn-star): We should regionck this implementation. + if ocx.select_all_or_error().is_empty() { + return Ok(()); + } else { + format!( + "the field `{field_name}` of {descr} `{self_ty}` \ + does not implement `PointerLike`", + field_name = nontrivial_field.name, + descr = self_ty_def.descr() + ) + } + } else { + format!( + "the {descr} `{self_ty}` is `repr(transparent)`, \ + but does not have a non-trivial field (it is zero-sized)", + descr = self_ty_def.descr() + ) + } + } else if self_ty_def.is_box() { + // If we got here, then the `layout.is_pointer_like()` check failed + // and this box is not a thin pointer. + + String::from("boxes of dynamically-sized types are too large to be `PointerLike`") + } else { + format!( + "the {descr} `{self_ty}` is not `repr(transparent)`", + descr = self_ty_def.descr() + ) + } + } + ty::Ref(..) => { + // If we got here, then the `layout.is_pointer_like()` check failed + // and this reference is not a thin pointer. + String::from("references to dynamically-sized types are too large to be `PointerLike`") + } + ty::Dynamic(..) | ty::Foreign(..) => { + String::from("types of dynamic or unknown size may not implement `PointerLike`") + } + _ => { + // This is a white lie; it is true everywhere outside the standard library. + format!("only user-defined sized types are eligible for `impl PointerLike`") + } + }; + Err(tcx .dcx() .struct_span_err( @@ -724,5 +787,6 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err "implementation must be applied to type that has the same ABI as a pointer, \ or is `repr(transparent)` and whose field is `PointerLike`", ) + .with_note(why_disqualified) .emit()) } diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index 2afc2aec1baad..a86dede48bffd 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -13,8 +13,7 @@ use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::bug; use rustc_middle::ty::fast_reject::{SimplifiedType, TreatParams, simplify_type}; use rustc_middle::ty::{self, CrateInherentImpls, Ty, TyCtxt}; -use rustc_span::ErrorGuaranteed; -use rustc_span::symbol::sym; +use rustc_span::{ErrorGuaranteed, sym}; use crate::errors; @@ -179,7 +178,8 @@ impl<'tcx> InherentCollect<'tcx> { | ty::Ref(..) | ty::Never | ty::FnPtr(..) - | ty::Tuple(..) => self.check_primitive_impl(id, self_ty), + | ty::Tuple(..) + | ty::UnsafeBinder(_) => self.check_primitive_impl(id, self_ty), ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) | ty::Param(_) => { Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span })) } diff --git a/compiler/rustc_hir_analysis/src/coherence/mod.rs b/compiler/rustc_hir_analysis/src/coherence/mod.rs index 3aad4bafeb5ab..1be4aa2f63ac4 100644 --- a/compiler/rustc_hir_analysis/src/coherence/mod.rs +++ b/compiler/rustc_hir_analysis/src/coherence/mod.rs @@ -206,7 +206,9 @@ fn check_object_overlap<'tcx>( // so this is valid. } else { let mut supertrait_def_ids = tcx.supertrait_def_ids(component_def_id); - if supertrait_def_ids.any(|d| d == trait_def_id) { + if supertrait_def_ids + .any(|d| d == trait_def_id && tcx.trait_def(d).implement_via_object) + { let span = tcx.def_span(impl_def_id); return Err(struct_span_code_err!( tcx.dcx(), diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index eca85c22a40eb..7d651155781a1 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -225,7 +225,8 @@ pub(crate) fn orphan_check_impl( | ty::FnDef(..) | ty::FnPtr(..) | ty::Never - | ty::Tuple(..) => (LocalImpl::Allow, NonlocalImpl::DisallowOther), + | ty::Tuple(..) + | ty::UnsafeBinder(_) => (LocalImpl::Allow, NonlocalImpl::DisallowOther), ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 8c9da78a65912..37f0fb487a0fd 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -38,8 +38,7 @@ use rustc_middle::ty::fold::fold_regions; use rustc_middle::ty::util::{Discr, IntTypeExt}; use rustc_middle::ty::{self, AdtKind, Const, IsSuggestable, Ty, TyCtxt, TypingMode}; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::traits::suggestions::NextTypeParamName; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::ObligationCtxt; @@ -132,19 +131,25 @@ pub struct ItemCtxt<'tcx> { /////////////////////////////////////////////////////////////////////////// #[derive(Default)] -pub(crate) struct HirPlaceholderCollector(pub(crate) Vec); +pub(crate) struct HirPlaceholderCollector { + pub spans: Vec, + // If any of the spans points to a const infer var, then suppress any messages + // that may try to turn that const infer into a type parameter. + pub may_contain_const_infer: bool, +} impl<'v> Visitor<'v> for HirPlaceholderCollector { fn visit_ty(&mut self, t: &'v hir::Ty<'v>) { if let hir::TyKind::Infer = t.kind { - self.0.push(t.span); + self.spans.push(t.span); } intravisit::walk_ty(self, t) } fn visit_generic_arg(&mut self, generic_arg: &'v hir::GenericArg<'v>) { match generic_arg { hir::GenericArg::Infer(inf) => { - self.0.push(inf.span); + self.spans.push(inf.span); + self.may_contain_const_infer = true; intravisit::walk_inf(self, inf); } hir::GenericArg::Type(t) => self.visit_ty(t), @@ -153,7 +158,8 @@ impl<'v> Visitor<'v> for HirPlaceholderCollector { } fn visit_const_arg(&mut self, const_arg: &'v hir::ConstArg<'v>) { if let hir::ConstArgKind::Infer(span) = const_arg.kind { - self.0.push(span); + self.may_contain_const_infer = true; + self.spans.push(span); } intravisit::walk_const_arg(self, const_arg) } @@ -278,8 +284,8 @@ fn reject_placeholder_type_signatures_in_item<'tcx>( placeholder_type_error( icx.lowerer(), Some(generics), - visitor.0, - suggest, + visitor.spans, + suggest && !visitor.may_contain_const_infer, None, item.kind.descr(), ); @@ -608,16 +614,16 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { hir::FnRetTy::DefaultReturn(..) => tcx.types.unit, }; - if !(visitor.0.is_empty() && infer_replacements.is_empty()) { + if !(visitor.spans.is_empty() && infer_replacements.is_empty()) { // We check for the presence of // `ident_span` to not emit an error twice when we have `fn foo(_: fn() -> _)`. let mut diag = crate::collect::placeholder_type_error_diag( self, generics, - visitor.0, + visitor.spans, infer_replacements.iter().map(|(s, _)| *s).collect(), - true, + !visitor.may_contain_const_infer, hir_ty, "function", ); @@ -713,7 +719,7 @@ fn lower_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) { placeholder_type_error( icx.lowerer(), None, - visitor.0, + visitor.spans, false, None, "static variable", @@ -781,7 +787,7 @@ fn lower_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) { placeholder_type_error( icx.lowerer(), None, - visitor.0, + visitor.spans, false, None, it.kind.descr(), @@ -789,7 +795,7 @@ fn lower_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) { } } - hir::ItemKind::Fn(..) => { + hir::ItemKind::Fn { .. } => { tcx.ensure().generics_of(def_id); tcx.ensure().type_of(def_id); tcx.ensure().predicates_of(def_id); @@ -823,7 +829,7 @@ fn lower_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) { placeholder_type_error( icx.lowerer(), None, - visitor.0, + visitor.spans, false, None, "associated constant", @@ -838,7 +844,14 @@ fn lower_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) { // Account for `type T = _;`. let mut visitor = HirPlaceholderCollector::default(); visitor.visit_trait_item(trait_item); - placeholder_type_error(icx.lowerer(), None, visitor.0, false, None, "associated type"); + placeholder_type_error( + icx.lowerer(), + None, + visitor.spans, + false, + None, + "associated type", + ); } hir::TraitItemKind::Type(_, None) => { @@ -849,7 +862,14 @@ fn lower_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) { let mut visitor = HirPlaceholderCollector::default(); visitor.visit_trait_item(trait_item); - placeholder_type_error(icx.lowerer(), None, visitor.0, false, None, "associated type"); + placeholder_type_error( + icx.lowerer(), + None, + visitor.spans, + false, + None, + "associated type", + ); } }; @@ -873,7 +893,14 @@ fn lower_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::ImplItemId) { let mut visitor = HirPlaceholderCollector::default(); visitor.visit_impl_item(impl_item); - placeholder_type_error(icx.lowerer(), None, visitor.0, false, None, "associated type"); + placeholder_type_error( + icx.lowerer(), + None, + visitor.spans, + false, + None, + "associated type", + ); } hir::ImplItemKind::Const(ty, _) => { // Account for `const T: _ = ..;` @@ -883,7 +910,7 @@ fn lower_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::ImplItemId) { placeholder_type_error( icx.lowerer(), None, - visitor.0, + visitor.spans, false, None, "associated constant", @@ -1193,7 +1220,7 @@ fn trait_def(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::TraitDef { // and that they are all identifiers .and_then(|attr| match attr.meta_item_list() { Some(items) if items.len() < 2 => { - tcx.dcx().emit_err(errors::MustImplementOneOfAttribute { span: attr.span }); + tcx.dcx().emit_err(errors::MustImplementOneOfAttribute { span: attr.span() }); None } @@ -1205,7 +1232,7 @@ fn trait_def(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::TraitDef { tcx.dcx().emit_err(errors::MustBeNameOfAssociatedFunction { span }); }) .ok() - .zip(Some(attr.span)), + .zip(Some(attr.span())), // Error is reported by `rustc_attr!` None => None, }) @@ -1262,49 +1289,8 @@ fn trait_def(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::TraitDef { no_dups.then_some(list) }); - let mut deny_explicit_impl = false; - let mut implement_via_object = true; - if let Some(attr) = tcx.get_attr(def_id, sym::rustc_deny_explicit_impl) { - deny_explicit_impl = true; - let mut seen_attr = false; - for meta in attr.meta_item_list().iter().flatten() { - if let Some(meta) = meta.meta_item() - && meta.name_or_empty() == sym::implement_via_object - && let Some(lit) = meta.name_value_literal() - { - if seen_attr { - tcx.dcx().span_err(meta.span, "duplicated `implement_via_object` meta item"); - } - seen_attr = true; - - match lit.symbol { - kw::True => { - implement_via_object = true; - } - kw::False => { - implement_via_object = false; - } - _ => { - tcx.dcx().span_err( - meta.span, - format!( - "unknown literal passed to `implement_via_object` attribute: {}", - lit.symbol - ), - ); - } - } - } else { - tcx.dcx().span_err( - meta.span(), - format!("unknown meta item passed to `rustc_deny_explicit_impl` {meta:?}"), - ); - } - } - if !seen_attr { - tcx.dcx().span_err(attr.span, "missing `implement_via_object` meta item"); - } - } + let deny_explicit_impl = tcx.has_attr(def_id, sym::rustc_deny_explicit_impl); + let implement_via_object = !tcx.has_attr(def_id, sym::rustc_do_not_implement_via_object); ty::TraitDef { def_id: def_id.to_def_id(), @@ -1339,7 +1325,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn generics, .. }) - | Item(hir::Item { kind: ItemKind::Fn(sig, generics, _), .. }) => { + | Item(hir::Item { kind: ItemKind::Fn { sig, generics, .. }, .. }) => { lower_fn_sig_recovering_infer_ret_ty(&icx, sig, generics, def_id) } @@ -1350,7 +1336,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn { icx.lowerer().lower_fn_ty( hir_id, - sig.header.safety, + sig.header.safety(), sig.header.abi, sig.decl, Some(generics), @@ -1365,13 +1351,18 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn kind: TraitItemKind::Fn(FnSig { header, decl, span: _ }, _), generics, .. - }) => { - icx.lowerer().lower_fn_ty(hir_id, header.safety, header.abi, decl, Some(generics), None) - } + }) => icx.lowerer().lower_fn_ty( + hir_id, + header.safety(), + header.abi, + decl, + Some(generics), + None, + ), ForeignItem(&hir::ForeignItem { kind: ForeignItemKind::Fn(sig, _, _), .. }) => { let abi = tcx.hir().get_foreign_abi(hir_id); - compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety) + compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety()) } Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => { @@ -1413,13 +1404,13 @@ fn lower_fn_sig_recovering_infer_ret_ty<'tcx>( generics: &'tcx hir::Generics<'tcx>, def_id: LocalDefId, ) -> ty::PolyFnSig<'tcx> { - if let Some(infer_ret_ty) = sig.decl.output.get_infer_ret_ty() { + if let Some(infer_ret_ty) = sig.decl.output.is_suggestable_infer_ty() { return recover_infer_ret_ty(icx, infer_ret_ty, generics, def_id); } icx.lowerer().lower_fn_ty( icx.tcx().local_def_id_to_hir_id(def_id), - sig.header.safety, + sig.header.safety(), sig.header.abi, sig.decl, Some(generics), @@ -1464,7 +1455,7 @@ fn recover_infer_ret_ty<'tcx>( let mut visitor = HirPlaceholderCollector::default(); visitor.visit_ty(infer_ret_ty); - let mut diag = bad_placeholder(icx.lowerer(), visitor.0, "return type"); + let mut diag = bad_placeholder(icx.lowerer(), visitor.spans, "return type"); let ret_ty = fn_sig.output(); // Don't leak types into signatures unless they're nameable! diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs index f52d4f42eca4b..075615de52279 100644 --- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs @@ -8,8 +8,7 @@ use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; use rustc_middle::ty::{self, TyCtxt}; use rustc_session::lint; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, kw}; +use rustc_span::{Span, Symbol, kw}; use tracing::{debug, instrument}; use crate::delegation::inherit_generics_for_delegation_item; diff --git a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs index 0b81f4693712f..d3ff1f7bebe65 100644 --- a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs +++ b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs @@ -371,10 +371,9 @@ pub(super) fn explicit_item_bounds_with_filter( associated_type_bounds(tcx, def_id, opaque_ty.bounds, opaque_ty.span, filter); return ty::EarlyBinder::bind(bounds); } - Some(ty::ImplTraitInTraitData::Impl { .. }) => span_bug!( - tcx.def_span(def_id), - "item bounds for RPITIT in impl to be fed on def-id creation" - ), + Some(ty::ImplTraitInTraitData::Impl { .. }) => { + span_bug!(tcx.def_span(def_id), "RPITIT in impl should not have item bounds") + } None => {} } diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 1a6c0a934360c..8a975786a92f6 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -9,8 +9,7 @@ use rustc_middle::ty::{ self, GenericPredicates, ImplTraitInTraitData, Ty, TyCtxt, TypeVisitable, TypeVisitor, Upcast, }; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::Ident; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span}; use tracing::{debug, instrument, trace}; use super::item_bounds::explicit_item_bounds_with_filter; @@ -654,7 +653,7 @@ pub(super) fn implied_predicates_with_filter<'tcx>( } } } - PredicateFilter::SelfAndAssociatedTypeBounds => { + PredicateFilter::All | PredicateFilter::SelfAndAssociatedTypeBounds => { for &(pred, span) in implied_bounds { debug!("superbound: {:?}", pred); if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() @@ -957,11 +956,20 @@ pub(super) fn const_conditions<'tcx>( bug!("const_conditions invoked for item that is not conditionally const: {def_id:?}"); } + match tcx.opt_rpitit_info(def_id.to_def_id()) { + // RPITITs inherit const conditions of their parent fn + Some( + ty::ImplTraitInTraitData::Impl { fn_def_id } + | ty::ImplTraitInTraitData::Trait { fn_def_id, .. }, + ) => return tcx.const_conditions(fn_def_id), + None => {} + } + let (generics, trait_def_id_and_supertraits, has_parent) = match tcx.hir_node_by_def_id(def_id) { Node::Item(item) => match item.kind { hir::ItemKind::Impl(impl_) => (impl_.generics, None, false), - hir::ItemKind::Fn(_, generics, _) => (generics, None, false), + hir::ItemKind::Fn { generics, .. } => (generics, None, false), hir::ItemKind::Trait(_, _, generics, supertraits, _) => { (generics, Some((item.owner_id.def_id, supertraits)), false) } @@ -1028,7 +1036,7 @@ pub(super) fn const_conditions<'tcx>( icx.lowerer().lower_bounds( tcx.types.self_param, - supertraits.into_iter(), + supertraits, &mut bounds, ty::List::empty(), PredicateFilter::ConstIfConst, @@ -1060,19 +1068,29 @@ pub(super) fn explicit_implied_const_bounds<'tcx>( bug!("const_conditions invoked for item that is not conditionally const: {def_id:?}"); } - let bounds = match tcx.hir_node_by_def_id(def_id) { - Node::Item(hir::Item { kind: hir::ItemKind::Trait(..), .. }) => { - implied_predicates_with_filter( - tcx, - def_id.to_def_id(), - PredicateFilter::SelfConstIfConst, - ) - } - Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(..), .. }) - | Node::OpaqueTy(_) => { + let bounds = match tcx.opt_rpitit_info(def_id.to_def_id()) { + // RPITIT's bounds are the same as opaque type bounds, but with + // a projection self type. + Some(ty::ImplTraitInTraitData::Trait { .. }) => { explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::ConstIfConst) } - _ => bug!("explicit_implied_const_bounds called on wrong item: {def_id:?}"), + Some(ty::ImplTraitInTraitData::Impl { .. }) => { + span_bug!(tcx.def_span(def_id), "RPITIT in impl should not have item bounds") + } + None => match tcx.hir_node_by_def_id(def_id) { + Node::Item(hir::Item { kind: hir::ItemKind::Trait(..), .. }) => { + implied_predicates_with_filter( + tcx, + def_id.to_def_id(), + PredicateFilter::SelfConstIfConst, + ) + } + Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(..), .. }) + | Node::OpaqueTy(_) => { + explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::ConstIfConst) + } + _ => bug!("explicit_implied_const_bounds called on wrong item: {def_id:?}"), + }, }; bounds.map_bound(|bounds| { diff --git a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs index 0f797bcdae428..e8706d1adfbb6 100644 --- a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs +++ b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs @@ -26,9 +26,8 @@ use rustc_middle::middle::resolve_bound_vars::*; use rustc_middle::query::Providers; use rustc_middle::ty::{self, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; use rustc_span::def_id::{DefId, LocalDefId, LocalDefIdMap}; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use tracing::{debug, debug_span, instrument}; use crate::errors; @@ -661,7 +660,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { _ => {} } match item.kind { - hir::ItemKind::Fn(_, generics, _) => { + hir::ItemKind::Fn { generics, .. } => { self.visit_early_late(item.hir_id(), generics, |this| { intravisit::walk_item(this, item); }); @@ -1380,7 +1379,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { } else if let Some(body_id) = outermost_body { let fn_id = self.tcx.hir().body_owner(body_id); match self.tcx.hir_node(fn_id) { - Node::Item(hir::Item { owner_id, kind: hir::ItemKind::Fn(..), .. }) + Node::Item(hir::Item { owner_id, kind: hir::ItemKind::Fn { .. }, .. }) | Node::TraitItem(hir::TraitItem { owner_id, kind: hir::TraitItemKind::Fn(..), diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs index 5595504c3d9b1..51e15767b8c70 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs @@ -4,16 +4,16 @@ use rustc_errors::{Applicability, StashKey, Suggestions}; use rustc_hir as hir; use rustc_hir::HirId; use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::intravisit::Visitor; use rustc_middle::query::plumbing::CyclePlaceholder; use rustc_middle::ty::fold::fold_regions; use rustc_middle::ty::print::with_forced_trimmed_paths; use rustc_middle::ty::util::IntTypeExt; use rustc_middle::ty::{self, Article, IsSuggestable, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::Ident; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span}; -use super::{ItemCtxt, bad_placeholder}; +use super::{HirPlaceholderCollector, ItemCtxt, bad_placeholder}; use crate::errors::TypeofReservedKeywordUsed; use crate::hir_ty_lowering::HirTyLowerer; @@ -294,7 +294,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_ } _ => icx.lower_ty(*self_ty), }, - ItemKind::Fn(..) => { + ItemKind::Fn { .. } => { let args = ty::GenericArgs::identity_for_item(tcx, def_id); Ty::new_fn_def(tcx, def_id.to_def_id(), args) } @@ -413,7 +413,7 @@ fn infer_placeholder_type<'tcx>( kind: &'static str, ) -> Ty<'tcx> { let tcx = cx.tcx(); - let ty = tcx.diagnostic_only_typeck(def_id).node_type(body_id.hir_id); + let ty = tcx.typeck(def_id).node_type(body_id.hir_id); // If this came from a free `const` or `static mut?` item, // then the user may have written e.g. `const A = 42;`. @@ -448,13 +448,37 @@ fn infer_placeholder_type<'tcx>( } }) .unwrap_or_else(|| { - let mut diag = bad_placeholder(cx, vec![span], kind); - - if !ty.references_error() { + let mut visitor = HirPlaceholderCollector::default(); + let node = tcx.hir_node_by_def_id(def_id); + if let Some(ty) = node.ty() { + visitor.visit_ty(ty); + } + // If we have just one span, let's try to steal a const `_` feature error. + let try_steal_span = if !tcx.features().generic_arg_infer() && visitor.spans.len() == 1 + { + visitor.spans.first().copied() + } else { + None + }; + // If we didn't find any infer tys, then just fallback to `span`. + if visitor.spans.is_empty() { + visitor.spans.push(span); + } + let mut diag = bad_placeholder(cx, visitor.spans, kind); + + // HACK(#69396): Stashing and stealing diagnostics does not interact + // well with macros which may delay more than one diagnostic on the + // same span. If this happens, we will fall through to this arm, so + // we need to suppress the suggestion since it's invalid. Ideally we + // would suppress the duplicated error too, but that's really hard. + if span.is_empty() && span.from_expansion() { + // An approximately better primary message + no suggestion... + diag.primary_message("missing type for item"); + } else if !ty.references_error() { if let Some(ty) = ty.make_suggestable(tcx, false, None) { - diag.span_suggestion( + diag.span_suggestion_verbose( span, - "replace with the correct type", + "replace this with a fully-specified type", ty, Applicability::MachineApplicable, ); @@ -465,7 +489,16 @@ fn infer_placeholder_type<'tcx>( )); } } - diag.emit() + + if let Some(try_steal_span) = try_steal_span { + cx.dcx().try_steal_replace_and_emit_err( + try_steal_span, + StashKey::UnderscoreForArrayLengths, + diag, + ) + } else { + diag.emit() + } }); Ty::new_error(tcx, guar) } @@ -473,7 +506,7 @@ fn infer_placeholder_type<'tcx>( fn check_feature_inherent_assoc_ty(tcx: TyCtxt<'_>, span: Span) { if !tcx.features().inherent_associated_types() { use rustc_session::parse::feature_err; - use rustc_span::symbol::sym; + use rustc_span::sym; feature_err( &tcx.sess, sym::inherent_associated_types, diff --git a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs index 66255829dcffe..d1a1e36c1d5a0 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs @@ -1,4 +1,3 @@ -use rustc_errors::StashKey; use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::{self, Visitor}; @@ -45,7 +44,7 @@ pub(super) fn find_opaque_ty_constraints_for_impl_trait_in_assoc_type( if !hidden.ty.references_error() { for concrete_type in locator.typeck_types { if concrete_type.ty != tcx.erase_regions(hidden.ty) { - if let Ok(d) = hidden.build_mismatch_error(&concrete_type, def_id, tcx) { + if let Ok(d) = hidden.build_mismatch_error(&concrete_type, tcx) { d.emit(); } } @@ -121,7 +120,7 @@ pub(super) fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: Local if !hidden.ty.references_error() { for concrete_type in locator.typeck_types { if concrete_type.ty != tcx.erase_regions(hidden.ty) { - if let Ok(d) = hidden.build_mismatch_error(&concrete_type, def_id, tcx) { + if let Ok(d) = hidden.build_mismatch_error(&concrete_type, tcx) { d.emit(); } } @@ -187,7 +186,7 @@ impl TaitConstraintLocator<'_> { "foreign items cannot constrain opaque types", ); if let Some(hir_sig) = hir_node.fn_sig() - && hir_sig.decl.output.get_infer_ret_ty().is_some() + && hir_sig.decl.output.is_suggestable_infer_ty().is_some() { let guar = self.tcx.dcx().span_delayed_bug( hir_sig.decl.output.span(), @@ -285,9 +284,8 @@ impl TaitConstraintLocator<'_> { debug!(?concrete_type, "found constraint"); if let Some(prev) = &mut self.found { if concrete_type.ty != prev.ty { - let (Ok(guar) | Err(guar)) = prev - .build_mismatch_error(&concrete_type, self.def_id, self.tcx) - .map(|d| d.emit()); + let (Ok(guar) | Err(guar)) = + prev.build_mismatch_error(&concrete_type, self.tcx).map(|d| d.emit()); prev.ty = Ty::new_error(self.tcx, guar); } } else { @@ -361,11 +359,8 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( ); if let Some(prev) = &mut hir_opaque_ty { if concrete_type.ty != prev.ty { - if let Ok(d) = prev.build_mismatch_error(&concrete_type, def_id, tcx) { - d.stash( - tcx.def_span(opaque_type_key.def_id), - StashKey::OpaqueHiddenTypeMismatch, - ); + if let Ok(d) = prev.build_mismatch_error(&concrete_type, tcx) { + d.emit(); } } } else { @@ -435,9 +430,7 @@ impl RpitConstraintChecker<'_> { debug!(?concrete_type, "found constraint"); if concrete_type.ty != self.found.ty { - if let Ok(d) = - self.found.build_mismatch_error(&concrete_type, self.def_id, self.tcx) - { + if let Ok(d) = self.found.build_mismatch_error(&concrete_type, self.tcx) { d.emit(); } } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index d46f60b16f5d1..00ba1741ed729 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -6,8 +6,7 @@ use rustc_errors::{ }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::Ty; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; mod pattern_types; diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs index 0404e38a29307..e949d4a11261e 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs @@ -9,8 +9,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::bug; use rustc_middle::ty::{self as ty, IsSuggestable, Ty, TyCtxt}; -use rustc_span::symbol::Ident; -use rustc_span::{ErrorGuaranteed, Span, Symbol, sym}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::traits; use rustc_type_ir::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor}; use smallvec::SmallVec; @@ -745,7 +744,7 @@ fn check_assoc_const_binding_type<'tcx>( let generics = tcx.generics_of(enclosing_item_owner_id); for index in collector.params { let param = generics.param_at(index as _, tcx); - let is_self_param = param.name == rustc_span::symbol::kw::SelfUpper; + let is_self_param = param.name == kw::SelfUpper; guar.get_or_insert(cx.dcx().emit_err(crate::errors::ParamInTyOfAssocConstBinding { span: assoc_const.span, assoc_const, diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs index 00c1f9b332b0a..7a3d921f00e6e 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs @@ -16,8 +16,7 @@ use rustc_middle::ty::{ }; use rustc_session::parse::feature_err; use rustc_span::edit_distance::find_best_match_for_name; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, Span, Symbol}; +use rustc_span::{BytePos, DUMMY_SP, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::traits::report_dyn_incompatibility; use rustc_trait_selection::traits::{ FulfillmentError, TraitAliasExpansionInfo, dyn_compatibility_violations_for_assoc_item, @@ -180,7 +179,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // all visible traits. If there's one clear winner, just suggest that. let visible_traits: Vec<_> = tcx - .all_traits() + .visible_traits() .filter(|trait_def_id| { let viz = tcx.visibility(*trait_def_id); let def_id = self.item_def_id(); diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs index c3f4fc8699a38..df00948dd211c 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs @@ -9,7 +9,7 @@ use rustc_middle::ty::{ self, GenericArgsRef, GenericParamDef, GenericParamDefKind, IsSuggestable, Ty, }; use rustc_session::lint::builtin::LATE_BOUND_LIFETIME_ARGUMENTS; -use rustc_span::symbol::{kw, sym}; +use rustc_span::{kw, sym}; use smallvec::SmallVec; use tracing::{debug, instrument}; diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs index fd49e7e44398b..a1f2b8c75948d 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs @@ -2,10 +2,12 @@ use rustc_ast::TraitObjectSyntax; use rustc_errors::codes::*; use rustc_errors::{Diag, EmissionGuarantee, ErrorGuaranteed, StashKey, Suggestions}; use rustc_hir as hir; -use rustc_hir::def::{DefKind, Res}; +use rustc_hir::def::{DefKind, Namespace, Res}; +use rustc_hir::def_id::DefId; use rustc_lint_defs::Applicability; use rustc_lint_defs::builtin::BARE_TRAIT_OBJECTS; use rustc_span::Span; +use rustc_span::edit_distance::find_best_match_for_name; use rustc_trait_selection::error_reporting::traits::suggestions::NextTypeParamName; use super::HirTyLowerer; @@ -86,7 +88,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // Check if the impl trait that we are considering is an impl of a local trait. self.maybe_suggest_blanket_trait_impl(self_ty, &mut diag); self.maybe_suggest_assoc_ty_bound(self_ty, &mut diag); - // In case there is an associate type with the same name + self.maybe_suggest_typoed_method( + self_ty, + poly_trait_ref.trait_ref.trait_def_id(), + &mut diag, + ); + // In case there is an associated type with the same name // Add the suggestion to this error if let Some(mut sugg) = tcx.dcx().steal_non_err(self_ty.span, StashKey::AssociatedTypeSuggestion) @@ -96,7 +103,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { s1.append(s2); sugg.cancel(); } - diag.stash(self_ty.span, StashKey::TraitMissingMethod) + Some(diag.emit()) } else { tcx.node_span_lint(BARE_TRAIT_OBJECTS, self_ty.hir_id, self_ty.span, |lint| { lint.primary_message("trait objects without an explicit `dyn` are deprecated"); @@ -189,9 +196,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // 2. Functions inside trait blocks // 3. Functions inside impl blocks let (sig, generics) = match tcx.hir_node_by_def_id(parent_id) { - hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, generics, _), .. }) => { - (sig, generics) - } + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn { sig, generics, .. }, .. + }) => (sig, generics), hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Fn(sig, _), generics, @@ -343,4 +350,44 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ); } } + + fn maybe_suggest_typoed_method( + &self, + self_ty: &hir::Ty<'_>, + trait_def_id: Option, + diag: &mut Diag<'_>, + ) { + let tcx = self.tcx(); + let Some(trait_def_id) = trait_def_id else { + return; + }; + let hir::Node::Expr(hir::Expr { + kind: hir::ExprKind::Path(hir::QPath::TypeRelative(path_ty, segment)), + .. + }) = tcx.parent_hir_node(self_ty.hir_id) + else { + return; + }; + if path_ty.hir_id != self_ty.hir_id { + return; + } + let names: Vec<_> = tcx + .associated_items(trait_def_id) + .in_definition_order() + .filter(|assoc| assoc.kind.namespace() == Namespace::ValueNS) + .map(|cand| cand.name) + .collect(); + if let Some(typo) = find_best_match_for_name(&names, segment.ident.name, None) { + diag.span_suggestion_verbose( + segment.ident.span, + format!( + "you may have misspelled this associated item, causing `{}` \ + to be interpreted as a type rather than a trait", + tcx.item_name(trait_def_id), + ), + typo, + Applicability::MaybeIncorrect, + ); + } + } } diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs index a357ade02945f..cb90fff782fb0 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs @@ -35,7 +35,7 @@ use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::ObligationCause; use rustc_middle::middle::stability::AllowUnstable; -use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput}; +use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::ty::fold::fold_regions; use rustc_middle::ty::print::PrintPolyTraitRefExt as _; use rustc_middle::ty::{ @@ -45,8 +45,7 @@ use rustc_middle::ty::{ use rustc_middle::{bug, span_bug}; use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; use rustc_span::edit_distance::find_best_match_for_name; -use rustc_span::symbol::{Ident, Symbol, kw}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::wf::object_region_bounds; use rustc_trait_selection::traits::{self, ObligationCtxt}; @@ -356,7 +355,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ty::Region::new_late_param( tcx, scope.to_def_id(), - ty::BoundRegionKind::Named(id.to_def_id(), name), + ty::LateParamRegionKind::Named(id.to_def_id(), name), ) // (*) -- not late-bound, won't change @@ -2263,25 +2262,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { _ => None, }; - if let Some(lit_input) = lit_input { - // If an error occurred, ignore that it's a literal and leave reporting the error up to - // mir. - match tcx.at(expr.span).lit_to_const(lit_input) { - Ok(c) => return Some(c), - Err(_) if lit_input.ty.has_aliases() => { - // allow the `ty` to be an alias type, though we cannot handle it here - return None; - } - Err(e) => { - tcx.dcx().span_delayed_bug( - expr.span, - format!("try_lower_anon_const_lit: couldn't lit_to_const {e:?}"), - ); - } - } - } - - None + lit_input + // Allow the `ty` to be an alias type, though we cannot handle it here, we just go through + // the more expensive anon const code path. + .filter(|l| !l.ty.has_aliases()) + .map(|l| tcx.at(expr.span).lit_to_const(l)) } fn lower_delegation_ty(&self, idx: hir::InferDelegationKind) -> Ty<'tcx> { @@ -2319,13 +2304,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self.lower_fn_ty(hir_ty.hir_id, bf.safety, bf.abi, bf.decl, None, Some(hir_ty)), ) } - hir::TyKind::UnsafeBinder(_binder) => { - let guar = self - .dcx() - .struct_span_err(hir_ty.span, "unsafe binders are not yet implemented") - .emit(); - Ty::new_error(tcx, guar) - } + hir::TyKind::UnsafeBinder(binder) => Ty::new_unsafe_binder( + tcx, + ty::Binder::bind_with_vars( + self.lower_ty(binder.inner_ty), + tcx.late_bound_vars(hir_ty.hir_id), + ), + ), hir::TyKind::TraitObject(bounds, lifetime, repr) => { if let Some(guar) = self.prohibit_or_lint_bare_trait_object_ty(hir_ty) { // Don't continue with type analysis if the `dyn` keyword is missing @@ -2450,44 +2435,39 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { Ty::new_error(tcx, err) } hir::PatKind::Range(start, end, include_end) => { - let expr_to_const = |expr: &'tcx hir::Expr<'tcx>| -> ty::Const<'tcx> { - let (expr, neg) = match expr.kind { - hir::ExprKind::Unary(hir::UnOp::Neg, negated) => { - (negated, Some((expr.hir_id, expr.span))) - } - _ => (expr, None), - }; - let (c, c_ty) = match &expr.kind { - hir::ExprKind::Lit(lit) => { + let expr_to_const = |expr: &'tcx hir::PatExpr<'tcx>| -> ty::Const<'tcx> { + let (c, c_ty) = match expr.kind { + hir::PatExprKind::Lit { lit, negated } => { let lit_input = - LitToConstInput { lit: &lit.node, ty, neg: neg.is_some() }; - let ct = match tcx.lit_to_const(lit_input) { - Ok(c) => c, - Err(LitToConstError::Reported(err)) => { - ty::Const::new_error(tcx, err) - } - Err(LitToConstError::TypeError) => todo!(), - }; + LitToConstInput { lit: &lit.node, ty, neg: negated }; + let ct = tcx.lit_to_const(lit_input); (ct, ty) } - hir::ExprKind::Path(hir::QPath::Resolved( + hir::PatExprKind::Path(hir::QPath::Resolved( _, path @ &hir::Path { res: Res::Def(DefKind::ConstParam, def_id), .. }, )) => { - let _ = self.prohibit_generic_args( + match self.prohibit_generic_args( path.segments.iter(), GenericsArgsErrExtend::Param(def_id), - ); - let ty = tcx - .type_of(def_id) - .no_bound_vars() - .expect("const parameter types cannot be generic"); - let ct = self.lower_const_param(def_id, expr.hir_id); - (ct, ty) + ) { + Ok(()) => { + let ty = tcx + .type_of(def_id) + .no_bound_vars() + .expect("const parameter types cannot be generic"); + let ct = self.lower_const_param(def_id, expr.hir_id); + (ct, ty) + } + Err(guar) => ( + ty::Const::new_error(tcx, guar), + Ty::new_error(tcx, guar), + ), + } } _ => { @@ -2498,9 +2478,6 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } }; self.record_ty(expr.hir_id, c_ty, expr.span); - if let Some((id, span)) = neg { - self.record_ty(id, c_ty, span); - } c }; diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check.rs b/compiler/rustc_hir_analysis/src/impl_wf_check.rs index d9c70c3cee655..42034736ad674 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check.rs @@ -57,22 +57,24 @@ pub(crate) fn check_impl_wf( tcx: TyCtxt<'_>, impl_def_id: LocalDefId, ) -> Result<(), ErrorGuaranteed> { - let min_specialization = tcx.features().min_specialization(); - let mut res = Ok(()); debug_assert_matches!(tcx.def_kind(impl_def_id), DefKind::Impl { .. }); - res = res.and(enforce_impl_params_are_constrained(tcx, impl_def_id)); - if min_specialization { + + // Check that the args are constrained. We queryfied the check for ty/const params + // since unconstrained type/const params cause ICEs in projection, so we want to + // detect those specifically and project those to `TyKind::Error`. + let mut res = tcx.ensure().enforce_impl_non_lifetime_params_are_constrained(impl_def_id); + res = res.and(enforce_impl_lifetime_params_are_constrained(tcx, impl_def_id)); + + if tcx.features().min_specialization() { res = res.and(check_min_specialization(tcx, impl_def_id)); } - res } -fn enforce_impl_params_are_constrained( +pub(crate) fn enforce_impl_lifetime_params_are_constrained( tcx: TyCtxt<'_>, impl_def_id: LocalDefId, ) -> Result<(), ErrorGuaranteed> { - // Every lifetime used in an associated type must be constrained. let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity(); if impl_self_ty.references_error() { // Don't complain about unconstrained type params when self ty isn't known due to errors. @@ -88,6 +90,7 @@ fn enforce_impl_params_are_constrained( // Compilation must continue in order for other important diagnostics to keep showing up. return Ok(()); } + let impl_generics = tcx.generics_of(impl_def_id); let impl_predicates = tcx.predicates_of(impl_def_id); let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).map(ty::EarlyBinder::instantiate_identity); @@ -121,6 +124,84 @@ fn enforce_impl_params_are_constrained( }) .collect(); + let mut res = Ok(()); + for param in &impl_generics.own_params { + match param.kind { + ty::GenericParamDefKind::Lifetime => { + // This is a horrible concession to reality. I think it'd be + // better to just ban unconstrained lifetimes outright, but in + // practice people do non-hygienic macros like: + // + // ``` + // macro_rules! __impl_slice_eq1 { + // ($Lhs: ty, $Rhs: ty, $Bound: ident) => { + // impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq { + // .... + // } + // } + // } + // ``` + // + // In a concession to backwards compatibility, we continue to + // permit those, so long as the lifetimes aren't used in + // associated types. I believe this is sound, because lifetimes + // used elsewhere are not projected back out. + let param_lt = cgp::Parameter::from(param.to_early_bound_region_data()); + if lifetimes_in_associated_types.contains(¶m_lt) + && !input_parameters.contains(¶m_lt) + { + let mut diag = tcx.dcx().create_err(UnconstrainedGenericParameter { + span: tcx.def_span(param.def_id), + param_name: param.name, + param_def_kind: tcx.def_descr(param.def_id), + const_param_note: false, + const_param_note2: false, + }); + diag.code(E0207); + res = Err(diag.emit()); + } + } + ty::GenericParamDefKind::Type { .. } | ty::GenericParamDefKind::Const { .. } => { + // Enforced in `enforce_impl_non_lifetime_params_are_constrained`. + } + } + } + res +} + +pub(crate) fn enforce_impl_non_lifetime_params_are_constrained( + tcx: TyCtxt<'_>, + impl_def_id: LocalDefId, +) -> Result<(), ErrorGuaranteed> { + let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity(); + if impl_self_ty.references_error() { + // Don't complain about unconstrained type params when self ty isn't known due to errors. + // (#36836) + tcx.dcx().span_delayed_bug( + tcx.def_span(impl_def_id), + format!( + "potentially unconstrained type parameters weren't evaluated: {impl_self_ty:?}", + ), + ); + // This is super fishy, but our current `rustc_hir_analysis::check_crate` pipeline depends on + // `type_of` having been called much earlier, and thus this value being read from cache. + // Compilation must continue in order for other important diagnostics to keep showing up. + return Ok(()); + } + let impl_generics = tcx.generics_of(impl_def_id); + let impl_predicates = tcx.predicates_of(impl_def_id); + let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).map(ty::EarlyBinder::instantiate_identity); + + impl_trait_ref.error_reported()?; + + let mut input_parameters = cgp::parameters_for_impl(tcx, impl_self_ty, impl_trait_ref); + cgp::identify_constrained_generic_params( + tcx, + impl_predicates, + impl_trait_ref, + &mut input_parameters, + ); + let mut res = Ok(()); for param in &impl_generics.own_params { let err = match param.kind { @@ -129,15 +210,14 @@ fn enforce_impl_params_are_constrained( let param_ty = ty::ParamTy::for_def(param); !input_parameters.contains(&cgp::Parameter::from(param_ty)) } - ty::GenericParamDefKind::Lifetime => { - let param_lt = cgp::Parameter::from(param.to_early_bound_region_data()); - lifetimes_in_associated_types.contains(¶m_lt) && // (*) - !input_parameters.contains(¶m_lt) - } ty::GenericParamDefKind::Const { .. } => { let param_ct = ty::ParamConst::for_def(param); !input_parameters.contains(&cgp::Parameter::from(param_ct)) } + ty::GenericParamDefKind::Lifetime => { + // Enforced in `enforce_impl_type_params_are_constrained`. + false + } }; if err { let const_param_note = matches!(param.kind, ty::GenericParamDefKind::Const { .. }); @@ -153,23 +233,4 @@ fn enforce_impl_params_are_constrained( } } res - - // (*) This is a horrible concession to reality. I think it'd be - // better to just ban unconstrained lifetimes outright, but in - // practice people do non-hygienic macros like: - // - // ``` - // macro_rules! __impl_slice_eq1 { - // ($Lhs: ty, $Rhs: ty, $Bound: ident) => { - // impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq { - // .... - // } - // } - // } - // ``` - // - // In a concession to backwards compatibility, we continue to - // permit those, so long as the lifetimes aren't used in - // associated types. I believe this is sound, because lifetimes - // used elsewhere are not projected back out. } diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 87fd4de26a5af..a42a168234f07 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -128,6 +128,8 @@ pub fn provide(providers: &mut Providers) { hir_wf_check::provide(providers); *providers = Providers { inherit_sig_for_delegation_item: delegation::inherit_sig_for_delegation_item, + enforce_impl_non_lifetime_params_are_constrained: + impl_wf_check::enforce_impl_non_lifetime_params_are_constrained, ..*providers }; } diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 415b23d812b5f..e954d2b9ea48e 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -322,6 +322,11 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_sig(current, sig_tys.with(hdr), variance); } + ty::UnsafeBinder(ty) => { + // FIXME(unsafe_binders): This is covariant, right? + self.add_constraints_from_ty(current, ty.skip_binder(), variance); + } + ty::Error(_) => { // we encounter this when walking the trait references for object // types, where we use Error as the Self type diff --git a/compiler/rustc_hir_analysis/src/variance/dump.rs b/compiler/rustc_hir_analysis/src/variance/dump.rs index a0fdf95a83173..31f0adf720d79 100644 --- a/compiler/rustc_hir_analysis/src/variance/dump.rs +++ b/compiler/rustc_hir_analysis/src/variance/dump.rs @@ -2,7 +2,7 @@ use std::fmt::Write; use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId}; use rustc_middle::ty::{GenericArgs, TyCtxt}; -use rustc_span::symbol::sym; +use rustc_span::sym; fn format_variances(tcx: TyCtxt<'_>, def_id: LocalDefId) -> String { let variances = tcx.variances_of(def_id); diff --git a/compiler/rustc_hir_pretty/Cargo.toml b/compiler/rustc_hir_pretty/Cargo.toml index 9af1fb8279e9f..58a12bdda2545 100644 --- a/compiler/rustc_hir_pretty/Cargo.toml +++ b/compiler/rustc_hir_pretty/Cargo.toml @@ -8,6 +8,7 @@ edition = "2021" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } +rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_hir = { path = "../rustc_hir" } rustc_span = { path = "../rustc_span" } # tidy-alphabetical-end diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index feb483a8bbb1f..66e7a28043d62 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -11,18 +11,18 @@ use std::vec; use rustc_abi::ExternAbi; use rustc_ast::util::parser::{self, AssocOp, ExprPrecedence, Fixity}; -use rustc_ast::{DUMMY_NODE_ID, DelimArgs}; +use rustc_ast::{AttrStyle, DUMMY_NODE_ID, DelimArgs}; use rustc_ast_pretty::pp::Breaks::{Consistent, Inconsistent}; use rustc_ast_pretty::pp::{self, Breaks}; use rustc_ast_pretty::pprust::state::MacHeader; use rustc_ast_pretty::pprust::{Comments, PrintState}; +use rustc_attr_parsing::AttributeKind; use rustc_hir::{ BindingMode, ByRef, ConstArgKind, GenericArg, GenericBound, GenericParam, GenericParamKind, HirId, LifetimeParamKind, Node, PatKind, PreciseCapturingArg, RangeEnd, Term, }; use rustc_span::source_map::SourceMap; -use rustc_span::symbol::{Ident, Symbol, kw}; -use rustc_span::{FileName, Span}; +use rustc_span::{FileName, Ident, Span, Symbol, kw}; use {rustc_ast as ast, rustc_hir as hir}; pub fn id_to_string(map: &dyn rustc_hir::intravisit::Map<'_>, hir_id: HirId) -> String { @@ -79,65 +79,43 @@ impl<'a> State<'a> { (self.attrs)(id) } - fn print_inner_attributes(&mut self, attrs: &[hir::Attribute]) -> bool { - self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true) + fn print_attrs_as_inner(&mut self, attrs: &[hir::Attribute]) { + self.print_either_attributes(attrs, ast::AttrStyle::Inner) } - fn print_outer_attributes(&mut self, attrs: &[hir::Attribute]) -> bool { - self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true) + fn print_attrs_as_outer(&mut self, attrs: &[hir::Attribute]) { + self.print_either_attributes(attrs, ast::AttrStyle::Outer) } - fn print_either_attributes( - &mut self, - attrs: &[hir::Attribute], - kind: ast::AttrStyle, - is_inline: bool, - trailing_hardbreak: bool, - ) -> bool { - let mut printed = false; - for attr in attrs { - if attr.style == kind { - self.print_attribute_inline(attr, is_inline); - if is_inline { - self.nbsp(); - } - printed = true; - } + fn print_either_attributes(&mut self, attrs: &[hir::Attribute], style: ast::AttrStyle) { + if attrs.is_empty() { + return; } - if printed && trailing_hardbreak && !is_inline { - self.hardbreak_if_not_bol(); + + for attr in attrs { + self.print_attribute_inline(attr, style); } - printed + self.hardbreak_if_not_bol(); } - fn print_attribute_inline(&mut self, attr: &hir::Attribute, is_inline: bool) { - if !is_inline { - self.hardbreak_if_not_bol(); - } - self.maybe_print_comment(attr.span.lo()); - match &attr.kind { - hir::AttrKind::Normal(normal) => { - match attr.style { + fn print_attribute_inline(&mut self, attr: &hir::Attribute, style: AttrStyle) { + match &attr { + hir::Attribute::Unparsed(unparsed) => { + self.maybe_print_comment(unparsed.span.lo()); + match style { ast::AttrStyle::Inner => self.word("#!["), ast::AttrStyle::Outer => self.word("#["), } - if normal.unsafety == hir::Safety::Unsafe { - self.word("unsafe("); - } - self.print_attr_item(&normal, attr.span); - if normal.unsafety == hir::Safety::Unsafe { - self.word(")"); - } + self.print_attr_item(&unparsed, unparsed.span); self.word("]"); } - hir::AttrKind::DocComment(comment_kind, data) => { + hir::Attribute::Parsed(AttributeKind::DocComment { style, kind, comment, .. }) => { self.word(rustc_ast_pretty::pprust::state::doc_comment_to_string( - *comment_kind, - attr.style, - *data, + *kind, *style, *comment, )); self.hardbreak() } + _ => unimplemented!("pretty print parsed attributes"), } } @@ -161,7 +139,7 @@ impl<'a> State<'a> { false, None, *delim, - tokens, + &tokens, true, span, ), @@ -200,6 +178,7 @@ impl<'a> State<'a> { Node::OpaqueTy(o) => self.print_opaque_ty(o), Node::Pat(a) => self.print_pat(a), Node::PatField(a) => self.print_patfield(a), + Node::PatExpr(a) => self.print_pat_expr(a), Node::Arm(a) => self.print_arm(a), Node::Infer(_) => self.word("_"), Node::PreciseCapturingNonLifetimeArg(param) => self.print_ident(param.ident), @@ -294,7 +273,7 @@ where } pub fn attribute_to_string(ann: &dyn PpAnn, attr: &hir::Attribute) -> String { - to_string(ann, |s| s.print_attribute_inline(attr, false)) + to_string(ann, |s| s.print_attribute_inline(attr, AttrStyle::Outer)) } pub fn ty_to_string(ann: &dyn PpAnn, ty: &hir::Ty<'_>) -> String { @@ -349,7 +328,7 @@ impl<'a> State<'a> { } fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[hir::Attribute]) { - self.print_inner_attributes(attrs); + self.print_attrs_as_inner(attrs); for &item_id in _mod.item_ids { self.ann.nested(self, Nested::Item(item_id)); } @@ -403,8 +382,10 @@ impl<'a> State<'a> { } hir::TyKind::Path(ref qpath) => self.print_qpath(qpath, false), hir::TyKind::TraitObject(bounds, lifetime, syntax) => { - if syntax == ast::TraitObjectSyntax::Dyn { - self.word_space("dyn"); + match syntax { + ast::TraitObjectSyntax::Dyn => self.word_nbsp("dyn"), + ast::TraitObjectSyntax::DynStar => self.word_nbsp("dyn*"), + ast::TraitObjectSyntax::None => {} } let mut first = true; for bound in bounds { @@ -463,7 +444,7 @@ impl<'a> State<'a> { fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); - self.print_outer_attributes(self.attrs(item.hir_id())); + self.print_attrs_as_outer(self.attrs(item.hir_id())); match item.kind { hir::ForeignItemKind::Fn(sig, arg_names, generics) => { self.head(""); @@ -567,7 +548,7 @@ impl<'a> State<'a> { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); let attrs = self.attrs(item.hir_id()); - self.print_outer_attributes(attrs); + self.print_attrs_as_outer(attrs); self.ann.pre(self, AnnNode::Item(item)); match item.kind { hir::ItemKind::ExternCrate(orig_name) => { @@ -633,7 +614,7 @@ impl<'a> State<'a> { self.word(";"); self.end(); // end the outer cbox } - hir::ItemKind::Fn(ref sig, generics, body) => { + hir::ItemKind::Fn { sig, generics, body, .. } => { self.head(""); self.print_fn( sig.decl, @@ -663,7 +644,7 @@ impl<'a> State<'a> { self.head("extern"); self.word_nbsp(abi.to_string()); self.bopen(); - self.print_inner_attributes(self.attrs(item.hir_id())); + self.print_attrs_as_inner(self.attrs(item.hir_id())); for item in items { self.ann.nested(self, Nested::ForeignItem(item.id)); } @@ -731,7 +712,7 @@ impl<'a> State<'a> { self.space(); self.bopen(); - self.print_inner_attributes(attrs); + self.print_attrs_as_inner(attrs); for impl_item in items { self.ann.nested(self, Nested::ImplItem(impl_item.id)); } @@ -823,7 +804,7 @@ impl<'a> State<'a> { for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); - self.print_outer_attributes(self.attrs(v.hir_id)); + self.print_attrs_as_outer(self.attrs(v.hir_id)); self.ibox(INDENT_UNIT); self.print_variant(v); self.word(","); @@ -856,7 +837,7 @@ impl<'a> State<'a> { self.popen(); self.commasep(Inconsistent, struct_def.fields(), |s, field| { s.maybe_print_comment(field.span.lo()); - s.print_outer_attributes(s.attrs(field.hir_id)); + s.print_attrs_as_outer(s.attrs(field.hir_id)); s.print_type(field.ty); }); self.pclose(); @@ -883,7 +864,7 @@ impl<'a> State<'a> { for field in fields { self.hardbreak_if_not_bol(); self.maybe_print_comment(field.span.lo()); - self.print_outer_attributes(self.attrs(field.hir_id)); + self.print_attrs_as_outer(self.attrs(field.hir_id)); self.print_ident(field.ident); self.word_nbsp(":"); self.print_type(field.ty); @@ -919,7 +900,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::SubItem(ti.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ti.span.lo()); - self.print_outer_attributes(self.attrs(ti.hir_id())); + self.print_attrs_as_outer(self.attrs(ti.hir_id())); match ti.kind { hir::TraitItemKind::Const(ty, default) => { self.print_associated_const(ti.ident, ti.generics, ty, default); @@ -947,7 +928,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::SubItem(ii.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ii.span.lo()); - self.print_outer_attributes(self.attrs(ii.hir_id())); + self.print_attrs_as_outer(self.attrs(ii.hir_id())); match ii.kind { hir::ImplItemKind::Const(ty, expr) => { @@ -1050,7 +1031,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::Block(blk)); self.bopen(); - self.print_inner_attributes(attrs); + self.print_attrs_as_inner(attrs); for st in blk.stmts { self.print_stmt(st); @@ -1240,7 +1221,7 @@ impl<'a> State<'a> { self.space(); } self.cbox(INDENT_UNIT); - self.print_outer_attributes(self.attrs(field.hir_id)); + self.print_attrs_as_outer(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_space(":"); @@ -1437,7 +1418,7 @@ impl<'a> State<'a> { fn print_expr(&mut self, expr: &hir::Expr<'_>) { self.maybe_print_comment(expr.span.lo()); - self.print_outer_attributes(self.attrs(expr.hir_id)); + self.print_attrs_as_outer(self.attrs(expr.hir_id)); self.ibox(INDENT_UNIT); self.ann.pre(self, AnnNode::Expr(expr)); match expr.kind { @@ -1653,8 +1634,8 @@ impl<'a> State<'a> { } hir::ExprKind::UnsafeBinderCast(kind, expr, ty) => { match kind { - hir::UnsafeBinderCastKind::Wrap => self.word("wrap_binder!("), - hir::UnsafeBinderCastKind::Unwrap => self.word("unwrap_binder!("), + ast::UnsafeBinderCastKind::Wrap => self.word("wrap_binder!("), + ast::UnsafeBinderCastKind::Unwrap => self.word("unwrap_binder!("), } self.print_expr(expr); if let Some(ty) = ty { @@ -1848,6 +1829,19 @@ impl<'a> State<'a> { } } + fn print_pat_expr(&mut self, expr: &hir::PatExpr<'_>) { + match &expr.kind { + hir::PatExprKind::Lit { lit, negated } => { + if *negated { + self.word("-"); + } + self.print_literal(lit); + } + hir::PatExprKind::ConstBlock(c) => self.print_inline_const(c), + hir::PatExprKind::Path(qpath) => self.print_qpath(qpath, true), + } + } + fn print_pat(&mut self, pat: &hir::Pat<'_>) { self.maybe_print_comment(pat.span.lo()); self.ann.pre(self, AnnNode::Pat(pat)); @@ -1965,17 +1959,17 @@ impl<'a> State<'a> { self.pclose(); } } - PatKind::Lit(e) => self.print_expr(e), + PatKind::Expr(e) => self.print_pat_expr(e), PatKind::Range(begin, end, end_kind) => { if let Some(expr) = begin { - self.print_expr(expr); + self.print_pat_expr(expr); } match end_kind { RangeEnd::Included => self.word("..."), RangeEnd::Excluded => self.word(".."), } if let Some(expr) = end { - self.print_expr(expr); + self.print_pat_expr(expr); } } PatKind::Slice(before, slice, after) => { @@ -1998,6 +1992,12 @@ impl<'a> State<'a> { self.commasep(Inconsistent, after, |s, p| s.print_pat(p)); self.word("]"); } + PatKind::Guard(inner, cond) => { + self.print_pat(inner); + self.space(); + self.word_space("if"); + self.print_expr(cond); + } PatKind::Err(_) => { self.popen(); self.word("/*ERROR*/"); @@ -2012,7 +2012,7 @@ impl<'a> State<'a> { self.space(); } self.cbox(INDENT_UNIT); - self.print_outer_attributes(self.attrs(field.hir_id)); + self.print_attrs_as_outer(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_nbsp(":"); @@ -2022,7 +2022,7 @@ impl<'a> State<'a> { } fn print_param(&mut self, arg: &hir::Param<'_>) { - self.print_outer_attributes(self.attrs(arg.hir_id)); + self.print_attrs_as_outer(self.attrs(arg.hir_id)); self.print_pat(arg.pat); } @@ -2035,7 +2035,7 @@ impl<'a> State<'a> { self.cbox(INDENT_UNIT); self.ann.pre(self, AnnNode::Arm(arm)); self.ibox(0); - self.print_outer_attributes(self.attrs(arm.hir_id)); + self.print_attrs_as_outer(self.attrs(arm.hir_id)); self.print_pat(arm.pat); self.space(); if let Some(ref g) = arm.guard { @@ -2386,7 +2386,7 @@ impl<'a> State<'a> { self.print_fn( decl, hir::FnHeader { - safety, + safety: safety.into(), abi, constness: hir::Constness::NotConst, asyncness: hir::IsAsync::NotAsync, @@ -2402,12 +2402,20 @@ impl<'a> State<'a> { fn print_fn_header_info(&mut self, header: hir::FnHeader) { self.print_constness(header.constness); + let safety = match header.safety { + hir::HeaderSafety::SafeTargetFeatures => { + self.word_nbsp("#[target_feature]"); + hir::Safety::Safe + } + hir::HeaderSafety::Normal(safety) => safety, + }; + match header.asyncness { hir::IsAsync::NotAsync => {} hir::IsAsync::Async(_) => self.word_nbsp("async"), } - self.print_safety(header.safety); + self.print_safety(safety); if header.abi != ExternAbi::Rust { self.word_nbsp("extern"); diff --git a/compiler/rustc_hir_typeck/messages.ftl b/compiler/rustc_hir_typeck/messages.ftl index a93da52b2703c..0f424a39840a3 100644 --- a/compiler/rustc_hir_typeck/messages.ftl +++ b/compiler/rustc_hir_typeck/messages.ftl @@ -165,6 +165,8 @@ hir_typeck_remove_semi_for_coerce_ret = the `match` arms can conform to this ret hir_typeck_remove_semi_for_coerce_semi = the `match` is a statement because of this semicolon, consider removing it hir_typeck_remove_semi_for_coerce_suggestion = remove this semicolon +hir_typeck_replace_comma_with_semicolon = replace the comma with a semicolon to create {$descr} + hir_typeck_return_stmt_outside_of_fn_body = {$statement_kind} statement outside of function body .encl_body_label = the {$statement_kind} is part of this body... diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index 243313ee8767a..3d40c5ee80450 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -77,12 +77,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut prior_non_diverging_arms = vec![]; // Used only for diagnostics. let mut prior_arm = None; for arm in arms { + self.diverges.set(Diverges::Maybe); + if let Some(e) = &arm.guard { - self.diverges.set(Diverges::Maybe); self.check_expr_has_type_or_error(e, tcx.types.bool, |_| {}); + + // FIXME: If this is the first arm and the pattern is irrefutable, + // e.g. `_` or `x`, and the guard diverges, then the whole match + // may also be considered to diverge. We should warn on all subsequent + // arms, too, just like we do for diverging scrutinees above. } - self.diverges.set(Diverges::Maybe); + // N.B. We don't reset diverges here b/c we want to warn in the arm + // if the guard diverges, like: `x if { loop {} } => f()`, and we + // also want to consider the arm to diverge itself. let arm_ty = self.check_expr_with_expectation(arm.body, expected); all_arms_diverge &= self.diverges.get(); @@ -383,7 +391,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let hir::Node::Block(block) = node { // check that the body's parent is an fn let parent = self.tcx.parent_hir_node(self.tcx.parent_hir_id(block.hir_id)); - if let (Some(expr), hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(..), .. })) = + if let (Some(expr), hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { .. }, .. })) = (&block.expr, parent) { // check that the `if` expr without `else` is the fn body's expr diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index b430f48965a88..28b1a823daea0 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -7,15 +7,14 @@ use rustc_hir::def_id::DefId; use rustc_hir::{self as hir, HirId, LangItem}; use rustc_hir_analysis::autoderef::Autoderef; use rustc_infer::infer; -use rustc_infer::traits::{self, Obligation, ObligationCause, ObligationCauseCode}; +use rustc_infer::traits::{Obligation, ObligationCause, ObligationCauseCode}; use rustc_middle::ty::adjustment::{ Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, }; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use rustc_trait_selection::error_reporting::traits::DefIdOrName; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; @@ -513,7 +512,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.register_bound( ty, self.tcx.require_lang_item(hir::LangItem::Tuple, Some(sp)), - traits::ObligationCause::new(sp, self.body_id, ObligationCauseCode::RustCall), + self.cause(sp, ObligationCauseCode::RustCall), ); self.require_type_is_sized(ty, sp, ObligationCauseCode::RustCall); } else { @@ -706,7 +705,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { for id in self.tcx.hir().items() { if let Some(node) = self.tcx.hir().get_if_local(id.owner_id.into()) && let hir::Node::Item(item) = node - && let hir::ItemKind::Fn(..) = item.kind + && let hir::ItemKind::Fn { .. } = item.kind && item.ident.name == segment.ident.name { err.span_label( diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index 80b91c215980a..7b07e0ee939c5 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -43,8 +43,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, VariantDe use rustc_middle::{bug, span_bug}; use rustc_session::lint; use rustc_span::def_id::LOCAL_CRATE; -use rustc_span::symbol::sym; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, sym}; use rustc_trait_selection::infer::InferCtxtExt; use tracing::{debug, instrument}; @@ -117,6 +116,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&f) => self.pointer_kind(f, span)?, }, + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + // Pointers to foreign types are thin, despite being unsized ty::Foreign(..) => Some(PointerKind::Thin), // We should really try to normalize here. @@ -722,13 +723,11 @@ impl<'a, 'tcx> CastCheck<'tcx> { use rustc_middle::ty::cast::IntTy::*; if self.cast_ty.is_dyn_star() { - if fcx.tcx.features().dyn_star() { - span_bug!(self.span, "should be handled by `coerce`"); - } else { - // Report "casting is invalid" rather than "non-primitive cast" - // if the feature is not enabled. - return Err(CastError::IllegalCast); - } + // This coercion will fail if the feature is not enabled, OR + // if the coercion is (currently) illegal (e.g. `dyn* Foo + Send` + // to `dyn* Foo`). Report "casting is invalid" rather than + // "non-primitive cast". + return Err(CastError::IllegalCast); } let (t_from, t_cast) = match (CastTy::from_ty(self.expr_ty), CastTy::from_ty(self.cast_ty)) diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index 51d7864637346..b0698183b5779 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -10,7 +10,7 @@ use rustc_infer::infer::RegionVariableOrigin; use rustc_infer::traits::WellFormedLoc; use rustc_middle::ty::{self, Binder, Ty, TyCtxt}; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::sym; +use rustc_span::sym; use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode}; use tracing::{debug, instrument}; diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index 7d7c9331edf00..6945dbc321697 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -38,6 +38,7 @@ use std::ops::Deref; use rustc_abi::ExternAbi; +use rustc_attr_parsing::InlineAttr; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, struct_span_code_err}; use rustc_hir as hir; @@ -59,8 +60,7 @@ use rustc_middle::ty::error::TypeError; use rustc_middle::ty::visit::TypeVisitableExt; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt}; use rustc_session::parse::feature_err; -use rustc_span::symbol::sym; -use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Span}; +use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Span, sym}; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt; use rustc_trait_selection::traits::{ @@ -581,11 +581,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let mut selcx = traits::SelectionContext::new(self); // Create an obligation for `Source: CoerceUnsized`. - let cause = - ObligationCause::new(self.cause.span, self.body_id, ObligationCauseCode::Coercion { - source, - target, - }); + let cause = self.cause(self.cause.span, ObligationCauseCode::Coercion { source, target }); // Use a FIFO queue for this custom fulfillment procedure. // @@ -671,7 +667,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Dyn-compatibility violations or miscellaneous. Err(err) => { - self.err_ctxt().report_selection_error(obligation.clone(), &obligation, &err); + let guar = self.err_ctxt().report_selection_error( + obligation.clone(), + &obligation, + &err, + ); + self.fcx.set_tainted_by_errors(guar); // Treat this like an obligation and follow through // with the unsizing - the lack of a coercion should // be silent, as it causes a type mismatch later. @@ -738,8 +739,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return Err(TypeError::Mismatch); } - if let ty::Dynamic(a_data, _, _) = a.kind() - && let ty::Dynamic(b_data, _, _) = b.kind() + // FIXME(dyn_star): We should probably allow things like casting from + // `dyn* Foo + Send` to `dyn* Foo`. + if let ty::Dynamic(a_data, _, ty::DynStar) = a.kind() + && let ty::Dynamic(b_data, _, ty::DynStar) = b.kind() && a_data.principal_def_id() == b_data.principal_def_id() { return self.unify_and(a, b, |_| vec![]); @@ -917,19 +920,32 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { match b.kind() { ty::FnPtr(_, b_hdr) => { - let a_sig = a.fn_sig(self.tcx); + let mut a_sig = a.fn_sig(self.tcx); if let ty::FnDef(def_id, _) = *a.kind() { // Intrinsics are not coercible to function pointers if self.tcx.intrinsic(def_id).is_some() { return Err(TypeError::IntrinsicCast); } - // Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396). + let fn_attrs = self.tcx.codegen_fn_attrs(def_id); + if matches!(fn_attrs.inline, InlineAttr::Force { .. }) { + return Err(TypeError::ForceInlineCast); + } if b_hdr.safety.is_safe() - && !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() + && self.tcx.codegen_fn_attrs(def_id).safe_target_features { - return Err(TypeError::TargetFeatureCast(def_id)); + // Allow the coercion if the current function has all the features that would be + // needed to call the coercee safely. + if let Some(safe_sig) = self.tcx.adjust_target_feature_sig( + def_id, + a_sig, + self.fcx.body_id.into(), + ) { + a_sig = safe_sig; + } else { + return Err(TypeError::TargetFeatureCast(def_id)); + } } } @@ -1195,6 +1211,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return Ok(prev_ty); } + let is_force_inline = |ty: Ty<'tcx>| { + if let ty::FnDef(did, _) = ty.kind() { + matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. }) + } else { + false + } + }; + if is_force_inline(prev_ty) || is_force_inline(new_ty) { + return Err(TypeError::ForceInlineCast); + } + // Special-case that coercion alone cannot handle: // Function items or non-capturing closures of differing IDs or GenericArgs. let (a_sig, b_sig) = { @@ -1316,43 +1343,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - // Then try to coerce the previous expressions to the type of the new one. - // This requires ensuring there are no coercions applied to *any* of the - // previous expressions, other than noop reborrows (ignoring lifetimes). - for expr in exprs { - let expr = expr.as_coercion_site(); - let noop = match self.typeck_results.borrow().expr_adjustments(expr) { - &[ - Adjustment { kind: Adjust::Deref(_), .. }, - Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl_adj)), .. }, - ] => { - match *self.node_ty(expr.hir_id).kind() { - ty::Ref(_, _, mt_orig) => { - let mutbl_adj: hir::Mutability = mutbl_adj.into(); - // Reborrow that we can safely ignore, because - // the next adjustment can only be a Deref - // which will be merged into it. - mutbl_adj == mt_orig - } - _ => false, - } - } - &[Adjustment { kind: Adjust::NeverToAny, .. }] | &[] => true, - _ => false, - }; - - if !noop { - debug!( - "coercion::try_find_coercion_lub: older expression {:?} had adjustments, requiring LUB", - expr, - ); - - return Err(self - .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty)) - .unwrap_err()); - } - } - match self.commit_if_ok(|_| coerce.coerce(prev_ty, new_ty)) { Err(_) => { // Avoid giving strange errors on failed attempts. diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs index d0272651c0809..367e7c6de9532 100644 --- a/compiler/rustc_hir_typeck/src/demand.rs +++ b/compiler/rustc_hir_typeck/src/demand.rs @@ -9,8 +9,7 @@ use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, AssocItem, Ty, TypeFoldable, TypeVisitableExt}; -use rustc_span::symbol::sym; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::ObligationCause; use tracing::instrument; @@ -31,7 +30,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if expr_ty == expected { return; } - self.annotate_alternative_method_deref(err, expr, error); self.explain_self_literal(err, expr, expected, expr_ty); @@ -40,6 +38,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { || self.suggest_missing_unwrap_expect(err, expr, expected, expr_ty) || self.suggest_remove_last_method_call(err, expr, expected) || self.suggest_associated_const(err, expr, expected) + || self.suggest_semicolon_in_repeat_expr(err, expr, expr_ty) || self.suggest_deref_ref_or_into(err, expr, expected, expr_ty, expected_ty_expr) || self.suggest_option_to_bool(err, expr, expr_ty, expected) || self.suggest_compatible_variants(err, expr, expected, expr_ty) @@ -86,6 +85,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.annotate_expected_due_to_let_ty(err, expr, error); self.annotate_loop_expected_due_to_inference(err, expr, error); + if self.annotate_mut_binding_to_immutable_binding(err, expr, error) { + return; + } // FIXME(#73154): For now, we do leak check when coercing function // pointers in typeck, instead of only during borrowck. This can lead @@ -796,6 +798,98 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } + /// Detect the following case + /// + /// ```text + /// fn change_object(mut a: &Ty) { + /// let a = Ty::new(); + /// b = a; + /// } + /// ``` + /// + /// where the user likely meant to modify the value behind there reference, use `a` as an out + /// parameter, instead of mutating the local binding. When encountering this we suggest: + /// + /// ```text + /// fn change_object(a: &'_ mut Ty) { + /// let a = Ty::new(); + /// *b = a; + /// } + /// ``` + fn annotate_mut_binding_to_immutable_binding( + &self, + err: &mut Diag<'_>, + expr: &hir::Expr<'_>, + error: Option>, + ) -> bool { + if let Some(TypeError::Sorts(ExpectedFound { expected, found })) = error + && let ty::Ref(_, inner, hir::Mutability::Not) = expected.kind() + + // The difference between the expected and found values is one level of borrowing. + && self.can_eq(self.param_env, *inner, found) + + // We have an `ident = expr;` assignment. + && let hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Assign(lhs, rhs, _), .. }) = + self.tcx.parent_hir_node(expr.hir_id) + && rhs.hir_id == expr.hir_id + + // We are assigning to some binding. + && let hir::ExprKind::Path(hir::QPath::Resolved( + None, + hir::Path { res: hir::def::Res::Local(hir_id), .. }, + )) = lhs.kind + && let hir::Node::Pat(pat) = self.tcx.hir_node(*hir_id) + + // The pattern we have is an fn argument. + && let hir::Node::Param(hir::Param { ty_span, .. }) = + self.tcx.parent_hir_node(pat.hir_id) + && let item = self.tcx.hir().get_parent_item(pat.hir_id) + && let item = self.tcx.hir_owner_node(item) + && let Some(fn_decl) = item.fn_decl() + + // We have a mutable binding in the argument. + && let hir::PatKind::Binding(hir::BindingMode::MUT, _hir_id, ident, _) = pat.kind + + // Look for the type corresponding to the argument pattern we have in the argument list. + && let Some(ty_sugg) = fn_decl + .inputs + .iter() + .filter_map(|ty| { + if ty.span == *ty_span + && let hir::TyKind::Ref(lt, x) = ty.kind + { + // `&'name Ty` -> `&'name mut Ty` or `&Ty` -> `&mut Ty` + Some(( + x.ty.span.shrink_to_lo(), + format!( + "{}mut ", + if lt.ident.span.lo() == lt.ident.span.hi() { "" } else { " " } + ), + )) + } else { + None + } + }) + .next() + { + let sugg = vec![ + ty_sugg, + (pat.span.until(ident.span), String::new()), + (lhs.span.shrink_to_lo(), "*".to_string()), + ]; + // We suggest changing the argument from `mut ident: &Ty` to `ident: &'_ mut Ty` and the + // assignment from `ident = val;` to `*ident = val;`. + err.multipart_suggestion_verbose( + "you might have meant to mutate the pointed at value being passed in, instead of \ + changing the reference in the local binding", + sugg, + Applicability::MaybeIncorrect, + ); + return true; + } + false + } + fn annotate_alternative_method_deref( &self, err: &mut Diag<'_>, @@ -1118,7 +1212,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Constructor, } let mut maybe_emit_help = |def_id: hir::def_id::DefId, - callable: rustc_span::symbol::Ident, + callable: Ident, args: &[hir::Expr<'_>], kind: CallableKind| { let arg_idx = args.iter().position(|a| a.hir_id == expr.hir_id).unwrap(); diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs index 7746a5a7132f8..4eed2bc12388d 100644 --- a/compiler/rustc_hir_typeck/src/errors.rs +++ b/compiler/rustc_hir_typeck/src/errors.rs @@ -10,8 +10,7 @@ use rustc_errors::{ use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{self, Ty}; use rustc_span::edition::{Edition, LATEST_STABLE_EDITION}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; @@ -847,3 +846,16 @@ pub(crate) struct PassFnItemToVariadicFunction { pub sugg_span: Span, pub replace: String, } + +#[derive(Subdiagnostic)] +#[suggestion( + hir_typeck_replace_comma_with_semicolon, + applicability = "machine-applicable", + style = "verbose", + code = "; " +)] +pub(crate) struct ReplaceCommaWithSemicolon { + #[primary_span] + pub comma_span: Span, + pub descr: &'static str, +} diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index 66978399efb53..0b1310df6cf15 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -22,7 +22,6 @@ use rustc_hir::{ExprKind, HirId, QPath}; use rustc_hir_analysis::hir_ty_lowering::{FeedConstTy, HirTyLowerer as _}; use rustc_infer::infer; use rustc_infer::infer::{DefineOpaqueTypes, InferOk}; -use rustc_infer::traits::ObligationCause; use rustc_infer::traits::query::NoSolution; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase}; use rustc_middle::ty::error::{ExpectedFound, TypeError}; @@ -30,11 +29,10 @@ use rustc_middle::ty::{self, AdtKind, GenericArgsRef, Ty, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; use rustc_session::errors::ExprParenthesesNeeded; use rustc_session::parse::feature_err; -use rustc_span::Span; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::hygiene::DesugaringKind; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt}; use tracing::{debug, instrument, trace}; @@ -432,6 +430,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | hir::Node::AssocItemConstraint(_) | hir::Node::TraitRef(_) | hir::Node::PatField(_) + | hir::Node::PatExpr(_) | hir::Node::LetStmt(_) | hir::Node::Synthetic | hir::Node::Err(_) @@ -458,6 +457,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Does not constitute a read. hir::PatKind::Wild => false, + // Might not constitute a read, since the condition might be false. + hir::PatKind::Guard(_, _) => true, + // This is unnecessarily restrictive when the pattern that doesn't // constitute a read is unreachable. // @@ -483,7 +485,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | hir::PatKind::Box(_) | hir::PatKind::Ref(_, _) | hir::PatKind::Deref(_) - | hir::PatKind::Lit(_) + | hir::PatKind::Expr(_) | hir::PatKind::Range(_, _, _) | hir::PatKind::Slice(_, _, _) | hir::PatKind::Err(_) => true, @@ -575,8 +577,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_index(base, idx, expr, brackets_span) } ExprKind::Yield(value, _) => self.check_expr_yield(value, expr), - ExprKind::UnsafeBinderCast(kind, expr, ty) => { - self.check_expr_unsafe_binder_cast(kind, expr, ty, expected) + ExprKind::UnsafeBinderCast(kind, inner_expr, ty) => { + self.check_expr_unsafe_binder_cast(expr.span, kind, inner_expr, ty, expected) } ExprKind::Err(guar) => Ty::new_error(tcx, guar), } @@ -836,7 +838,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // We always require that the type provided as the value for // a type parameter outlives the moment of instantiation. let args = self.typeck_results.borrow().node_args(expr.hir_id); - self.add_wf_bounds(args, expr); + self.add_wf_bounds(args, expr.span); ty } @@ -1129,7 +1131,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let encl_item_id = self.tcx.hir().get_parent_item(expr.hir_id); if let hir::Node::Item(hir::Item { - kind: hir::ItemKind::Fn(..), span: encl_fn_span, .. + kind: hir::ItemKind::Fn { .. }, + span: encl_fn_span, + .. }) | hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)), @@ -1175,9 +1179,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { for err in errors { let cause = &mut err.obligation.cause; if let ObligationCauseCode::OpaqueReturnType(None) = cause.code() { - let new_cause = ObligationCause::new( + let new_cause = self.cause( cause.span, - cause.body_id, ObligationCauseCode::OpaqueReturnType(Some((return_expr_ty, hir_id))), ); *cause = new_cause; @@ -1650,14 +1653,94 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_expr_unsafe_binder_cast( &self, - _kind: hir::UnsafeBinderCastKind, - expr: &'tcx hir::Expr<'tcx>, - _hir_ty: Option<&'tcx hir::Ty<'tcx>>, - _expected: Expectation<'tcx>, + span: Span, + kind: ast::UnsafeBinderCastKind, + inner_expr: &'tcx hir::Expr<'tcx>, + hir_ty: Option<&'tcx hir::Ty<'tcx>>, + expected: Expectation<'tcx>, ) -> Ty<'tcx> { - let guar = - self.dcx().struct_span_err(expr.span, "unsafe binders are not yet implemented").emit(); - Ty::new_error(self.tcx, guar) + self.dcx().span_err(inner_expr.span, "unsafe binder casts are not fully implemented"); + + match kind { + ast::UnsafeBinderCastKind::Wrap => { + let ascribed_ty = + hir_ty.map(|hir_ty| self.lower_ty_saving_user_provided_ty(hir_ty)); + let expected_ty = expected.only_has_type(self); + let binder_ty = match (ascribed_ty, expected_ty) { + (Some(ascribed_ty), Some(expected_ty)) => { + self.demand_eqtype(inner_expr.span, expected_ty, ascribed_ty); + expected_ty + } + (Some(ty), None) | (None, Some(ty)) => ty, + // This will always cause a structural resolve error, but we do it + // so we don't need to manually report an E0282 both on this codepath + // and in the others; it all happens in `structurally_resolve_type`. + (None, None) => self.next_ty_var(inner_expr.span), + }; + + let binder_ty = self.structurally_resolve_type(inner_expr.span, binder_ty); + let hint_ty = match *binder_ty.kind() { + ty::UnsafeBinder(binder) => self.instantiate_binder_with_fresh_vars( + inner_expr.span, + infer::BoundRegionConversionTime::HigherRankedType, + binder.into(), + ), + ty::Error(e) => Ty::new_error(self.tcx, e), + _ => { + let guar = self + .dcx() + .struct_span_err( + hir_ty.map_or(span, |hir_ty| hir_ty.span), + format!( + "`wrap_binder!()` can only wrap into unsafe binder, not {}", + binder_ty.sort_string(self.tcx) + ), + ) + .with_note("unsafe binders are the only valid output of wrap") + .emit(); + Ty::new_error(self.tcx, guar) + } + }; + + self.check_expr_has_type_or_error(inner_expr, hint_ty, |_| {}); + + binder_ty + } + ast::UnsafeBinderCastKind::Unwrap => { + let ascribed_ty = + hir_ty.map(|hir_ty| self.lower_ty_saving_user_provided_ty(hir_ty)); + let hint_ty = ascribed_ty.unwrap_or_else(|| self.next_ty_var(inner_expr.span)); + // FIXME(unsafe_binders): coerce here if needed? + let binder_ty = self.check_expr_has_type_or_error(inner_expr, hint_ty, |_| {}); + + // Unwrap the binder. This will be ambiguous if it's an infer var, and will error + // if it's not an unsafe binder. + let binder_ty = self.structurally_resolve_type(inner_expr.span, binder_ty); + match *binder_ty.kind() { + ty::UnsafeBinder(binder) => self.instantiate_binder_with_fresh_vars( + inner_expr.span, + infer::BoundRegionConversionTime::HigherRankedType, + binder.into(), + ), + ty::Error(e) => Ty::new_error(self.tcx, e), + _ => { + let guar = self + .dcx() + .struct_span_err( + hir_ty.map_or(inner_expr.span, |hir_ty| hir_ty.span), + format!( + "expected unsafe binder, found {} as input of \ + `unwrap_binder!()`", + binder_ty.sort_string(self.tcx) + ), + ) + .with_note("only an unsafe binder type can be unwrapped") + .emit(); + Ty::new_error(self.tcx, guar) + } + } + } + } } fn check_expr_array( @@ -1694,12 +1777,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let parent_node = self.tcx.hir().parent_iter(expr.hir_id).find(|(_, node)| { !matches!(node, hir::Node::Expr(hir::Expr { kind: hir::ExprKind::AddrOf(..), .. })) }); - let Some(( - _, - hir::Node::LetStmt(hir::LetStmt { ty: Some(ty), .. }) - | hir::Node::Item(hir::Item { kind: hir::ItemKind::Const(ty, _, _), .. }), - )) = parent_node - else { + let Some((_, hir::Node::LetStmt(hir::LetStmt { ty: Some(ty), .. }))) = parent_node else { return; }; if let hir::TyKind::Array(_, ct) = ty.peel_refs().kind { @@ -1719,7 +1797,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - fn check_expr_const_block( + pub(super) fn check_expr_const_block( &self, block: &'tcx hir::ConstBlock, expected: Expectation<'tcx>, @@ -1830,21 +1908,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let lang_item = self.tcx.require_lang_item(LangItem::Copy, None); - let code = traits::ObligationCauseCode::RepeatElementCopy { - is_constable, - elt_type: element_ty, - elt_span: element.span, - elt_stmt_span: self - .tcx - .hir() - .parent_iter(element.hir_id) - .find_map(|(_, node)| match node { - hir::Node::Item(it) => Some(it.span), - hir::Node::Stmt(stmt) => Some(stmt.span), - _ => None, - }) - .expect("array repeat expressions must be inside an item or statement"), - }; + let code = + traits::ObligationCauseCode::RepeatElementCopy { is_constable, elt_span: element.span }; self.require_type_meets(element_ty, element.span, code, lang_item); } @@ -2721,12 +2786,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Check field access expressions, this works for both structs and tuples. /// Returns the Ty of the field. /// - /// ```not_rust - /// base.field - /// ^^^^^^^^^^ expr - /// ^^^^ base - /// ^^^^^ field - /// ``` + /// ```ignore (illustrative) + /// base.field + /// ^^^^^^^^^^ expr + /// ^^^^ base + /// ^^^^^ field + /// ``` fn check_expr_field( &self, expr: &'tcx hir::Expr<'tcx>, @@ -3777,7 +3842,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Enums are anyway always sized. But just to safeguard against future // language extensions, let's double-check. - self.require_type_is_sized(field_ty, expr.span, ObligationCauseCode::Misc); + self.require_type_is_sized( + field_ty, + expr.span, + ObligationCauseCode::FieldSized { + adt_kind: AdtKind::Enum, + span: self.tcx.def_span(field.did), + last: false, + }, + ); if field.vis.is_accessible_from(sub_def_scope, self.tcx) { self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span, None); @@ -3805,11 +3878,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let field_ty = self.field_ty(expr.span, field, args); if self.tcx.features().offset_of_slice() { - self.require_type_has_static_alignment( - field_ty, - expr.span, - ObligationCauseCode::Misc, - ); + self.require_type_has_static_alignment(field_ty, expr.span); } else { self.require_type_is_sized( field_ty, @@ -3838,11 +3907,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { if let Some(&field_ty) = tys.get(index) { if self.tcx.features().offset_of_slice() { - self.require_type_has_static_alignment( - field_ty, - expr.span, - ObligationCauseCode::Misc, - ); + self.require_type_has_static_alignment(field_ty, expr.span); } else { self.require_type_is_sized( field_ty, diff --git a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs index ecbae6ac72fa2..1f48b703e4ac8 100644 --- a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs +++ b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs @@ -595,7 +595,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx let place_ty = place.place.ty(); needs_to_be_read |= self.is_multivariant_adt(place_ty, pat.span); } - PatKind::Lit(_) | PatKind::Range(..) => { + PatKind::Expr(_) | PatKind::Range(..) => { // If the PatKind is a Lit or a Range then we want // to borrow discr. needs_to_be_read = true; @@ -615,6 +615,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx | PatKind::Box(_) | PatKind::Deref(_) | PatKind::Ref(..) + | PatKind::Guard(..) | PatKind::Wild | PatKind::Err(_) => { // If the PatKind is Or, Box, or Ref, the decision is made later @@ -1737,7 +1738,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx } } - PatKind::Binding(.., Some(subpat)) => { + PatKind::Binding(.., Some(subpat)) | PatKind::Guard(subpat, _) => { self.cat_pattern(place_with_id, subpat, op)?; } @@ -1802,7 +1803,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx PatKind::Path(_) | PatKind::Binding(.., None) - | PatKind::Lit(..) + | PatKind::Expr(..) | PatKind::Range(..) | PatKind::Never | PatKind::Wild diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 0dacfc9b7bf90..be6d9570e35b4 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -3,7 +3,7 @@ use std::slice; use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxHashSet; -use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan, StashKey}; +use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan}; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::Visitor; @@ -29,10 +29,9 @@ use rustc_middle::ty::{ }; use rustc_middle::{bug, span_bug}; use rustc_session::lint; -use rustc_span::Span; use rustc_span::def_id::LocalDefId; use rustc_span::hygiene::DesugaringKind; -use rustc_span::symbol::kw; +use rustc_span::{Span, kw}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; use rustc_trait_selection::traits::{ self, NormalizeExt, ObligationCauseCode, StructurallyNormalizeExt, @@ -147,18 +146,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!("write_ty({:?}, {:?}) in fcx {}", id, self.resolve_vars_if_possible(ty), self.tag()); let mut typeck = self.typeck_results.borrow_mut(); let mut node_ty = typeck.node_types_mut(); - if let Some(ty) = node_ty.get(id) - && let Err(e) = ty.error_reported() - { - // Do not overwrite nodes that were already marked as `{type error}`. This allows us to - // silence unnecessary errors from obligations that were set earlier than a type error - // was produced, but that is overwritten by later analysis. This happens in particular - // for `Sized` obligations introduced in gather_locals. (#117846) - self.set_tainted_by_errors(e); - return; - } - node_ty.insert(id, ty); + if let Some(prev) = node_ty.insert(id, ty) { + if prev.references_error() { + node_ty.insert(id, prev); + } else if !ty.references_error() { + // Could change this to a bug, but there's lots of diagnostic code re-lowering + // or re-typechecking nodes that were already typecked. + // Lots of that diagnostics code relies on subtle effects of re-lowering, so we'll + // let it keep doing that and just ensure that compilation won't succeed. + self.dcx().span_delayed_bug( + self.tcx.hir().span(id), + format!("`{prev}` overridden by `{ty}` for {id:?} in {:?}", self.body_id), + ); + } + } if let Err(e) = ty.error_reported() { self.set_tainted_by_errors(e); @@ -382,7 +384,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { code: traits::ObligationCauseCode<'tcx>, def_id: DefId, ) { - self.register_bound(ty, def_id, traits::ObligationCause::new(span, self.body_id, code)); + self.register_bound(ty, def_id, self.cause(span, code)); } pub(crate) fn require_type_is_sized( @@ -408,12 +410,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - pub(crate) fn require_type_has_static_alignment( - &self, - ty: Ty<'tcx>, - span: Span, - code: traits::ObligationCauseCode<'tcx>, - ) { + pub(crate) fn require_type_has_static_alignment(&self, ty: Ty<'tcx>, span: Span) { if !ty.references_error() { let tail = self.tcx.struct_tail_raw( ty, @@ -432,7 +429,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else { // We can't be sure, let's required full `Sized`. let lang_item = self.tcx.require_lang_item(LangItem::Sized, None); - self.require_type_meets(ty, span, code, lang_item); + self.require_type_meets(ty, span, ObligationCauseCode::Misc, lang_item); } } } @@ -570,7 +567,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { code: traits::ObligationCauseCode<'tcx>, ) { // WF obligations never themselves fail, so no real need to give a detailed cause: - let cause = traits::ObligationCause::new(span, self.body_id, code); + let cause = self.cause(span, code); self.register_predicate(traits::Obligation::new( self.tcx, cause, @@ -580,11 +577,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } /// Registers obligations that all `args` are well-formed. - pub(crate) fn add_wf_bounds(&self, args: GenericArgsRef<'tcx>, expr: &hir::Expr<'_>) { + pub(crate) fn add_wf_bounds(&self, args: GenericArgsRef<'tcx>, span: Span) { for arg in args.iter().filter(|arg| { matches!(arg.unpack(), GenericArgKind::Type(..) | GenericArgKind::Const(..)) }) { - self.register_wf_obligation(arg, expr.span, ObligationCauseCode::WellFormed(None)); + self.register_wf_obligation(arg, span, ObligationCauseCode::WellFormed(None)); } } @@ -809,17 +806,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let item_name = item_segment.ident; let result = self .resolve_fully_qualified_call(span, item_name, ty.normalized, qself.span, hir_id) - .map(|r| { - // lint bare trait if the method is found in the trait - if span.edition().at_least_rust_2021() { - self.dcx().try_steal_modify_and_emit_err( - qself.span, - StashKey::TraitMissingMethod, - |_err| {}, - ); - } - r - }) .or_else(|error| { let guar = self .dcx() @@ -843,17 +829,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } - // Emit the diagnostic for bare traits. (We used to cancel for slightly better - // error messages, but cancelling stashed diagnostics is no longer allowed because - // it causes problems when tracking whether errors have actually occurred.) - if span.edition().at_least_rust_2021() { - self.dcx().try_steal_modify_and_emit_err( - qself.span, - StashKey::TraitMissingMethod, - |_err| {}, - ); - } - if item_name.name != kw::Empty { self.report_method_error( hir_id, @@ -894,7 +869,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx.hir().get_fn_id_for_return_block(blk_id).and_then(|item_id| { match self.tcx.hir_node(item_id) { Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(ref sig, ..), owner_id, .. + kind: hir::ItemKind::Fn { sig, .. }, owner_id, .. }) => Some((owner_id.def_id, sig.decl)), Node::TraitItem(&hir::TraitItem { kind: hir::TraitItemKind::Fn(ref sig, ..), @@ -923,7 +898,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { )) => { let (sig, owner_id) = match self.tcx.parent_hir_node(hir_id) { Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(ref sig, ..), + kind: hir::ItemKind::Fn { ref sig, .. }, owner_id, .. }) => (sig, owner_id), @@ -1042,6 +1017,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { def_id, span, ), + Res::Err => { + return ( + Ty::new_error( + tcx, + tcx.dcx().span_delayed_bug(span, "could not resolve path {:?}"), + ), + res, + ); + } _ => bug!("instantiate_value_path on {:?}", res), }; @@ -1105,7 +1089,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Res::Local(hid) = res { let ty = self.local_ty(span, hid); let ty = self.normalize(span, ty); - self.write_ty(hir_id, ty); return (ty, res); } @@ -1425,9 +1408,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let bounds = self.instantiate_bounds(span, def_id, args); for obligation in traits::predicates_for_generics( - |idx, predicate_span| { - traits::ObligationCause::new(span, self.body_id, code(idx, predicate_span)) - }, + |idx, predicate_span| self.cause(span, code(idx, predicate_span)), param_env, bounds, ) { @@ -1560,7 +1541,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { query_result: &Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>, ) -> InferResult<'tcx, Ty<'tcx>> { self.instantiate_query_response_and_region_obligations( - &traits::ObligationCause::misc(span, self.body_id), + &self.misc(span), self.param_env, original_values, query_result, diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs index 6eaba641888ca..ee0436f73e1de 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs @@ -5,8 +5,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_infer::traits::ObligationCauseCode; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor}; -use rustc_span::Span; -use rustc_span::symbol::kw; +use rustc_span::{Span, kw}; use rustc_trait_selection::traits; use crate::FnCtxt; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 6b1cceefbeea1..46eed2db2364f 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -22,8 +22,7 @@ use rustc_middle::ty::visit::TypeVisitableExt; use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::Session; -use rustc_span::symbol::{Ident, kw}; -use rustc_span::{DUMMY_SP, Span, sym}; +use rustc_span::{DUMMY_SP, Ident, Span, kw, sym}; use rustc_trait_selection::error_reporting::infer::{FailureCode, ObligationCauseExt}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt, SelectionContext}; @@ -208,7 +207,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.register_wf_obligation( fn_input_ty.into(), arg_expr.span, - ObligationCauseCode::Misc, + ObligationCauseCode::WellFormed(None), ); } @@ -1751,10 +1750,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - pub(in super::super) fn check_decl(&self, decl: Declaration<'tcx>) { + pub(in super::super) fn check_decl(&self, decl: Declaration<'tcx>) -> Ty<'tcx> { // Determine and write the type which we'll check the pattern against. let decl_ty = self.local_ty(decl.span, decl.hir_id); - self.write_ty(decl.hir_id, decl_ty); // Type check the initializer. if let Some(ref init) = decl.init { @@ -1786,11 +1784,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } self.diverges.set(previous_diverges); } + decl_ty } /// Type check a `let` statement. fn check_decl_local(&self, local: &'tcx hir::LetStmt<'tcx>) { - self.check_decl(local.into()); + let ty = self.check_decl(local.into()); + self.write_ty(local.hir_id, ty); if local.pat.is_never_pattern() { self.diverges.set(Diverges::Always { span: local.pat.span, @@ -2041,7 +2041,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn parent_item_span(&self, id: HirId) -> Option { let node = self.tcx.hir_node_by_def_id(self.tcx.hir().get_parent_item(id).def_id); match node { - Node::Item(&hir::Item { kind: hir::ItemKind::Fn(_, _, body_id), .. }) + Node::Item(&hir::Item { kind: hir::ItemKind::Fn { body: body_id, .. }, .. }) | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(_, body_id), .. }) => { let body = self.tcx.hir().body(body_id); if let ExprKind::Block(block, _) = &body.value.kind { @@ -2189,7 +2189,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } hir::Node::Item(item) => { - if let hir::ItemKind::Fn(..) = item.kind { + if let hir::ItemKind::Fn { .. } = item.kind { break; } } @@ -2460,16 +2460,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { spans.push_span_label( param.span, format!( - "{} {} to match the {} type of this parameter", + "{} need{} to match the {} type of this parameter", display_list_with_comma_and(&other_param_matched_names), - format!( - "need{}", - pluralize!(if other_param_matched_names.len() == 1 { - 0 - } else { - 1 - }) - ), + pluralize!(if other_param_matched_names.len() == 1 { + 0 + } else { + 1 + }), matched_ty, ), ); diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index b9011e89f04ef..d432199f03735 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -17,8 +17,7 @@ use rustc_infer::infer; use rustc_infer::traits::Obligation; use rustc_middle::ty::{self, Const, Ty, TyCtxt, TypeVisitableExt}; use rustc_session::Session; -use rustc_span::symbol::Ident; -use rustc_span::{self, DUMMY_SP, Span, sym}; +use rustc_span::{self, DUMMY_SP, Ident, Span, sym}; use rustc_trait_selection::error_reporting::TypeErrCtxt; use rustc_trait_selection::error_reporting::infer::sub_relations::SubRelations; use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt}; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 21b1768ae6f4d..53e055fdeef44 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -24,8 +24,7 @@ use rustc_middle::ty::{ }; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::error_reporting::traits::DefIdOrName; use rustc_trait_selection::infer::InferCtxtExt; @@ -982,14 +981,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let hir::Node::Item(hir::Item { kind: - hir::ItemKind::Fn( - hir::FnSig { - decl: hir::FnDecl { inputs: fn_parameters, output: fn_return, .. }, - .. - }, - hir::Generics { params, predicates, .. }, - _body_id, - ), + hir::ItemKind::Fn { + sig: + hir::FnSig { + decl: hir::FnDecl { inputs: fn_parameters, output: fn_return, .. }, + .. + }, + generics: hir::Generics { params, predicates, .. }, + .. + }, .. }) = fn_node else { @@ -1320,14 +1320,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let span = expr.span.shrink_to_hi(); let subdiag = if self.type_is_copy_modulo_regions(self.param_env, ty) { errors::OptionResultRefMismatch::Copied { span, def_path } - } else if let Some(clone_did) = self.tcx.lang_items().clone_trait() - && rustc_trait_selection::traits::type_known_to_meet_bound_modulo_regions( - self, - self.param_env, - ty, - clone_did, - ) - { + } else if self.type_is_clone_modulo_regions(self.param_env, ty) { errors::OptionResultRefMismatch::Cloned { span, def_path } } else { return false; @@ -2182,6 +2175,87 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } + /// Suggest replacing comma with semicolon in incorrect repeat expressions + /// like `["_", 10]` or `vec![String::new(), 10]`. + pub(crate) fn suggest_semicolon_in_repeat_expr( + &self, + err: &mut Diag<'_>, + expr: &hir::Expr<'_>, + expr_ty: Ty<'tcx>, + ) -> bool { + // Check if `expr` is contained in array of two elements + if let hir::Node::Expr(array_expr) = self.tcx.parent_hir_node(expr.hir_id) + && let hir::ExprKind::Array(elements) = array_expr.kind + && let [first, second] = &elements[..] + && second.hir_id == expr.hir_id + { + // Span between the two elements of the array + let comma_span = first.span.between(second.span); + + // Check if `expr` is a constant value of type `usize`. + // This can only detect const variable declarations and + // calls to const functions. + + // Checking this here instead of rustc_hir::hir because + // this check needs access to `self.tcx` but rustc_hir + // has no access to `TyCtxt`. + let expr_is_const_usize = expr_ty.is_usize() + && match expr.kind { + ExprKind::Path(QPath::Resolved( + None, + Path { res: Res::Def(DefKind::Const, _), .. }, + )) => true, + ExprKind::Call( + Expr { + kind: + ExprKind::Path(QPath::Resolved( + None, + Path { res: Res::Def(DefKind::Fn, fn_def_id), .. }, + )), + .. + }, + _, + ) => self.tcx.is_const_fn(*fn_def_id), + _ => false, + }; + + // Type of the first element is guaranteed to be checked + // when execution reaches here because `mismatched types` + // error occurs only when type of second element of array + // is not the same as type of first element. + let first_ty = self.typeck_results.borrow().expr_ty(first); + + // `array_expr` is from a macro `vec!["a", 10]` if + // 1. array expression's span is imported from a macro + // 2. first element of array implements `Clone` trait + // 3. second element is an integer literal or is an expression of `usize` like type + if self.tcx.sess.source_map().is_imported(array_expr.span) + && self.type_is_clone_modulo_regions(self.param_env, first_ty) + && (expr.is_size_lit() || expr_ty.is_usize_like()) + { + err.subdiagnostic(errors::ReplaceCommaWithSemicolon { + comma_span, + descr: "a vector", + }); + return true; + } + + // `array_expr` is from an array `["a", 10]` if + // 1. first element of array implements `Copy` trait + // 2. second element is an integer literal or is a const value of type `usize` + if self.type_is_copy_modulo_regions(self.param_env, first_ty) + && (expr.is_size_lit() || expr_is_const_usize) + { + err.subdiagnostic(errors::ReplaceCommaWithSemicolon { + comma_span, + descr: "an array", + }); + return true; + } + } + false + } + /// If the expected type is an enum (Issue #55250) with any variants whose /// sole field is of the found type, suggest such variants. (Issue #42764) pub(crate) fn suggest_compatible_variants( @@ -2608,6 +2682,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let hir::ExprKind::Unary(hir::UnOp::Deref, inner) = expr.kind && let Some(1) = self.deref_steps_for_suggestion(expected, checked_ty) + && self.typeck_results.borrow().expr_ty(inner).is_ref() { // We have `*&T`, check if what was expected was `&T`. // If so, we may want to suggest removing a `*`. diff --git a/compiler/rustc_hir_typeck/src/intrinsicck.rs b/compiler/rustc_hir_typeck/src/intrinsicck.rs index 789530d35dd81..f4929aae59901 100644 --- a/compiler/rustc_hir_typeck/src/intrinsicck.rs +++ b/compiler/rustc_hir_typeck/src/intrinsicck.rs @@ -40,13 +40,25 @@ fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { } impl<'a, 'tcx> FnCtxt<'a, 'tcx> { + /// FIXME: Move this check out of typeck, since it'll easily cycle when revealing opaques, + /// and we shouldn't need to check anything here if the typeck results are tainted. pub(crate) fn check_transmute(&self, from: Ty<'tcx>, to: Ty<'tcx>, hir_id: HirId) { let tcx = self.tcx; let dl = &tcx.data_layout; let span = tcx.hir().span(hir_id); let normalize = |ty| { let ty = self.resolve_vars_if_possible(ty); - self.tcx.normalize_erasing_regions(self.typing_env(self.param_env), ty) + if let Ok(ty) = + self.tcx.try_normalize_erasing_regions(self.typing_env(self.param_env), ty) + { + ty + } else { + Ty::new_error_with_message( + tcx, + span, + "tried to normalize non-wf type in check_transmute", + ) + } }; let from = normalize(from); let to = normalize(to); diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index 91ea34eb54d35..cb21961f36b2d 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -87,21 +87,7 @@ fn used_trait_imports(tcx: TyCtxt<'_>, def_id: LocalDefId) -> &UnordSet(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> { - let fallback = move || tcx.type_of(def_id.to_def_id()).instantiate_identity(); - typeck_with_fallback(tcx, def_id, fallback, None) -} - -/// Used only to get `TypeckResults` for type inference during error recovery. -/// Currently only used for type inference of `static`s and `const`s to avoid type cycle errors. -fn diagnostic_only_typeck<'tcx>( - tcx: TyCtxt<'tcx>, - def_id: LocalDefId, -) -> &'tcx ty::TypeckResults<'tcx> { - let fallback = move || { - let span = tcx.hir().span(tcx.local_def_id_to_hir_id(def_id)); - Ty::new_error_with_message(tcx, span, "diagnostic only typeck table used") - }; - typeck_with_fallback(tcx, def_id, fallback, None) + typeck_with_fallback(tcx, def_id, None) } /// Same as `typeck` but `inspect` is invoked on evaluation of each root obligation. @@ -113,15 +99,13 @@ pub fn inspect_typeck<'tcx>( def_id: LocalDefId, inspect: ObligationInspector<'tcx>, ) -> &'tcx ty::TypeckResults<'tcx> { - let fallback = move || tcx.type_of(def_id.to_def_id()).instantiate_identity(); - typeck_with_fallback(tcx, def_id, fallback, Some(inspect)) + typeck_with_fallback(tcx, def_id, Some(inspect)) } -#[instrument(level = "debug", skip(tcx, fallback, inspector), ret)] +#[instrument(level = "debug", skip(tcx, inspector), ret)] fn typeck_with_fallback<'tcx>( tcx: TyCtxt<'tcx>, def_id: LocalDefId, - fallback: impl Fn() -> Ty<'tcx> + 'tcx, inspector: Option>, ) -> &'tcx ty::TypeckResults<'tcx> { // Closures' typeck results come from their outermost function, @@ -150,8 +134,12 @@ fn typeck_with_fallback<'tcx>( let mut fcx = FnCtxt::new(&root_ctxt, param_env, def_id); if let Some(hir::FnSig { header, decl, .. }) = node.fn_sig() { - let fn_sig = if decl.output.get_infer_ret_ty().is_some() { - fcx.lowerer().lower_fn_ty(id, header.safety, header.abi, decl, None, None) + let fn_sig = if decl.output.is_suggestable_infer_ty().is_some() { + // In the case that we're recovering `fn() -> W<_>` or some other return + // type that has an infer in it, lower the type directly so that it'll + // be correctly filled with infer. We'll use this inference to provide + // a suggestion later on. + fcx.lowerer().lower_fn_ty(id, header.safety(), header.abi, decl, None, None) } else { tcx.fn_sig(def_id).instantiate_identity() }; @@ -159,13 +147,39 @@ fn typeck_with_fallback<'tcx>( check_abi(tcx, span, fn_sig.abi()); // Compute the function signature from point of view of inside the fn. - let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig); - let fn_sig = fcx.normalize(body.value.span, fn_sig); + let mut fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig); + + // Normalize the input and output types one at a time, using a different + // `WellFormedLoc` for each. We cannot call `normalize_associated_types` + // on the entire `FnSig`, since this would use the same `WellFormedLoc` + // for each type, preventing the HIR wf check from generating + // a nice error message. + let arg_span = + |idx| decl.inputs.get(idx).map_or(decl.output.span(), |arg: &hir::Ty<'_>| arg.span); + + fn_sig.inputs_and_output = tcx.mk_type_list_from_iter( + fn_sig + .inputs_and_output + .iter() + .enumerate() + .map(|(idx, ty)| fcx.normalize(arg_span(idx), ty)), + ); check_fn(&mut fcx, fn_sig, None, decl, def_id, body, tcx.features().unsized_fn_params()); } else { - let expected_type = infer_type_if_missing(&fcx, node); - let expected_type = expected_type.unwrap_or_else(fallback); + let expected_type = if let Some(infer_ty) = infer_type_if_missing(&fcx, node) { + infer_ty + } else if let Some(ty) = node.ty() + && ty.is_suggestable_infer_ty() + { + // In the case that we're recovering `const X: [T; _]` or some other + // type that has an infer in it, lower the type directly so that it'll + // be correctly filled with infer. We'll use this inference to provide + // a suggestion later on. + fcx.lowerer().lower_ty(ty) + } else { + tcx.type_of(def_id).instantiate_identity() + }; let expected_type = fcx.normalize(body.value.span, expected_type); @@ -506,5 +520,5 @@ fn fatally_break_rust(tcx: TyCtxt<'_>, span: Span) -> ! { pub fn provide(providers: &mut Providers) { method::provide(providers); - *providers = Providers { typeck, diagnostic_only_typeck, used_trait_imports, ..*providers }; + *providers = Providers { typeck, used_trait_imports, ..*providers }; } diff --git a/compiler/rustc_hir_typeck/src/method/confirm.rs b/compiler/rustc_hir_typeck/src/method/confirm.rs index ef431c852e94b..f549ced9dc332 100644 --- a/compiler/rustc_hir_typeck/src/method/confirm.rs +++ b/compiler/rustc_hir_typeck/src/method/confirm.rs @@ -601,7 +601,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { self.call_expr.hir_id, idx, ); - traits::ObligationCause::new(self.span, self.body_id, code) + self.cause(self.span, code) }, self.param_env, method_predicates, @@ -611,7 +611,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // this is a projection from a trait reference, so we have to // make sure that the trait reference inputs are well-formed. - self.add_wf_bounds(all_args, self.call_expr); + self.add_wf_bounds(all_args, self.call_expr.span); // the function type must also be well-formed (this is not // implied by the args being well-formed because of inherent diff --git a/compiler/rustc_hir_typeck/src/method/mod.rs b/compiler/rustc_hir_typeck/src/method/mod.rs index e0b6ea0ac9de6..4008021c3a857 100644 --- a/compiler/rustc_hir_typeck/src/method/mod.rs +++ b/compiler/rustc_hir_typeck/src/method/mod.rs @@ -19,8 +19,7 @@ use rustc_middle::ty::{ self, GenericArgs, GenericArgsRef, GenericParamDefKind, Ty, TypeVisitableExt, }; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::Ident; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span}; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt; use rustc_trait_selection::traits::{self, NormalizeExt}; use tracing::{debug, instrument}; diff --git a/compiler/rustc_hir_typeck/src/method/prelude_edition_lints.rs b/compiler/rustc_hir_typeck/src/method/prelude_edition_lints.rs index b20592c85d285..69d7a6c97cb36 100644 --- a/compiler/rustc_hir_typeck/src/method/prelude_edition_lints.rs +++ b/compiler/rustc_hir_typeck/src/method/prelude_edition_lints.rs @@ -8,9 +8,7 @@ use rustc_lint::{ARRAY_INTO_ITER, BOXED_SLICE_INTO_ITER}; use rustc_middle::span_bug; use rustc_middle::ty::{self, Ty}; use rustc_session::lint::builtin::{RUST_2021_PRELUDE_COLLISIONS, RUST_2024_PRELUDE_COLLISIONS}; -use rustc_span::Span; -use rustc_span::symbol::kw::{Empty, Underscore}; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, STDLIB_STABLE_CRATES, Span, kw, sym}; use rustc_trait_selection::infer::InferCtxtExt; use tracing::debug; @@ -78,7 +76,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; // No need to lint if method came from std/core, as that will now be in the prelude - if matches!(self.tcx.crate_name(pick.item.def_id.krate), sym::std | sym::core) { + if STDLIB_STABLE_CRATES.contains(&self.tcx.crate_name(pick.item.def_id.krate)) { return; } @@ -254,7 +252,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } // No need to lint if method came from std/core, as that will now be in the prelude - if matches!(self.tcx.crate_name(pick.item.def_id.krate), sym::std | sym::core) { + if STDLIB_STABLE_CRATES.contains(&self.tcx.crate_name(pick.item.def_id.krate)) { return; } @@ -369,11 +367,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .collect(); // Find an identifier with which this trait was imported (note that `_` doesn't count). - let any_id = import_items - .iter() - .find_map(|item| if item.ident.name != Underscore { Some(item.ident) } else { None }); + let any_id = import_items.iter().find_map(|item| { + if item.ident.name != kw::Underscore { Some(item.ident) } else { None } + }); if let Some(any_id) = any_id { - if any_id.name == Empty { + if any_id.name == kw::Empty { // Glob import, so just use its name. return None; } else { diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 3b377076545da..116765325a91f 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -25,8 +25,7 @@ use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::edit_distance::{ edit_distance_with_substrings, find_best_match_for_name_with_substrings, }; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{DUMMY_SP, Span, Symbol}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::CanonicalTyGoal; @@ -1230,23 +1229,16 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { if let Some(by_value_pick) = by_value_pick { if let Ok(by_value_pick) = by_value_pick.as_ref() { if by_value_pick.kind == PickKind::InherentImplPick { - if let Err(e) = self.check_for_shadowed_autorefd_method( - by_value_pick, - step, - self_ty, - hir::Mutability::Not, - track_unstable_candidates, - ) { - return Some(Err(e)); - } - if let Err(e) = self.check_for_shadowed_autorefd_method( - by_value_pick, - step, - self_ty, - hir::Mutability::Mut, - track_unstable_candidates, - ) { - return Some(Err(e)); + for mutbl in [hir::Mutability::Not, hir::Mutability::Mut] { + if let Err(e) = self.check_for_shadowed_autorefd_method( + by_value_pick, + step, + self_ty, + mutbl, + track_unstable_candidates, + ) { + return Some(Err(e)); + } } } } @@ -1337,6 +1329,15 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { mutbl: hir::Mutability, track_unstable_candidates: bool, ) -> Result<(), MethodError<'tcx>> { + // The errors emitted by this function are part of + // the arbitrary self types work, and should not impact + // other users. + if !self.tcx.features().arbitrary_self_types() + && !self.tcx.features().arbitrary_self_types_pointers() + { + return Ok(()); + } + // We don't want to remember any of the diagnostic hints from this // shadow search, but we do need to provide Some/None for the // unstable_candidates in order to reflect the behavior of the @@ -1738,8 +1739,8 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { &self, trait_ref: ty::TraitRef<'tcx>, ) -> traits::SelectionResult<'tcx, traits::Selection<'tcx>> { - let cause = traits::ObligationCause::misc(self.span, self.body_id); - let obligation = traits::Obligation::new(self.tcx, cause, self.param_env, trait_ref); + let obligation = + traits::Obligation::new(self.tcx, self.misc(self.span), self.param_env, trait_ref); traits::SelectionContext::new(self).select(&obligation) } @@ -1840,7 +1841,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { self.scope_expr_id, idx, ); - ObligationCause::new(self.span, self.body_id, code) + self.cause(self.span, code) }, self.param_env, impl_bounds, diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index caddfc1254082..162fac0beed96 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -8,13 +8,13 @@ use std::borrow::Cow; use hir::Expr; use rustc_ast::ast::Mutability; -use rustc_attr_parsing::parse_confusables; +use rustc_attr_parsing::{AttributeKind, find_attr}; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::unord::UnordSet; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, MultiSpan, StashKey, pluralize, struct_span_code_err}; -use rustc_hir::def::DefKind; +use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::lang_items::LangItem; @@ -27,9 +27,9 @@ use rustc_middle::ty::print::{ }; use rustc_middle::ty::{self, GenericArgKind, IsSuggestable, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::DefIdSet; -use rustc_span::symbol::{Ident, kw, sym}; use rustc_span::{ - DUMMY_SP, ErrorGuaranteed, ExpnKind, FileName, MacroKind, Span, Symbol, edit_distance, + DUMMY_SP, ErrorGuaranteed, ExpnKind, FileName, Ident, MacroKind, Span, Symbol, edit_distance, + kw, sym, }; use rustc_trait_selection::error_reporting::traits::DefIdOrName; use rustc_trait_selection::error_reporting::traits::on_unimplemented::OnUnimplementedNote; @@ -105,8 +105,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return false; }; let trait_ref = ty::TraitRef::new(self.tcx, into_iterator_trait, [ty]); - let cause = ObligationCause::new(span, self.body_id, ObligationCauseCode::Misc); - let obligation = Obligation::new(self.tcx, cause, self.param_env, trait_ref); + let obligation = Obligation::new(self.tcx, self.misc(span), self.param_env, trait_ref); if !self.predicate_must_hold_modulo_regions(&obligation) { return false; } @@ -171,6 +170,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span, .. }) + | hir::Node::PatExpr(&hir::PatExpr { + kind: hir::PatExprKind::Path(QPath::TypeRelative(rcvr, segment)), + span, + .. + }) | hir::Node::Pat(&hir::Pat { kind: hir::PatKind::Path(QPath::TypeRelative(rcvr, segment)) @@ -691,6 +695,30 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } + // Check if we wrote `Self::Assoc(1)` as if it were a tuple ctor. + if let SelfSource::QPath(ty) = source + && let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind + && let Res::SelfTyAlias { alias_to: impl_def_id, .. } = path.res + && let DefKind::Impl { .. } = self.tcx.def_kind(impl_def_id) + && let Some(candidate) = tcx.associated_items(impl_def_id).find_by_name_and_kind( + self.tcx, + item_name, + ty::AssocKind::Type, + impl_def_id, + ) + && let Some(adt_def) = tcx.type_of(candidate.def_id).skip_binder().ty_adt_def() + && adt_def.is_struct() + && adt_def.non_enum_variant().ctor_kind() == Some(CtorKind::Fn) + { + let def_path = tcx.def_path_str(adt_def.did()); + err.span_suggestion( + ty.span.to(item_name.span), + format!("to construct a value of type `{}`, use the explicit path", def_path), + def_path, + Applicability::MachineApplicable, + ); + } + err }; if tcx.sess.source_map().is_multiline(sugg_span) { @@ -1068,7 +1096,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) ) { continue; - }; + } match self.tcx.hir().get_if_local(item_def_id) { // Unmet obligation comes from a `derive` macro, point at it once to @@ -1182,8 +1210,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { entry.1.insert((cause_span, "unsatisfied trait bound introduced here")); entry.2.push(p); } - Some(node) => unreachable!("encountered `{node:?}` due to `{cause:#?}`"), - None => (), + _ => { + // It's possible to use well-formedness clauses to get obligations + // which point arbitrary items like ADTs, so there's no use in ICEing + // here if we find that the obligation originates from some other + // node that we don't handle. + } } } let mut spanned_predicates: Vec<_> = spanned_predicates.into_iter().collect(); @@ -1869,9 +1901,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { for inherent_method in self.tcx.associated_items(inherent_impl_did).in_definition_order() { - if let Some(attr) = - self.tcx.get_attr(inherent_method.def_id, sym::rustc_confusables) - && let Some(candidates) = parse_confusables(attr) + if let Some(candidates) = find_attr!(self.tcx.get_all_attrs(inherent_method.def_id), AttributeKind::Confusables{symbols, ..} => symbols) && candidates.contains(&item_name.name) && let ty::AssocKind::Fn = inherent_method.kind { @@ -3489,7 +3519,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let pred = ty::TraitRef::new(self.tcx, unpin_trait, [*rcvr_ty]); let unpin = self.predicate_must_hold_considering_regions(&Obligation::new( self.tcx, - ObligationCause::misc(rcvr.span, self.body_id), + self.misc(rcvr.span), self.param_env, pred, )); diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs index 72b930ee84df6..805079afdb006 100644 --- a/compiler/rustc_hir_typeck/src/op.rs +++ b/compiler/rustc_hir_typeck/src/op.rs @@ -11,9 +11,8 @@ use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; use rustc_session::errors::ExprParenthesesNeeded; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{FulfillmentError, Obligation, ObligationCtxt}; use rustc_type_ir::TyKind::*; diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index e7726845652f3..36094657eafd1 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -23,14 +23,14 @@ use rustc_session::parse::feature_err; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::hygiene::DesugaringKind; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, Span}; +use rustc_span::{BytePos, DUMMY_SP, Ident, Span, kw, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode}; use tracing::{debug, instrument, trace}; use ty::VariantDef; use super::report_unexpected_variant_res; +use crate::expectation::Expectation; use crate::gather_locals::DeclOrigin; use crate::{FnCtxt, LoweredTy, errors}; @@ -271,7 +271,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { PatKind::Wild | PatKind::Err(_) => expected, // We allow any type here; we ensure that the type is uninhabited during match checking. PatKind::Never => expected, - PatKind::Lit(lt) => self.check_pat_lit(pat.span, lt, expected, ti), + PatKind::Expr(lt) => self.check_pat_lit(pat.span, lt, expected, ti), PatKind::Range(lhs, rhs, _) => self.check_pat_range(pat.span, lhs, rhs, expected, ti), PatKind::Binding(ba, var_id, ident, sub) => { self.check_pat_ident(pat, ba, var_id, ident, sub, expected, pat_info) @@ -280,11 +280,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, pat_info) } PatKind::Path(ref qpath) => { - self.check_pat_path(pat, qpath, path_res.unwrap(), expected, ti) + self.check_pat_path(pat.hir_id, pat.span, qpath, path_res.unwrap(), expected, ti) } PatKind::Struct(ref qpath, fields, has_rest_pat) => { self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, pat_info) } + PatKind::Guard(pat, cond) => { + self.check_pat(pat, expected, pat_info); + self.check_expr_has_type_or_error(cond, self.tcx.types.bool, |_| {}); + expected + } PatKind::Or(pats) => { for pat in pats { self.check_pat(pat, expected, pat_info); @@ -394,7 +399,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo" {}`. // // Call `resolve_vars_if_possible` here for inline const blocks. - PatKind::Lit(lt) => match self.resolve_vars_if_possible(self.check_expr(lt)).kind() { + PatKind::Expr(lt) => match self.resolve_vars_if_possible(self.check_pat_expr_unadjusted(lt)).kind() { ty::Ref(..) => AdjustMode::Pass, _ => AdjustMode::Peel, }, @@ -423,7 +428,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // An OR-pattern just propagates to each individual alternative. // This is maximally flexible, allowing e.g., `Some(mut x) | &Some(mut x)`. // In that example, `Some(mut x)` results in `Peel` whereas `&Some(mut x)` in `Reset`. - | PatKind::Or(_) => AdjustMode::Pass, + | PatKind::Or(_) + // Like or-patterns, guard patterns just propogate to their subpatterns. + | PatKind::Guard(..) => AdjustMode::Pass, } } @@ -487,10 +494,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (expected, def_br, max_ref_mutbl) } + fn check_pat_expr_unadjusted(&self, lt: &'tcx hir::PatExpr<'tcx>) -> Ty<'tcx> { + let ty = match <.kind { + rustc_hir::PatExprKind::Lit { lit, .. } => { + self.check_expr_lit(lit, Expectation::NoExpectation) + } + rustc_hir::PatExprKind::ConstBlock(c) => { + self.check_expr_const_block(c, Expectation::NoExpectation) + } + rustc_hir::PatExprKind::Path(qpath) => { + let (res, opt_ty, segments) = + self.resolve_ty_and_res_fully_qualified_call(qpath, lt.hir_id, lt.span); + self.instantiate_value_path(segments, opt_ty, res, lt.span, lt.span, lt.hir_id).0 + } + }; + self.write_ty(lt.hir_id, ty); + ty + } + fn check_pat_lit( &self, span: Span, - lt: &hir::Expr<'tcx>, + lt: &hir::PatExpr<'tcx>, expected: Ty<'tcx>, ti: &TopInfo<'tcx>, ) -> Ty<'tcx> { @@ -501,7 +526,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Byte string patterns behave the same way as array patterns // They can denote both statically and dynamically-sized byte arrays. let mut pat_ty = ty; - if let hir::ExprKind::Lit(Spanned { node: ast::LitKind::ByteStr(..), .. }) = lt.kind { + if let hir::PatExprKind::Lit { + lit: Spanned { node: ast::LitKind::ByteStr(..), .. }, .. + } = lt.kind + { let expected = self.structurally_resolve_type(span, expected); if let ty::Ref(_, inner_ty, _) = *expected.kind() && self.try_structurally_resolve_type(span, inner_ty).is_slice() @@ -518,7 +546,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } if self.tcx.features().string_deref_patterns() - && let hir::ExprKind::Lit(Spanned { node: ast::LitKind::Str(..), .. }) = lt.kind + && let hir::PatExprKind::Lit { + lit: Spanned { node: ast::LitKind::Str(..), .. }, .. + } = lt.kind { let tcx = self.tcx; let expected = self.resolve_vars_if_possible(expected); @@ -559,15 +589,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_pat_range( &self, span: Span, - lhs: Option<&'tcx hir::Expr<'tcx>>, - rhs: Option<&'tcx hir::Expr<'tcx>>, + lhs: Option<&'tcx hir::PatExpr<'tcx>>, + rhs: Option<&'tcx hir::PatExpr<'tcx>>, expected: Ty<'tcx>, ti: &TopInfo<'tcx>, ) -> Ty<'tcx> { - let calc_side = |opt_expr: Option<&'tcx hir::Expr<'tcx>>| match opt_expr { + let calc_side = |opt_expr: Option<&'tcx hir::PatExpr<'tcx>>| match opt_expr { None => None, Some(expr) => { - let ty = self.check_expr(expr); + let ty = self.check_pat_expr_unadjusted(expr); // Check that the end-point is possibly of numeric or char type. // The early check here is not for correctness, but rather better // diagnostics (e.g. when `&str` is being matched, `expected` will @@ -718,12 +748,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { BindingMode(def_br, Mutability::Mut) } else { // `mut` resets the binding mode on edition <= 2021 - *self - .typeck_results - .borrow_mut() - .rust_2024_migration_desugared_pats_mut() - .entry(pat_info.top_info.hir_id) - .or_default() |= pat.span.at_least_rust_2024(); + self.add_rust_2024_migration_desugared_pat( + pat_info.top_info.hir_id, + pat.span, + ident.span, + "requires binding by-value, but the implicit default is by-reference", + ); BindingMode(ByRef::No, Mutability::Mut) } } @@ -731,12 +761,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { BindingMode(ByRef::Yes(_), _) => { if matches!(def_br, ByRef::Yes(_)) { // `ref`/`ref mut` overrides the binding mode on edition <= 2021 - *self - .typeck_results - .borrow_mut() - .rust_2024_migration_desugared_pats_mut() - .entry(pat_info.top_info.hir_id) - .or_default() |= pat.span.at_least_rust_2024(); + self.add_rust_2024_migration_desugared_pat( + pat_info.top_info.hir_id, + pat.span, + ident.span, + "cannot override to bind by-reference when that is the implicit default", + ); } user_bind_annot } @@ -902,6 +932,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Or(..) + | PatKind::Guard(..) | PatKind::Tuple(..) | PatKind::Slice(..) => "binding", @@ -912,7 +943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | PatKind::Box(..) | PatKind::Deref(_) | PatKind::Ref(..) - | PatKind::Lit(..) + | PatKind::Expr(..) | PatKind::Range(..) | PatKind::Err(_) => break 'block None, }, @@ -1046,7 +1077,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_pat_path( &self, - pat: &Pat<'tcx>, + hir_id: HirId, + span: Span, qpath: &hir::QPath<'_>, path_resolution: (Res, Option>, &'tcx [hir::PathSegment<'tcx>]), expected: Ty<'tcx>, @@ -1065,8 +1097,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } Res::Def(DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::Variant, _) => { let expected = "unit struct, unit variant or constant"; - let e = - report_unexpected_variant_res(tcx, res, None, qpath, pat.span, E0533, expected); + let e = report_unexpected_variant_res(tcx, res, None, qpath, span, E0533, expected); return Ty::new_error(tcx, e); } Res::SelfCtor(def_id) => { @@ -1081,7 +1112,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { res, None, qpath, - pat.span, + span, E0533, "unit struct", ); @@ -1100,11 +1131,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Type-check the path. let (pat_ty, pat_res) = - self.instantiate_value_path(segments, opt_ty, res, pat.span, pat.span, pat.hir_id); + self.instantiate_value_path(segments, opt_ty, res, span, span, hir_id); if let Err(err) = - self.demand_suptype_with_origin(&self.pattern_cause(ti, pat.span), expected, pat_ty) + self.demand_suptype_with_origin(&self.pattern_cause(ti, span), expected, pat_ty) { - self.emit_bad_pat_path(err, pat, res, pat_res, pat_ty, segments); + self.emit_bad_pat_path(err, hir_id, span, res, pat_res, pat_ty, segments); } pat_ty } @@ -1147,13 +1178,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn emit_bad_pat_path( &self, mut e: Diag<'_>, - pat: &hir::Pat<'tcx>, + hir_id: HirId, + pat_span: Span, res: Res, pat_res: Res, pat_ty: Ty<'tcx>, segments: &'tcx [hir::PathSegment<'tcx>], ) { - let pat_span = pat.span; if let Some(span) = self.tcx.hir().res_span(pat_res) { e.span_label(span, format!("{} defined here", res.descr())); if let [hir::PathSegment { ident, .. }] = &*segments { @@ -1166,7 +1197,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { res.descr(), ), ); - match self.tcx.parent_hir_node(pat.hir_id) { + match self.tcx.parent_hir_node(hir_id) { hir::Node::PatField(..) => { e.span_suggestion_verbose( ident.span.shrink_to_hi(), @@ -1806,9 +1837,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } else if inexistent_fields.len() == 1 { match pat_field.pat.kind { - PatKind::Lit(expr) + PatKind::Expr(_) if !self.may_coerce( - self.typeck_results.borrow().expr_ty(expr), + self.typeck_results.borrow().node_type(pat_field.pat.hir_id), self.field_ty(field.span, field_def, args), ) => {} _ => { @@ -2266,12 +2297,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Reset binding mode on old editions if pat_info.binding_mode != ByRef::No { pat_info.binding_mode = ByRef::No; - *self - .typeck_results - .borrow_mut() - .rust_2024_migration_desugared_pats_mut() - .entry(pat_info.top_info.hir_id) - .or_default() |= pat.span.at_least_rust_2024(); + self.add_rust_2024_migration_desugared_pat( + pat_info.top_info.hir_id, + pat.span, + inner.span, + "cannot implicitly match against multiple layers of reference", + ) } } @@ -2630,4 +2661,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { _ => (false, ty), } } + + /// Record a pattern that's invalid under Rust 2024 match ergonomics, along with a problematic + /// span, so that the pattern migration lint can desugar it during THIR construction. + fn add_rust_2024_migration_desugared_pat( + &self, + pat_id: HirId, + subpat_span: Span, + cutoff_span: Span, + detailed_label: &str, + ) { + // Try to trim the span we're labeling to just the `&` or binding mode that's an issue. + // If the subpattern's span is is from an expansion, the emitted label will not be trimmed. + let source_map = self.tcx.sess.source_map(); + let cutoff_span = source_map + .span_extend_prev_while(cutoff_span, char::is_whitespace) + .unwrap_or(cutoff_span); + // Ensure we use the syntax context and thus edition of `subpat_span`; this will be a hard + // error if the subpattern is of edition >= 2024. + let trimmed_span = subpat_span.until(cutoff_span).with_ctxt(subpat_span.ctxt()); + + // Only provide a detailed label if the problematic subpattern isn't from an expansion. + // In the case that it's from a macro, we'll add a more detailed note in the emitter. + let desc = if subpat_span.from_expansion() { + "default binding mode is reset within expansion" + } else { + detailed_label + }; + + self.typeck_results + .borrow_mut() + .rust_2024_migration_desugared_pats_mut() + .entry(pat_id) + .or_default() + .push((trimmed_span, desc.to_owned())); + } } diff --git a/compiler/rustc_hir_typeck/src/place_op.rs b/compiler/rustc_hir_typeck/src/place_op.rs index 7c667511aa997..ba63507113578 100644 --- a/compiler/rustc_hir_typeck/src/place_op.rs +++ b/compiler/rustc_hir_typeck/src/place_op.rs @@ -7,8 +7,7 @@ use rustc_middle::ty::adjustment::{ PointerCoercion, }; use rustc_middle::ty::{self, Ty}; -use rustc_span::Span; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use tracing::debug; use {rustc_ast as ast, rustc_hir as hir}; diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs index 4f283644cbe1a..cac891c4e4c4f 100644 --- a/compiler/rustc_hir_typeck/src/upvar.rs +++ b/compiler/rustc_hir_typeck/src/upvar.rs @@ -14,7 +14,7 @@ //! to everything owned by `x`, so the result is the same for something //! like `x.f = 5` and so on (presuming `x` is not a borrowed pointer to a //! struct). These adjustments are performed in -//! `adjust_upvar_borrow_kind()` (you can trace backwards through the code +//! `adjust_for_non_move_closure` (you can trace backwards through the code //! from there). //! //! The fact that we are inferring borrow kinds as we go results in a @@ -147,15 +147,16 @@ impl<'a, 'tcx> Visitor<'tcx> for InferBorrowKindVisitor<'a, 'tcx> { self.visit_body(body); self.fcx.analyze_closure(expr.hir_id, expr.span, body_id, body, capture_clause); } - hir::ExprKind::ConstBlock(anon_const) => { - let body = self.fcx.tcx.hir().body(anon_const.body); - self.visit_body(body); - } _ => {} } intravisit::walk_expr(self, expr); } + + fn visit_inline_const(&mut self, c: &'tcx hir::ConstBlock) { + let body = self.fcx.tcx.hir().body(c.body); + self.visit_body(body); + } } impl<'a, 'tcx> FnCtxt<'a, 'tcx> { @@ -1684,8 +1685,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // want to capture by ref to allow precise capture using reborrows. // // If the data will be moved out of this place, then the place will be truncated - // at the first Deref in `adjust_upvar_borrow_kind_for_consume` and then moved into - // the closure. + // at the first Deref in `adjust_for_move_closure` and then moved into the closure. hir::CaptureBy::Value { .. } if !place.deref_tys().any(Ty::is_ref) => { ty::UpvarCapture::ByValue } @@ -1936,7 +1936,7 @@ fn drop_location_span(tcx: TyCtxt<'_>, hir_id: HirId) -> Span { let owner_node = tcx.hir_node(owner_id); let owner_span = match owner_node { hir::Node::Item(item) => match item.kind { - hir::ItemKind::Fn(_, _, owner_id) => tcx.hir().span(owner_id.hir_id), + hir::ItemKind::Fn { body: owner_id, .. } => tcx.hir().span(owner_id.hir_id), _ => { bug!("Drop location span error: need to handle more ItemKind '{:?}'", item.kind); } diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs index 6f1e3a0cf8c7b..683cacdff7d78 100644 --- a/compiler/rustc_hir_typeck/src/writeback.rs +++ b/compiler/rustc_hir_typeck/src/writeback.rs @@ -5,7 +5,7 @@ use std::mem; use rustc_data_structures::unord::ExtendUnord; -use rustc_errors::{ErrorGuaranteed, StashKey}; +use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_hir::HirId; use rustc_hir::intravisit::{self, Visitor}; @@ -15,8 +15,7 @@ use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion}; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, fold_regions}; use rustc_middle::ty::visit::TypeVisitableExt; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperFoldable}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; use rustc_trait_selection::solve; use tracing::{debug, instrument}; @@ -247,6 +246,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } } + + fn visit_const_block(&mut self, span: Span, anon_const: &hir::ConstBlock) { + self.visit_node_id(span, anon_const.hir_id); + + let body = self.tcx().hir().body(anon_const.body); + self.visit_body(body); + } } /////////////////////////////////////////////////////////////////////////// @@ -276,11 +282,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> { hir::ExprKind::Field(..) | hir::ExprKind::OffsetOf(..) => { self.visit_field_id(e.hir_id); } - hir::ExprKind::ConstBlock(anon_const) => { - self.visit_node_id(e.span, anon_const.hir_id); - - let body = self.tcx().hir().body(anon_const.body); - self.visit_body(body); + hir::ExprKind::ConstBlock(ref anon_const) => { + self.visit_const_block(e.span, anon_const); } _ => {} } @@ -336,6 +339,14 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> { intravisit::walk_pat(self, p); } + fn visit_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) { + self.visit_node_id(expr.span, expr.hir_id); + if let hir::PatExprKind::ConstBlock(c) = &expr.kind { + self.visit_const_block(expr.span, c); + } + intravisit::walk_pat_expr(self, expr); + } + fn visit_local(&mut self, l: &'tcx hir::LetStmt<'tcx>) { intravisit::walk_local(self, l); let var_ty = self.fcx.local_ty(l.span, l.hir_id); @@ -551,15 +562,17 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { // types or by using this function at the end of writeback and running it as a // fixpoint. let opaque_types = self.fcx.infcx.clone_opaque_types(); - for (opaque_type_key, decl) in opaque_types { - let hidden_type = self.resolve(decl.hidden_type, &decl.hidden_type.span); - let opaque_type_key = self.resolve(opaque_type_key, &decl.hidden_type.span); - - if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; + for (opaque_type_key, hidden_type) in opaque_types { + let hidden_type = self.resolve(hidden_type, &hidden_type.span); + let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span); + + if !self.fcx.next_trait_solver() { + if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; + } } // Here we only detect impl trait definition conflicts when they @@ -569,15 +582,8 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { && last_opaque_ty.ty != hidden_type.ty { assert!(!self.fcx.next_trait_solver()); - if let Ok(d) = hidden_type.build_mismatch_error( - &last_opaque_ty, - opaque_type_key.def_id, - self.tcx(), - ) { - d.stash( - self.tcx().def_span(opaque_type_key.def_id), - StashKey::OpaqueHiddenTypeMismatch, - ); + if let Ok(d) = hidden_type.build_mismatch_error(&last_opaque_ty, self.tcx()) { + d.emit(); } } } diff --git a/compiler/rustc_incremental/src/assert_dep_graph.rs b/compiler/rustc_incremental/src/assert_dep_graph.rs index 031c50c45d43f..294ee1ce161ff 100644 --- a/compiler/rustc_incremental/src/assert_dep_graph.rs +++ b/compiler/rustc_incremental/src/assert_dep_graph.rs @@ -47,8 +47,7 @@ use rustc_middle::dep_graph::{ use rustc_middle::hir::nested_filter; use rustc_middle::ty::TyCtxt; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use tracing::debug; use {rustc_graphviz as dot, rustc_hir as hir}; @@ -138,13 +137,13 @@ impl<'tcx> IfThisChanged<'tcx> { match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) { Ok(n) => n, Err(()) => self.tcx.dcx().emit_fatal(errors::UnrecognizedDepNode { - span: attr.span, + span: attr.span(), name: n, }), } } }; - self.if_this_changed.push((attr.span, def_id.to_def_id(), dep_node)); + self.if_this_changed.push((attr.span(), def_id.to_def_id(), dep_node)); } else if attr.has_name(sym::rustc_then_this_would_need) { let dep_node_interned = self.argument(attr); let dep_node = match dep_node_interned { @@ -152,17 +151,17 @@ impl<'tcx> IfThisChanged<'tcx> { match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) { Ok(n) => n, Err(()) => self.tcx.dcx().emit_fatal(errors::UnrecognizedDepNode { - span: attr.span, + span: attr.span(), name: n, }), } } None => { - self.tcx.dcx().emit_fatal(errors::MissingDepNode { span: attr.span }); + self.tcx.dcx().emit_fatal(errors::MissingDepNode { span: attr.span() }); } }; self.then_this_would_need.push(( - attr.span, + attr.span(), dep_node_interned.unwrap(), hir_id, dep_node, diff --git a/compiler/rustc_incremental/src/errors.rs b/compiler/rustc_incremental/src/errors.rs index cb21f9758789c..b4a207386dc44 100644 --- a/compiler/rustc_incremental/src/errors.rs +++ b/compiler/rustc_incremental/src/errors.rs @@ -1,8 +1,7 @@ use std::path::{Path, PathBuf}; use rustc_macros::Diagnostic; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; #[derive(Diagnostic)] #[diag(incremental_unrecognized_depnode)] diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs index a85686e590ba2..81b0ec6181da0 100644 --- a/compiler/rustc_incremental/src/persist/dirty_clean.rs +++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs @@ -29,8 +29,7 @@ use rustc_hir::{ use rustc_middle::dep_graph::{DepNode, DepNodeExt, label_strs}; use rustc_middle::hir::nested_filter; use rustc_middle::ty::TyCtxt; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use thin_vec::ThinVec; use tracing::debug; @@ -201,7 +200,7 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { let loaded_from_disk = self.loaded_from_disk(attr); for e in except.items().into_sorted_stable_ord() { if !auto.remove(e) { - self.tcx.dcx().emit_fatal(errors::AssertionAuto { span: attr.span, name, e }); + self.tcx.dcx().emit_fatal(errors::AssertionAuto { span: attr.span(), name, e }); } } Assertion { clean: auto, dirty: except, loaded_from_disk } @@ -254,7 +253,7 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { HirItem::Const(..) => ("ItemConst", LABELS_CONST), // A function declaration - HirItem::Fn(..) => ("ItemFn", LABELS_FN), + HirItem::Fn { .. } => ("ItemFn", LABELS_FN), // // A module HirItem::Mod(..) => ("ItemMod", LABELS_HIR_ONLY), @@ -284,7 +283,7 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { HirItem::Impl { .. } => ("ItemKind::Impl", LABELS_IMPL), _ => self.tcx.dcx().emit_fatal(errors::UndefinedCleanDirtyItem { - span: attr.span, + span: attr.span(), kind: format!("{:?}", item.kind), }), } @@ -300,7 +299,7 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { ImplItemKind::Type(..) => ("NodeImplType", LABELS_CONST_IN_IMPL), }, _ => self.tcx.dcx().emit_fatal(errors::UndefinedCleanDirty { - span: attr.span, + span: attr.span(), kind: format!("{node:?}"), }), }; @@ -377,7 +376,7 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { let Some(assertion) = self.assertion_maybe(item_id, attr) else { continue; }; - self.checked_attrs.insert(attr.id); + self.checked_attrs.insert(attr.id()); for label in assertion.clean.items().into_sorted_stable_ord() { let dep_node = DepNode::from_label_string(self.tcx, label, def_path_hash).unwrap(); self.assert_clean(item_span, dep_node); @@ -407,12 +406,13 @@ fn check_config(tcx: TyCtxt<'_>, attr: &Attribute) -> bool { debug!("check_config: searching for cfg {:?}", value); cfg = Some(config.contains(&(value, None))); } else if !(item.has_name(EXCEPT) || item.has_name(LOADED_FROM_DISK)) { - tcx.dcx().emit_err(errors::UnknownItem { span: attr.span, name: item.name_or_empty() }); + tcx.dcx() + .emit_err(errors::UnknownItem { span: attr.span(), name: item.name_or_empty() }); } } match cfg { - None => tcx.dcx().emit_fatal(errors::NoCfg { span: attr.span }), + None => tcx.dcx().emit_fatal(errors::NoCfg { span: attr.span() }), Some(c) => c, } } @@ -446,9 +446,9 @@ impl<'tcx> FindAllAttrs<'tcx> { fn report_unchecked_attrs(&self, mut checked_attrs: FxHashSet) { for attr in &self.found_attrs { - if !checked_attrs.contains(&attr.id) { - self.tcx.dcx().emit_err(errors::UncheckedClean { span: attr.span }); - checked_attrs.insert(attr.id); + if !checked_attrs.contains(&attr.id()) { + self.tcx.dcx().emit_err(errors::UncheckedClean { span: attr.span() }); + checked_attrs.insert(attr.id()); } } } diff --git a/compiler/rustc_index/Cargo.toml b/compiler/rustc_index/Cargo.toml index 33e8e2824c7a9..f27db7a5400ad 100644 --- a/compiler/rustc_index/Cargo.toml +++ b/compiler/rustc_index/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -rustc_index_macros = { path = "../rustc_index_macros", default-features = false } +rustc_index_macros = { path = "../rustc_index_macros" } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } smallvec = "1.8.1" diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index aba1e93829694..f12df831cb503 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -97,7 +97,13 @@ macro_rules! bit_relations_inherent_impls { /// A fixed-size bitset type with a dense representation. /// -/// NOTE: Use [`GrowableBitSet`] if you need support for resizing after creation. +/// Note 1: Since this bitset is dense, if your domain is big, and/or relatively +/// homogeneous (for example, with long runs of bits set or unset), then it may +/// be preferable to instead use a [MixedBitSet], or an +/// [IntervalSet](crate::interval::IntervalSet). They should be more suited to +/// sparse, or highly-compressible, domains. +/// +/// Note 2: Use [`GrowableBitSet`] if you need support for resizing after creation. /// /// `T` is an index type, typically a newtyped `usize` wrapper, but it can also /// just be `usize`. @@ -108,33 +114,33 @@ macro_rules! bit_relations_inherent_impls { /// #[cfg_attr(feature = "nightly", derive(Decodable_Generic, Encodable_Generic))] #[derive(Eq, PartialEq, Hash)] -pub struct BitSet { +pub struct DenseBitSet { domain_size: usize, words: SmallVec<[Word; 2]>, marker: PhantomData, } -impl BitSet { +impl DenseBitSet { /// Gets the domain size. pub fn domain_size(&self) -> usize { self.domain_size } } -impl BitSet { +impl DenseBitSet { /// Creates a new, empty bitset with a given `domain_size`. #[inline] - pub fn new_empty(domain_size: usize) -> BitSet { + pub fn new_empty(domain_size: usize) -> DenseBitSet { let num_words = num_words(domain_size); - BitSet { domain_size, words: smallvec![0; num_words], marker: PhantomData } + DenseBitSet { domain_size, words: smallvec![0; num_words], marker: PhantomData } } /// Creates a new, filled bitset with a given `domain_size`. #[inline] - pub fn new_filled(domain_size: usize) -> BitSet { + pub fn new_filled(domain_size: usize) -> DenseBitSet { let num_words = num_words(domain_size); let mut result = - BitSet { domain_size, words: smallvec![!0; num_words], marker: PhantomData }; + DenseBitSet { domain_size, words: smallvec![!0; num_words], marker: PhantomData }; result.clear_excess_bits(); result } @@ -165,7 +171,7 @@ impl BitSet { /// Is `self` is a (non-strict) superset of `other`? #[inline] - pub fn superset(&self, other: &BitSet) -> bool { + pub fn superset(&self, other: &DenseBitSet) -> bool { assert_eq!(self.domain_size, other.domain_size); self.words.iter().zip(&other.words).all(|(a, b)| (a & b) == *b) } @@ -179,7 +185,12 @@ impl BitSet { /// Insert `elem`. Returns whether the set has changed. #[inline] pub fn insert(&mut self, elem: T) -> bool { - assert!(elem.index() < self.domain_size); + assert!( + elem.index() < self.domain_size, + "inserting element at index {} but domain size is {}", + elem.index(), + self.domain_size, + ); let (word_index, mask) = word_index_and_mask(elem); let word_ref = &mut self.words[word_index]; let word = *word_ref; @@ -270,35 +281,57 @@ impl BitSet { } bit_relations_inherent_impls! {} + + /// Sets `self = self | !other`. + /// + /// FIXME: Incorporate this into [`BitRelations`] and fill out + /// implementations for other bitset types, if needed. + pub fn union_not(&mut self, other: &DenseBitSet) { + assert_eq!(self.domain_size, other.domain_size); + + // FIXME(Zalathar): If we were to forcibly _set_ all excess bits before + // the bitwise update, and then clear them again afterwards, we could + // quickly and accurately detect whether the update changed anything. + // But that's only worth doing if there's an actual use-case. + + bitwise(&mut self.words, &other.words, |a, b| a | !b); + // The bitwise update `a | !b` can result in the last word containing + // out-of-domain bits, so we need to clear them. + self.clear_excess_bits(); + } } // dense REL dense -impl BitRelations> for BitSet { - fn union(&mut self, other: &BitSet) -> bool { +impl BitRelations> for DenseBitSet { + fn union(&mut self, other: &DenseBitSet) -> bool { assert_eq!(self.domain_size, other.domain_size); bitwise(&mut self.words, &other.words, |a, b| a | b) } - fn subtract(&mut self, other: &BitSet) -> bool { + fn subtract(&mut self, other: &DenseBitSet) -> bool { assert_eq!(self.domain_size, other.domain_size); bitwise(&mut self.words, &other.words, |a, b| a & !b) } - fn intersect(&mut self, other: &BitSet) -> bool { + fn intersect(&mut self, other: &DenseBitSet) -> bool { assert_eq!(self.domain_size, other.domain_size); bitwise(&mut self.words, &other.words, |a, b| a & b) } } -impl From> for BitSet { +impl From> for DenseBitSet { fn from(bit_set: GrowableBitSet) -> Self { bit_set.bit_set } } -impl Clone for BitSet { +impl Clone for DenseBitSet { fn clone(&self) -> Self { - BitSet { domain_size: self.domain_size, words: self.words.clone(), marker: PhantomData } + DenseBitSet { + domain_size: self.domain_size, + words: self.words.clone(), + marker: PhantomData, + } } fn clone_from(&mut self, from: &Self) { @@ -307,13 +340,13 @@ impl Clone for BitSet { } } -impl fmt::Debug for BitSet { +impl fmt::Debug for DenseBitSet { fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result { w.debug_list().entries(self.iter()).finish() } } -impl ToString for BitSet { +impl ToString for DenseBitSet { fn to_string(&self) -> String { let mut result = String::new(); let mut sep = '['; @@ -897,7 +930,7 @@ impl BitRelations> for ChunkedBitSet { } } -impl BitRelations> for BitSet { +impl BitRelations> for DenseBitSet { fn union(&mut self, other: &ChunkedBitSet) -> bool { sequential_update(|elem| self.insert(elem), other.iter()) } @@ -1072,6 +1105,18 @@ impl fmt::Debug for ChunkedBitSet { } } +/// Sets `out_vec[i] = op(out_vec[i], in_vec[i])` for each index `i` in both +/// slices. The slices must have the same length. +/// +/// Returns true if at least one bit in `out_vec` was changed. +/// +/// ## Warning +/// Some bitwise operations (e.g. union-not, xor) can set output bits that were +/// unset in in both inputs. If this happens in the last word/chunk of a bitset, +/// it can cause the bitset to contain out-of-domain values, which need to +/// be cleared with `clear_excess_bits_in_final_word`. This also makes the +/// "changed" return value unreliable, because the change might have only +/// affected excess bits. #[inline] fn bitwise(out_vec: &mut [Word], in_vec: &[Word], op: Op) -> bool where @@ -1109,10 +1154,10 @@ where false } -/// A bitset with a mixed representation, using `BitSet` for small and medium -/// bitsets, and `ChunkedBitSet` for large bitsets, i.e. those with enough bits -/// for at least two chunks. This is a good choice for many bitsets that can -/// have large domain sizes (e.g. 5000+). +/// A bitset with a mixed representation, using `DenseBitSet` for small and +/// medium bitsets, and `ChunkedBitSet` for large bitsets, i.e. those with +/// enough bits for at least two chunks. This is a good choice for many bitsets +/// that can have large domain sizes (e.g. 5000+). /// /// `T` is an index type, typically a newtyped `usize` wrapper, but it can also /// just be `usize`. @@ -1122,7 +1167,7 @@ where /// will panic if the bitsets have differing domain sizes. #[derive(PartialEq, Eq)] pub enum MixedBitSet { - Small(BitSet), + Small(DenseBitSet), Large(ChunkedBitSet), } @@ -1139,7 +1184,7 @@ impl MixedBitSet { #[inline] pub fn new_empty(domain_size: usize) -> MixedBitSet { if domain_size <= CHUNK_BITS { - MixedBitSet::Small(BitSet::new_empty(domain_size)) + MixedBitSet::Small(DenseBitSet::new_empty(domain_size)) } else { MixedBitSet::Large(ChunkedBitSet::new_empty(domain_size)) } @@ -1191,6 +1236,14 @@ impl MixedBitSet { } } + #[inline] + pub fn clear(&mut self) { + match self { + MixedBitSet::Small(set) => set.clear(), + MixedBitSet::Large(set) => set.clear(), + } + } + bit_relations_inherent_impls! {} } @@ -1270,7 +1323,7 @@ impl<'a, T: Idx> Iterator for MixedBitIter<'a, T> { /// to or greater than the domain size. #[derive(Clone, Debug, PartialEq)] pub struct GrowableBitSet { - bit_set: BitSet, + bit_set: DenseBitSet, } impl Default for GrowableBitSet { @@ -1293,11 +1346,11 @@ impl GrowableBitSet { } pub fn new_empty() -> GrowableBitSet { - GrowableBitSet { bit_set: BitSet::new_empty(0) } + GrowableBitSet { bit_set: DenseBitSet::new_empty(0) } } pub fn with_capacity(capacity: usize) -> GrowableBitSet { - GrowableBitSet { bit_set: BitSet::new_empty(capacity) } + GrowableBitSet { bit_set: DenseBitSet::new_empty(capacity) } } /// Returns `true` if the set has changed. @@ -1336,8 +1389,8 @@ impl GrowableBitSet { } } -impl From> for GrowableBitSet { - fn from(bit_set: BitSet) -> Self { +impl From> for GrowableBitSet { + fn from(bit_set: DenseBitSet) -> Self { Self { bit_set } } } @@ -1373,7 +1426,7 @@ impl BitMatrix { } /// Creates a new matrix, with `row` used as the value for every row. - pub fn from_row_n(row: &BitSet, num_rows: usize) -> BitMatrix { + pub fn from_row_n(row: &DenseBitSet, num_rows: usize) -> BitMatrix { let num_columns = row.domain_size(); let words_per_row = num_words(num_columns); assert_eq!(words_per_row, row.words.len()); @@ -1471,7 +1524,7 @@ impl BitMatrix { /// Adds the bits from `with` to the bits from row `write`, and /// returns `true` if anything changed. - pub fn union_row_with(&mut self, with: &BitSet, write: R) -> bool { + pub fn union_row_with(&mut self, with: &DenseBitSet, write: R) -> bool { assert!(write.index() < self.num_rows); assert_eq!(with.domain_size(), self.num_columns); let (write_start, write_end) = self.range(write); @@ -1528,8 +1581,8 @@ impl fmt::Debug for BitMatrix { /// A fixed-column-size, variable-row-size 2D bit matrix with a moderately /// sparse representation. /// -/// Initially, every row has no explicit representation. If any bit within a -/// row is set, the entire row is instantiated as `Some()`. +/// Initially, every row has no explicit representation. If any bit within a row +/// is set, the entire row is instantiated as `Some()`. /// Furthermore, any previously uninstantiated rows prior to it will be /// instantiated as `None`. Those prior rows may themselves become fully /// instantiated later on if any of their bits are set. @@ -1543,7 +1596,7 @@ where C: Idx, { num_columns: usize, - rows: IndexVec>>, + rows: IndexVec>>, } impl SparseBitMatrix { @@ -1552,10 +1605,10 @@ impl SparseBitMatrix { Self { num_columns, rows: IndexVec::new() } } - fn ensure_row(&mut self, row: R) -> &mut BitSet { - // Instantiate any missing rows up to and including row `row` with an empty `BitSet`. - // Then replace row `row` with a full `BitSet` if necessary. - self.rows.get_or_insert_with(row, || BitSet::new_empty(self.num_columns)) + fn ensure_row(&mut self, row: R) -> &mut DenseBitSet { + // Instantiate any missing rows up to and including row `row` with an empty `DenseBitSet`. + // Then replace row `row` with a full `DenseBitSet` if necessary. + self.rows.get_or_insert_with(row, || DenseBitSet::new_empty(self.num_columns)) } /// Sets the cell at `(row, column)` to true. Put another way, insert @@ -1629,17 +1682,17 @@ impl SparseBitMatrix { self.row(row).into_iter().flat_map(|r| r.iter()) } - pub fn row(&self, row: R) -> Option<&BitSet> { + pub fn row(&self, row: R) -> Option<&DenseBitSet> { self.rows.get(row)?.as_ref() } - /// Intersects `row` with `set`. `set` can be either `BitSet` or + /// Intersects `row` with `set`. `set` can be either `DenseBitSet` or /// `ChunkedBitSet`. Has no effect if `row` does not exist. /// /// Returns true if the row was changed. pub fn intersect_row(&mut self, row: R, set: &Set) -> bool where - BitSet: BitRelations, + DenseBitSet: BitRelations, { match self.rows.get_mut(row) { Some(Some(row)) => row.intersect(set), @@ -1647,13 +1700,13 @@ impl SparseBitMatrix { } } - /// Subtracts `set` from `row`. `set` can be either `BitSet` or + /// Subtracts `set` from `row`. `set` can be either `DenseBitSet` or /// `ChunkedBitSet`. Has no effect if `row` does not exist. /// /// Returns true if the row was changed. pub fn subtract_row(&mut self, row: R, set: &Set) -> bool where - BitSet: BitRelations, + DenseBitSet: BitRelations, { match self.rows.get_mut(row) { Some(Some(row)) => row.subtract(set), @@ -1661,13 +1714,13 @@ impl SparseBitMatrix { } } - /// Unions `row` with `set`. `set` can be either `BitSet` or + /// Unions `row` with `set`. `set` can be either `DenseBitSet` or /// `ChunkedBitSet`. /// /// Returns true if the row was changed. pub fn union_row(&mut self, row: R, set: &Set) -> bool where - BitSet: BitRelations, + DenseBitSet: BitRelations, { self.ensure_row(row).union(set) } diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs index f614232397935..eaa4aafe72134 100644 --- a/compiler/rustc_index/src/bit_set/tests.rs +++ b/compiler/rustc_index/src/bit_set/tests.rs @@ -8,7 +8,7 @@ use test::Bencher; #[test] fn test_new_filled() { for i in 0..128 { - let idx_buf = BitSet::new_filled(i); + let idx_buf = DenseBitSet::new_filled(i); let elems: Vec = idx_buf.iter().collect(); let expected: Vec = (0..i).collect(); assert_eq!(elems, expected); @@ -17,7 +17,7 @@ fn test_new_filled() { #[test] fn bitset_iter_works() { - let mut bitset: BitSet = BitSet::new_empty(100); + let mut bitset: DenseBitSet = DenseBitSet::new_empty(100); bitset.insert(1); bitset.insert(10); bitset.insert(19); @@ -32,7 +32,7 @@ fn bitset_iter_works() { #[test] fn bitset_iter_works_2() { - let mut bitset: BitSet = BitSet::new_empty(320); + let mut bitset: DenseBitSet = DenseBitSet::new_empty(320); bitset.insert(0); bitset.insert(127); bitset.insert(191); @@ -43,25 +43,25 @@ fn bitset_iter_works_2() { #[test] fn bitset_clone_from() { - let mut a: BitSet = BitSet::new_empty(10); + let mut a: DenseBitSet = DenseBitSet::new_empty(10); a.insert(4); a.insert(7); a.insert(9); - let mut b = BitSet::new_empty(2); + let mut b = DenseBitSet::new_empty(2); b.clone_from(&a); assert_eq!(b.domain_size(), 10); assert_eq!(b.iter().collect::>(), [4, 7, 9]); - b.clone_from(&BitSet::new_empty(40)); + b.clone_from(&DenseBitSet::new_empty(40)); assert_eq!(b.domain_size(), 40); assert_eq!(b.iter().collect::>(), []); } #[test] fn union_two_sets() { - let mut set1: BitSet = BitSet::new_empty(65); - let mut set2: BitSet = BitSet::new_empty(65); + let mut set1: DenseBitSet = DenseBitSet::new_empty(65); + let mut set2: DenseBitSet = DenseBitSet::new_empty(65); assert!(set1.insert(3)); assert!(!set1.insert(3)); assert!(set2.insert(5)); @@ -75,6 +75,32 @@ fn union_two_sets() { assert!(set1.contains(64)); } +#[test] +fn union_not() { + let mut a = DenseBitSet::::new_empty(100); + let mut b = DenseBitSet::::new_empty(100); + + a.insert(3); + a.insert(5); + a.insert(80); + a.insert(81); + + b.insert(5); // Already in `a`. + b.insert(7); + b.insert(63); + b.insert(81); // Already in `a`. + b.insert(90); + + a.union_not(&b); + + // After union-not, `a` should contain all values in the domain, except for + // the ones that are in `b` and were _not_ already in `a`. + assert_eq!( + a.iter().collect::>(), + (0usize..100).filter(|&x| !matches!(x, 7 | 63 | 90)).collect::>(), + ); +} + #[test] fn chunked_bitset() { let mut b0 = ChunkedBitSet::::new_empty(0); @@ -268,8 +294,8 @@ fn with_elements_chunked(elements: &[usize], domain_size: usize) -> ChunkedBitSe s } -fn with_elements_standard(elements: &[usize], domain_size: usize) -> BitSet { - let mut s = BitSet::new_empty(domain_size); +fn with_elements_standard(elements: &[usize], domain_size: usize) -> DenseBitSet { + let mut s = DenseBitSet::new_empty(domain_size); for &e in elements { assert!(s.insert(e)); } @@ -503,15 +529,15 @@ fn sparse_matrix_operations() { matrix.insert(2, 99); matrix.insert(4, 0); - let mut disjoint: BitSet = BitSet::new_empty(100); + let mut disjoint: DenseBitSet = DenseBitSet::new_empty(100); disjoint.insert(33); - let mut superset = BitSet::new_empty(100); + let mut superset = DenseBitSet::new_empty(100); superset.insert(22); superset.insert(75); superset.insert(33); - let mut subset = BitSet::new_empty(100); + let mut subset = DenseBitSet::new_empty(100); subset.insert(22); // SparseBitMatrix::remove @@ -568,7 +594,7 @@ fn dense_insert_range() { where R: RangeBounds + Clone + IntoIterator + std::fmt::Debug, { - let mut set = BitSet::new_empty(domain); + let mut set = DenseBitSet::new_empty(domain); set.insert_range(range.clone()); for i in set.iter() { assert!(range.contains(&i)); @@ -609,7 +635,7 @@ fn dense_insert_range() { #[test] fn dense_last_set_before() { - fn easy(set: &BitSet, needle: impl RangeBounds) -> Option { + fn easy(set: &DenseBitSet, needle: impl RangeBounds) -> Option { let mut last_leq = None; for e in set.iter() { if needle.contains(&e) { @@ -620,7 +646,7 @@ fn dense_last_set_before() { } #[track_caller] - fn cmp(set: &BitSet, needle: impl RangeBounds + Clone + std::fmt::Debug) { + fn cmp(set: &DenseBitSet, needle: impl RangeBounds + Clone + std::fmt::Debug) { assert_eq!( set.last_set_in(needle.clone()), easy(set, needle.clone()), @@ -629,7 +655,7 @@ fn dense_last_set_before() { set ); } - let mut set = BitSet::new_empty(300); + let mut set = DenseBitSet::new_empty(300); cmp(&set, 50..=50); set.insert(WORD_BITS); cmp(&set, WORD_BITS..=WORD_BITS); @@ -645,7 +671,7 @@ fn dense_last_set_before() { for i in 0..=WORD_BITS * 2 { for j in i..=WORD_BITS * 2 { for k in 0..WORD_BITS * 2 { - let mut set = BitSet::new_empty(300); + let mut set = DenseBitSet::new_empty(300); cmp(&set, i..j); cmp(&set, i..=j); set.insert(k); @@ -658,7 +684,7 @@ fn dense_last_set_before() { #[bench] fn bench_insert(b: &mut Bencher) { - let mut bs = BitSet::new_filled(99999usize); + let mut bs = DenseBitSet::new_filled(99999usize); b.iter(|| { black_box(bs.insert(black_box(100u32))); }); @@ -666,7 +692,7 @@ fn bench_insert(b: &mut Bencher) { #[bench] fn bench_remove(b: &mut Bencher) { - let mut bs = BitSet::new_filled(99999usize); + let mut bs = DenseBitSet::new_filled(99999usize); b.iter(|| { black_box(bs.remove(black_box(100u32))); }); @@ -674,7 +700,7 @@ fn bench_remove(b: &mut Bencher) { #[bench] fn bench_iter(b: &mut Bencher) { - let bs = BitSet::new_filled(99999usize); + let bs = DenseBitSet::new_filled(99999usize); b.iter(|| { bs.iter().map(|b: usize| black_box(b)).for_each(drop); }); @@ -682,8 +708,8 @@ fn bench_iter(b: &mut Bencher) { #[bench] fn bench_intersect(b: &mut Bencher) { - let mut ba: BitSet = BitSet::new_filled(99999usize); - let bb = BitSet::new_filled(99999usize); + let mut ba: DenseBitSet = DenseBitSet::new_filled(99999usize); + let bb = DenseBitSet::new_filled(99999usize); b.iter(|| { ba.intersect(black_box(&bb)); }); diff --git a/compiler/rustc_index_macros/Cargo.toml b/compiler/rustc_index_macros/Cargo.toml index a7c2a1804ddb8..98bc1b6a29bba 100644 --- a/compiler/rustc_index_macros/Cargo.toml +++ b/compiler/rustc_index_macros/Cargo.toml @@ -12,5 +12,4 @@ proc-macro2 = "1" quote = "1" [features] -default = ["nightly"] nightly = [] diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index fe66d306ceb80..c47f27e871f7a 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -441,6 +441,7 @@ impl<'cx, 'tcx> TypeFolder> for Canonicalizer<'cx, 'tcx> { | ty::FnDef(..) | ty::FnPtr(..) | ty::Dynamic(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(..) | ty::Alias(..) diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs index 1d3d32ef74979..23f63af778d07 100644 --- a/compiler/rustc_infer/src/infer/canonical/query_response.rs +++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs @@ -155,12 +155,12 @@ impl<'tcx> InferCtxt<'tcx> { .opaque_type_storage .opaque_types .iter() - .map(|(k, v)| (*k, v.hidden_type.ty)) + .map(|(k, v)| (*k, v.ty)) .collect() } fn take_opaque_types_for_query_response(&self) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { - self.take_opaque_types().into_iter().map(|(k, v)| (k, v.hidden_type.ty)).collect() + self.take_opaque_types().into_iter().map(|(k, v)| (k, v.ty)).collect() } /// Given the (canonicalized) result to a canonical query, @@ -316,16 +316,6 @@ impl<'tcx> InferCtxt<'tcx> { }), ); - // ...also include the query member constraints. - output_query_region_constraints.member_constraints.extend( - query_response - .value - .region_constraints - .member_constraints - .iter() - .map(|p_c| instantiate_value(self.tcx, &result_args, p_c.clone())), - ); - let user_result: R = query_response.instantiate_projected(self.tcx, &result_args, |q_r| q_r.value.clone()); @@ -643,7 +633,7 @@ pub fn make_query_region_constraints<'tcx>( outlives_obligations: impl Iterator, ty::Region<'tcx>, ConstraintCategory<'tcx>)>, region_constraints: &RegionConstraintData<'tcx>, ) -> QueryRegionConstraints<'tcx> { - let RegionConstraintData { constraints, verifys, member_constraints } = region_constraints; + let RegionConstraintData { constraints, verifys } = region_constraints; assert!(verifys.is_empty()); @@ -674,5 +664,5 @@ pub fn make_query_region_constraints<'tcx>( })) .collect(); - QueryRegionConstraints { outlives, member_constraints: member_constraints.clone() } + QueryRegionConstraints { outlives } } diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 544f941dda574..283ebdfa236be 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -17,7 +17,6 @@ pub use relate::StructurallyRelateAliases; pub use relate::combine::PredicateEmittingRelation; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; -use rustc_data_structures::sync::Lrc; use rustc_data_structures::undo_log::{Rollback, UndoLogs}; use rustc_data_structures::unify as ut; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed}; @@ -41,8 +40,7 @@ use rustc_middle::ty::{ GenericParamDefKind, InferConst, IntVid, PseudoCanonicalInput, Ty, TyCtxt, TyVid, TypeVisitable, TypingEnv, TypingMode, }; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use snapshot::undo_log::InferCtxtUndoLogs; use tracing::{debug, instrument}; use type_variable::TypeVariableOrigin; @@ -236,7 +234,7 @@ impl<'tcx> InferCtxtInner<'tcx> { pub fn iter_opaque_types( &self, ) -> impl Iterator, ty::OpaqueHiddenType<'tcx>)> + '_ { - self.opaque_type_storage.opaque_types.iter().map(|(&k, v)| (k, v.hidden_type)) + self.opaque_type_storage.opaque_types.iter().map(|(&k, &v)| (k, v)) } } @@ -686,26 +684,6 @@ impl<'tcx> InferCtxt<'tcx> { self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); } - /// Require that the region `r` be equal to one of the regions in - /// the set `regions`. - #[instrument(skip(self), level = "debug")] - pub fn member_constraint( - &self, - key: ty::OpaqueTypeKey<'tcx>, - definition_span: Span, - hidden_ty: Ty<'tcx>, - region: ty::Region<'tcx>, - in_regions: Lrc>>, - ) { - self.inner.borrow_mut().unwrap_region_constraints().member_constraint( - key, - definition_span, - hidden_ty, - region, - in_regions, - ); - } - /// Processes a `Coerce` predicate from the fulfillment context. /// This is NOT the preferred way to handle coercion, which is to /// invoke `FnCtxt::coerce` or a similar method (see `coercion.rs`). @@ -967,7 +945,7 @@ impl<'tcx> InferCtxt<'tcx> { /// Clone the list of variable regions. This is used only during NLL processing /// to put the set of region variables into the NLL region context. - pub fn get_region_var_origins(&self) -> VarInfos { + pub fn get_region_var_infos(&self) -> VarInfos { let inner = self.inner.borrow(); assert!(!UndoLogs::>::in_snapshot(&inner.undo_log)); let storage = inner.region_constraint_storage.as_ref().expect("regions already resolved"); diff --git a/compiler/rustc_infer/src/infer/opaque_types/mod.rs b/compiler/rustc_infer/src/infer/opaque_types/mod.rs index b64686afd2340..f6ef3f40e622d 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/mod.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/mod.rs @@ -1,6 +1,5 @@ use hir::def_id::{DefId, LocalDefId}; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::sync::Lrc; use rustc_hir as hir; use rustc_middle::bug; use rustc_middle::traits::ObligationCause; @@ -8,8 +7,7 @@ use rustc_middle::traits::solve::Goal; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::{ - self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, - TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, + self, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, }; use rustc_span::Span; use tracing::{debug, instrument}; @@ -21,20 +19,9 @@ use crate::traits::{self, Obligation, PredicateObligations}; mod table; -pub(crate) type OpaqueTypeMap<'tcx> = FxIndexMap, OpaqueTypeDecl<'tcx>>; +pub(crate) type OpaqueTypeMap<'tcx> = FxIndexMap, OpaqueHiddenType<'tcx>>; pub(crate) use table::{OpaqueTypeStorage, OpaqueTypeTable}; -/// Information about the opaque types whose values we -/// are inferring in this function (these are the `impl Trait` that -/// appear in the return type). -#[derive(Clone, Debug)] -pub struct OpaqueTypeDecl<'tcx> { - /// The hidden types that have been inferred for this opaque type. - /// There can be multiple, but they are all `lub`ed together at the end - /// to obtain the canonical hidden type. - pub hidden_type: OpaqueHiddenType<'tcx>, -} - impl<'tcx> InferCtxt<'tcx> { /// This is a backwards compatibility hack to prevent breaking changes from /// lazy TAIT around RPIT handling. @@ -181,289 +168,6 @@ impl<'tcx> InferCtxt<'tcx> { Err(TypeError::Sorts(ExpectedFound::new(a, b))) } } - - /// Given the map `opaque_types` containing the opaque - /// `impl Trait` types whose underlying, hidden types are being - /// inferred, this method adds constraints to the regions - /// appearing in those underlying hidden types to ensure that they - /// at least do not refer to random scopes within the current - /// function. These constraints are not (quite) sufficient to - /// guarantee that the regions are actually legal values; that - /// final condition is imposed after region inference is done. - /// - /// # The Problem - /// - /// Let's work through an example to explain how it works. Assume - /// the current function is as follows: - /// - /// ```text - /// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) - /// ``` - /// - /// Here, we have two `impl Trait` types whose values are being - /// inferred (the `impl Bar<'a>` and the `impl - /// Bar<'b>`). Conceptually, this is sugar for a setup where we - /// define underlying opaque types (`Foo1`, `Foo2`) and then, in - /// the return type of `foo`, we *reference* those definitions: - /// - /// ```text - /// type Foo1<'x> = impl Bar<'x>; - /// type Foo2<'x> = impl Bar<'x>; - /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } - /// // ^^^^ ^^ - /// // | | - /// // | args - /// // def_id - /// ``` - /// - /// As indicating in the comments above, each of those references - /// is (in the compiler) basically generic parameters (`args`) - /// applied to the type of a suitable `def_id` (which identifies - /// `Foo1` or `Foo2`). - /// - /// Now, at this point in compilation, what we have done is to - /// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with - /// fresh inference variables C1 and C2. We wish to use the values - /// of these variables to infer the underlying types of `Foo1` and - /// `Foo2`. That is, this gives rise to higher-order (pattern) unification - /// constraints like: - /// - /// ```text - /// for<'a> (Foo1<'a> = C1) - /// for<'b> (Foo1<'b> = C2) - /// ``` - /// - /// For these equation to be satisfiable, the types `C1` and `C2` - /// can only refer to a limited set of regions. For example, `C1` - /// can only refer to `'static` and `'a`, and `C2` can only refer - /// to `'static` and `'b`. The job of this function is to impose that - /// constraint. - /// - /// Up to this point, C1 and C2 are basically just random type - /// inference variables, and hence they may contain arbitrary - /// regions. In fact, it is fairly likely that they do! Consider - /// this possible definition of `foo`: - /// - /// ```text - /// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { - /// (&*x, &*y) - /// } - /// ``` - /// - /// Here, the values for the concrete types of the two impl - /// traits will include inference variables: - /// - /// ```text - /// &'0 i32 - /// &'1 i32 - /// ``` - /// - /// Ordinarily, the subtyping rules would ensure that these are - /// sufficiently large. But since `impl Bar<'a>` isn't a specific - /// type per se, we don't get such constraints by default. This - /// is where this function comes into play. It adds extra - /// constraints to ensure that all the regions which appear in the - /// inferred type are regions that could validly appear. - /// - /// This is actually a bit of a tricky constraint in general. We - /// want to say that each variable (e.g., `'0`) can only take on - /// values that were supplied as arguments to the opaque type - /// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in - /// scope. We don't have a constraint quite of this kind in the current - /// region checker. - /// - /// # The Solution - /// - /// We generally prefer to make `<=` constraints, since they - /// integrate best into the region solver. To do that, we find the - /// "minimum" of all the arguments that appear in the args: that - /// is, some region which is less than all the others. In the case - /// of `Foo1<'a>`, that would be `'a` (it's the only choice, after - /// all). Then we apply that as a least bound to the variables - /// (e.g., `'a <= '0`). - /// - /// In some cases, there is no minimum. Consider this example: - /// - /// ```text - /// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } - /// ``` - /// - /// Here we would report a more complex "in constraint", like `'r - /// in ['a, 'b, 'static]` (where `'r` is some region appearing in - /// the hidden type). - /// - /// # Constrain regions, not the hidden concrete type - /// - /// Note that generating constraints on each region `Rc` is *not* - /// the same as generating an outlives constraint on `Tc` itself. - /// For example, if we had a function like this: - /// - /// ``` - /// # #![feature(type_alias_impl_trait)] - /// # fn main() {} - /// # trait Foo<'a> {} - /// # impl<'a, T> Foo<'a> for (&'a u32, T) {} - /// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { - /// (x, y) - /// } - /// - /// // Equivalent to: - /// # mod dummy { use super::*; - /// type FooReturn<'a, T> = impl Foo<'a>; - /// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { - /// (x, y) - /// } - /// # } - /// ``` - /// - /// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` - /// is an inference variable). If we generated a constraint that - /// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- - /// but this is not necessary, because the opaque type we - /// create will be allowed to reference `T`. So we only generate a - /// constraint that `'0: 'a`. - #[instrument(level = "debug", skip(self))] - pub fn register_member_constraints( - &self, - opaque_type_key: OpaqueTypeKey<'tcx>, - concrete_ty: Ty<'tcx>, - span: Span, - ) { - let concrete_ty = self.resolve_vars_if_possible(concrete_ty); - debug!(?concrete_ty); - - let variances = self.tcx.variances_of(opaque_type_key.def_id); - debug!(?variances); - - // For a case like `impl Foo<'a, 'b>`, we would generate a constraint - // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the - // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). - // - // `conflict1` and `conflict2` are the two region bounds that we - // detected which were unrelated. They are used for diagnostics. - - // Create the set of choice regions: each region in the hidden - // type can be equal to any of the region parameters of the - // opaque type definition. - let choice_regions: Lrc>> = Lrc::new( - opaque_type_key - .args - .iter() - .enumerate() - .filter(|(i, _)| variances[*i] == ty::Invariant) - .filter_map(|(_, arg)| match arg.unpack() { - GenericArgKind::Lifetime(r) => Some(r), - GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, - }) - .chain(std::iter::once(self.tcx.lifetimes.re_static)) - .collect(), - ); - - // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's - // not currently sound until we have existential regions. - concrete_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { - tcx: self.tcx, - op: |r| { - self.member_constraint( - opaque_type_key, - span, - concrete_ty, - r, - Lrc::clone(&choice_regions), - ) - }, - }); - } -} - -/// Visitor that requires that (almost) all regions in the type visited outlive -/// `least_region`. We cannot use `push_outlives_components` because regions in -/// closure signatures are not included in their outlives components. We need to -/// ensure all regions outlive the given bound so that we don't end up with, -/// say, `ReVar` appearing in a return type and causing ICEs when other -/// functions end up with region constraints involving regions from other -/// functions. -/// -/// We also cannot use `for_each_free_region` because for closures it includes -/// the regions parameters from the enclosing item. -/// -/// We ignore any type parameters because impl trait values are assumed to -/// capture all the in-scope type parameters. -struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { - tcx: TyCtxt<'tcx>, - op: OP, -} - -impl<'tcx, OP> TypeVisitor> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> -where - OP: FnMut(ty::Region<'tcx>), -{ - fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { - t.super_visit_with(self); - } - - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match *r { - // ignore bound regions, keep visiting - ty::ReBound(_, _) => {} - _ => (self.op)(r), - } - } - - fn visit_ty(&mut self, ty: Ty<'tcx>) { - // We're only interested in types involving regions - if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { - return; - } - - match ty.kind() { - ty::Closure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_closure().upvar_tys() { - upvar.visit_with(self); - } - args.as_closure().sig_as_fn_ptr_ty().visit_with(self); - } - - ty::CoroutineClosure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_coroutine_closure().upvar_tys() { - upvar.visit_with(self); - } - - args.as_coroutine_closure().signature_parts_ty().visit_with(self); - } - - ty::Coroutine(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - // Also skip the witness type, because that has no free regions. - - for upvar in args.as_coroutine().upvar_tys() { - upvar.visit_with(self); - } - args.as_coroutine().return_ty().visit_with(self); - args.as_coroutine().yield_ty().visit_with(self); - args.as_coroutine().resume_ty().visit_with(self); - } - - ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { - // Skip lifetime parameters that are not captures. - let variances = self.tcx.variances_of(*def_id); - - for (v, s) in std::iter::zip(variances, args.iter()) { - if *v != ty::Bivariant { - s.visit_with(self); - } - } - } - - _ => { - ty.super_visit_with(self); - } - } - } } impl<'tcx> InferCtxt<'tcx> { diff --git a/compiler/rustc_infer/src/infer/opaque_types/table.rs b/compiler/rustc_infer/src/infer/opaque_types/table.rs index 047d8edad3de7..ba6cc0d783dd3 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/table.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/table.rs @@ -3,7 +3,7 @@ use rustc_middle::bug; use rustc_middle::ty::{self, OpaqueHiddenType, OpaqueTypeKey, Ty}; use tracing::instrument; -use super::{OpaqueTypeDecl, OpaqueTypeMap}; +use super::OpaqueTypeMap; use crate::infer::snapshot::undo_log::{InferCtxtUndoLogs, UndoLog}; #[derive(Default, Debug, Clone)] @@ -11,15 +11,19 @@ pub(crate) struct OpaqueTypeStorage<'tcx> { /// Opaque types found in explicit return types and their /// associated fresh inference variable. Writeback resolves these /// variables to get the concrete type, which can be used to - /// 'de-opaque' OpaqueTypeDecl, after typeck is done with all functions. + /// 'de-opaque' OpaqueHiddenType, after typeck is done with all functions. pub opaque_types: OpaqueTypeMap<'tcx>, } impl<'tcx> OpaqueTypeStorage<'tcx> { #[instrument(level = "debug")] - pub(crate) fn remove(&mut self, key: OpaqueTypeKey<'tcx>, idx: Option>) { - if let Some(idx) = idx { - self.opaque_types.get_mut(&key).unwrap().hidden_type = idx; + pub(crate) fn remove( + &mut self, + key: OpaqueTypeKey<'tcx>, + prev: Option>, + ) { + if let Some(prev) = prev { + *self.opaque_types.get_mut(&key).unwrap() = prev; } else { // FIXME(#120456) - is `swap_remove` correct? match self.opaque_types.swap_remove(&key) { @@ -59,13 +63,12 @@ impl<'a, 'tcx> OpaqueTypeTable<'a, 'tcx> { key: OpaqueTypeKey<'tcx>, hidden_type: OpaqueHiddenType<'tcx>, ) -> Option> { - if let Some(decl) = self.storage.opaque_types.get_mut(&key) { - let prev = std::mem::replace(&mut decl.hidden_type, hidden_type); + if let Some(entry) = self.storage.opaque_types.get_mut(&key) { + let prev = std::mem::replace(entry, hidden_type); self.undo_log.push(UndoLog::OpaqueTypes(key, Some(prev))); return Some(prev.ty); } - let decl = OpaqueTypeDecl { hidden_type }; - self.storage.opaque_types.insert(key, decl); + self.storage.opaque_types.insert(key, hidden_type); self.undo_log.push(UndoLog::OpaqueTypes(key, None)); None } diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs index b1d5d688295d9..0383c81f2af0d 100644 --- a/compiler/rustc_infer/src/infer/outlives/obligations.rs +++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs @@ -363,6 +363,13 @@ where return; } + if alias_ty.has_non_region_infer() { + self.tcx + .dcx() + .span_delayed_bug(origin.span(), "an alias has infers during region solving"); + return; + } + // This case is thorny for inference. The fundamental problem is // that there are many cases where we have choice, and inference // doesn't like choice (the current region inference in @@ -388,26 +395,9 @@ where // Compute the bounds we can derive from the environment. This // is an "approximate" match -- in some cases, these bounds // may not apply. - let mut approx_env_bounds = self.verify_bound.approx_declared_bounds_from_env(alias_ty); + let approx_env_bounds = self.verify_bound.approx_declared_bounds_from_env(alias_ty); debug!(?approx_env_bounds); - // Remove outlives bounds that we get from the environment but - // which are also deducible from the trait. This arises (cc - // #55756) in cases where you have e.g., `>::Item: - // 'a` in the environment but `trait Foo<'b> { type Item: 'b - // }` in the trait definition. - approx_env_bounds.retain(|bound_outlives| { - // OK to skip binder because we only manipulate and compare against other values from - // the same binder. e.g. if we have (e.g.) `for<'a> >::Item: 'a` in - // `bound`, the `'a` will be a `^1` (bound, debruijn index == innermost) region. If the - // declaration is `trait Trait<'b> { type Item: 'b; }`, then - // `projection_declared_bounds_from_trait` will be invoked with `['b => ^1]` and so we - // will get `^1` returned. - let bound = bound_outlives.skip_binder(); - let ty::Alias(_, alias_ty) = bound.0.kind() else { bug!("expected AliasTy") }; - self.verify_bound.declared_bounds_from_definition(*alias_ty).all(|r| r != bound.1) - }); - // If declared bounds list is empty, the only applicable rule is // OutlivesProjectionComponent. If there are inference variables, // then, we can break down the outlives into more primitive @@ -425,7 +415,7 @@ where let is_opaque = alias_ty.kind(self.tcx) == ty::Opaque; if approx_env_bounds.is_empty() && trait_bounds.is_empty() - && (alias_ty.has_infer() || is_opaque) + && (alias_ty.has_infer_regions() || is_opaque) { debug!("no declared bounds"); let opt_variances = is_opaque.then(|| self.tcx.variances_of(alias_ty.def_id)); diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index 247fbc259652e..7a21c2883d1ac 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -192,7 +192,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { /// Obviously these must be approximate -- they are in fact both *over* and /// and *under* approximated: /// - /// * Over-approximated because we erase regions, so + /// * Over-approximated because we don't consider equality of regions. /// * Under-approximated because we look for syntactic equality and so for complex types /// like `>::Item` or whatever we may fail to figure out /// all the subtleties. @@ -205,13 +205,14 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { erased_ty: Ty<'tcx>, ) -> Vec> { let tcx = self.tcx; + let mut bounds = vec![]; // To start, collect bounds from user environment. Note that // parameter environments are already elaborated, so we don't // have to worry about that. - let param_bounds = self.caller_bounds.iter().copied().filter(move |outlives_predicate| { + bounds.extend(self.caller_bounds.iter().copied().filter(move |outlives_predicate| { super::test_type_match::can_match_erased_ty(tcx, *outlives_predicate, erased_ty) - }); + })); // Next, collect regions we scraped from the well-formedness // constraints in the fn signature. To do that, we walk the list @@ -224,37 +225,27 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { // The problem is that the type of `x` is `&'a A`. To be // well-formed, then, A must outlive `'a`, but we don't know that // this holds from first principles. - let from_region_bound_pairs = - self.region_bound_pairs.iter().filter_map(|&OutlivesPredicate(p, r)| { - debug!( - "declared_generic_bounds_from_env_for_erased_ty: region_bound_pair = {:?}", - (r, p) - ); - // Fast path for the common case. - match (&p, erased_ty.kind()) { - // In outlive routines, all types are expected to be fully normalized. - // And therefore we can safely use structural equality for alias types. - (GenericKind::Param(p1), ty::Param(p2)) if p1 == p2 => {} - (GenericKind::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => {} - (GenericKind::Alias(a1), ty::Alias(_, a2)) if a1.def_id == a2.def_id => {} - _ => return None, - } + bounds.extend(self.region_bound_pairs.iter().filter_map(|&OutlivesPredicate(p, r)| { + debug!( + "declared_generic_bounds_from_env_for_erased_ty: region_bound_pair = {:?}", + (r, p) + ); + // Fast path for the common case. + match (&p, erased_ty.kind()) { + // In outlive routines, all types are expected to be fully normalized. + // And therefore we can safely use structural equality for alias types. + (GenericKind::Param(p1), ty::Param(p2)) if p1 == p2 => {} + (GenericKind::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => {} + (GenericKind::Alias(a1), ty::Alias(_, a2)) if a1.def_id == a2.def_id => {} + _ => return None, + } - let p_ty = p.to_ty(tcx); - let erased_p_ty = self.tcx.erase_regions(p_ty); - (erased_p_ty == erased_ty) - .then_some(ty::Binder::dummy(ty::OutlivesPredicate(p_ty, r))) - }); + let p_ty = p.to_ty(tcx); + let erased_p_ty = self.tcx.erase_regions(p_ty); + (erased_p_ty == erased_ty).then_some(ty::Binder::dummy(ty::OutlivesPredicate(p_ty, r))) + })); - param_bounds - .chain(from_region_bound_pairs) - .inspect(|bound| { - debug!( - "declared_generic_bounds_from_env_for_erased_ty: result predicate = {:?}", - bound - ) - }) - .collect() + bounds } /// Given a projection like `>::Bar`, returns any bounds diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 270217e26b78c..6496f38269a5e 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -4,7 +4,6 @@ use std::ops::Range; use std::{cmp, fmt, mem}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lrc; use rustc_data_structures::undo_log::UndoLogs; use rustc_data_structures::unify as ut; use rustc_index::IndexVec; @@ -12,7 +11,6 @@ use rustc_macros::{TypeFoldable, TypeVisitable}; use rustc_middle::infer::unify_key::{RegionVariableValue, RegionVidKey}; use rustc_middle::ty::{self, ReBound, ReStatic, ReVar, Region, RegionVid, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; use tracing::{debug, instrument}; use self::CombineMapType::*; @@ -22,8 +20,6 @@ use crate::infer::snapshot::undo_log::{InferCtxtUndoLogs, Snapshot}; mod leak_check; -pub use rustc_middle::infer::MemberConstraint; - #[derive(Clone, Default)] pub struct RegionConstraintStorage<'tcx> { /// For each `RegionVid`, the corresponding `RegionVariableOrigin`. @@ -73,11 +69,6 @@ pub struct RegionConstraintData<'tcx> { /// be a region variable (or neither, as it happens). pub constraints: Vec<(Constraint<'tcx>, SubregionOrigin<'tcx>)>, - /// Constraints of the form `R0 member of [R1, ..., Rn]`, meaning that - /// `R0` must be equal to one of the regions `R1..Rn`. These occur - /// with `impl Trait` quite frequently. - pub member_constraints: Vec>, - /// A "verify" is something that we need to verify after inference /// is done, but which does not directly affect inference in any /// way. @@ -308,10 +299,6 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { self.storage.var_infos.len() } - pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { - &self.storage.data - } - /// Takes (and clears) the current set of constraints. Note that /// the set of variables remains intact, but all relationships /// between them are reset. This is used during NLL checking to @@ -466,29 +453,6 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } } - pub(super) fn member_constraint( - &mut self, - key: ty::OpaqueTypeKey<'tcx>, - definition_span: Span, - hidden_ty: Ty<'tcx>, - member_region: ty::Region<'tcx>, - choice_regions: Lrc>>, - ) { - debug!("member_constraint({:?} in {:#?})", member_region, choice_regions); - - if choice_regions.iter().any(|&r| r == member_region) { - return; - } - - self.storage.data.member_constraints.push(MemberConstraint { - key, - definition_span, - hidden_ty, - member_region, - choice_regions, - }); - } - #[instrument(skip(self, origin), level = "debug")] pub(super) fn make_subregion( &mut self, @@ -745,8 +709,8 @@ impl<'tcx> RegionConstraintData<'tcx> { /// Returns `true` if this region constraint data contains no constraints, and `false` /// otherwise. pub fn is_empty(&self) -> bool { - let RegionConstraintData { constraints, member_constraints, verifys } = self; - constraints.is_empty() && member_constraints.is_empty() && verifys.is_empty() + let RegionConstraintData { constraints, verifys } = self; + constraints.is_empty() && verifys.is_empty() } } diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs index a977bd15d3bb9..ab8ada1596c81 100644 --- a/compiler/rustc_infer/src/traits/util.rs +++ b/compiler/rustc_infer/src/traits/util.rs @@ -1,7 +1,6 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::ty::{self, ToPolyTraitRef, TyCtxt}; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; pub use rustc_type_ir::elaborate::*; use crate::traits::{self, Obligation, ObligationCauseCode, PredicateObligation}; diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index 91f190c6a28de..1456255ea14cd 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -22,9 +22,8 @@ use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileN use rustc_session::filesearch::{self, sysroot_candidates}; use rustc_session::parse::ParseSess; use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, lint}; -use rustc_span::FileName; use rustc_span::source_map::{FileLoader, RealFileLoader, SourceMapInputs}; -use rustc_span::symbol::sym; +use rustc_span::{FileName, sym}; use tracing::trace; use crate::util; diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs index 1c4dda2a4367e..a2a29612e489f 100644 --- a/compiler/rustc_interface/src/lib.rs +++ b/compiler/rustc_interface/src/lib.rs @@ -8,7 +8,7 @@ // tidy-alphabetical-end mod callbacks; -mod errors; +pub mod errors; pub mod interface; pub mod passes; mod proc_macro_decls; @@ -17,8 +17,8 @@ pub mod util; pub use callbacks::setup_callbacks; pub use interface::{Config, run_compiler}; -pub use passes::DEFAULT_QUERY_PROVIDERS; -pub use queries::{Linker, Queries}; +pub use passes::{DEFAULT_QUERY_PROVIDERS, create_and_enter_global_ctxt, parse}; +pub use queries::Linker; #[cfg(test)] mod tests; diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index f8351a81bec64..aff66e48fbbee 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -19,7 +19,7 @@ use rustc_incremental::setup_dep_graph; use rustc_lint::{BufferedEarlyLint, EarlyCheckNode, LintStore, unerased_lint_store}; use rustc_metadata::creader::CStore; use rustc_middle::arena::Arena; -use rustc_middle::ty::{self, GlobalCtxt, RegisteredTools, TyCtxt}; +use rustc_middle::ty::{self, CurrentGcx, GlobalCtxt, RegisteredTools, TyCtxt}; use rustc_middle::util::Providers; use rustc_parse::{ new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr, @@ -32,8 +32,7 @@ use rustc_session::cstore::Untracked; use rustc_session::output::{collect_crate_types, filename_for_input, find_crate_name}; use rustc_session::search_paths::PathKind; use rustc_session::{Limit, Session}; -use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm}; +use rustc_span::{ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm, Symbol, sym}; use rustc_target::spec::PanicStrategy; use rustc_trait_selection::traits; use tracing::{info, instrument}; @@ -41,7 +40,7 @@ use tracing::{info, instrument}; use crate::interface::Compiler; use crate::{errors, proc_macro_decls, util}; -pub(crate) fn parse<'a>(sess: &'a Session) -> ast::Crate { +pub fn parse<'a>(sess: &'a Session) -> ast::Crate { let krate = sess .time("parse_crate", || { let mut parser = unwrap_or_emit_fatal(match &sess.io.input { @@ -711,13 +710,11 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock = LazyLock::new(|| { *providers }); -pub(crate) fn create_global_ctxt<'tcx>( - compiler: &'tcx Compiler, +pub fn create_and_enter_global_ctxt FnOnce(TyCtxt<'tcx>) -> T>( + compiler: &Compiler, mut krate: rustc_ast::Crate, - gcx_cell: &'tcx OnceLock>, - arena: &'tcx WorkerLocal>, - hir_arena: &'tcx WorkerLocal>, -) -> &'tcx GlobalCtxt<'tcx> { + f: F, +) -> T { let sess = &compiler.sess; rustc_builtin_macros::cmdline_attrs::inject( @@ -765,44 +762,63 @@ pub(crate) fn create_global_ctxt<'tcx>( let incremental = dep_graph.is_fully_enabled(); - sess.time("setup_global_ctxt", || { - let qcx = gcx_cell.get_or_init(move || { - TyCtxt::create_global_ctxt( - sess, - crate_types, - stable_crate_id, - arena, - hir_arena, - untracked, - dep_graph, - rustc_query_impl::query_callbacks(arena), - rustc_query_impl::query_system( - providers.queries, - providers.extern_queries, - query_result_on_disk_cache, - incremental, - ), - providers.hooks, - compiler.current_gcx.clone(), - ) - }); - - qcx.enter(|tcx| { - let feed = tcx.create_crate_num(stable_crate_id).unwrap(); - assert_eq!(feed.key(), LOCAL_CRATE); - feed.crate_name(crate_name); + let gcx_cell = OnceLock::new(); + let arena = WorkerLocal::new(|_| Arena::default()); + let hir_arena = WorkerLocal::new(|_| rustc_hir::Arena::default()); + + // This closure is necessary to force rustc to perform the correct lifetime + // subtyping for GlobalCtxt::enter to be allowed. + let inner: Box< + dyn for<'tcx> FnOnce( + &'tcx Session, + CurrentGcx, + &'tcx OnceLock>, + &'tcx WorkerLocal>, + &'tcx WorkerLocal>, + F, + ) -> T, + > = Box::new(move |sess, current_gcx, gcx_cell, arena, hir_arena, f| { + TyCtxt::create_global_ctxt( + gcx_cell, + sess, + crate_types, + stable_crate_id, + arena, + hir_arena, + untracked, + dep_graph, + rustc_query_impl::query_callbacks(arena), + rustc_query_impl::query_system( + providers.queries, + providers.extern_queries, + query_result_on_disk_cache, + incremental, + ), + providers.hooks, + current_gcx, + |tcx| { + let feed = tcx.create_crate_num(stable_crate_id).unwrap(); + assert_eq!(feed.key(), LOCAL_CRATE); + feed.crate_name(crate_name); + + let feed = tcx.feed_unit_query(); + feed.features_query(tcx.arena.alloc(rustc_expand::config::features( + tcx.sess, + &pre_configured_attrs, + crate_name, + ))); + feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs)))); + feed.output_filenames(Arc::new(outputs)); + + let res = f(tcx); + // FIXME maybe run finish even when a fatal error occured? or at least tcx.alloc_self_profile_query_strings()? + tcx.finish(); + res + }, + ) + }); - let feed = tcx.feed_unit_query(); - feed.features_query(tcx.arena.alloc(rustc_expand::config::features( - sess, - &pre_configured_attrs, - crate_name, - ))); - feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs)))); - feed.output_filenames(Arc::new(outputs)); - }); - qcx - }) + inner(&compiler.sess, compiler.current_gcx.clone(), &gcx_cell, &arena, &hir_arena, f) } /// Runs all analyses that we guarantee to run, even if errors were reported in earlier analyses. diff --git a/compiler/rustc_interface/src/proc_macro_decls.rs b/compiler/rustc_interface/src/proc_macro_decls.rs index 2c8014d8b3ab9..82593dbc2b7ac 100644 --- a/compiler/rustc_interface/src/proc_macro_decls.rs +++ b/compiler/rustc_interface/src/proc_macro_decls.rs @@ -2,7 +2,7 @@ use rustc_ast::attr; use rustc_hir::def_id::LocalDefId; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; -use rustc_span::symbol::sym; +use rustc_span::sym; fn proc_macro_decls_static(tcx: TyCtxt<'_>, (): ()) -> Option { let mut decls = None; diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index bb2ad3b3dd014..c8914c9be9c01 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -1,117 +1,18 @@ use std::any::Any; -use std::cell::{RefCell, RefMut}; use std::sync::Arc; -use rustc_ast as ast; use rustc_codegen_ssa::CodegenResults; use rustc_codegen_ssa::traits::CodegenBackend; -use rustc_data_structures::steal::Steal; use rustc_data_structures::svh::Svh; -use rustc_data_structures::sync::{OnceLock, WorkerLocal}; use rustc_hir::def_id::LOCAL_CRATE; -use rustc_middle::arena::Arena; use rustc_middle::dep_graph::DepGraph; -use rustc_middle::ty::{GlobalCtxt, TyCtxt}; +use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_session::config::{self, OutputFilenames, OutputType}; use crate::errors::FailedWritingFile; -use crate::interface::Compiler; use crate::passes; -/// Represent the result of a query. -/// -/// This result can be stolen once with the [`steal`] method and generated with the [`compute`] method. -/// -/// [`steal`]: Steal::steal -/// [`compute`]: Self::compute -pub struct Query { - /// `None` means no value has been computed yet. - result: RefCell>>, -} - -impl Query { - fn compute T>(&self, f: F) -> QueryResult<'_, T> { - QueryResult(RefMut::map( - self.result.borrow_mut(), - |r: &mut Option>| -> &mut Steal { - r.get_or_insert_with(|| Steal::new(f())) - }, - )) - } -} - -pub struct QueryResult<'a, T>(RefMut<'a, Steal>); - -impl<'a, T> std::ops::Deref for QueryResult<'a, T> { - type Target = RefMut<'a, Steal>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl<'a, T> std::ops::DerefMut for QueryResult<'a, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl<'a, 'tcx> QueryResult<'a, &'tcx GlobalCtxt<'tcx>> { - pub fn enter(&mut self, f: impl FnOnce(TyCtxt<'tcx>) -> T) -> T { - (*self.0).borrow().enter(f) - } -} - -pub struct Queries<'tcx> { - compiler: &'tcx Compiler, - gcx_cell: OnceLock>, - - arena: WorkerLocal>, - hir_arena: WorkerLocal>, - - parse: Query, - // This just points to what's in `gcx_cell`. - gcx: Query<&'tcx GlobalCtxt<'tcx>>, -} - -impl<'tcx> Queries<'tcx> { - pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> { - Queries { - compiler, - gcx_cell: OnceLock::new(), - arena: WorkerLocal::new(|_| Arena::default()), - hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()), - parse: Query { result: RefCell::new(None) }, - gcx: Query { result: RefCell::new(None) }, - } - } - - pub fn finish(&'tcx self) { - if let Some(gcx) = self.gcx_cell.get() { - gcx.finish(); - } - } - - pub fn parse(&self) -> QueryResult<'_, ast::Crate> { - self.parse.compute(|| passes::parse(&self.compiler.sess)) - } - - pub fn global_ctxt(&'tcx self) -> QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>> { - self.gcx.compute(|| { - let krate = self.parse().steal(); - - passes::create_global_ctxt( - self.compiler, - krate, - &self.gcx_cell, - &self.arena, - &self.hir_arena, - ) - }) - } -} - pub struct Linker { dep_graph: DepGraph, output_filenames: Arc, @@ -186,22 +87,3 @@ impl Linker { codegen_backend.link(sess, codegen_results, &self.output_filenames) } } - -impl Compiler { - pub fn enter(&self, f: F) -> T - where - F: for<'tcx> FnOnce(&'tcx Queries<'tcx>) -> T, - { - // Must declare `_timer` first so that it is dropped after `queries`. - let _timer; - let queries = Queries::new(self); - let ret = f(&queries); - - // The timer's lifetime spans the dropping of `queries`, which contains - // the global context. - _timer = self.sess.timer("free_global_ctxt"); - queries.finish(); - - ret - } -} diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index e76e9ca9f8529..07c4b8987216c 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -23,8 +23,8 @@ use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind}; use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, build_session, filesearch, getopts}; use rustc_span::edition::{DEFAULT_EDITION, Edition}; use rustc_span::source_map::{RealFileLoader, SourceMapInputs}; -use rustc_span::symbol::sym; -use rustc_span::{FileName, SourceFileHashAlgorithm}; +use rustc_span::{FileName, SourceFileHashAlgorithm, sym}; +use rustc_target::abi::Align; use rustc_target::spec::{ CodeModel, FramePointer, LinkerFlavorCli, MergeFunctions, OnBrokenPipe, PanicStrategy, RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo, StackProtector, TlsModel, WasmCAbi, @@ -583,6 +583,7 @@ fn test_codegen_options_tracking_hash() { untracked!(dlltool, Some(PathBuf::from("custom_dlltool.exe"))); untracked!(extra_filename, String::from("extra-filename")); untracked!(incremental, Some(String::from("abc"))); + untracked!(inline_threshold, Some(0xf007ba11)); // `link_arg` is omitted because it just forwards to `link_args`. untracked!(link_args, vec![String::from("abc"), String::from("def")]); untracked!(link_self_contained, LinkSelfContained::on()); @@ -614,7 +615,6 @@ fn test_codegen_options_tracking_hash() { tracked!(embed_bitcode, false); tracked!(force_frame_pointers, FramePointer::Always); tracked!(force_unwind_tables, Some(true)); - tracked!(inline_threshold, Some(0xf007ba11)); tracked!(instrument_coverage, InstrumentCoverage::Yes); tracked!(link_dead_code, Some(true)); tracked!(linker_plugin_lto, LinkerPluginLto::LinkerPluginAuto); @@ -767,7 +767,11 @@ fn test_unstable_options_tracking_hash() { }) ); tracked!(codegen_backend, Some("abc".to_string())); - tracked!(coverage_options, CoverageOptions { level: CoverageLevel::Mcdc, no_mir_spans: true }); + tracked!(coverage_options, CoverageOptions { + level: CoverageLevel::Mcdc, + no_mir_spans: true, + discard_all_spans_in_codegen: true + }); tracked!(crate_attr, vec!["abc".to_string()]); tracked!(cross_crate_inline_threshold, InliningThreshold::Always); tracked!(debug_info_for_profiling, true); @@ -779,6 +783,7 @@ fn test_unstable_options_tracking_hash() { tracked!(dwarf_version, Some(5)); tracked!(embed_source, true); tracked!(emit_thin_lto, false); + tracked!(emscripten_wasm_eh, true); tracked!(export_executable_symbols, true); tracked!(fewer_names, Some(true)); tracked!(fixed_x18, true); @@ -803,6 +808,7 @@ fn test_unstable_options_tracking_hash() { tracked!(location_detail, LocationDetail { file: true, line: false, column: false }); tracked!(maximal_hir_to_mir_coverage, true); tracked!(merge_functions, Some(MergeFunctions::Disabled)); + tracked!(min_function_alignment, Some(Align::EIGHT)); tracked!(mir_emit_retag, true); tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]); tracked!(mir_keep_place_mention, true); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 54fbb743b9317..984b8104f539f 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -18,7 +18,7 @@ use rustc_session::{EarlyDiagCtxt, Session, filesearch}; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::edition::Edition; use rustc_span::source_map::SourceMapInputs; -use rustc_span::symbol::sym; +use rustc_span::sym; use rustc_target::spec::Target; use tracing::info; diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 2ecd5051f9e9d..7522e21d0ef09 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -536,9 +536,6 @@ lint_non_camel_case_type = {$sort} `{$name}` should have an upper camel case nam .suggestion = convert the identifier to upper camel case .label = should have an UpperCamelCase name -lint_non_existent_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]` - .help = only existing keywords are allowed in core/std - lint_non_fmt_panic = panic message is not a string literal .note = this usage of `{$name}!()` is deprecated; it will be a hard error in Rust 2021 .more_info_note = for more information, see diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 2d3ecb6943c56..badc6c74a933f 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -39,8 +39,7 @@ pub use rustc_session::lint::builtin::*; use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass}; use rustc_span::edition::Edition; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{BytePos, InnerSpan, Span}; +use rustc_span::{BytePos, Ident, InnerSpan, Span, Symbol, kw, sym}; use rustc_target::asm::InlineAsmArch; use rustc_trait_selection::infer::{InferCtxtExt, TyCtxtInferExt}; use rustc_trait_selection::traits::misc::type_allowed_to_implement_copy; @@ -1023,7 +1022,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { GenericParamKind::Lifetime { .. } => {} GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { cx.emit_span_lint(NO_MANGLE_GENERIC_ITEMS, span, BuiltinNoMangleGeneric { - suggestion: no_mangle_attr.span, + suggestion: no_mangle_attr.span(), }); break; } @@ -1031,7 +1030,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { } }; match it.kind { - hir::ItemKind::Fn(.., generics, _) => { + hir::ItemKind::Fn { generics, .. } => { if let Some(no_mangle_attr) = attr::find_by_name(attrs, sym::no_mangle) { check_no_mangle_on_generic_fn(no_mangle_attr, None, generics, it.span); } @@ -1236,7 +1235,7 @@ impl<'tcx> LateLintPass<'tcx> for UngatedAsyncFnTrackCaller { { cx.emit_span_lint( UNGATED_ASYNC_FN_TRACK_CALLER, - attr.span, + attr.span(), BuiltinUngatedAsyncFnTrackCaller { label: span, session: &cx.tcx.sess }, ); } @@ -1245,8 +1244,8 @@ impl<'tcx> LateLintPass<'tcx> for UngatedAsyncFnTrackCaller { declare_lint! { /// The `unreachable_pub` lint triggers for `pub` items not reachable from other crates - that - /// means neither directly accessible, nor reexported, nor leaked through things like return - /// types. + /// means neither directly accessible, nor reexported (with `pub use`), nor leaked through + /// things like return types (which the [`unnameable_types`] lint can detect if desired). /// /// ### Example /// @@ -1272,9 +1271,10 @@ declare_lint! { /// `pub(crate)` visibility is recommended to be used instead. This more clearly expresses the /// intent that the item is only visible within its own crate. /// - /// This lint is "allow" by default because it will trigger for a large - /// amount existing Rust code, and has some false-positives. Eventually it - /// is desired for this to become warn-by-default. + /// This lint is "allow" by default because it will trigger for a large amount of existing Rust code. + /// Eventually it is desired for this to become warn-by-default. + /// + /// [`unnameable_types`]: #unnameable-types pub UNREACHABLE_PUB, Allow, "`pub` items not reachable from crate root" @@ -1303,9 +1303,9 @@ impl UnreachablePub { cx.effective_visibilities.effective_vis(def_id).map(|effective_vis| { effective_vis.at_level(rustc_middle::middle::privacy::Level::Reachable) }) - && let parent_parent = cx.tcx.parent_module_from_def_id( - cx.tcx.parent_module_from_def_id(def_id.into()).into(), - ) + && let parent_parent = cx + .tcx + .parent_module_from_def_id(cx.tcx.parent_module_from_def_id(def_id).into()) && *restricted_did == parent_parent.to_local_def_id() && !restricted_did.to_def_id().is_crate_root() { @@ -1814,7 +1814,7 @@ declare_lint! { "detects edition keywords being used as an identifier", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), - reference: "issue #49716 ", + reference: "", }; } @@ -1829,7 +1829,7 @@ impl KeywordIdents { fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: &TokenStream) { // Check if the preceding token is `$`, because we want to allow `$async`, etc. let mut prev_dollar = false; - for tt in tokens.trees() { + for tt in tokens.iter() { match tt { // Only report non-raw idents. TokenTree::Token(token, _) => { diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index d8d901698d611..50bb1fb942e98 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -23,9 +23,8 @@ use rustc_session::lint::{ FutureIncompatibleInfo, Level, Lint, LintBuffer, LintExpectationId, LintId, }; use rustc_session::{LintStoreMarker, Session}; -use rustc_span::Span; use rustc_span::edit_distance::find_best_match_for_names; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::{Ident, Span, Symbol, sym}; use tracing::debug; use {rustc_abi as abi, rustc_hir as hir}; diff --git a/compiler/rustc_lint/src/dangling.rs b/compiler/rustc_lint/src/dangling.rs index 10769b57a7666..98b717a307068 100644 --- a/compiler/rustc_lint/src/dangling.rs +++ b/compiler/rustc_lint/src/dangling.rs @@ -4,8 +4,7 @@ use rustc_hir::intravisit::{FnKind, Visitor, walk_expr}; use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, LangItem}; use rustc_middle::ty::{Ty, TyCtxt}; use rustc_session::{declare_lint, impl_lint_pass}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use crate::lints::DanglingPointersFromTemporaries; use crate::{LateContext, LateLintPass}; diff --git a/compiler/rustc_lint/src/default_could_be_derived.rs b/compiler/rustc_lint/src/default_could_be_derived.rs new file mode 100644 index 0000000000000..bae9defa68700 --- /dev/null +++ b/compiler/rustc_lint/src/default_could_be_derived.rs @@ -0,0 +1,203 @@ +use rustc_data_structures::fx::FxHashMap; +use rustc_errors::{Applicability, Diag}; +use rustc_hir as hir; +use rustc_middle::ty; +use rustc_middle::ty::TyCtxt; +use rustc_session::{declare_lint, impl_lint_pass}; +use rustc_span::Symbol; +use rustc_span::def_id::DefId; +use rustc_span::symbol::sym; + +use crate::{LateContext, LateLintPass}; + +declare_lint! { + /// The `default_overrides_default_fields` lint checks for manual `impl` blocks of the + /// `Default` trait of types with default field values. + /// + /// ### Example + /// + /// ```rust,compile_fail + /// #![feature(default_field_values)] + /// struct Foo { + /// x: i32 = 101, + /// y: NonDefault, + /// } + /// + /// struct NonDefault; + /// + /// #[deny(default_overrides_default_fields)] + /// impl Default for Foo { + /// fn default() -> Foo { + /// Foo { x: 100, y: NonDefault } + /// } + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Manually writing a `Default` implementation for a type that has + /// default field values runs the risk of diverging behavior between + /// `Type { .. }` and `::default()`, which would be a + /// foot-gun for users of that type that would expect these to be + /// equivalent. If `Default` can't be derived due to some fields not + /// having a `Default` implementation, we encourage the use of `..` for + /// the fields that do have a default field value. + pub DEFAULT_OVERRIDES_DEFAULT_FIELDS, + Deny, + "detect `Default` impl that should use the type's default field values", + @feature_gate = default_field_values; +} + +#[derive(Default)] +pub(crate) struct DefaultCouldBeDerived; + +impl_lint_pass!(DefaultCouldBeDerived => [DEFAULT_OVERRIDES_DEFAULT_FIELDS]); + +impl<'tcx> LateLintPass<'tcx> for DefaultCouldBeDerived { + fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) { + // Look for manual implementations of `Default`. + let Some(default_def_id) = cx.tcx.get_diagnostic_item(sym::Default) else { return }; + let hir::ImplItemKind::Fn(_sig, body_id) = impl_item.kind else { return }; + let assoc = cx.tcx.associated_item(impl_item.owner_id); + let parent = assoc.container_id(cx.tcx); + if cx.tcx.has_attr(parent, sym::automatically_derived) { + // We don't care about what `#[derive(Default)]` produces in this lint. + return; + } + let Some(trait_ref) = cx.tcx.impl_trait_ref(parent) else { return }; + let trait_ref = trait_ref.instantiate_identity(); + if trait_ref.def_id != default_def_id { + return; + } + let ty = trait_ref.self_ty(); + let ty::Adt(def, _) = ty.kind() else { return }; + + // We now know we have a manually written definition of a `::default()`. + + let hir = cx.tcx.hir(); + + let type_def_id = def.did(); + let body = hir.body(body_id); + + // FIXME: evaluate bodies with statements and evaluate bindings to see if they would be + // derivable. + let hir::ExprKind::Block(hir::Block { stmts: _, expr: Some(expr), .. }, None) = + body.value.kind + else { + return; + }; + + // Keep a mapping of field name to `hir::FieldDef` for every field in the type. We'll use + // these to check for things like checking whether it has a default or using its span for + // suggestions. + let orig_fields = match hir.get_if_local(type_def_id) { + Some(hir::Node::Item(hir::Item { + kind: + hir::ItemKind::Struct(hir::VariantData::Struct { fields, recovered: _ }, _generics), + .. + })) => fields.iter().map(|f| (f.ident.name, f)).collect::>(), + _ => return, + }; + + // We check `fn default()` body is a single ADT literal and get all the fields that are + // being set. + let hir::ExprKind::Struct(_qpath, fields, tail) = expr.kind else { return }; + + // We have a struct literal + // + // struct Foo { + // field: Type, + // } + // + // impl Default for Foo { + // fn default() -> Foo { + // Foo { + // field: val, + // } + // } + // } + // + // We would suggest `#[derive(Default)]` if `field` has a default value, regardless of what + // it is; we don't want to encourage divergent behavior between `Default::default()` and + // `..`. + + if let hir::StructTailExpr::Base(_) = tail { + // This is *very* niche. We'd only get here if someone wrote + // impl Default for Ty { + // fn default() -> Ty { + // Ty { ..something() } + // } + // } + // where `something()` would have to be a call or path. + // We have nothing meaninful to do with this. + return; + } + + // At least one of the fields with a default value have been overriden in + // the `Default` implementation. We suggest removing it and relying on `..` + // instead. + let any_default_field_given = + fields.iter().any(|f| orig_fields.get(&f.ident.name).and_then(|f| f.default).is_some()); + + if !any_default_field_given { + // None of the default fields were actually provided explicitly, so the manual impl + // doesn't override them (the user used `..`), so there's no risk of divergent behavior. + return; + } + + let Some(local) = parent.as_local() else { return }; + let hir_id = cx.tcx.local_def_id_to_hir_id(local); + let hir::Node::Item(item) = cx.tcx.hir_node(hir_id) else { return }; + cx.tcx.node_span_lint(DEFAULT_OVERRIDES_DEFAULT_FIELDS, hir_id, item.span, |diag| { + mk_lint(cx.tcx, diag, type_def_id, parent, orig_fields, fields); + }); + } +} + +fn mk_lint( + tcx: TyCtxt<'_>, + diag: &mut Diag<'_, ()>, + type_def_id: DefId, + impl_def_id: DefId, + orig_fields: FxHashMap>, + fields: &[hir::ExprField<'_>], +) { + diag.primary_message("`Default` impl doesn't use the declared default field values"); + + // For each field in the struct expression + // - if the field in the type has a default value, it should be removed + // - elif the field is an expression that could be a default value, it should be used as the + // field's default value (FIXME: not done). + // - else, we wouldn't touch this field, it would remain in the manual impl + let mut removed_all_fields = true; + for field in fields { + if orig_fields.get(&field.ident.name).and_then(|f| f.default).is_some() { + diag.span_label(field.expr.span, "this field has a default value"); + } else { + removed_all_fields = false; + } + } + + if removed_all_fields { + let msg = "to avoid divergence in behavior between `Struct { .. }` and \ + `::default()`, derive the `Default`"; + if let Some(hir::Node::Item(impl_)) = tcx.hir().get_if_local(impl_def_id) { + diag.multipart_suggestion_verbose( + msg, + vec![ + (tcx.def_span(type_def_id).shrink_to_lo(), "#[derive(Default)] ".to_string()), + (impl_.span, String::new()), + ], + Applicability::MachineApplicable, + ); + } else { + diag.help(msg); + } + } else { + let msg = "use the default values in the `impl` with `Struct { mandatory_field, .. }` to \ + avoid them diverging over time"; + diag.help(msg); + } +} diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index a15aab32aee64..bc7cd3d118c5a 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -12,8 +12,7 @@ use rustc_feature::Features; use rustc_middle::ty::{RegisteredTools, TyCtxt}; use rustc_session::Session; use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass}; -use rustc_span::Span; -use rustc_span::symbol::Ident; +use rustc_span::{Ident, Span}; use tracing::debug; use crate::context::{EarlyContext, LintContext, LintStore}; diff --git a/compiler/rustc_lint/src/early/diagnostics.rs b/compiler/rustc_lint/src/early/diagnostics.rs index b0fb1e4af7667..6d73715562b2e 100644 --- a/compiler/rustc_lint/src/early/diagnostics.rs +++ b/compiler/rustc_lint/src/early/diagnostics.rs @@ -11,8 +11,7 @@ use rustc_middle::middle::stability; use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_session::lint::{BuiltinLintDiag, ElidedLifetimeResolution}; -use rustc_span::BytePos; -use rustc_span::symbol::kw; +use rustc_span::{BytePos, kw}; use tracing::debug; use crate::lints::{self, ElidedNamedLifetime}; diff --git a/compiler/rustc_lint/src/early/diagnostics/check_cfg.rs b/compiler/rustc_lint/src/early/diagnostics/check_cfg.rs index 033c0fa4c880b..eebb131599a2b 100644 --- a/compiler/rustc_lint/src/early/diagnostics/check_cfg.rs +++ b/compiler/rustc_lint/src/early/diagnostics/check_cfg.rs @@ -4,8 +4,7 @@ use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_session::config::ExpectedValues; use rustc_span::edit_distance::find_best_match_for_name; -use rustc_span::symbol::Ident; -use rustc_span::{ExpnKind, Span, Symbol, sym}; +use rustc_span::{ExpnKind, Ident, Span, Symbol, sym}; use crate::lints; diff --git a/compiler/rustc_lint/src/enum_intrinsics_non_enums.rs b/compiler/rustc_lint/src/enum_intrinsics_non_enums.rs index 8e22a9bdc45a7..6556a8d8f2d05 100644 --- a/compiler/rustc_lint/src/enum_intrinsics_non_enums.rs +++ b/compiler/rustc_lint/src/enum_intrinsics_non_enums.rs @@ -2,8 +2,7 @@ use rustc_hir as hir; use rustc_middle::ty::Ty; use rustc_middle::ty::visit::TypeVisitableExt; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::Span; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use crate::context::LintContext; use crate::lints::{EnumIntrinsicsMemDiscriminate, EnumIntrinsicsMemVariant}; diff --git a/compiler/rustc_lint/src/expect.rs b/compiler/rustc_lint/src/expect.rs index 289e2c9b72243..c0cfe15b972cd 100644 --- a/compiler/rustc_lint/src/expect.rs +++ b/compiler/rustc_lint/src/expect.rs @@ -38,7 +38,8 @@ fn check_expectations(tcx: TyCtxt<'_>, tool_filter: Option) { } LintExpectationId::Stable { hir_id, attr_index, lint_index: Some(lint_index) } => { // We are an `eval_always` query, so looking at the attribute's `AttrId` is ok. - let attr_id = tcx.hir().attrs(hir_id)[attr_index as usize].id; + let attr_id = tcx.hir().attrs(hir_id)[attr_index as usize].id(); + (attr_id, lint_index) } _ => panic!("fulfilled expectations must have a lint index"), diff --git a/compiler/rustc_lint/src/foreign_modules.rs b/compiler/rustc_lint/src/foreign_modules.rs index 45b188205d228..53a9f64e67269 100644 --- a/compiler/rustc_lint/src/foreign_modules.rs +++ b/compiler/rustc_lint/src/foreign_modules.rs @@ -182,7 +182,7 @@ fn name_of_extern_decl(tcx: TyCtxt<'_>, fi: hir::OwnerId) -> SymbolName { // information, we could have codegen_fn_attrs also give span information back for // where the attribute was defined. However, until this is found to be a // bottleneck, this does just fine. - (overridden_link_name, tcx.get_attr(fi, sym::link_name).unwrap().span) + (overridden_link_name, tcx.get_attr(fi, sym::link_name).unwrap().span()) }) { SymbolName::Link(overridden_link_name, overridden_link_name_span) diff --git a/compiler/rustc_lint/src/hidden_unicode_codepoints.rs b/compiler/rustc_lint/src/hidden_unicode_codepoints.rs index 28368e1ab462b..406aa1005dfba 100644 --- a/compiler/rustc_lint/src/hidden_unicode_codepoints.rs +++ b/compiler/rustc_lint/src/hidden_unicode_codepoints.rs @@ -82,7 +82,36 @@ impl HiddenUnicodeCodepoints { sub, }); } + + fn check_literal( + &mut self, + cx: &EarlyContext<'_>, + text: Symbol, + lit_kind: ast::token::LitKind, + span: Span, + label: &'static str, + ) { + if !contains_text_flow_control_chars(text.as_str()) { + return; + } + let (padding, point_at_inner_spans) = match lit_kind { + // account for `"` or `'` + ast::token::LitKind::Str | ast::token::LitKind::Char => (1, true), + // account for `c"` + ast::token::LitKind::CStr => (2, true), + // account for `r###"` + ast::token::LitKind::StrRaw(n) => (n as u32 + 2, true), + // account for `cr###"` + ast::token::LitKind::CStrRaw(n) => (n as u32 + 3, true), + // suppress bad literals. + ast::token::LitKind::Err(_) => return, + // Be conservative just in case new literals do support these. + _ => (0, false), + }; + self.lint_text_direction_codepoint(cx, text, span, padding, point_at_inner_spans, label); + } } + impl EarlyLintPass for HiddenUnicodeCodepoints { fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &ast::Attribute) { if let ast::AttrKind::DocComment(_, comment) = attr.kind { @@ -97,18 +126,11 @@ impl EarlyLintPass for HiddenUnicodeCodepoints { // byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString` match &expr.kind { ast::ExprKind::Lit(token_lit) => { - let text = token_lit.symbol; - if !contains_text_flow_control_chars(text.as_str()) { - return; - } - let padding = match token_lit.kind { - // account for `"` or `'` - ast::token::LitKind::Str | ast::token::LitKind::Char => 1, - // account for `r###"` - ast::token::LitKind::StrRaw(n) => n as u32 + 2, - _ => return, - }; - self.lint_text_direction_codepoint(cx, text, expr.span, padding, true, "literal"); + self.check_literal(cx, token_lit.symbol, token_lit.kind, expr.span, "literal"); + } + ast::ExprKind::FormatArgs(args) => { + let (lit_kind, text) = args.uncooked_fmt_str; + self.check_literal(cx, text, lit_kind, args.span, "format string"); } _ => {} }; diff --git a/compiler/rustc_lint/src/if_let_rescope.rs b/compiler/rustc_lint/src/if_let_rescope.rs index 1402129195f69..259ea908fc60b 100644 --- a/compiler/rustc_lint/src/if_let_rescope.rs +++ b/compiler/rustc_lint/src/if_let_rescope.rs @@ -84,7 +84,7 @@ declare_lint! { rewriting in `match` is an option to preserve the semantics up to Edition 2021", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024), - reference: "issue #124085 ", + reference: "", }; } diff --git a/compiler/rustc_lint/src/impl_trait_overcaptures.rs b/compiler/rustc_lint/src/impl_trait_overcaptures.rs index 1aacdbd448ccb..44f8653552711 100644 --- a/compiler/rustc_lint/src/impl_trait_overcaptures.rs +++ b/compiler/rustc_lint/src/impl_trait_overcaptures.rs @@ -99,7 +99,7 @@ declare_lint! { /// To fix this, remove the `use<'a>`, since the lifetime is already captured /// since it is in scope. pub IMPL_TRAIT_REDUNDANT_CAPTURES, - Warn, + Allow, "redundant precise-capturing `use<...>` syntax on an `impl Trait`", } @@ -112,7 +112,7 @@ declare_lint_pass!( impl<'tcx> LateLintPass<'tcx> for ImplTraitOvercaptures { fn check_item(&mut self, cx: &LateContext<'tcx>, it: &'tcx hir::Item<'tcx>) { match &it.kind { - hir::ItemKind::Fn(..) => check_fn(cx.tcx, it.owner_id.def_id), + hir::ItemKind::Fn { .. } => check_fn(cx.tcx, it.owner_id.def_id), _ => {} } } @@ -177,7 +177,7 @@ fn check_fn(tcx: TyCtxt<'_>, parent_def_id: LocalDefId) { // Lazily compute these two, since they're likely a bit expensive. variances: LazyCell::new(|| { let mut functional_variances = FunctionalVariances { - tcx: tcx, + tcx, variances: FxHashMap::default(), ambient_variance: ty::Covariant, generics: tcx.generics_of(parent_def_id), @@ -325,7 +325,7 @@ where ParamKind::Free(def_id, name) => ty::Region::new_late_param( self.tcx, self.parent_def_id.to_def_id(), - ty::BoundRegionKind::Named(def_id, name), + ty::LateParamRegionKind::Named(def_id, name), ), // Totally ignore late bound args from binders. ParamKind::Late => return true, @@ -475,7 +475,7 @@ fn extract_def_id_from_arg<'tcx>( ) | ty::ReLateParam(ty::LateParamRegion { scope: _, - bound_region: ty::BoundRegionKind::Named(def_id, ..), + kind: ty::LateParamRegionKind::Named(def_id, ..), }) => def_id, _ => unreachable!(), }, @@ -544,7 +544,7 @@ impl<'tcx> TypeRelation> for FunctionalVariances<'tcx> { ) | ty::ReLateParam(ty::LateParamRegion { scope: _, - bound_region: ty::BoundRegionKind::Named(def_id, ..), + kind: ty::LateParamRegionKind::Named(def_id, ..), }) => def_id, _ => { return Ok(a); diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs index 482650e04e85c..b31a4c74787fa 100644 --- a/compiler/rustc_lint/src/internal.rs +++ b/compiler/rustc_lint/src/internal.rs @@ -10,13 +10,12 @@ use rustc_hir::{ }; use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::Span; use rustc_span::hygiene::{ExpnKind, MacroKind}; -use rustc_span::symbol::{Symbol, kw, sym}; +use rustc_span::{Span, sym}; use tracing::debug; use crate::lints::{ - BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistentDocKeyword, + BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonGlobImportTypeIrInherent, QueryInstability, QueryUntracked, SpanUseEqCtxtDiag, SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrInherentUsage, UntranslatableDiag, @@ -171,27 +170,11 @@ impl<'tcx> LateLintPass<'tcx> for TyTyKind { | PatKind::TupleStruct(qpath, ..) | PatKind::Struct(qpath, ..), .. - }) => { - if let QPath::TypeRelative(qpath_ty, ..) = qpath - && qpath_ty.hir_id == ty.hir_id - { - Some(path.span) - } else { - None - } - } - Node::Expr(Expr { kind: ExprKind::Path(qpath), .. }) => { - if let QPath::TypeRelative(qpath_ty, ..) = qpath - && qpath_ty.hir_id == ty.hir_id - { - Some(path.span) - } else { - None - } - } - // Can't unify these two branches because qpath below is `&&` and above is `&` - // and `A | B` paths don't play well together with adjustments, apparently. - Node::Expr(Expr { kind: ExprKind::Struct(qpath, ..), .. }) => { + }) + | Node::Expr( + Expr { kind: ExprKind::Path(qpath), .. } + | &Expr { kind: ExprKind::Struct(qpath, ..), .. }, + ) => { if let QPath::TypeRelative(qpath_ty, ..) = qpath && qpath_ty.hir_id == ty.hir_id { @@ -375,46 +358,6 @@ impl EarlyLintPass for LintPassImpl { } } -declare_tool_lint! { - /// The `existing_doc_keyword` lint detects use `#[doc()]` keywords - /// that don't exist, e.g. `#[doc(keyword = "..")]`. - pub rustc::EXISTING_DOC_KEYWORD, - Allow, - "Check that documented keywords in std and core actually exist", - report_in_external_macro: true -} - -declare_lint_pass!(ExistingDocKeyword => [EXISTING_DOC_KEYWORD]); - -fn is_doc_keyword(s: Symbol) -> bool { - s <= kw::Union -} - -impl<'tcx> LateLintPass<'tcx> for ExistingDocKeyword { - fn check_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::Item<'_>) { - for attr in cx.tcx.hir().attrs(item.hir_id()) { - if !attr.has_name(sym::doc) { - continue; - } - if let Some(list) = attr.meta_item_list() { - for nested in list { - if nested.has_name(sym::keyword) { - let keyword = nested - .value_str() - .expect("#[doc(keyword = \"...\")] expected a value!"); - if is_doc_keyword(keyword) { - return; - } - cx.emit_span_lint(EXISTING_DOC_KEYWORD, attr.span, NonExistentDocKeyword { - keyword, - }); - } - } - } - } - } -} - declare_tool_lint! { /// The `untranslatable_diagnostic` lint detects messages passed to functions with `impl /// Into<{D,Subd}iagMessage` parameters without using translatable Fluent strings. diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 04099cd9001fd..5a4ed68440a3c 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -20,8 +20,7 @@ use rustc_session::lint::builtin::{ UNFULFILLED_LINT_EXPECTATIONS, UNKNOWN_LINTS, UNUSED_ATTRIBUTES, }; use rustc_session::lint::{Level, Lint, LintExpectationId, LintId}; -use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use tracing::{debug, instrument}; use {rustc_ast as ast, rustc_hir as hir}; @@ -915,7 +914,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { let src = LintLevelSource::Node { name, span: sp, reason }; for &id in ids { - if self.check_gated_lint(id, attr.span(), false) { + if self.check_gated_lint(id, sp, false) { self.insert_spec(id, (level, src)); } } diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index a99c94592b302..1465c2cff7bc1 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -41,6 +41,7 @@ mod async_fn_in_trait; pub mod builtin; mod context; mod dangling; +mod default_could_be_derived; mod deref_into_dyn_supertrait; mod drop_forget_useless; mod early; @@ -85,6 +86,7 @@ use async_closures::AsyncClosureUsage; use async_fn_in_trait::AsyncFnInTrait; use builtin::*; use dangling::*; +use default_could_be_derived::DefaultCouldBeDerived; use deref_into_dyn_supertrait::*; use drop_forget_useless::*; use enum_intrinsics_non_enums::EnumIntrinsicsNonEnums; @@ -189,6 +191,7 @@ late_lint_methods!( BuiltinCombinedModuleLateLintPass, [ ForLoopsOverFallibles: ForLoopsOverFallibles, + DefaultCouldBeDerived: DefaultCouldBeDerived::default(), DerefIntoDynSupertrait: DerefIntoDynSupertrait, DropForgetUseless: DropForgetUseless, ImproperCTypesDeclarations: ImproperCTypesDeclarations, @@ -600,8 +603,6 @@ fn register_internals(store: &mut LintStore) { store.register_late_mod_pass(|_| Box::new(DefaultHashTypes)); store.register_lints(&QueryStability::lint_vec()); store.register_late_mod_pass(|_| Box::new(QueryStability)); - store.register_lints(&ExistingDocKeyword::lint_vec()); - store.register_late_mod_pass(|_| Box::new(ExistingDocKeyword)); store.register_lints(&TyTyKind::lint_vec()); store.register_late_mod_pass(|_| Box::new(TyTyKind)); store.register_lints(&TypeIr::lint_vec()); @@ -629,7 +630,6 @@ fn register_internals(store: &mut LintStore) { LintId::of(LINT_PASS_IMPL_WITHOUT_MACRO), LintId::of(USAGE_OF_QUALIFIED_TY), LintId::of(NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT), - LintId::of(EXISTING_DOC_KEYWORD), LintId::of(BAD_OPT_ACCESS), LintId::of(SPAN_USE_EQ_CTXT), ]); diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index df89fe6f7016d..ac995b59caff1 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -16,8 +16,7 @@ use rustc_middle::ty::{Clause, PolyExistentialTraitRef, Ty, TyCtxt}; use rustc_session::Session; use rustc_session::lint::AmbiguityErrorDiag; use rustc_span::edition::Edition; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent}; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol, kw, sym}; use crate::builtin::{InitError, ShorthandAssocTyCollector, TypeAliasBounds}; use crate::errors::{OverruledAttributeSub, RequestedLevel}; @@ -950,13 +949,6 @@ pub(crate) struct NonGlobImportTypeIrInherent { #[help] pub(crate) struct LintPassByHand; -#[derive(LintDiagnostic)] -#[diag(lint_non_existent_doc_keyword)] -#[help] -pub(crate) struct NonExistentDocKeyword { - pub keyword: Symbol, -} - #[derive(LintDiagnostic)] #[diag(lint_diag_out_of_impl)] pub(crate) struct DiagOutOfImpl; @@ -2222,8 +2214,7 @@ pub(crate) struct UnexpectedCfgName { pub(crate) mod unexpected_cfg_name { use rustc_errors::DiagSymbolList; use rustc_macros::Subdiagnostic; - use rustc_span::symbol::Ident; - use rustc_span::{Span, Symbol}; + use rustc_span::{Ident, Span, Symbol}; #[derive(Subdiagnostic)] pub(crate) enum CodeSuggestion { @@ -2696,7 +2687,7 @@ impl LintDiagnostic<'_, G> for ElidedNamedLifetime { // but currently this lint's suggestions can conflict with those of `clippy::needless_lifetimes`: // https://github.com/rust-lang/rust/pull/129840#issuecomment-2323349119 // HACK: `'static` suggestions will never sonflict, emit only those for now. - if name != rustc_span::symbol::kw::StaticLifetime { + if name != kw::StaticLifetime { return; } match kind { diff --git a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs index 23f4f7289067d..ce280fef8b623 100644 --- a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs +++ b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs @@ -84,7 +84,7 @@ impl Expr2024 { let mut prev_colon = false; let mut prev_identifier = false; let mut prev_dollar = false; - for tt in tokens.trees() { + for tt in tokens.iter() { debug!( "check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}", tt diff --git a/compiler/rustc_lint/src/non_ascii_idents.rs b/compiler/rustc_lint/src/non_ascii_idents.rs index 9b495c19990c1..593c8616c1d7c 100644 --- a/compiler/rustc_lint/src/non_ascii_idents.rs +++ b/compiler/rustc_lint/src/non_ascii_idents.rs @@ -2,7 +2,7 @@ use rustc_ast as ast; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::unord::UnordMap; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use unicode_security::general_security_profile::IdentifierType; use crate::lints::{ @@ -205,7 +205,7 @@ impl EarlyLintPass for NonAsciiIdents { (IdentifierType::Not_NFKC, "Not_NFKC"), ] { let codepoints: Vec<_> = - chars.extract_if(|(_, ty)| *ty == Some(id_ty)).collect(); + chars.extract_if(.., |(_, ty)| *ty == Some(id_ty)).collect(); if codepoints.is_empty() { continue; } @@ -217,7 +217,7 @@ impl EarlyLintPass for NonAsciiIdents { } let remaining = chars - .extract_if(|(c, _)| !GeneralSecurityProfile::identifier_allowed(*c)) + .extract_if(.., |(c, _)| !GeneralSecurityProfile::identifier_allowed(*c)) .collect::>(); if !remaining.is_empty() { cx.emit_span_lint(UNCOMMON_CODEPOINTS, sp, IdentifierUncommonCodepoints { diff --git a/compiler/rustc_lint/src/non_fmt_panic.rs b/compiler/rustc_lint/src/non_fmt_panic.rs index 36b1ff59c6742..6f047b4da4954 100644 --- a/compiler/rustc_lint/src/non_fmt_panic.rs +++ b/compiler/rustc_lint/src/non_fmt_panic.rs @@ -8,8 +8,7 @@ use rustc_parse_format::{ParseMode, Parser, Piece}; use rustc_session::lint::FutureIncompatibilityReason; use rustc_session::{declare_lint, declare_lint_pass}; use rustc_span::edition::Edition; -use rustc_span::symbol::kw; -use rustc_span::{InnerSpan, Span, Symbol, hygiene, sym}; +use rustc_span::{InnerSpan, Span, Symbol, hygiene, kw, sym}; use rustc_trait_selection::infer::InferCtxtExt; use crate::lints::{NonFmtPanicBraces, NonFmtPanicUnused}; diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index 3c33b2dd4789c..a1cc3a85109ad 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -5,8 +5,7 @@ use rustc_hir::{Body, HirId, Item, ItemKind, Node, Path, TyKind}; use rustc_middle::ty::TyCtxt; use rustc_session::{declare_lint, impl_lint_pass}; use rustc_span::def_id::{DefId, LOCAL_CRATE}; -use rustc_span::symbol::kw; -use rustc_span::{ExpnKind, MacroKind, Span, sym}; +use rustc_span::{ExpnKind, MacroKind, Span, kw, sym}; use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag}; use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent}; diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index e315307cd45f9..3211e0a811029 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -1,14 +1,14 @@ use rustc_abi::ExternAbi; +use rustc_attr_parsing::{AttributeKind, AttributeParser, ReprAttr}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::FnKind; -use rustc_hir::{AttrArgs, AttrItem, AttrKind, GenericParamKind, PatKind}; +use rustc_hir::{AttrArgs, AttrItem, Attribute, GenericParamKind, PatKind}; use rustc_middle::ty; use rustc_session::config::CrateType; use rustc_session::{declare_lint, declare_lint_pass}; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::{Ident, sym}; -use rustc_span::{BytePos, Span}; -use {rustc_ast as ast, rustc_attr_parsing as attr, rustc_hir as hir}; +use rustc_span::{BytePos, Ident, Span, sym}; +use {rustc_ast as ast, rustc_hir as hir}; use crate::lints::{ NonCamelCaseType, NonCamelCaseTypeSub, NonSnakeCaseDiag, NonSnakeCaseDiagSub, @@ -162,10 +162,10 @@ impl NonCamelCaseTypes { impl EarlyLintPass for NonCamelCaseTypes { fn check_item(&mut self, cx: &EarlyContext<'_>, it: &ast::Item) { - let has_repr_c = it - .attrs - .iter() - .any(|attr| attr::find_repr_attrs(cx.sess(), attr).contains(&attr::ReprC)); + let has_repr_c = matches!( + AttributeParser::parse_limited(cx.sess(), &it.attrs, sym::repr, it.span, true), + Some(Attribute::Parsed(AttributeKind::Repr(r))) if r.iter().any(|(r, _)| r == &ReprAttr::ReprC) + ); if has_repr_c { return; @@ -235,10 +235,10 @@ declare_lint! { declare_lint_pass!(NonSnakeCase => [NON_SNAKE_CASE]); impl NonSnakeCase { - fn to_snake_case(mut str: &str) -> String { + fn to_snake_case(mut name: &str) -> String { let mut words = vec![]; // Preserve leading underscores - str = str.trim_start_matches(|c: char| { + name = name.trim_start_matches(|c: char| { if c == '_' { words.push(String::new()); true @@ -246,7 +246,7 @@ impl NonSnakeCase { false } }); - for s in str.split('_') { + for s in name.split('_') { let mut last_upper = false; let mut buf = String::new(); if s.is_empty() { @@ -344,7 +344,7 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { } else { ast::attr::find_by_name(cx.tcx.hir().attrs(hir::CRATE_HIR_ID), sym::crate_name) .and_then(|attr| { - if let AttrKind::Normal(n) = &attr.kind + if let Attribute::Unparsed(n) = attr && let AttrItem { args: AttrArgs::Eq { eq_span: _, expr: ref lit }, .. } = n.as_ref() && let ast::LitKind::Str(name, ..) = lit.kind diff --git a/compiler/rustc_lint/src/noop_method_call.rs b/compiler/rustc_lint/src/noop_method_call.rs index 76dc96ae00f46..fa519281be531 100644 --- a/compiler/rustc_lint/src/noop_method_call.rs +++ b/compiler/rustc_lint/src/noop_method_call.rs @@ -3,7 +3,7 @@ use rustc_hir::{Expr, ExprKind}; use rustc_middle::ty; use rustc_middle::ty::adjustment::Adjust; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::context::LintContext; use crate::lints::{ diff --git a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs index 5de0d4bc870fd..53a411e2b8712 100644 --- a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs +++ b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs @@ -5,8 +5,7 @@ use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::print::{PrintTraitPredicateExt as _, TraitPredPrintModifiersAndPath}; use rustc_middle::ty::{self, Ty, TypeFoldable}; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::Span; -use rustc_span::symbol::kw; +use rustc_span::{Span, kw}; use rustc_trait_selection::traits::{self, ObligationCtxt}; use crate::{LateContext, LateLintPass, LintContext}; diff --git a/compiler/rustc_lint/src/pass_by_value.rs b/compiler/rustc_lint/src/pass_by_value.rs index ec306f5f83472..3f264859d4835 100644 --- a/compiler/rustc_lint/src/pass_by_value.rs +++ b/compiler/rustc_lint/src/pass_by_value.rs @@ -3,7 +3,7 @@ use rustc_hir::def::Res; use rustc_hir::{GenericArg, PathSegment, QPath, TyKind}; use rustc_middle::ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::lints::PassByValueDiag; use crate::{LateContext, LateLintPass, LintContext}; diff --git a/compiler/rustc_lint/src/passes.rs b/compiler/rustc_lint/src/passes.rs index 380cbe650f046..3a323298bee83 100644 --- a/compiler/rustc_lint/src/passes.rs +++ b/compiler/rustc_lint/src/passes.rs @@ -136,7 +136,7 @@ macro_rules! early_lint_methods { ($macro:path, $args:tt) => ( $macro!($args, [ fn check_param(a: &rustc_ast::Param); - fn check_ident(a: &rustc_span::symbol::Ident); + fn check_ident(a: &rustc_span::Ident); fn check_crate(a: &rustc_ast::Crate); fn check_crate_post(a: &rustc_ast::Crate); fn check_item(a: &rustc_ast::Item); diff --git a/compiler/rustc_lint/src/shadowed_into_iter.rs b/compiler/rustc_lint/src/shadowed_into_iter.rs index a73904cd7769b..f5ab44d74692a 100644 --- a/compiler/rustc_lint/src/shadowed_into_iter.rs +++ b/compiler/rustc_lint/src/shadowed_into_iter.rs @@ -61,6 +61,7 @@ declare_lint! { "detects calling `into_iter` on boxed slices in Rust 2015, 2018, and 2021", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024), + reference: "" }; } diff --git a/compiler/rustc_lint/src/tests.rs b/compiler/rustc_lint/src/tests.rs index 186dec5904b49..f49301b0215db 100644 --- a/compiler/rustc_lint/src/tests.rs +++ b/compiler/rustc_lint/src/tests.rs @@ -1,4 +1,4 @@ -#![cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] +#![allow(rustc::symbol_intern_string_literal)] use rustc_span::{Symbol, create_default_session_globals_then}; diff --git a/compiler/rustc_lint/src/traits.rs b/compiler/rustc_lint/src/traits.rs index b793ec6a4939e..a9797c3b32a0e 100644 --- a/compiler/rustc_lint/src/traits.rs +++ b/compiler/rustc_lint/src/traits.rs @@ -1,6 +1,6 @@ use rustc_hir::{self as hir, LangItem}; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::lints::{DropGlue, DropTraitConstraintsDiag}; use crate::{LateContext, LateLintPass, LintContext}; diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index 33d31d27c738f..3bd27a224e77c 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -4,7 +4,7 @@ use std::ops::ControlFlow; use rustc_abi::{BackendRepr, ExternAbi, TagEncoding, Variants, WrappingRange}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::DiagMessage; -use rustc_hir::{Expr, ExprKind}; +use rustc_hir::{Expr, ExprKind, LangItem}; use rustc_middle::bug; use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton}; use rustc_middle::ty::{ @@ -12,8 +12,7 @@ use rustc_middle::ty::{ }; use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass}; use rustc_span::def_id::LocalDefId; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol, source_map}; +use rustc_span::{Span, Symbol, source_map, sym}; use tracing::debug; use {rustc_ast as ast, rustc_hir as hir}; @@ -445,7 +444,25 @@ fn lint_fn_pointer<'tcx>( let (l_ty, l_ty_refs) = peel_refs(l_ty); let (r_ty, r_ty_refs) = peel_refs(r_ty); - if !l_ty.is_fn() || !r_ty.is_fn() { + if l_ty.is_fn() && r_ty.is_fn() { + // both operands are function pointers, fallthrough + } else if let ty::Adt(l_def, l_args) = l_ty.kind() + && let ty::Adt(r_def, r_args) = r_ty.kind() + && cx.tcx.is_lang_item(l_def.did(), LangItem::Option) + && cx.tcx.is_lang_item(r_def.did(), LangItem::Option) + && let Some(l_some_arg) = l_args.get(0) + && let Some(r_some_arg) = r_args.get(0) + && l_some_arg.expect_ty().is_fn() + && r_some_arg.expect_ty().is_fn() + { + // both operands are `Option<{function ptr}>` + return cx.emit_span_lint( + UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS, + e.span, + UnpredictableFunctionPointerComparisons::Warn, + ); + } else { + // types are not function pointers, nothing to do return; } @@ -1267,6 +1284,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { FfiSafe } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) | ty::Infer(..) @@ -1542,7 +1561,7 @@ impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions { ); } // See `check_fn`.. - hir::ItemKind::Fn(..) => {} + hir::ItemKind::Fn { .. } => {} // See `check_field_def`.. hir::ItemKind::Union(..) | hir::ItemKind::Struct(..) | hir::ItemKind::Enum(..) => {} // Doesn't define something that can contain a external type to be checked. diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index 83942918e3b58..4b5163522f866 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -204,20 +204,35 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static match t.kind() { ty::Uint(ty::UintTy::Usize) | ty::Int(ty::IntTy::Isize) => None, ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), - ty::Int(_) if negative => Some(Integer::fit_signed(-(val as i128)).int_ty_str()), - ty::Int(int) => { - let signed = Integer::fit_signed(val as i128); - let unsigned = Integer::fit_unsigned(val); - Some(if Some(unsigned.size().bits()) == int.bit_width() { - unsigned.uint_ty_str() + ty::Int(_) => { + let signed = literal_to_i128(val, negative).map(Integer::fit_signed); + if negative { + signed.map(Integer::int_ty_str) } else { - signed.int_ty_str() - }) + let unsigned = Integer::fit_unsigned(val); + Some(if let Some(signed) = signed { + if unsigned.size() < signed.size() { + unsigned.uint_ty_str() + } else { + signed.int_ty_str() + } + } else { + unsigned.uint_ty_str() + }) + } } _ => None, } } +fn literal_to_i128(val: u128, negative: bool) -> Option { + if negative { + (val <= i128::MAX as u128 + 1).then(|| val.wrapping_neg() as i128) + } else { + val.try_into().ok() + } +} + fn lint_int_literal<'tcx>( cx: &LateContext<'tcx>, type_limits: &TypeLimits, diff --git a/compiler/rustc_lint/src/unqualified_local_imports.rs b/compiler/rustc_lint/src/unqualified_local_imports.rs index d4b39d91a7141..b27398a950c81 100644 --- a/compiler/rustc_lint/src/unqualified_local_imports.rs +++ b/compiler/rustc_lint/src/unqualified_local_imports.rs @@ -1,7 +1,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::{self as hir}; use rustc_session::{declare_lint, declare_lint_pass}; -use rustc_span::symbol::kw; +use rustc_span::kw; use crate::{LateContext, LateLintPass, LintContext, lints}; @@ -12,6 +12,7 @@ declare_lint! { /// ### Example /// /// ```rust,edition2018 + /// #![feature(unqualified_local_imports)] /// #![warn(unqualified_local_imports)] /// /// mod localmod { diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 9cad5d98562b6..3059adb3fda10 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -11,8 +11,7 @@ use rustc_hir::{self as hir, LangItem}; use rustc_infer::traits::util::elaborate; use rustc_middle::ty::{self, Ty, adjustment}; use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass}; -use rustc_span::symbol::{Symbol, kw, sym}; -use rustc_span::{BytePos, Span}; +use rustc_span::{BytePos, Span, Symbol, kw, sym}; use tracing::instrument; use crate::lints::{ @@ -1024,6 +1023,7 @@ declare_lint! { "`if`, `match`, `while` and `return` do not need parentheses" } +#[derive(Default)] pub(crate) struct UnusedParens { with_self_ty_parens: bool, /// `1 as (i32) < 2` parses to ExprKind::Lt @@ -1031,12 +1031,6 @@ pub(crate) struct UnusedParens { parens_in_cast_in_lt: Vec, } -impl Default for UnusedParens { - fn default() -> Self { - Self { with_self_ty_parens: false, parens_in_cast_in_lt: Vec::new() } - } -} - impl_lint_pass!(UnusedParens => [UNUSED_PARENS]); impl UnusedDelimLint for UnusedParens { @@ -1226,7 +1220,7 @@ impl EarlyLintPass for UnusedParens { // Do not lint on `(..)` as that will result in the other arms being useless. Paren(_) // The other cases do not contain sub-patterns. - | Wild | Never | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) | Err(_) => {}, + | Wild | Never | Rest | Expr(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) | Err(_) => {}, // These are list-like patterns; parens can always be removed. TupleStruct(_, _, ps) | Tuple(ps) | Slice(ps) | Or(ps) => for p in ps { self.check_unused_parens_pat(cx, p, false, false, keep_space); diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 54e927df3c42b..9fc527a6a3ab3 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -1677,7 +1677,7 @@ declare_lint! { "detects patterns whose meaning will change in Rust 2024", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024), - reference: "123076", + reference: "", }; } @@ -2606,7 +2606,7 @@ declare_lint! { "unsafe operations in unsafe functions without an explicit unsafe block are deprecated", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024), - reference: "issue #71668 ", + reference: "", explain_reason: false }; @edition Edition2024 => Warn; @@ -2671,6 +2671,7 @@ declare_lint! { /// ### Example /// /// ```rust + /// #![feature(strict_provenance_lints)] /// #![warn(fuzzy_provenance_casts)] /// /// fn main() { @@ -2714,6 +2715,7 @@ declare_lint! { /// ### Example /// /// ```rust + /// #![feature(strict_provenance_lints)] /// #![warn(lossy_provenance_casts)] /// /// fn main() { @@ -3595,7 +3597,7 @@ declare_lint! { /// /// [Other ABIs]: https://doc.rust-lang.org/reference/items/external-blocks.html#abi pub MISSING_ABI, - Allow, + Warn, "No declared ABI for extern declaration" } @@ -4033,6 +4035,8 @@ declare_lint! { /// ### Example /// /// ```rust + /// // This lint is intentionally used to test the compiler's behavior + /// // when an unstable lint is enabled without the corresponding feature gate. /// #![allow(test_unstable_lint)] /// ``` /// @@ -4189,7 +4193,7 @@ declare_lint! { "never type fallback affecting unsafe function calls", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionAndFutureReleaseSemanticsChange(Edition::Edition2024), - reference: "issue #123748 ", + reference: "", }; @edition Edition2024 => Deny; report_in_external_macro @@ -4243,7 +4247,7 @@ declare_lint! { "never type fallback affecting unsafe function calls", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionAndFutureReleaseError(Edition::Edition2024), - reference: "issue #123748 ", + reference: "", }; report_in_external_macro } @@ -4362,7 +4366,7 @@ declare_lint! { /// ### Explanation /// /// It is often expected that if you can obtain an object of type `T`, then - /// you can name the type `T` as well, this lint attempts to enforce this rule. + /// you can name the type `T` as well; this lint attempts to enforce this rule. /// The recommended action is to either reexport the type properly to make it nameable, /// or document that users are not supposed to be able to name it for one reason or another. /// @@ -4790,7 +4794,7 @@ declare_lint! { "detects unsafe functions being used as safe functions", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), - reference: "issue #27970 ", + reference: "", }; } @@ -4826,7 +4830,7 @@ declare_lint! { "detects missing unsafe keyword on extern declarations", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), - reference: "issue #123743 ", + reference: "", }; } @@ -4867,7 +4871,7 @@ declare_lint! { "detects unsafe attributes outside of unsafe", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), - reference: "issue #123757 ", + reference: "", }; } @@ -5069,7 +5073,7 @@ declare_lint! { "Detect and warn on significant change in drop order in tail expression location", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024), - reference: "issue #123739 ", + reference: "", }; } @@ -5108,7 +5112,7 @@ declare_lint! { "will be parsed as a guarded string in Rust 2024", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), - reference: "issue #123735 ", + reference: "", }; crate_level_only } diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index 7edb1d2ffe885..7d77acbcc3348 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -15,8 +15,7 @@ use rustc_hir::def::Namespace; use rustc_hir::{HashStableContext, HirId, MissingLifetimeKind}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; pub use rustc_span::edition::Edition; -use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent}; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol, sym}; use serde::{Deserialize, Serialize}; pub use self::Level::*; @@ -223,19 +222,23 @@ impl Level { /// Converts an `Attribute` to a level. pub fn from_attr(attr: &impl AttributeExt) -> Option { - Self::from_symbol(attr.name_or_empty(), Some(attr.id())) + Self::from_symbol(attr.name_or_empty(), || Some(attr.id())) } /// Converts a `Symbol` to a level. - pub fn from_symbol(s: Symbol, id: Option) -> Option { - match (s, id) { - (sym::allow, _) => Some(Level::Allow), - (sym::expect, Some(attr_id)) => { - Some(Level::Expect(LintExpectationId::Unstable { attr_id, lint_index: None })) + pub fn from_symbol(s: Symbol, id: impl FnOnce() -> Option) -> Option { + match s { + sym::allow => Some(Level::Allow), + sym::expect => { + if let Some(attr_id) = id() { + Some(Level::Expect(LintExpectationId::Unstable { attr_id, lint_index: None })) + } else { + None + } } - (sym::warn, _) => Some(Level::Warn), - (sym::deny, _) => Some(Level::Deny), - (sym::forbid, _) => Some(Level::Forbid), + sym::warn => Some(Level::Warn), + sym::deny => Some(Level::Deny), + sym::forbid => Some(Level::Forbid), _ => None, } } @@ -932,7 +935,7 @@ macro_rules! declare_lint { desc: $desc, is_externally_loaded: false, $($v: true,)* - $(feature_gate: Some(rustc_span::symbol::sym::$gate),)? + $(feature_gate: Some(rustc_span::sym::$gate),)? $(future_incompatible: Some($crate::FutureIncompatibleInfo { reason: $reason, $($field: $val,)* @@ -977,7 +980,7 @@ macro_rules! declare_tool_lint { report_in_external_macro: $external, future_incompatible: None, is_externally_loaded: true, - $(feature_gate: Some(rustc_span::symbol::sym::$gate),)? + $(feature_gate: Some(rustc_span::sym::$gate),)? crate_level_only: false, $(eval_always: $eval_always,)? ..$crate::Lint::default_fields_for_macro() diff --git a/compiler/rustc_llvm/Cargo.toml b/compiler/rustc_llvm/Cargo.toml index b29d6b79250f3..7c402310436df 100644 --- a/compiler/rustc_llvm/Cargo.toml +++ b/compiler/rustc_llvm/Cargo.toml @@ -10,5 +10,7 @@ libc = "0.2.73" [build-dependencies] # tidy-alphabetical-start -cc = "1.1.23" +# Pinned so `cargo update` bumps don't cause breakage. Please also update the +# pinned `cc` in `rustc_codegen_ssa` if you update `cc` here. +cc = "=1.2.7" # tidy-alphabetical-end diff --git a/compiler/rustc_llvm/build.rs b/compiler/rustc_llvm/build.rs index f092110a324ec..d9d28299413b1 100644 --- a/compiler/rustc_llvm/build.rs +++ b/compiler/rustc_llvm/build.rs @@ -220,7 +220,7 @@ fn main() { let mut cmd = Command::new(&llvm_config); cmd.arg(llvm_link_arg).arg("--libs"); - // Don't link system libs if cross-compiling unless targetting Windows. + // Don't link system libs if cross-compiling unless targeting Windows. // On Windows system DLLs aren't linked directly, instead import libraries are used. // These import libraries are independent of the host. if !is_crossed || target.contains("windows") { diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 20859b167bc6b..6447a9362b3ab 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -308,6 +308,24 @@ static Reloc::Model fromRust(LLVMRustRelocModel RustReloc) { report_fatal_error("Bad RelocModel."); } +enum class LLVMRustFloatABI { + Default, + Soft, + Hard, +}; + +static FloatABI::ABIType fromRust(LLVMRustFloatABI RustFloatAbi) { + switch (RustFloatAbi) { + case LLVMRustFloatABI::Default: + return FloatABI::Default; + case LLVMRustFloatABI::Soft: + return FloatABI::Soft; + case LLVMRustFloatABI::Hard: + return FloatABI::Hard; + } + report_fatal_error("Bad FloatABI."); +} + /// getLongestEntryLength - Return the length of the longest entry in the table. template static size_t getLongestEntryLength(ArrayRef Table) { size_t MaxLen = 0; @@ -358,7 +376,7 @@ extern "C" const char *LLVMRustGetHostCPUName(size_t *OutLen) { extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( const char *TripleStr, const char *CPU, const char *Feature, const char *ABIStr, LLVMRustCodeModel RustCM, LLVMRustRelocModel RustReloc, - LLVMRustCodeGenOptLevel RustOptLevel, bool UseSoftFloat, + LLVMRustCodeGenOptLevel RustOptLevel, LLVMRustFloatABI RustFloatABIType, bool FunctionSections, bool DataSections, bool UniqueSectionNames, bool TrapUnreachable, bool Singlethread, bool VerboseAsm, bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray, @@ -369,6 +387,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( auto OptLevel = fromRust(RustOptLevel); auto RM = fromRust(RustReloc); auto CM = fromRust(RustCM); + auto FloatABIType = fromRust(RustFloatABIType); std::string Error; auto Trip = Triple(Triple::normalize(TripleStr)); @@ -381,10 +400,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( TargetOptions Options = codegen::InitTargetOptionsFromCodeGenFlags(Trip); - Options.FloatABIType = FloatABI::Default; - if (UseSoftFloat) { - Options.FloatABIType = FloatABI::Soft; - } + Options.FloatABIType = FloatABIType; Options.DataSections = DataSections; Options.FunctionSections = FunctionSections; Options.UniqueSectionNames = UniqueSectionNames; @@ -1373,20 +1389,14 @@ static bool clearDSOLocalOnDeclarations(Module &Mod, TargetMachine &TM) { return ClearDSOLocalOnDeclarations; } -extern "C" bool LLVMRustPrepareThinLTORename(const LLVMRustThinLTOData *Data, +extern "C" void LLVMRustPrepareThinLTORename(const LLVMRustThinLTOData *Data, LLVMModuleRef M, LLVMTargetMachineRef TM) { Module &Mod = *unwrap(M); TargetMachine &Target = *unwrap(TM); bool ClearDSOLocal = clearDSOLocalOnDeclarations(Mod, Target); - bool error = renameModuleForThinLTO(Mod, Data->Index, ClearDSOLocal); - - if (error) { - LLVMRustSetLastError("renameModuleForThinLTO failed"); - return false; - } - return true; + renameModuleForThinLTO(Mod, Data->Index, ClearDSOLocal); } extern "C" bool diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index b79205ff946d3..dd72ea2497f7c 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -1,5 +1,6 @@ #include "LLVMWrapper.h" +#include "llvm-c/Analysis.h" #include "llvm-c/Core.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/SmallVector.h" @@ -53,6 +54,10 @@ using namespace llvm; using namespace llvm::sys; using namespace llvm::object; +// This opcode is an LLVM detail that could hypothetically change (?), so +// verify that the hard-coded value in `dwarf_const.rs` still agrees with LLVM. +static_assert(dwarf::DW_OP_LLVM_fragment == 0x1000); + // LLVMAtomicOrdering is already an enum - don't create another // one. static AtomicOrdering fromRust(LLVMAtomicOrdering Ordering) { @@ -165,6 +170,30 @@ extern "C" LLVMValueRef LLVMRustGetNamedValue(LLVMModuleRef M, const char *Name, return wrap(unwrap(M)->getNamedValue(StringRef(Name, NameLen))); } +enum class LLVMRustVerifierFailureAction { + AbortProcessAction = 0, + PrintMessageAction = 1, + ReturnStatusAction = 2, +}; + +static LLVMVerifierFailureAction +fromRust(LLVMRustVerifierFailureAction Action) { + switch (Action) { + case LLVMRustVerifierFailureAction::AbortProcessAction: + return LLVMAbortProcessAction; + case LLVMRustVerifierFailureAction::PrintMessageAction: + return LLVMPrintMessageAction; + case LLVMRustVerifierFailureAction::ReturnStatusAction: + return LLVMReturnStatusAction; + } + report_fatal_error("Invalid LLVMVerifierFailureAction value!"); +} + +extern "C" LLVMBool +LLVMRustVerifyFunction(LLVMValueRef Fn, LLVMRustVerifierFailureAction Action) { + return LLVMVerifyFunction(Fn, fromRust(Action)); +} + enum class LLVMRustTailCallKind { None, Tail, @@ -388,6 +417,17 @@ extern "C" void LLVMRustAddCallSiteAttributes(LLVMValueRef Instr, AddAttributes(Call, Index, Attrs, AttrsLen); } +extern "C" LLVMValueRef LLVMRustGetTerminator(LLVMBasicBlockRef BB) { + Instruction *ret = unwrap(BB)->getTerminator(); + return wrap(ret); +} + +extern "C" void LLVMRustEraseInstFromParent(LLVMValueRef Instr) { + if (auto I = dyn_cast(unwrap(Instr))) { + I->eraseFromParent(); + } +} + extern "C" LLVMAttributeRef LLVMRustCreateAttrNoValue(LLVMContextRef C, LLVMRustAttributeKind RustAttr) { return wrap(Attribute::get(*unwrap(C), fromRust(RustAttr))); @@ -954,6 +994,47 @@ extern "C" void LLVMRustAddModuleFlagString( MDString::get(unwrap(M)->getContext(), StringRef(Value, ValueLen))); } +extern "C" LLVMValueRef LLVMRustGetLastInstruction(LLVMBasicBlockRef BB) { + auto Point = unwrap(BB)->rbegin(); + if (Point != unwrap(BB)->rend()) + return wrap(&*Point); + return nullptr; +} + +extern "C" void LLVMRustEraseInstBefore(LLVMBasicBlockRef bb, LLVMValueRef I) { + auto &BB = *unwrap(bb); + auto &Inst = *unwrap(I); + auto It = BB.begin(); + while (&*It != &Inst) + ++It; + // Make sure we found the Instruction. + assert(It != BB.end()); + // We don't want to erase the instruction itself. + It--; + // Delete in rev order to ensure no dangling references. + while (It != BB.begin()) { + auto Prev = std::prev(It); + It->eraseFromParent(); + It = Prev; + } + It->eraseFromParent(); +} + +extern "C" bool LLVMRustHasMetadata(LLVMValueRef inst, unsigned kindID) { + if (auto *I = dyn_cast(unwrap(inst))) { + return I->hasMetadata(kindID); + } + return false; +} + +extern "C" LLVMMetadataRef LLVMRustDIGetInstMetadata(LLVMValueRef x) { + if (auto *I = dyn_cast(unwrap(x))) { + auto *MD = I->getDebugLoc().getAsMDNode(); + return wrap(MD); + } + return nullptr; +} + extern "C" void LLVMRustGlobalAddMetadata(LLVMValueRef Global, unsigned Kind, LLVMMetadataRef MD) { unwrap(Global)->addMetadata(Kind, *unwrap(MD)); @@ -1140,6 +1221,13 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateStaticMemberType( unwrap(val), llvm::dwarf::DW_TAG_member, AlignInBits)); } +extern "C" LLVMMetadataRef +LLVMRustDIBuilderCreateQualifiedType(LLVMDIBuilderRef Builder, unsigned Tag, + LLVMMetadataRef Type) { + return wrap( + unwrap(Builder)->createQualifiedType(Tag, unwrapDI(Type))); +} + extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateLexicalBlock(LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope, LLVMMetadataRef File, @@ -1313,18 +1401,6 @@ LLVMRustDILocationCloneWithBaseDiscriminator(LLVMMetadataRef Location, return wrap(NewLoc.has_value() ? NewLoc.value() : nullptr); } -extern "C" uint64_t LLVMRustDIBuilderCreateOpDeref() { - return dwarf::DW_OP_deref; -} - -extern "C" uint64_t LLVMRustDIBuilderCreateOpPlusUconst() { - return dwarf::DW_OP_plus_uconst; -} - -extern "C" uint64_t LLVMRustDIBuilderCreateOpLLVMFragment() { - return dwarf::DW_OP_LLVM_fragment; -} - extern "C" void LLVMRustWriteTypeToString(LLVMTypeRef Ty, RustStringRef Str) { auto OS = RawRustStringOstream(Str); unwrap(Ty)->print(OS); @@ -1796,56 +1872,6 @@ extern "C" LLVMValueRef LLVMRustBuildMaxNum(LLVMBuilderRef B, LLVMValueRef LHS, return wrap(unwrap(B)->CreateMaxNum(unwrap(LHS), unwrap(RHS))); } -// This struct contains all necessary info about a symbol exported from a DLL. -struct LLVMRustCOFFShortExport { - const char *name; - bool ordinal_present; - // The value of `ordinal` is only meaningful if `ordinal_present` is true. - uint16_t ordinal; -}; - -// Machine must be a COFF machine type, as defined in PE specs. -extern "C" LLVMRustResult -LLVMRustWriteImportLibrary(const char *ImportName, const char *Path, - const LLVMRustCOFFShortExport *Exports, - size_t NumExports, uint16_t Machine, bool MinGW) { - std::vector ConvertedExports; - ConvertedExports.reserve(NumExports); - - for (size_t i = 0; i < NumExports; ++i) { - bool ordinal_present = Exports[i].ordinal_present; - uint16_t ordinal = ordinal_present ? Exports[i].ordinal : 0; - ConvertedExports.push_back(llvm::object::COFFShortExport{ - Exports[i].name, // Name - std::string{}, // ExtName - std::string{}, // SymbolName - std::string{}, // AliasTarget -#if LLVM_VERSION_GE(19, 0) - std::string{}, // ExportAs -#endif - ordinal, // Ordinal - ordinal_present, // Noname - false, // Data - false, // Private - false // Constant - }); - } - - auto Error = llvm::object::writeImportLibrary( - ImportName, Path, ConvertedExports, - static_cast(Machine), MinGW); - if (Error) { - std::string errorString; - auto stream = llvm::raw_string_ostream(errorString); - stream << Error; - stream.flush(); - LLVMRustSetLastError(errorString.c_str()); - return LLVMRustResult::Failure; - } else { - return LLVMRustResult::Success; - } -} - // Transfers ownership of DiagnosticHandler unique_ptr to the caller. extern "C" DiagnosticHandler * LLVMRustContextGetDiagnosticHandler(LLVMContextRef C) { diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs index a3890fc937e79..d0ef82f4a6ce2 100644 --- a/compiler/rustc_log/src/lib.rs +++ b/compiler/rustc_log/src/lib.rs @@ -130,11 +130,11 @@ pub fn init_logger(cfg: LoggerConfig) -> Result<(), Error> { let subscriber = tracing_subscriber::Registry::default().with(filter).with(layer); match cfg.backtrace { - Ok(str) => { + Ok(backtrace_target) => { let fmt_layer = tracing_subscriber::fmt::layer() .with_writer(io::stderr) .without_time() - .event_format(BacktraceFormatter { backtrace_target: str }); + .event_format(BacktraceFormatter { backtrace_target }); let subscriber = subscriber.with(fmt_layer); tracing::subscriber::set_global_default(subscriber).unwrap(); } diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs index 0f7b89215097e..91398f1a9da9d 100644 --- a/compiler/rustc_macros/src/diagnostics/mod.rs +++ b/compiler/rustc_macros/src/diagnostics/mod.rs @@ -16,7 +16,7 @@ use synstructure::Structure; /// # extern crate rustc_errors; /// # use rustc_errors::Applicability; /// # extern crate rustc_span; -/// # use rustc_span::{symbol::Ident, Span}; +/// # use rustc_span::{Ident, Span}; /// # extern crate rust_middle; /// # use rustc_middle::ty::Ty; /// #[derive(Diagnostic)] diff --git a/compiler/rustc_macros/src/symbols.rs b/compiler/rustc_macros/src/symbols.rs index 2552c0a0cfc79..37200f62eb5a2 100644 --- a/compiler/rustc_macros/src/symbols.rs +++ b/compiler/rustc_macros/src/symbols.rs @@ -156,14 +156,14 @@ impl Entries { Entries { map: HashMap::with_capacity(capacity) } } - fn insert(&mut self, span: Span, str: &str, errors: &mut Errors) -> u32 { - if let Some(prev) = self.map.get(str) { - errors.error(span, format!("Symbol `{str}` is duplicated")); + fn insert(&mut self, span: Span, s: &str, errors: &mut Errors) -> u32 { + if let Some(prev) = self.map.get(s) { + errors.error(span, format!("Symbol `{s}` is duplicated")); errors.error(prev.span_of_name, "location of previous definition".to_string()); prev.idx } else { let idx = self.len(); - self.map.insert(str.to_string(), Preinterned { idx, span_of_name: span }); + self.map.insert(s.to_string(), Preinterned { idx, span_of_name: span }); idx } } @@ -192,14 +192,14 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec) { let mut entries = Entries::with_capacity(input.keywords.len() + input.symbols.len() + 10); let mut prev_key: Option<(Span, String)> = None; - let mut check_order = |span: Span, str: &str, errors: &mut Errors| { + let mut check_order = |span: Span, s: &str, errors: &mut Errors| { if let Some((prev_span, ref prev_str)) = prev_key { - if str < prev_str { - errors.error(span, format!("Symbol `{str}` must precede `{prev_str}`")); + if s < prev_str { + errors.error(span, format!("Symbol `{s}` must precede `{prev_str}`")); errors.error(prev_span, format!("location of previous symbol `{prev_str}`")); } } - prev_key = Some((span, str.to_string())); + prev_key = Some((span, s.to_string())); }; // Generate the listed keywords. diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index fd1535094c982..6512176cc4a90 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -29,8 +29,7 @@ use rustc_session::lint::{self, BuiltinLintDiag}; use rustc_session::output::validate_crate_name; use rustc_session::search_paths::PathKind; use rustc_span::edition::Edition; -use rustc_span::symbol::{Ident, Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, STDLIB_STABLE_CRATES, Span, Symbol, sym}; use rustc_target::spec::{PanicStrategy, Target, TargetTuple}; use tracing::{debug, info, trace}; @@ -391,19 +390,51 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { None } - // The `dependency` type is determined by the command line arguments(`--extern`) and - // `private_dep`. However, sometimes the directly dependent crate is not specified by - // `--extern`, in this case, `private-dep` is none during loading. This is equivalent to the - // scenario where the command parameter is set to `public-dependency` - fn is_private_dep(&self, name: &str, private_dep: Option) -> bool { - self.sess.opts.externs.get(name).map_or(private_dep.unwrap_or(false), |e| e.is_private_dep) - && private_dep.unwrap_or(true) + /// Determine whether a dependency should be considered private. + /// + /// Dependencies are private if they get extern option specified, e.g. `--extern priv:mycrate`. + /// This is stored in metadata, so `private_dep` can be correctly set during load. A `Some` + /// value for `private_dep` indicates that the crate is known to be private or public (note + /// that any `None` or `Some(false)` use of the same crate will make it public). + /// + /// Sometimes the directly dependent crate is not specified by `--extern`, in this case, + /// `private-dep` is none during loading. This is equivalent to the scenario where the + /// command parameter is set to `public-dependency` + fn is_private_dep( + &self, + name: Symbol, + private_dep: Option, + dep_root: Option<&CratePaths>, + ) -> bool { + // Standard library crates are never private. + if STDLIB_STABLE_CRATES.contains(&name) { + tracing::info!("returning false for {name} is private"); + return false; + } + + let extern_private = self.sess.opts.externs.get(name.as_str()).map(|e| e.is_private_dep); + + // Any descendants of `std` should be private. These crates are usually not marked + // private in metadata, so we ignore that field. + if extern_private.is_none() + && dep_root.map_or(false, |d| STDLIB_STABLE_CRATES.contains(&d.name)) + { + return true; + } + + match (extern_private, private_dep) { + // Explicit non-private via `--extern`, explicit non-private from metadata, or + // unspecified with default to public. + (Some(false), _) | (_, Some(false)) | (None, None) => false, + // Marked private via `--extern priv:mycrate` or in metadata. + (Some(true) | None, Some(true) | None) => true, + } } fn register_crate( &mut self, host_lib: Option, - root: Option<&CratePaths>, + dep_root: Option<&CratePaths>, lib: Library, dep_kind: CrateDepKind, name: Symbol, @@ -415,7 +446,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { let Library { source, metadata } = lib; let crate_root = metadata.get_root(); let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash()); - let private_dep = self.is_private_dep(name.as_str(), private_dep); + let private_dep = self.is_private_dep(name, private_dep, dep_root); // Claim this crate number and cache it let feed = self.cstore.intern_stable_crate_id(&crate_root, self.tcx)?; @@ -431,14 +462,14 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { // Maintain a reference to the top most crate. // Stash paths for top-most crate locally if necessary. let crate_paths; - let root = if let Some(root) = root { - root + let dep_root = if let Some(dep_root) = dep_root { + dep_root } else { crate_paths = CratePaths::new(crate_root.name(), source.clone()); &crate_paths }; - let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, dep_kind)?; + let cnum_map = self.resolve_crate_deps(dep_root, &crate_root, &metadata, cnum, dep_kind)?; let raw_proc_macros = if crate_root.is_proc_macro_crate() { let temp_root; @@ -560,23 +591,21 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { &'b mut self, name: Symbol, mut dep_kind: CrateDepKind, - dep: Option<(&'b CratePaths, &'b CrateDep)>, + dep_of: Option<(&'b CratePaths, &'b CrateDep)>, ) -> Result { info!("resolving crate `{}`", name); if !name.as_str().is_ascii() { return Err(CrateError::NonAsciiName(name)); } - let (root, hash, host_hash, extra_filename, path_kind, private_dep) = match dep { - Some((root, dep)) => ( - Some(root), - Some(dep.hash), - dep.host_hash, - Some(&dep.extra_filename[..]), - PathKind::Dependency, - Some(dep.is_private), - ), - None => (None, None, None, None, PathKind::Crate, None), - }; + + let dep_root = dep_of.map(|d| d.0); + let dep = dep_of.map(|d| d.1); + let hash = dep.map(|d| d.hash); + let host_hash = dep.map(|d| d.host_hash).flatten(); + let extra_filename = dep.map(|d| &d.extra_filename[..]); + let path_kind = if dep.is_some() { PathKind::Dependency } else { PathKind::Crate }; + let private_dep = dep.map(|d| d.is_private); + let result = if let Some(cnum) = self.existing_match(name, hash, path_kind) { (LoadResult::Previous(cnum), None) } else { @@ -600,7 +629,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { dep_kind = CrateDepKind::MacrosOnly; match self.load_proc_macro(&mut locator, path_kind, host_hash)? { Some(res) => res, - None => return Err(locator.into_error(root.cloned())), + None => return Err(locator.into_error(dep_root.cloned())), } } } @@ -613,7 +642,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { // not specified by `--extern` on command line parameters, it may be // `private-dependency` when `register_crate` is called for the first time. Then it must be updated to // `public-dependency` here. - let private_dep = self.is_private_dep(name.as_str(), private_dep); + let private_dep = self.is_private_dep(name, private_dep, dep_root); let data = self.cstore.get_crate_data_mut(cnum); if data.is_proc_macro_crate() { dep_kind = CrateDepKind::MacrosOnly; @@ -624,7 +653,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { } (LoadResult::Loaded(library), host_library) => { info!("register newly loaded library for `{}`", name); - self.register_crate(host_library, root, library, dep_kind, name, private_dep) + self.register_crate(host_library, dep_root, library, dep_kind, name, private_dep) } _ => panic!(), } @@ -664,16 +693,20 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { })) } - // Go through the crate metadata and load any crates that it references + /// Go through the crate metadata and load any crates that it references. fn resolve_crate_deps( &mut self, - root: &CratePaths, + dep_root: &CratePaths, crate_root: &CrateRoot, metadata: &MetadataBlob, krate: CrateNum, dep_kind: CrateDepKind, ) -> Result { - debug!("resolving deps of external crate"); + debug!( + "resolving deps of external crate `{}` with dep root `{}`", + crate_root.name(), + dep_root.name + ); if crate_root.is_proc_macro_crate() { return Ok(CrateNumMap::new()); } @@ -686,14 +719,17 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { crate_num_map.push(krate); for dep in deps { info!( - "resolving dep crate {} hash: `{}` extra filename: `{}`", - dep.name, dep.hash, dep.extra_filename + "resolving dep `{}`->`{}` hash: `{}` extra filename: `{}`", + crate_root.name(), + dep.name, + dep.hash, + dep.extra_filename ); let dep_kind = match dep_kind { CrateDepKind::MacrosOnly => CrateDepKind::MacrosOnly, _ => dep.kind, }; - let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((root, &dep)))?; + let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((dep_root, &dep)))?; crate_num_map.push(cnum); } @@ -868,7 +904,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { // First up we check for global allocators. Look at the crate graph here // and see what's a global allocator, including if we ourselves are a // global allocator. - #[cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] + #[allow(rustc::symbol_intern_string_literal)] let this_crate = Symbol::intern("this crate"); let mut global_allocator = self.cstore.has_global_allocator.then_some(this_crate); diff --git a/compiler/rustc_metadata/src/dependency_format.rs b/compiler/rustc_metadata/src/dependency_format.rs index e8de0acb7c9fe..582c2215d92e1 100644 --- a/compiler/rustc_metadata/src/dependency_format.rs +++ b/compiler/rustc_metadata/src/dependency_format.rs @@ -52,7 +52,8 @@ //! than finding a number of solutions (there are normally quite a few). use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_hir::def_id::CrateNum; +use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; +use rustc_index::IndexVec; use rustc_middle::bug; use rustc_middle::middle::dependency_format::{Dependencies, DependencyList, Linkage}; use rustc_middle::ty::TyCtxt; @@ -84,7 +85,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { let sess = &tcx.sess; if !sess.opts.output_types.should_codegen() { - return Vec::new(); + return IndexVec::new(); } let preferred_linkage = match ty { @@ -131,7 +132,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { match preferred_linkage { // If the crate is not linked, there are no link-time dependencies. - Linkage::NotLinked => return Vec::new(), + Linkage::NotLinked => return IndexVec::new(), Linkage::Static => { // Attempt static linkage first. For dylibs and executables, we may be // able to retry below with dynamic linkage. @@ -156,7 +157,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { } sess.dcx().emit_err(RlibRequired { crate_name: tcx.crate_name(cnum) }); } - return Vec::new(); + return IndexVec::new(); } } Linkage::Dynamic | Linkage::IncludedFromDylib => {} @@ -210,19 +211,32 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { // Collect what we've got so far in the return vector. let last_crate = tcx.crates(()).len(); - let mut ret = (1..last_crate + 1) - .map(|cnum| match formats.get(&CrateNum::new(cnum)) { - Some(&RequireDynamic) => Linkage::Dynamic, - Some(&RequireStatic) => Linkage::IncludedFromDylib, - None => Linkage::NotLinked, - }) - .collect::>(); + let mut ret = IndexVec::new(); + + // We need to fill in something for LOCAL_CRATE as IndexVec is a dense map. + // Linkage::Static semantically the most correct thing to use as the local + // crate is always statically linked into the linker output, even when + // linking a dylib. Using Linkage::Static also allow avoiding special cases + // for LOCAL_CRATE in some places. + assert_eq!(ret.push(Linkage::Static), LOCAL_CRATE); + + for cnum in 1..last_crate + 1 { + let cnum = CrateNum::new(cnum); + assert_eq!( + ret.push(match formats.get(&cnum) { + Some(&RequireDynamic) => Linkage::Dynamic, + Some(&RequireStatic) => Linkage::IncludedFromDylib, + None => Linkage::NotLinked, + }), + cnum + ); + } // Run through the dependency list again, and add any missing libraries as // static libraries. // // If the crate hasn't been included yet and it's not actually required - // (e.g., it's an allocator) then we skip it here as well. + // (e.g., it's a panic runtime) then we skip it here as well. for &cnum in tcx.crates(()).iter() { let src = tcx.used_crate_source(cnum); if src.dylib.is_none() @@ -232,7 +246,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { assert!(src.rlib.is_some() || src.rmeta.is_some()); info!("adding staticlib: {}", tcx.crate_name(cnum)); add_library(tcx, cnum, RequireStatic, &mut formats, &mut unavailable_as_static); - ret[cnum.as_usize() - 1] = Linkage::Static; + ret[cnum] = Linkage::Static; } } @@ -240,8 +254,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { // artifact which means that we may need to inject dependencies of some // form. // - // Things like allocators and panic runtimes may not have been activated - // quite yet, so do so here. + // Things like panic runtimes may not have been activated quite yet, so do so here. activate_injected_dep(CStore::from_tcx(tcx).injected_panic_runtime(), &mut ret, &|cnum| { tcx.is_panic_runtime(cnum) }); @@ -252,8 +265,10 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { // // For situations like this, we perform one last pass over the dependencies, // making sure that everything is available in the requested format. - for (cnum, kind) in ret.iter().enumerate() { - let cnum = CrateNum::new(cnum + 1); + for (cnum, kind) in ret.iter_enumerated() { + if cnum == LOCAL_CRATE { + continue; + } let src = tcx.used_crate_source(cnum); match *kind { Linkage::NotLinked | Linkage::IncludedFromDylib => {} @@ -334,18 +349,21 @@ fn attempt_static(tcx: TyCtxt<'_>, unavailable: &mut Vec) -> Option Linkage::Static, - CrateDepKind::MacrosOnly | CrateDepKind::Implicit => Linkage::NotLinked, - }) - .collect::>(); + let mut ret = IndexVec::new(); + assert_eq!(ret.push(Linkage::Static), LOCAL_CRATE); + for &cnum in tcx.crates(()) { + assert_eq!( + ret.push(match tcx.dep_kind(cnum) { + CrateDepKind::Explicit => Linkage::Static, + CrateDepKind::MacrosOnly | CrateDepKind::Implicit => Linkage::NotLinked, + }), + cnum + ); + } - // Our allocator/panic runtime may not have been linked above if it wasn't - // explicitly linked, which is the case for any injected dependency. Handle - // that here and activate them. + // Our panic runtime may not have been linked above if it wasn't explicitly + // linked, which is the case for any injected dependency. Handle that here + // and activate it. activate_injected_dep(CStore::from_tcx(tcx).injected_panic_runtime(), &mut ret, &|cnum| { tcx.is_panic_runtime(cnum) }); @@ -367,8 +385,7 @@ fn activate_injected_dep( list: &mut DependencyList, replaces_injected: &dyn Fn(CrateNum) -> bool, ) { - for (i, slot) in list.iter().enumerate() { - let cnum = CrateNum::new(i + 1); + for (cnum, slot) in list.iter_enumerated() { if !replaces_injected(cnum) { continue; } @@ -377,25 +394,23 @@ fn activate_injected_dep( } } if let Some(injected) = injected { - let idx = injected.as_usize() - 1; - assert_eq!(list[idx], Linkage::NotLinked); - list[idx] = Linkage::Static; + assert_eq!(list[injected], Linkage::NotLinked); + list[injected] = Linkage::Static; } } -// After the linkage for a crate has been determined we need to verify that -// there's only going to be one allocator in the output. -fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { +/// After the linkage for a crate has been determined we need to verify that +/// there's only going to be one panic runtime in the output. +fn verify_ok(tcx: TyCtxt<'_>, list: &DependencyList) { let sess = &tcx.sess; if list.is_empty() { return; } let mut panic_runtime = None; - for (i, linkage) in list.iter().enumerate() { + for (cnum, linkage) in list.iter_enumerated() { if let Linkage::NotLinked = *linkage { continue; } - let cnum = CrateNum::new(i + 1); if tcx.is_panic_runtime(cnum) { if let Some((prev, _)) = panic_runtime { @@ -431,11 +446,10 @@ fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { // strategy. If the dep isn't linked, we ignore it, and if our strategy // is abort then it's compatible with everything. Otherwise all crates' // panic strategy must match our own. - for (i, linkage) in list.iter().enumerate() { + for (cnum, linkage) in list.iter_enumerated() { if let Linkage::NotLinked = *linkage { continue; } - let cnum = CrateNum::new(i + 1); if cnum == runtime_cnum || tcx.is_compiler_builtins(cnum) { continue; } @@ -450,13 +464,16 @@ fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { }); } - let found_drop_strategy = tcx.panic_in_drop_strategy(cnum); - if tcx.sess.opts.unstable_opts.panic_in_drop != found_drop_strategy { - sess.dcx().emit_err(IncompatiblePanicInDropStrategy { - crate_name: tcx.crate_name(cnum), - found_strategy: found_drop_strategy, - desired_strategy: tcx.sess.opts.unstable_opts.panic_in_drop, - }); + // panic_in_drop_strategy isn't allowed for LOCAL_CRATE + if cnum != LOCAL_CRATE { + let found_drop_strategy = tcx.panic_in_drop_strategy(cnum); + if tcx.sess.opts.unstable_opts.panic_in_drop != found_drop_strategy { + sess.dcx().emit_err(IncompatiblePanicInDropStrategy { + crate_name: tcx.crate_name(cnum), + found_strategy: found_drop_strategy, + desired_strategy: tcx.sess.opts.unstable_opts.panic_in_drop, + }); + } } } } diff --git a/compiler/rustc_metadata/src/locator.rs b/compiler/rustc_metadata/src/locator.rs index d59ec7af6ecc0..2ddabeb49f70d 100644 --- a/compiler/rustc_metadata/src/locator.rs +++ b/compiler/rustc_metadata/src/locator.rs @@ -229,8 +229,7 @@ use rustc_session::cstore::CrateSource; use rustc_session::filesearch::FileSearch; use rustc_session::search_paths::PathKind; use rustc_session::utils::CanonicalizedPath; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use rustc_target::spec::{Target, TargetTuple}; use tracing::{debug, info}; @@ -263,7 +262,7 @@ pub(crate) struct CrateLocator<'a> { #[derive(Clone)] pub(crate) struct CratePaths { - name: Symbol, + pub(crate) name: Symbol, source: CrateSource, } @@ -766,10 +765,10 @@ impl<'a> CrateLocator<'a> { self.extract_lib(rlibs, rmetas, dylibs).map(|opt| opt.map(|(_, lib)| lib)) } - pub(crate) fn into_error(self, root: Option) -> CrateError { + pub(crate) fn into_error(self, dep_root: Option) -> CrateError { CrateError::LocatorCombined(Box::new(CombinedLocatorError { crate_name: self.crate_name, - root, + dep_root, triple: self.tuple, dll_prefix: self.target.dll_prefix.to_string(), dll_suffix: self.target.dll_suffix.to_string(), @@ -915,7 +914,7 @@ struct CrateRejections { /// otherwise they are ignored. pub(crate) struct CombinedLocatorError { crate_name: Symbol, - root: Option, + dep_root: Option, triple: TargetTuple, dll_prefix: String, dll_suffix: String, @@ -988,7 +987,7 @@ impl CrateError { } CrateError::LocatorCombined(locator) => { let crate_name = locator.crate_name; - let add_info = match &locator.root { + let add_info = match &locator.dep_root { None => String::new(), Some(r) => format!(" which `{}` depends on", r.name), }; @@ -1013,7 +1012,7 @@ impl CrateError { path.display() )); } - if let Some(r) = locator.root { + if let Some(r) = locator.dep_root { for path in r.source.paths() { found_crates.push_str(&format!( "\ncrate `{}`: {}", diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index fe3bdb3a7b904..e2d043f47c0ef 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -16,7 +16,7 @@ use rustc_session::parse::feature_err; use rustc_session::search_paths::PathKind; use rustc_session::utils::NativeLibKind; use rustc_span::def_id::{DefId, LOCAL_CRATE}; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use rustc_target::spec::LinkSelfContainedComponents; use crate::{errors, fluent_generated}; @@ -450,7 +450,7 @@ impl<'tcx> Collector<'tcx> { (name, kind) = (wasm_import_module, Some(NativeLibKind::WasmImportModule)); } let Some((name, name_span)) = name else { - sess.dcx().emit_err(errors::LinkRequiresName { span: m.span }); + sess.dcx().emit_err(errors::LinkRequiresName { span: m.span() }); continue; }; @@ -485,7 +485,7 @@ impl<'tcx> Collector<'tcx> { let link_ordinal_attr = self.tcx.get_attr(child_item, sym::link_ordinal).unwrap(); sess.dcx().emit_err(errors::LinkOrdinalRawDylib { - span: link_ordinal_attr.span, + span: link_ordinal_attr.span(), }); } } @@ -544,7 +544,7 @@ impl<'tcx> Collector<'tcx> { // can move them to the end of the list below. let mut existing = self .libs - .extract_if(|lib| { + .extract_if(.., |lib| { if lib.name.as_str() == passed_lib.name { // FIXME: This whole logic is questionable, whether modifiers are // involved or not, library reordering and kind overriding without diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 6eae4f9a8d6ea..daf3bd13bbfbe 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -31,8 +31,7 @@ use rustc_serialize::{Decodable, Decoder}; use rustc_session::Session; use rustc_session::cstore::{CrateSource, ExternCrate}; use rustc_span::hygiene::HygieneDecodeContext; -use rustc_span::symbol::kw; -use rustc_span::{BytePos, DUMMY_SP, Pos, SpanData, SpanDecoder, SyntaxContext}; +use rustc_span::{BytePos, DUMMY_SP, Pos, SpanData, SpanDecoder, SyntaxContext, kw}; use tracing::debug; use crate::creader::CStore; @@ -872,9 +871,9 @@ impl MetadataBlob { let def_kind = root.tables.def_kind.get(blob, item).unwrap(); let def_key = root.tables.def_keys.get(blob, item).unwrap().decode(blob); - #[cfg_attr(not(bootstrap), allow(rustc::symbol_intern_string_literal))] + #[allow(rustc::symbol_intern_string_literal)] let def_name = if item == CRATE_DEF_INDEX { - rustc_span::symbol::kw::Crate + kw::Crate } else { def_key .disambiguated_data @@ -1061,7 +1060,6 @@ impl<'a> CrateMetadataRef<'a> { let attrs: Vec<_> = self.get_item_attrs(id, sess).collect(); SyntaxExtension::new( sess, - tcx.features(), kind, self.get_span(id, sess), helper_attrs, @@ -1475,7 +1473,7 @@ impl<'a> CrateMetadataRef<'a> { ) -> &'tcx [(CrateNum, LinkagePreference)] { tcx.arena.alloc_from_iter( self.root.dylib_dependency_formats.decode(self).enumerate().flat_map(|(i, link)| { - let cnum = CrateNum::new(i + 1); + let cnum = CrateNum::new(i + 1); // We skipped LOCAL_CRATE when encoding link.map(|link| (self.cnum_map[cnum], link)) }), ) diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index f8c743d4f278f..527f2f10205a6 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -17,9 +17,8 @@ use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::util::Providers; use rustc_session::cstore::{CrateStore, ExternCrate}; use rustc_session::{Session, StableCrateId}; -use rustc_span::Span; use rustc_span::hygiene::ExpnId; -use rustc_span::symbol::{Symbol, kw}; +use rustc_span::{Span, Symbol, kw}; use super::{Decodable, DecodeContext, DecodeIterator}; use crate::creader::{CStore, LoadedMacro}; diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 88906d71597c2..c538ab99fb54b 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -27,9 +27,9 @@ use rustc_middle::{bug, span_bug}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque}; use rustc_session::config::{CrateType, OptLevel}; use rustc_span::hygiene::HygieneEncodeContext; -use rustc_span::symbol::sym; use rustc_span::{ ExternalSource, FileName, SourceFile, SpanData, SpanEncoder, StableSourceFileId, SyntaxContext, + sym, }; use tracing::{debug, instrument, trace}; @@ -2165,12 +2165,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { empty_proc_macro!(self); let formats = self.tcx.dependency_formats(()); if let Some(arr) = formats.get(&CrateType::Dylib) { - return self.lazy_array(arr.iter().map(|slot| match *slot { - Linkage::NotLinked | Linkage::IncludedFromDylib => None, + return self.lazy_array(arr.iter().skip(1 /* skip LOCAL_CRATE */).map( + |slot| match *slot { + Linkage::NotLinked | Linkage::IncludedFromDylib => None, - Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), - Linkage::Static => Some(LinkagePreference::RequireStatic), - })); + Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), + Linkage::Static => Some(LinkagePreference::RequireStatic), + }, + )); } LazyArray::default() } diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index f1872807aef84..4f9cdc9a474bd 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -15,7 +15,7 @@ use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIndex, DefPathHash, Stable use rustc_hir::definitions::DefKey; use rustc_hir::lang_items::LangItem; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_macros::{ Decodable, Encodable, MetadataDecodable, MetadataEncodable, TyDecodable, TyEncodable, }; @@ -36,8 +36,7 @@ use rustc_session::config::SymbolManglingVersion; use rustc_session::cstore::{CrateDepKind, ForeignModule, LinkagePreference, NativeLib}; use rustc_span::edition::Edition; use rustc_span::hygiene::{ExpnIndex, MacroKind, SyntaxContextData}; -use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::{self, ExpnData, ExpnHash, ExpnId, Span}; +use rustc_span::{self, ExpnData, ExpnHash, ExpnId, Ident, Span, Symbol}; use rustc_target::spec::{PanicStrategy, TargetTuple}; use table::TableBuilder; use {rustc_ast as ast, rustc_attr_parsing as attr, rustc_hir as hir}; @@ -304,9 +303,9 @@ pub(crate) struct RawDefId { index: u32, } -impl Into for DefId { - fn into(self) -> RawDefId { - RawDefId { krate: self.krate.as_u32(), index: self.index.as_u32() } +impl From for RawDefId { + fn from(val: DefId) -> Self { + RawDefId { krate: val.krate.as_u32(), index: val.index.as_u32() } } } @@ -451,7 +450,7 @@ define_tables! { trait_item_def_id: Table, expn_that_defined: Table>, default_fields: Table>, - params_in_repr: Table>>, + params_in_repr: Table>>, repr_options: Table>, // `def_keys` and `def_path_hashes` represent a lazy version of a // `DefPathTable`. This allows us to avoid deserializing an entire diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index e64500f812a1d..2c34df6ea61a3 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -6,7 +6,6 @@ edition = "2021" [dependencies] # tidy-alphabetical-start bitflags = "2.4.1" -derive-where = "1.2.7" either = "1.5.0" field-offset = "0.3.5" gsgdt = "0.1.2" diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 52233d407f2e0..750531b638e4d 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -86,8 +86,8 @@ macro_rules! arena_types { [] dyn_compatibility_violations: rustc_middle::traits::DynCompatibilityViolation, [] codegen_unit: rustc_middle::mir::mono::CodegenUnit<'tcx>, [decode] attribute: rustc_hir::Attribute, - [] name_set: rustc_data_structures::unord::UnordSet, - [] ordered_name_set: rustc_data_structures::fx::FxIndexSet, + [] name_set: rustc_data_structures::unord::UnordSet, + [] ordered_name_set: rustc_data_structures::fx::FxIndexSet, [] pats: rustc_middle::ty::PatternKind<'tcx>, // Note that this deliberately duplicates items in the `rustc_hir::arena`, diff --git a/compiler/rustc_middle/src/dep_graph/dep_node.rs b/compiler/rustc_middle/src/dep_graph/dep_node.rs index 87bbeb178ee19..fcfc31575f8aa 100644 --- a/compiler/rustc_middle/src/dep_graph/dep_node.rs +++ b/compiler/rustc_middle/src/dep_graph/dep_node.rs @@ -64,7 +64,7 @@ pub use rustc_query_system::dep_graph::DepNode; use rustc_query_system::dep_graph::FingerprintStyle; pub use rustc_query_system::dep_graph::dep_node::DepKind; pub(crate) use rustc_query_system::dep_graph::{DepContext, DepNodeParams}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use crate::mir::mono::MonoItem; use crate::ty::TyCtxt; diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index fc3cbfd7b3e4f..5d78bed5cf80e 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -12,8 +12,7 @@ use rustc_hir::*; use rustc_hir_pretty as pprust_hir; use rustc_middle::hir::nested_filter; use rustc_span::def_id::StableCrateId; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{ErrorGuaranteed, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use crate::hir::ModuleItems; use crate::middle::debugger_visualizer::DebuggerVisualizerFile; @@ -635,7 +634,7 @@ impl<'hir> Map<'hir> { for (hir_id, node) in self.parent_iter(hir_id) { if let Node::Item(Item { kind: - ItemKind::Fn(..) + ItemKind::Fn { .. } | ItemKind::Const(..) | ItemKind::Static(..) | ItemKind::Mod(..) @@ -824,7 +823,7 @@ impl<'hir> Map<'hir> { let span = match self.tcx.hir_node(hir_id) { // Function-like. - Node::Item(Item { kind: ItemKind::Fn(sig, ..), span: outer_span, .. }) + Node::Item(Item { kind: ItemKind::Fn { sig, .. }, span: outer_span, .. }) | Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(sig, ..), span: outer_span, @@ -939,6 +938,7 @@ impl<'hir> Map<'hir> { Node::OpaqueTy(op) => op.span, Node::Pat(pat) => pat.span, Node::PatField(field) => field.span, + Node::PatExpr(lit) => lit.span, Node::Arm(arm) => arm.span, Node::Block(block) => block.span, Node::Ctor(..) => self.span_with_body(self.tcx.parent_hir_id(hir_id)), @@ -1150,7 +1150,7 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String { ItemKind::Use(..) => "use", ItemKind::Static(..) => "static", ItemKind::Const(..) => "const", - ItemKind::Fn(..) => "fn", + ItemKind::Fn { .. } => "fn", ItemKind::Macro(..) => "macro", ItemKind::Mod(..) => "mod", ItemKind::ForeignMod { .. } => "foreign mod", @@ -1210,6 +1210,7 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String { Node::OpaqueTy(_) => node_str("opaque type"), Node::Pat(_) => node_str("pat"), Node::PatField(_) => node_str("pattern field"), + Node::PatExpr(_) => node_str("pattern literal"), Node::Param(_) => node_str("param"), Node::Arm(_) => node_str("arm"), Node::Block(_) => node_str("block"), @@ -1245,6 +1246,7 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod foreign_items, body_owners, opaques, + nested_bodies, .. } = collector; ModuleItems { @@ -1255,6 +1257,7 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod foreign_items: foreign_items.into_boxed_slice(), body_owners: body_owners.into_boxed_slice(), opaques: opaques.into_boxed_slice(), + nested_bodies: nested_bodies.into_boxed_slice(), } } @@ -1275,6 +1278,7 @@ pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems { foreign_items, body_owners, opaques, + nested_bodies, .. } = collector; @@ -1286,6 +1290,7 @@ pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems { foreign_items: foreign_items.into_boxed_slice(), body_owners: body_owners.into_boxed_slice(), opaques: opaques.into_boxed_slice(), + nested_bodies: nested_bodies.into_boxed_slice(), } } @@ -1301,6 +1306,7 @@ struct ItemCollector<'tcx> { foreign_items: Vec, body_owners: Vec, opaques: Vec, + nested_bodies: Vec, } impl<'tcx> ItemCollector<'tcx> { @@ -1315,6 +1321,7 @@ impl<'tcx> ItemCollector<'tcx> { foreign_items: Vec::default(), body_owners: Vec::default(), opaques: Vec::default(), + nested_bodies: Vec::default(), } } } @@ -1357,6 +1364,7 @@ impl<'hir> Visitor<'hir> for ItemCollector<'hir> { fn visit_inline_const(&mut self, c: &'hir ConstBlock) { self.body_owners.push(c.def_id); + self.nested_bodies.push(c.def_id); intravisit::walk_inline_const(self, c) } @@ -1368,6 +1376,7 @@ impl<'hir> Visitor<'hir> for ItemCollector<'hir> { fn visit_expr(&mut self, ex: &'hir Expr<'hir>) { if let ExprKind::Closure(closure) = ex.kind { self.body_owners.push(closure.def_id); + self.nested_bodies.push(closure.def_id); } intravisit::walk_expr(self, ex) } diff --git a/compiler/rustc_middle/src/hir/mod.rs b/compiler/rustc_middle/src/hir/mod.rs index ffefd81cd08e4..0d2acf96d08f1 100644 --- a/compiler/rustc_middle/src/hir/mod.rs +++ b/compiler/rustc_middle/src/hir/mod.rs @@ -30,6 +30,7 @@ pub struct ModuleItems { foreign_items: Box<[ForeignItemId]>, opaques: Box<[LocalDefId]>, body_owners: Box<[LocalDefId]>, + nested_bodies: Box<[LocalDefId]>, } impl ModuleItems { @@ -70,6 +71,10 @@ impl ModuleItems { self.opaques.iter().copied() } + pub fn nested_bodies(&self) -> impl Iterator + '_ { + self.nested_bodies.iter().copied() + } + pub fn definitions(&self) -> impl Iterator + '_ { self.owners().map(|id| id.def_id) } diff --git a/compiler/rustc_middle/src/infer/canonical.rs b/compiler/rustc_middle/src/infer/canonical.rs index ac55497f8b3d4..0f408375e05f3 100644 --- a/compiler/rustc_middle/src/infer/canonical.rs +++ b/compiler/rustc_middle/src/infer/canonical.rs @@ -30,7 +30,6 @@ pub use rustc_type_ir as ir; pub use rustc_type_ir::{CanonicalTyVarKind, CanonicalVarKind}; use smallvec::SmallVec; -use crate::infer::MemberConstraint; use crate::mir::ConstraintCategory; use crate::ty::{self, GenericArg, List, Ty, TyCtxt, TypeFlags, TypeVisitableExt}; @@ -91,14 +90,13 @@ pub struct QueryResponse<'tcx, R> { #[derive(HashStable, TypeFoldable, TypeVisitable)] pub struct QueryRegionConstraints<'tcx> { pub outlives: Vec>, - pub member_constraints: Vec>, } impl QueryRegionConstraints<'_> { /// Represents an empty (trivially true) set of region /// constraints. pub fn is_empty(&self) -> bool { - self.outlives.is_empty() && self.member_constraints.is_empty() + self.outlives.is_empty() } } diff --git a/compiler/rustc_middle/src/infer/mod.rs b/compiler/rustc_middle/src/infer/mod.rs index 19fe9e5a54f53..3dfcf90cb9356 100644 --- a/compiler/rustc_middle/src/infer/mod.rs +++ b/compiler/rustc_middle/src/infer/mod.rs @@ -1,34 +1,2 @@ pub mod canonical; pub mod unify_key; - -use rustc_data_structures::sync::Lrc; -use rustc_macros::{HashStable, TypeFoldable, TypeVisitable}; -use rustc_span::Span; - -use crate::ty::{OpaqueTypeKey, Region, Ty}; - -/// Requires that `region` must be equal to one of the regions in `choice_regions`. -/// We often denote this using the syntax: -/// -/// ```text -/// R0 member of [O1..On] -/// ``` -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -#[derive(HashStable, TypeFoldable, TypeVisitable)] -pub struct MemberConstraint<'tcx> { - /// The `DefId` and args of the opaque type causing this constraint. - /// Used for error reporting. - pub key: OpaqueTypeKey<'tcx>, - - /// The span where the hidden type was instantiated. - pub definition_span: Span, - - /// The hidden type in which `member_region` appears: used for error reporting. - pub hidden_ty: Ty<'tcx>, - - /// The region `R0`. - pub member_region: Region<'tcx>, - - /// The options `O1..On`. - pub choice_regions: Lrc>>, -} diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs index 971d036fa69dd..620d9f1c35790 100644 --- a/compiler/rustc_middle/src/lint.rs +++ b/compiler/rustc_middle/src/lint.rs @@ -9,7 +9,7 @@ use rustc_session::Session; use rustc_session::lint::builtin::{self, FORBIDDEN_LINT_GROUPS}; use rustc_session::lint::{FutureIncompatibilityReason, Level, Lint, LintExpectationId, LintId}; use rustc_span::hygiene::{ExpnKind, MacroKind}; -use rustc_span::{DUMMY_SP, DesugaringKind, Span, Symbol, symbol}; +use rustc_span::{DUMMY_SP, DesugaringKind, Span, Symbol, kw}; use tracing::instrument; use crate::ty::TyCtxt; @@ -37,7 +37,7 @@ pub enum LintLevelSource { impl LintLevelSource { pub fn name(&self) -> Symbol { match *self { - LintLevelSource::Default => symbol::kw::Default, + LintLevelSource::Default => kw::Default, LintLevelSource::Node { name, .. } => name, LintLevelSource::CommandLine(name, _) => name, } diff --git a/compiler/rustc_middle/src/metadata.rs b/compiler/rustc_middle/src/metadata.rs index c3175c6bdf513..57c8960943b1c 100644 --- a/compiler/rustc_middle/src/metadata.rs +++ b/compiler/rustc_middle/src/metadata.rs @@ -1,7 +1,7 @@ use rustc_hir::def::Res; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; +use rustc_span::Ident; use rustc_span::def_id::DefId; -use rustc_span::symbol::Ident; use smallvec::SmallVec; use crate::ty; diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index 509063c40d77f..e05f42af6fd24 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -1,7 +1,7 @@ use rustc_abi::Align; use rustc_attr_parsing::{InlineAttr, InstructionSetAttr, OptimizeAttr}; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use rustc_target::spec::SanitizerSet; use crate::mir::mono::Linkage; @@ -28,7 +28,10 @@ pub struct CodegenFnAttrs { pub link_ordinal: Option, /// The `#[target_feature(enable = "...")]` attribute and the enabled /// features (only enabled features are supported right now). + /// Implied target features have already been applied. pub target_features: Vec, + /// Whether the function was declared safe, but has target features + pub safe_target_features: bool, /// The `#[linkage = "..."]` attribute on Rust-defined items and the value we found. pub linkage: Option, /// The `#[linkage = "..."]` attribute on foreign items and the value we found. @@ -149,6 +152,7 @@ impl CodegenFnAttrs { link_name: None, link_ordinal: None, target_features: vec![], + safe_target_features: false, linkage: None, import_linkage: None, link_section: None, diff --git a/compiler/rustc_middle/src/middle/dependency_format.rs b/compiler/rustc_middle/src/middle/dependency_format.rs index e3b40b6415784..4f613e97631d0 100644 --- a/compiler/rustc_middle/src/middle/dependency_format.rs +++ b/compiler/rustc_middle/src/middle/dependency_format.rs @@ -8,13 +8,13 @@ // this will introduce circular dependency between rustc_metadata and rustc_middle use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::CrateNum; +use rustc_index::IndexVec; use rustc_macros::{Decodable, Encodable, HashStable}; use rustc_session::config::CrateType; /// A list of dependencies for a certain crate type. -/// -/// The length of this vector is the same as the number of external crates used. -pub type DependencyList = Vec; +pub type DependencyList = IndexVec; /// A mapping of all required dependencies for a particular flavor of output. /// diff --git a/compiler/rustc_middle/src/middle/limits.rs b/compiler/rustc_middle/src/middle/limits.rs index 3a3e84a87af5e..8a367a947d16b 100644 --- a/compiler/rustc_middle/src/middle/limits.rs +++ b/compiler/rustc_middle/src/middle/limits.rs @@ -12,7 +12,7 @@ use std::num::IntErrorKind; use rustc_ast::attr::AttributeExt; use rustc_session::{Limit, Limits, Session}; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use crate::error::LimitInvalid; use crate::query::Providers; diff --git a/compiler/rustc_middle/src/middle/mod.rs b/compiler/rustc_middle/src/middle/mod.rs index 83873439bd927..692fe027c4903 100644 --- a/compiler/rustc_middle/src/middle/mod.rs +++ b/compiler/rustc_middle/src/middle/mod.rs @@ -6,8 +6,7 @@ pub mod lang_items; pub mod lib_features { use rustc_data_structures::unord::UnordMap; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; - use rustc_span::Span; - use rustc_span::symbol::Symbol; + use rustc_span::{Span, Symbol}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(HashStable, TyEncodable, TyDecodable)] diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs index 114211b27c179..66861519e17c8 100644 --- a/compiler/rustc_middle/src/middle/region.rs +++ b/compiler/rustc_middle/src/middle/region.rs @@ -84,23 +84,23 @@ use crate::ty::TyCtxt; #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy, TyEncodable, TyDecodable)] #[derive(HashStable)] pub struct Scope { - pub id: hir::ItemLocalId, + pub local_id: hir::ItemLocalId, pub data: ScopeData, } impl fmt::Debug for Scope { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { match self.data { - ScopeData::Node => write!(fmt, "Node({:?})", self.id), - ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.id), - ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.id), - ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.id), - ScopeData::IfThen => write!(fmt, "IfThen({:?})", self.id), - ScopeData::IfThenRescope => write!(fmt, "IfThen[edition2024]({:?})", self.id), + ScopeData::Node => write!(fmt, "Node({:?})", self.local_id), + ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.local_id), + ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.local_id), + ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.local_id), + ScopeData::IfThen => write!(fmt, "IfThen({:?})", self.local_id), + ScopeData::IfThenRescope => write!(fmt, "IfThen[edition2024]({:?})", self.local_id), ScopeData::Remainder(fsi) => write!( fmt, "Remainder {{ block: {:?}, first_statement_index: {}}}", - self.id, + self.local_id, fsi.as_u32(), ), } @@ -164,18 +164,8 @@ rustc_index::newtype_index! { rustc_data_structures::static_assert_size!(ScopeData, 4); impl Scope { - /// Returns an item-local ID associated with this scope. - /// - /// N.B., likely to be replaced as API is refined; e.g., pnkfelix - /// anticipates `fn entry_node_id` and `fn each_exit_node_id`. - pub fn item_local_id(&self) -> hir::ItemLocalId { - self.id - } - pub fn hir_id(&self, scope_tree: &ScopeTree) -> Option { - scope_tree - .root_body - .map(|hir_id| HirId { owner: hir_id.owner, local_id: self.item_local_id() }) + scope_tree.root_body.map(|hir_id| HirId { owner: hir_id.owner, local_id: self.local_id }) } /// Returns the span of this `Scope`. Note that in general the @@ -350,7 +340,7 @@ impl ScopeTree { pub fn record_var_scope(&mut self, var: hir::ItemLocalId, lifetime: Scope) { debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime); - assert!(var != lifetime.item_local_id()); + assert!(var != lifetime.local_id); self.var_map.insert(var, lifetime); } @@ -359,7 +349,7 @@ impl ScopeTree { match &candidate_type { RvalueCandidateType::Borrow { lifetime: Some(lifetime), .. } | RvalueCandidateType::Pattern { lifetime: Some(lifetime), .. } => { - assert!(var.local_id != lifetime.item_local_id()) + assert!(var.local_id != lifetime.local_id) } _ => {} } diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index ece561c7493d4..77a7da2c74bc2 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -18,8 +18,7 @@ use rustc_session::Session; use rustc_session::lint::builtin::{DEPRECATED, DEPRECATED_IN_FUTURE, SOFT_UNSTABLE}; use rustc_session::lint::{BuiltinLintDiag, DeprecatedSinceKind, Level, Lint, LintBuffer}; use rustc_session::parse::feature_err_issue; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use tracing::debug; pub use self::StabilityLevel::*; @@ -31,6 +30,14 @@ pub enum StabilityLevel { Stable, } +#[derive(Copy, Clone)] +pub enum UnstableKind { + /// Enforcing regular stability of an item + Regular, + /// Enforcing const stability of an item + Const(Span), +} + /// An entry in the `depr_map`. #[derive(Copy, Clone, HashStable, Debug, Encodable, Decodable)] pub struct DeprecationEntry { @@ -109,10 +116,16 @@ pub fn report_unstable( is_soft: bool, span: Span, soft_handler: impl FnOnce(&'static Lint, Span, String), + kind: UnstableKind, ) { + let qual = match kind { + UnstableKind::Regular => "", + UnstableKind::Const(_) => " const", + }; + let msg = match reason { - Some(r) => format!("use of unstable library feature `{feature}`: {r}"), - None => format!("use of unstable library feature `{feature}`"), + Some(r) => format!("use of unstable{qual} library feature `{feature}`: {r}"), + None => format!("use of unstable{qual} library feature `{feature}`"), }; if is_soft { @@ -122,6 +135,9 @@ pub fn report_unstable( if let Some((inner_types, msg, sugg, applicability)) = suggestion { err.span_suggestion(inner_types, msg, sugg, applicability); } + if let UnstableKind::Const(kw) = kind { + err.span_label(kw, "trait is not stable as const yet"); + } err.emit(); } } @@ -233,9 +249,18 @@ fn late_report_deprecation( return; } + let is_in_effect = depr.is_in_effect(); + let lint = deprecation_lint(is_in_effect); + + // Calculating message for lint involves calling `self.def_path_str`, + // which will by default invoke the expensive `visible_parent_map` query. + // Skip all that work if the lint is allowed anyway. + if tcx.lint_level_at_node(lint, hir_id).0 == Level::Allow { + return; + } + let def_path = with_no_trimmed_paths!(tcx.def_path_str(def_id)); let def_kind = tcx.def_descr(def_id); - let is_in_effect = depr.is_in_effect(); let method_span = method_span.unwrap_or(span); let suggestion = @@ -251,7 +276,7 @@ fn late_report_deprecation( note: depr.note, since_kind: deprecated_since_kind(is_in_effect, depr.since), }; - tcx.emit_node_span_lint(deprecation_lint(is_in_effect), hir_id, method_span, diag); + tcx.emit_node_span_lint(lint, hir_id, method_span, diag); } /// Result of `TyCtxt::eval_stability`. @@ -361,13 +386,7 @@ impl<'tcx> TyCtxt<'tcx> { // hierarchy. let depr_attr = &depr_entry.attr; if !skip || depr_attr.is_since_rustc_version() { - // Calculating message for lint involves calling `self.def_path_str`. - // Which by default to calculate visible path will invoke expensive `visible_parent_map` query. - // So we skip message calculation altogether, if lint is allowed. - let lint = deprecation_lint(depr_attr.is_in_effect()); - if self.lint_level_at_node(lint, id).0 != Level::Allow { - late_report_deprecation(self, depr_attr, span, method_span, id, def_id); - } + late_report_deprecation(self, depr_attr, span, method_span, id, def_id); } }; } @@ -588,6 +607,7 @@ impl<'tcx> TyCtxt<'tcx> { is_soft, span, soft_handler, + UnstableKind::Regular, ), EvalResult::Unmarked => unmarked(span, def_id), } @@ -595,6 +615,73 @@ impl<'tcx> TyCtxt<'tcx> { is_allowed } + /// This function is analogous to `check_optional_stability` but with the logic in + /// `eval_stability_allow_unstable` inlined, and which operating on const stability + /// instead of regular stability. + /// + /// This enforces *syntactical* const stability of const traits. In other words, + /// it enforces the ability to name `~const`/`const` traits in trait bounds in various + /// syntax positions in HIR (including in the trait of an impl header). + pub fn check_const_stability(self, def_id: DefId, span: Span, const_kw_span: Span) { + let is_staged_api = self.lookup_stability(def_id.krate.as_def_id()).is_some(); + if !is_staged_api { + return; + } + + // Only the cross-crate scenario matters when checking unstable APIs + let cross_crate = !def_id.is_local(); + if !cross_crate { + return; + } + + let stability = self.lookup_const_stability(def_id); + debug!( + "stability: \ + inspecting def_id={:?} span={:?} of stability={:?}", + def_id, span, stability + ); + + match stability { + Some(ConstStability { + level: attr::StabilityLevel::Unstable { reason, issue, is_soft, implied_by, .. }, + feature, + .. + }) => { + assert!(!is_soft); + + if span.allows_unstable(feature) { + debug!("body stability: skipping span={:?} since it is internal", span); + return; + } + if self.features().enabled(feature) { + return; + } + + // If this item was previously part of a now-stabilized feature which is still + // enabled (i.e. the user hasn't removed the attribute for the stabilized feature + // yet) then allow use of this item. + if let Some(implied_by) = implied_by + && self.features().enabled(implied_by) + { + return; + } + + report_unstable( + self.sess, + feature, + reason.to_opt_reason(), + issue, + None, + false, + span, + |_, _, _| {}, + UnstableKind::Const(const_kw_span), + ); + } + Some(_) | None => {} + } + } + pub fn lookup_deprecation(self, id: DefId) -> Option { self.lookup_deprecation_entry(id).map(|depr| depr.attr) } diff --git a/compiler/rustc_middle/src/mir/consts.rs b/compiler/rustc_middle/src/mir/consts.rs index d8a3919192058..1231ea8856961 100644 --- a/compiler/rustc_middle/src/mir/consts.rs +++ b/compiler/rustc_middle/src/mir/consts.rs @@ -5,7 +5,7 @@ use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, Lift, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_session::RemapFileNameExt; use rustc_session::config::RemapPathScopeComponents; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol}; use rustc_type_ir::visit::TypeVisitableExt; use super::interpret::ReportedErrorInfo; @@ -569,7 +569,7 @@ impl<'tcx> TyCtxt<'tcx> { let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); let caller = self.sess.source_map().lookup_char_pos(topmost.lo()); self.const_caller_location( - rustc_span::symbol::Symbol::intern( + Symbol::intern( &caller .file .name diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index 962176290df32..cd6b2d65bf184 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -3,7 +3,7 @@ use std::fmt::{self, Debug, Formatter}; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_span::Span; @@ -154,22 +154,6 @@ impl Debug for CoverageKind { } } -#[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, Eq, PartialOrd, Ord)] -#[derive(TypeFoldable, TypeVisitable)] -pub struct SourceRegion { - pub start_line: u32, - pub start_col: u32, - pub end_line: u32, - pub end_col: u32, -} - -impl Debug for SourceRegion { - fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { - let &Self { start_line, start_col, end_line, end_col } = self; - write!(fmt, "{start_line}:{start_col} - {end_line}:{end_col}") - } -} - #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)] #[derive(TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)] pub enum Op { @@ -231,7 +215,7 @@ impl MappingKind { #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] pub struct Mapping { pub kind: MappingKind, - pub source_region: SourceRegion, + pub span: Span, } /// Stores per-function coverage information attached to a `mir::Body`, @@ -319,8 +303,8 @@ pub struct MCDCDecisionSpan { /// Used by the `coverage_ids_info` query. #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable)] pub struct CoverageIdsInfo { - pub counters_seen: BitSet, - pub zero_expressions: BitSet, + pub counters_seen: DenseBitSet, + pub zero_expressions: DenseBitSet, } impl CoverageIdsInfo { diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index fbada6ec405f3..8c085fa310ab5 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -88,10 +88,10 @@ impl ReportedErrorInfo { } } -impl Into for ReportedErrorInfo { +impl From for ErrorGuaranteed { #[inline] - fn into(self) -> ErrorGuaranteed { - self.error + fn from(val: ReportedErrorInfo) -> Self { + val.error } } @@ -392,7 +392,7 @@ pub enum UndefinedBehaviorInfo<'tcx> { /// A discriminant of an uninhabited enum variant is written. UninhabitedEnumVariantWritten(VariantIdx), /// An uninhabited enum variant is projected. - UninhabitedEnumVariantRead(VariantIdx), + UninhabitedEnumVariantRead(Option), /// Trying to set discriminant to the niched variant, but the value does not match. InvalidNichedEnumVariantWritten { enum_ty: Ty<'tcx> }, /// ABI-incompatible argument types. diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 8d73c9e76de1a..b88137544bca3 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -16,7 +16,6 @@ use rustc_abi::{AddressSpace, Align, Endian, HasDataLayout, Size}; use rustc_ast::{LitKind, Mutability}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lock; -use rustc_errors::ErrorGuaranteed; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; @@ -84,16 +83,6 @@ pub struct LitToConstInput<'tcx> { pub neg: bool, } -/// Error type for `tcx.lit_to_const`. -#[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)] -pub enum LitToConstError { - /// The literal's inferred type did not match the expected `ty` in the input. - /// This is used for graceful error handling (`span_delayed_bug`) in - /// type checking (`Const::from_anon_const`). - TypeError, - Reported(ErrorGuaranteed), -} - #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)] pub struct AllocId(pub NonZero); diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs index f7f38575bd035..4c47d9636d3f2 100644 --- a/compiler/rustc_middle/src/mir/interpret/queries.rs +++ b/compiler/rustc_middle/src/mir/interpret/queries.rs @@ -16,12 +16,12 @@ use crate::ty::{self, GenericArgs, TyCtxt}; impl<'tcx> TyCtxt<'tcx> { /// Evaluates a constant without providing any generic parameters. This is useful to evaluate consts /// that can't take any generic arguments like const items or enum discriminants. If a - /// generic parameter is used within the constant `ErrorHandled::ToGeneric` will be returned. + /// generic parameter is used within the constant `ErrorHandled::TooGeneric` will be returned. #[instrument(skip(self), level = "debug")] pub fn const_eval_poly(self, def_id: DefId) -> EvalToConstValueResult<'tcx> { // In some situations def_id will have generic parameters within scope, but they aren't allowed // to be used. So we can't use `Instance::mono`, instead we feed unresolved generic parameters - // into `const_eval` which will return `ErrorHandled::ToGeneric` if any of them are + // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self, def_id); let instance = ty::Instance::new(def_id, args); @@ -32,12 +32,12 @@ impl<'tcx> TyCtxt<'tcx> { /// Evaluates a constant without providing any generic parameters. This is useful to evaluate consts /// that can't take any generic arguments like const items or enum discriminants. If a - /// generic parameter is used within the constant `ErrorHandled::ToGeneric` will be returned. + /// generic parameter is used within the constant `ErrorHandled::TooGeneric` will be returned. #[instrument(skip(self), level = "debug")] pub fn const_eval_poly_to_alloc(self, def_id: DefId) -> EvalToAllocationRawResult<'tcx> { // In some situations def_id will have generic parameters within scope, but they aren't allowed // to be used. So we can't use `Instance::mono`, instead we feed unresolved generic parameters - // into `const_eval` which will return `ErrorHandled::ToGeneric` if any of them are + // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self, def_id); let instance = ty::Instance::new(def_id, args); @@ -201,12 +201,12 @@ impl<'tcx> TyCtxt<'tcx> { impl<'tcx> TyCtxtEnsure<'tcx> { /// Evaluates a constant without providing any generic parameters. This is useful to evaluate consts /// that can't take any generic arguments like const items or enum discriminants. If a - /// generic parameter is used within the constant `ErrorHandled::ToGeneric` will be returned. + /// generic parameter is used within the constant `ErrorHandled::TooGeneric` will be returned. #[instrument(skip(self), level = "debug")] pub fn const_eval_poly(self, def_id: DefId) { // In some situations def_id will have generic parameters within scope, but they aren't allowed // to be used. So we can't use `Instance::mono`, instead we feed unresolved generic parameters - // into `const_eval` which will return `ErrorHandled::ToGeneric` if any of them are + // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self.tcx, def_id); let instance = ty::Instance::new(def_id, self.tcx.erase_regions(args)); diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 56340ff009553..bbb8bdce4a0ca 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -21,13 +21,12 @@ use rustc_hir::def_id::{CRATE_DEF_ID, DefId}; use rustc_hir::{ self as hir, BindingMode, ByRef, CoroutineDesugaring, CoroutineKind, HirId, ImplicitSelfKind, }; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_serialize::{Decodable, Encodable}; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol}; use tracing::trace; pub use self::query::*; @@ -1349,8 +1348,8 @@ pub struct BasicBlockData<'tcx> { } impl<'tcx> BasicBlockData<'tcx> { - pub fn new(terminator: Option>) -> BasicBlockData<'tcx> { - BasicBlockData { statements: vec![], terminator, is_cleanup: false } + pub fn new(terminator: Option>, is_cleanup: bool) -> BasicBlockData<'tcx> { + BasicBlockData { statements: vec![], terminator, is_cleanup } } /// Accessor for terminator. diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index dcb680c283c04..111c3b6956a24 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -13,8 +13,7 @@ use rustc_index::Idx; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; use rustc_query_system::ich::StableHashingContext; use rustc_session::config::OptLevel; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use rustc_target::spec::SymbolVisibility; use tracing::debug; @@ -133,9 +132,10 @@ impl<'tcx> MonoItem<'tcx> { // creating one copy of this `#[inline]` function which may // conflict with upstream crates as it could be an exported // symbol. - match tcx.codegen_fn_attrs(instance.def_id()).inline { - InlineAttr::Always => InstantiationMode::LocalCopy, - _ => InstantiationMode::GloballyShared { may_conflict: true }, + if tcx.codegen_fn_attrs(instance.def_id()).inline.always() { + InstantiationMode::LocalCopy + } else { + InstantiationMode::GloballyShared { may_conflict: true } } } MonoItem::Static(..) | MonoItem::GlobalAsm(..) => { diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 2bfcd0a62274d..24d2478f77018 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -23,6 +23,7 @@ pub(crate) const ALIGN: usize = 40; /// An indication of where we are in the control flow graph. Used for printing /// extra information in `dump_mir` +#[derive(Clone)] pub enum PassWhere { /// We have not started dumping the control flow graph, but we are about to. BeforeCFG, @@ -603,8 +604,8 @@ fn write_function_coverage_info( for (id, expression) in expressions.iter_enumerated() { writeln!(w, "{INDENT}coverage {id:?} => {expression:?};")?; } - for coverage::Mapping { kind, source_region } in mappings { - writeln!(w, "{INDENT}coverage {kind:?} => {source_region:?};")?; + for coverage::Mapping { kind, span } in mappings { + writeln!(w, "{INDENT}coverage {kind:?} => {span:?};")?; } writeln!(w)?; @@ -1067,7 +1068,6 @@ impl<'tcx> Debug for Rvalue<'tcx> { pretty_print_const(b, fmt, false)?; write!(fmt, "]") } - Len(ref a) => write!(fmt, "Len({a:?})"), Cast(ref kind, ref place, ref ty) => { with_no_trimmed_paths!(write!(fmt, "{place:?} as {ty} ({kind:?})")) } @@ -1555,16 +1555,22 @@ pub fn write_allocations<'tcx>( write!(w, " (vtable: impl {dyn_ty} for {ty})")? } Some(GlobalAlloc::Static(did)) if !tcx.is_foreign_item(did) => { - match tcx.eval_static_initializer(did) { - Ok(alloc) => { - write!(w, " (static: {}, ", tcx.def_path_str(did))?; - write_allocation_track_relocs(w, alloc)?; + write!(w, " (static: {}", tcx.def_path_str(did))?; + if body.phase <= MirPhase::Runtime(RuntimePhase::PostCleanup) + && tcx.hir().body_const_context(body.source.def_id()).is_some() + { + // Statics may be cyclic and evaluating them too early + // in the MIR pipeline may cause cycle errors even though + // normal compilation is fine. + write!(w, ")")?; + } else { + match tcx.eval_static_initializer(did) { + Ok(alloc) => { + write!(w, ", ")?; + write_allocation_track_relocs(w, alloc)?; + } + Err(_) => write!(w, ", error during initializer evaluation)")?, } - Err(_) => write!( - w, - " (static: {}, error during initializer evaluation)", - tcx.def_path_str(did) - )?, } } Some(GlobalAlloc::Static(did)) => { diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index f690359e01215..db5da941f1e7b 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -3,7 +3,6 @@ use std::cell::Cell; use std::fmt::{self, Debug}; -use derive_where::derive_where; use rustc_abi::{FieldIdx, VariantIdx}; use rustc_data_structures::fx::FxIndexMap; use rustc_errors::ErrorGuaranteed; @@ -11,8 +10,7 @@ use rustc_hir::def_id::LocalDefId; use rustc_index::bit_set::BitMatrix; use rustc_index::{Idx, IndexVec}; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use smallvec::SmallVec; use super::{ConstValue, SourceInfo}; @@ -226,29 +224,22 @@ rustc_data_structures::static_assert_size!(ConstraintCategory<'_>, 16); /// See also `rustc_const_eval::borrow_check::constraints`. #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] #[derive(TyEncodable, TyDecodable, HashStable, TypeVisitable, TypeFoldable)] -#[derive_where(PartialOrd, Ord)] pub enum ConstraintCategory<'tcx> { Return(ReturnConstraint), Yield, UseAsConst, UseAsStatic, - TypeAnnotation, + TypeAnnotation(AnnotationSource), Cast { /// Whether this cast is a coercion that was automatically inserted by the compiler. is_implicit_coercion: bool, /// Whether this is an unsizing coercion and if yes, this contains the target type. /// Region variables are erased to ReErased. - #[derive_where(skip)] unsize_to: Option>, }, - /// A constraint that came from checking the body of a closure. - /// - /// We try to get the category that the closure used when reporting this. - ClosureBounds, - /// Contains the function type if available. - CallArgument(#[derive_where(skip)] Option>), + CallArgument(Option>), CopyBound, SizedBound, Assignment, @@ -277,13 +268,22 @@ pub enum ConstraintCategory<'tcx> { IllegalUniverse, } -#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] #[derive(TyEncodable, TyDecodable, HashStable, TypeVisitable, TypeFoldable)] pub enum ReturnConstraint { Normal, ClosureUpvar(FieldIdx), } +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +#[derive(TyEncodable, TyDecodable, HashStable, TypeVisitable, TypeFoldable)] +pub enum AnnotationSource { + Ascription, + Declaration, + OpaqueCast, + GenericArg, +} + /// The subject of a `ClosureOutlivesRequirement` -- that is, the thing /// that must outlive some region. #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs index 1ce735cec6313..470a247d794ec 100644 --- a/compiler/rustc_middle/src/mir/statement.rs +++ b/compiler/rustc_middle/src/mir/statement.rs @@ -424,7 +424,6 @@ impl<'tcx> Rvalue<'tcx> { | Rvalue::Ref(_, _, _) | Rvalue::ThreadLocalRef(_) | Rvalue::RawPtr(_, _) - | Rvalue::Len(_) | Rvalue::Cast( CastKind::IntToInt | CastKind::FloatToInt @@ -456,6 +455,8 @@ impl BorrowKind { } } + /// Returns whether borrows represented by this kind are allowed to be split into separate + /// Reservation and Activation phases. pub fn allows_two_phase_borrow(&self) -> bool { match *self { BorrowKind::Shared diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index fea940ea47c47..bbbaffc5a35cc 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -10,10 +10,9 @@ use rustc_hir::CoroutineKind; use rustc_hir::def_id::DefId; use rustc_index::IndexVec; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; -use rustc_span::Span; use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use rustc_target::asm::InlineAsmRegOrRegClass; use smallvec::SmallVec; @@ -822,6 +821,11 @@ pub enum TerminatorKind<'tcx> { /// continues at the `resume` basic block, with the second argument written to the `resume_arg` /// place. If the coroutine is dropped before then, the `drop` basic block is invoked. /// + /// Note that coroutines can be (unstably) cloned under certain conditions, which means that + /// this terminator can **return multiple times**! MIR optimizations that reorder code into + /// different basic blocks needs to be aware of that. + /// See . + /// /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering. /// /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`? @@ -1347,16 +1351,6 @@ pub enum Rvalue<'tcx> { /// model. RawPtr(Mutability, Place<'tcx>), - /// Yields the length of the place, as a `usize`. - /// - /// If the type of the place is an array, this is the array length. For slices (`[T]`, not - /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is - /// ill-formed for places of other types. - /// - /// This cannot be a `UnOp(PtrMetadata, _)` because that expects a value, and we only - /// have a place, and `UnOp(PtrMetadata, RawPtr(place))` is not a thing. - Len(Place<'tcx>), - /// Performs essentially all of the casts that can be performed via `as`. /// /// This allows for casts from/to a variety of types. diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index 476e352ed9227..cbb26b83c79cf 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -5,6 +5,7 @@ use rustc_hir as hir; use tracing::{debug, instrument}; +use ty::CoroutineArgsExt; use crate::mir::*; @@ -25,29 +26,63 @@ impl<'tcx> PlaceTy<'tcx> { PlaceTy { ty, variant_index: None } } - /// `place_ty.field_ty(tcx, f)` computes the type at a given field - /// of a record or enum-variant. (Most clients of `PlaceTy` can - /// instead just extract the relevant type directly from their - /// `PlaceElem`, but some instances of `ProjectionElem` do - /// not carry a `Ty` for `T`.) + /// `place_ty.field_ty(tcx, f)` computes the type of a given field. + /// + /// Most clients of `PlaceTy` can instead just extract the relevant type + /// directly from their `PlaceElem`, but some instances of `ProjectionElem` + /// do not carry a `Ty` for `T`. /// /// Note that the resulting type has not been normalized. #[instrument(level = "debug", skip(tcx), ret)] pub fn field_ty(self, tcx: TyCtxt<'tcx>, f: FieldIdx) -> Ty<'tcx> { - match self.ty.kind() { - ty::Adt(adt_def, args) => { - let variant_def = match self.variant_index { - None => adt_def.non_enum_variant(), - Some(variant_index) => { - assert!(adt_def.is_enum()); - adt_def.variant(variant_index) - } - }; - let field_def = &variant_def.fields[f]; - field_def.ty(tcx, args) + if let Some(variant_index) = self.variant_index { + match *self.ty.kind() { + ty::Adt(adt_def, args) if adt_def.is_enum() => { + adt_def.variant(variant_index).fields[f].ty(tcx, args) + } + ty::Coroutine(def_id, args) => { + let mut variants = args.as_coroutine().state_tys(def_id, tcx); + let Some(mut variant) = variants.nth(variant_index.into()) else { + bug!("variant {variant_index:?} of coroutine out of range: {self:?}"); + }; + + variant + .nth(f.index()) + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")) + } + _ => bug!("can't downcast non-adt non-coroutine type: {self:?}"), + } + } else { + match self.ty.kind() { + ty::Adt(adt_def, args) if !adt_def.is_enum() => { + adt_def.non_enum_variant().fields[f].ty(tcx, args) + } + ty::Closure(_, args) => args + .as_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::CoroutineClosure(_, args) => args + .as_coroutine_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + // Only prefix fields (upvars and current state) are + // accessible without a variant index. + ty::Coroutine(_, args) => args + .as_coroutine() + .prefix_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::Tuple(tys) => tys + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + _ => bug!("can't project out of {self:?}"), } - ty::Tuple(tys) => tys[f.index()], - _ => bug!("extracting field of non-tuple non-adt: {:?}", self), } } @@ -175,7 +210,6 @@ impl<'tcx> Rvalue<'tcx> { let place_ty = place.ty(local_decls, tcx).ty; Ty::new_ptr(tcx, place_ty, mutability) } - Rvalue::Len(..) => tcx.types.usize, Rvalue::Cast(.., ty) => ty, Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => { let lhs_ty = lhs.ty(local_decls, tcx); diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index b919f5726db58..473b817aed076 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -67,6 +67,17 @@ impl SwitchTargets { &mut self.targets } + /// Returns a slice with all considered values (not including the fallback). + #[inline] + pub fn all_values(&self) -> &[Pu128] { + &self.values + } + + #[inline] + pub fn all_values_mut(&mut self) -> &mut [Pu128] { + &mut self.values + } + /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the /// specific value. This cannot fail, as it'll return the `otherwise` /// branch if there's not a specific match for the value. diff --git a/compiler/rustc_middle/src/mir/traversal.rs b/compiler/rustc_middle/src/mir/traversal.rs index b8b74da401c9f..0e7dcc24dafa6 100644 --- a/compiler/rustc_middle/src/mir/traversal.rs +++ b/compiler/rustc_middle/src/mir/traversal.rs @@ -21,7 +21,7 @@ use super::*; #[derive(Clone)] pub struct Preorder<'a, 'tcx> { body: &'a Body<'tcx>, - visited: BitSet, + visited: DenseBitSet, worklist: Vec, root_is_start_block: bool, } @@ -32,7 +32,7 @@ impl<'a, 'tcx> Preorder<'a, 'tcx> { Preorder { body, - visited: BitSet::new_empty(body.basic_blocks.len()), + visited: DenseBitSet::new_empty(body.basic_blocks.len()), worklist, root_is_start_block: root == START_BLOCK, } @@ -106,7 +106,7 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { /// A Postorder traversal of this graph is `D B C A` or `D C B A` pub struct Postorder<'a, 'tcx, C> { basic_blocks: &'a IndexSlice>, - visited: BitSet, + visited: DenseBitSet, visit_stack: Vec<(BasicBlock, Successors<'a>)>, root_is_start_block: bool, extra: C, @@ -123,7 +123,7 @@ where ) -> Postorder<'a, 'tcx, C> { let mut po = Postorder { basic_blocks, - visited: BitSet::new_empty(basic_blocks.len()), + visited: DenseBitSet::new_empty(basic_blocks.len()), visit_stack: Vec::new(), root_is_start_block: root == START_BLOCK, extra, @@ -285,8 +285,8 @@ pub fn reachable<'a, 'tcx>( preorder(body) } -/// Returns a `BitSet` containing all basic blocks reachable from the `START_BLOCK`. -pub fn reachable_as_bitset(body: &Body<'_>) -> BitSet { +/// Returns a `DenseBitSet` containing all basic blocks reachable from the `START_BLOCK`. +pub fn reachable_as_bitset(body: &Body<'_>) -> DenseBitSet { let mut iter = preorder(body); while let Some(_) = iter.next() {} iter.visited @@ -340,13 +340,13 @@ pub fn mono_reachable<'a, 'tcx>( MonoReachable::new(body, tcx, instance) } -/// [`MonoReachable`] internally accumulates a [`BitSet`] of visited blocks. This is just a +/// [`MonoReachable`] internally accumulates a [`DenseBitSet`] of visited blocks. This is just a /// convenience function to run that traversal then extract its set of reached blocks. pub fn mono_reachable_as_bitset<'a, 'tcx>( body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, -) -> BitSet { +) -> DenseBitSet { let mut iter = mono_reachable(body, tcx, instance); while let Some(_) = iter.next() {} iter.visited @@ -356,11 +356,11 @@ pub struct MonoReachable<'a, 'tcx> { body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, - visited: BitSet, + visited: DenseBitSet, // Other traversers track their worklist in a Vec. But we don't care about order, so we can - // store ours in a BitSet and thus save allocations because BitSet has a small size + // store ours in a DenseBitSet and thus save allocations because DenseBitSet has a small size // optimization. - worklist: BitSet, + worklist: DenseBitSet, } impl<'a, 'tcx> MonoReachable<'a, 'tcx> { @@ -369,13 +369,13 @@ impl<'a, 'tcx> MonoReachable<'a, 'tcx> { tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, ) -> MonoReachable<'a, 'tcx> { - let mut worklist = BitSet::new_empty(body.basic_blocks.len()); + let mut worklist = DenseBitSet::new_empty(body.basic_blocks.len()); worklist.insert(START_BLOCK); MonoReachable { body, tcx, instance, - visited: BitSet::new_empty(body.basic_blocks.len()), + visited: DenseBitSet::new_empty(body.basic_blocks.len()), worklist, } } diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 62c340d99e38e..12a024a219e8e 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -695,14 +695,6 @@ macro_rules! make_mir_visitor { self.visit_place(path, ctx, location); } - Rvalue::Len(path) => { - self.visit_place( - path, - PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect), - location - ); - } - Rvalue::Cast(_cast_kind, operand, ty) => { self.visit_operand(operand, location); self.visit_ty($(& $mutability)? *ty, TyContext::Location(location)); @@ -1369,12 +1361,12 @@ pub enum PlaceContext { impl PlaceContext { /// Returns `true` if this place context represents a drop. #[inline] - pub fn is_drop(&self) -> bool { + pub fn is_drop(self) -> bool { matches!(self, PlaceContext::MutatingUse(MutatingUseContext::Drop)) } /// Returns `true` if this place context represents a borrow. - pub fn is_borrow(&self) -> bool { + pub fn is_borrow(self) -> bool { matches!( self, PlaceContext::NonMutatingUse( @@ -1384,7 +1376,7 @@ impl PlaceContext { } /// Returns `true` if this place context represents an address-of. - pub fn is_address_of(&self) -> bool { + pub fn is_address_of(self) -> bool { matches!( self, PlaceContext::NonMutatingUse(NonMutatingUseContext::RawBorrow) @@ -1394,7 +1386,7 @@ impl PlaceContext { /// Returns `true` if this place context represents a storage live or storage dead marker. #[inline] - pub fn is_storage_marker(&self) -> bool { + pub fn is_storage_marker(self) -> bool { matches!( self, PlaceContext::NonUse(NonUseContext::StorageLive | NonUseContext::StorageDead) @@ -1403,18 +1395,18 @@ impl PlaceContext { /// Returns `true` if this place context represents a use that potentially changes the value. #[inline] - pub fn is_mutating_use(&self) -> bool { + pub fn is_mutating_use(self) -> bool { matches!(self, PlaceContext::MutatingUse(..)) } /// Returns `true` if this place context represents a use. #[inline] - pub fn is_use(&self) -> bool { + pub fn is_use(self) -> bool { !matches!(self, PlaceContext::NonUse(..)) } /// Returns `true` if this place context represents an assignment statement. - pub fn is_place_assignment(&self) -> bool { + pub fn is_place_assignment(self) -> bool { matches!( self, PlaceContext::MutatingUse( @@ -1424,4 +1416,19 @@ impl PlaceContext { ) ) } + + /// The variance of a place in the given context. + pub fn ambient_variance(self) -> ty::Variance { + use NonMutatingUseContext::*; + use NonUseContext::*; + match self { + PlaceContext::MutatingUse(_) => ty::Invariant, + PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant, + PlaceContext::NonMutatingUse( + Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | RawBorrow + | Projection, + ) => ty::Covariant, + PlaceContext::NonUse(AscribeUserTy(variance)) => variance, + } + } } diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index 9afcf9466b6d0..1676afb4b6ec4 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -141,14 +141,6 @@ impl EraseType for Result>, &ty::layout::Layou >()]; } -impl EraseType for Result, mir::interpret::LitToConstError> { - type Result = [u8; size_of::, mir::interpret::LitToConstError>>()]; -} - -impl EraseType for Result, mir::interpret::LitToConstError> { - type Result = [u8; size_of::, mir::interpret::LitToConstError>>()]; -} - impl EraseType for Result, mir::interpret::ErrorHandled> { type Result = [u8; size_of::, mir::interpret::ErrorHandled>>()]; } @@ -296,7 +288,6 @@ trivial! { rustc_middle::mir::interpret::AllocId, rustc_middle::mir::interpret::CtfeProvenance, rustc_middle::mir::interpret::ErrorHandled, - rustc_middle::mir::interpret::LitToConstError, rustc_middle::thir::ExprId, rustc_middle::traits::CodegenObligationError, rustc_middle::traits::EvaluationResult, @@ -332,7 +323,7 @@ trivial! { rustc_span::ExpnId, rustc_span::Span, rustc_span::Symbol, - rustc_span::symbol::Ident, + rustc_span::Ident, rustc_target::spec::PanicStrategy, rustc_type_ir::Variance, u32, diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index 66fec2dd0f7e8..e243425c0b7ba 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -4,8 +4,7 @@ use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_query_system::dep_graph::DepNodeIndex; use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache}; -use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol}; use crate::infer::canonical::CanonicalQueryInput; use crate::mir::mono::CollectionMode; diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 9475e6296832b..bfbcb0532c10e 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -41,8 +41,7 @@ use rustc_session::cstore::{ use rustc_session::lint::LintExpectationId; use rustc_span::def_id::LOCAL_CRATE; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol}; use rustc_target::spec::PanicStrategy; use {rustc_abi as abi, rustc_ast as ast, rustc_attr_parsing as attr, rustc_hir as hir}; @@ -58,7 +57,7 @@ use crate::middle::resolve_bound_vars::{ObjectLifetimeDefault, ResolveBoundVars, use crate::middle::stability::{self, DeprecationEntry}; use crate::mir::interpret::{ EvalStaticInitializerRawResult, EvalToAllocationRawResult, EvalToConstValueResult, - EvalToValTreeResult, GlobalId, LitToConstError, LitToConstInput, + EvalToValTreeResult, GlobalId, LitToConstInput, }; use crate::mir::mono::{CodegenUnit, CollectionMode, MonoItem}; use crate::query::erase::{Erase, erase, restore}; @@ -133,6 +132,7 @@ rustc_queries! { } /// Return the span for a definition. + /// /// Contrary to `def_span` below, this query returns the full absolute span of the definition. /// This span is meant for dep-tracking rather than diagnostics. It should not be used outside /// of rustc_middle::hir::source_map. @@ -143,6 +143,7 @@ rustc_queries! { } /// Represents crate as a whole (as distinct from the top-level crate module). + /// /// If you call `hir_crate` (e.g., indirectly by calling `tcx.hir().krate()`), /// we will have to assume that any change means that you need to be recompiled. /// This is because the `hir_crate` query gives you access to all other items. @@ -203,28 +204,40 @@ rustc_queries! { feedable } - /// Given the def_id of a const-generic parameter, computes the associated default const - /// parameter. e.g. `fn example` called on `N` would return `3`. + /// Returns the *default* of the const pararameter given by `DefId`. + /// + /// E.g., given `struct Ty;` this returns `3` for `N`. query const_param_default(param: DefId) -> ty::EarlyBinder<'tcx, ty::Const<'tcx>> { - desc { |tcx| "computing const default for a given parameter `{}`", tcx.def_path_str(param) } + desc { |tcx| "computing the default for const parameter `{}`", tcx.def_path_str(param) } cache_on_disk_if { param.is_local() } separate_provide_extern } - /// Returns the [`Ty`][rustc_middle::ty::Ty] of the given [`DefId`]. If the [`DefId`] points - /// to an alias, it will "skip" this alias to return the aliased type. + /// Returns the *type* of the definition given by `DefId`. + /// + /// For type aliases (whether eager or lazy) and associated types, this returns + /// the underlying aliased type (not the corresponding [alias type]). + /// + /// For opaque types, this returns and thus reveals the hidden type! If you + /// want to detect cycle errors use `type_of_opaque` instead. + /// + /// To clarify, for type definitions, this does *not* return the "type of a type" + /// (aka *kind* or *sort*) in the type-theoretical sense! It merely returns + /// the type primarily *associated with* it. + /// + /// # Panics /// - /// [`DefId`]: rustc_hir::def_id::DefId + /// This query will panic if the given definition doesn't (and can't + /// conceptually) have an (underlying) type. + /// + /// [alias type]: rustc_middle::ty::AliasTy query type_of(key: DefId) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { desc { |tcx| "{action} `{path}`", - action = { - use rustc_hir::def::DefKind; - match tcx.def_kind(key) { - DefKind::TyAlias => "expanding type alias", - DefKind::TraitAlias => "expanding trait alias", - _ => "computing type of", - } + action = match tcx.def_kind(key) { + DefKind::TyAlias => "expanding type alias", + DefKind::TraitAlias => "expanding trait alias", + _ => "computing type of", }, path = tcx.def_path_str(key), } @@ -233,9 +246,14 @@ rustc_queries! { feedable } - /// Specialized instance of `type_of` that detects cycles that are due to - /// revealing opaque because of an auto trait bound. Unless `CyclePlaceholder` needs - /// to be handled separately, call `type_of` instead. + /// Returns the *hidden type* of the opaque type given by `DefId` unless a cycle occurred. + /// + /// This is a specialized instance of [`Self::type_of`] that detects query cycles. + /// Unless `CyclePlaceholder` needs to be handled separately, call [`Self::type_of`] instead. + /// + /// # Panics + /// + /// This query will panic if the given definition is not an opaque type. query type_of_opaque(key: DefId) -> Result>, CyclePlaceholder> { desc { |tcx| "computing type of opaque `{path}`", @@ -244,9 +262,22 @@ rustc_queries! { cycle_stash } + /// Returns whether the type alias given by `DefId` is lazy. + /// + /// I.e., if the type alias expands / ought to expand to a [weak] [alias type] + /// instead of the underyling aliased type. + /// + /// Relevant for features `lazy_type_alias` and `type_alias_impl_trait`. + /// + /// # Panics + /// + /// This query *may* panic if the given definition is not a type alias. + /// + /// [weak]: rustc_middle::ty::Weak + /// [alias type]: rustc_middle::ty::AliasTy query type_alias_is_lazy(key: DefId) -> bool { desc { |tcx| - "computing whether `{path}` is a lazy type alias", + "computing whether the type alias `{path}` is lazy", path = tcx.def_path_str(key), } separate_provide_extern @@ -266,7 +297,7 @@ rustc_queries! { separate_provide_extern } - query unsizing_params_for_adt(key: DefId) -> &'tcx rustc_index::bit_set::BitSet + query unsizing_params_for_adt(key: DefId) -> &'tcx rustc_index::bit_set::DenseBitSet { arena_cache desc { |tcx| @@ -300,8 +331,7 @@ rustc_queries! { desc { "checking lint expectations (RFC 2383)" } } - /// Maps from the `DefId` of an item (trait/struct/enum/fn) to its - /// associated generics. + /// Returns the *generics* of the definition given by `DefId`. query generics_of(key: DefId) -> &'tcx ty::Generics { desc { |tcx| "computing generics of `{}`", tcx.def_path_str(key) } arena_cache @@ -310,10 +340,13 @@ rustc_queries! { feedable } - /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the - /// predicates (where-clauses) that must be proven true in order - /// to reference it. This is almost always the "predicates query" - /// that you want. + /// Returns the (elaborated) *predicates* of the definition given by `DefId` + /// that must be proven true at usage sites (and which can be assumed at definition site). + /// + /// This is almost always *the* "predicates query" that you want. + /// + /// **Tip**: You can use `#[rustc_dump_predicates]` on an item to basically print + /// the result of this query for use in UI tests or for debugging purposes. query predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> { desc { |tcx| "computing predicates of `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } @@ -329,25 +362,24 @@ rustc_queries! { } } - /// Returns the list of bounds that are required to be satisfied - /// by a implementation or definition. For associated types, these - /// must be satisfied for an implementation to be well-formed, - /// and for opaque types, these are required to be satisfied by - /// the hidden-type of the opaque. + /// Returns the explicitly user-written *bounds* on the associated or opaque type given by `DefId` + /// that must be proven true at definition site (and which can be assumed at usage sites). /// - /// Syntactially, these are the bounds written on the trait's type - /// definition, or those after the `impl` keyword for an opaque: + /// For associated types, these must be satisfied for an implementation + /// to be well-formed, and for opaque types, these are required to be + /// satisfied by the hidden type of the opaque. /// - /// ```ignore (incomplete) - /// type X: Bound + 'lt - /// // ^^^^^^^^^^^ - /// impl Debug + Display - /// // ^^^^^^^^^^^^^^^ - /// ``` + /// Bounds from the parent (e.g. with nested `impl Trait`) are not included. /// - /// `key` is the `DefId` of the associated type or opaque type. + /// Syntactially, these are the bounds written on associated types in trait + /// definitions, or those after the `impl` keyword for an opaque: /// - /// Bounds from the parent (e.g. with nested impl trait) are not included. + /// ```ignore (illustrative) + /// trait Trait { type X: Bound + 'lt; } + /// // ^^^^^^^^^^^ + /// fn function() -> impl Debug + Display { /*...*/ } + /// // ^^^^^^^^^^^^^^^ + /// ``` query explicit_item_bounds(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { desc { |tcx| "finding item bounds for `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } @@ -355,10 +387,12 @@ rustc_queries! { feedable } - /// The set of item bounds (see [`TyCtxt::explicit_item_bounds`]) that - /// share the `Self` type of the item. These are a subset of the bounds - /// that may explicitly be used for things like closure signature - /// deduction. + /// Returns the explicitly user-written *bounds* that share the `Self` type of the item. + /// + /// These are a subset of the [explicit item bounds] that may explicitly be used for things + /// like closure signature deduction. + /// + /// [explicit item bounds]: Self::explicit_item_bounds query explicit_item_super_predicates(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { desc { |tcx| "finding item bounds for `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } @@ -366,26 +400,29 @@ rustc_queries! { feedable } - /// Elaborated version of the predicates from `explicit_item_bounds`. + /// Returns the (elaborated) *bounds* on the associated or opaque type given by `DefId` + /// that must be proven true at definition site (and which can be assumed at usage sites). + /// + /// Bounds from the parent (e.g. with nested `impl Trait`) are not included. + /// + /// **Tip**: You can use `#[rustc_dump_item_bounds]` on an item to basically print + /// the result of this query for use in UI tests or for debugging purposes. /// - /// For example: + /// # Examples /// /// ``` - /// trait MyTrait { - /// type MyAType: Eq + ?Sized; - /// } + /// trait Trait { type Assoc: Eq + ?Sized; } /// ``` /// - /// `explicit_item_bounds` returns `[::MyAType: Eq]`, - /// and `item_bounds` returns + /// While [`Self::explicit_item_bounds`] returns `[::Assoc: Eq]` + /// here, `item_bounds` returns: + /// /// ```text /// [ - /// ::MyAType: Eq, - /// ::MyAType: PartialEq<::MyAType> + /// ::Assoc: Eq, + /// ::Assoc: PartialEq<::Assoc> /// ] /// ``` - /// - /// Bounds from the parent (e.g. with nested impl trait) are not included. query item_bounds(key: DefId) -> ty::EarlyBinder<'tcx, ty::Clauses<'tcx>> { desc { |tcx| "elaborating item bounds for `{}`", tcx.def_path_str(key) } } @@ -457,7 +494,7 @@ rustc_queries! { } /// Set of param indexes for type params that are in the type's representation - query params_in_repr(key: DefId) -> &'tcx rustc_index::bit_set::BitSet { + query params_in_repr(key: DefId) -> &'tcx rustc_index::bit_set::DenseBitSet { desc { "finding type parameters in the representation" } arena_cache no_hash @@ -616,27 +653,35 @@ rustc_queries! { desc { "getting wasm import module map" } } - /// Returns everything that looks like a predicate written explicitly - /// by the user on a trait item. + /// Returns the explicitly user-written *predicates and bounds* of the trait given by `DefId`. /// /// Traits are unusual, because predicates on associated types are /// converted into bounds on that type for backwards compatibility: /// + /// ``` /// trait X where Self::U: Copy { type U; } + /// ``` /// /// becomes /// + /// ``` /// trait X { type U: Copy; } + /// ``` + /// + /// [`Self::explicit_predicates_of`] and [`Self::explicit_item_bounds`] will + /// then take the appropriate subsets of the predicates here. + /// + /// # Panics /// - /// `explicit_predicates_of` and `explicit_item_bounds` will then take - /// the appropriate subsets of the predicates here. + /// This query will panic if the given definition is not a trait. query trait_explicit_predicates_and_bounds(key: LocalDefId) -> ty::GenericPredicates<'tcx> { desc { |tcx| "computing explicit predicates of trait `{}`", tcx.def_path_str(key) } } - /// Returns the predicates written explicitly by the user. + /// Returns the explicitly user-written *predicates* of the definition given by `DefId` + /// that must be proven true at usage sites (and which can be assumed at definition site). /// - /// You should probably use `predicates_of` unless you're looking for + /// You should probably use [`Self::predicates_of`] unless you're looking for /// predicates with explicit spans for diagnostics purposes. query explicit_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> { desc { |tcx| "computing explicit predicates of `{}`", tcx.def_path_str(key) } @@ -645,18 +690,24 @@ rustc_queries! { feedable } - /// Returns the inferred outlives predicates (e.g., for `struct - /// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`). + /// Returns the *inferred outlives-predicates* of the item given by `DefId`. + /// + /// E.g., for `struct Foo<'a, T> { x: &'a T }`, this would return `[T: 'a]`. + /// + /// **Tip**: You can use `#[rustc_outlives]` on an item to basically print the + /// result of this query for use in UI tests or for debugging purposes. query inferred_outlives_of(key: DefId) -> &'tcx [(ty::Clause<'tcx>, Span)] { - desc { |tcx| "computing inferred outlives predicates of `{}`", tcx.def_path_str(key) } + desc { |tcx| "computing inferred outlives-predicates of `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } separate_provide_extern feedable } - /// Maps from the `DefId` of a trait to the list of super-predicates of the trait, - /// *before* elaboration (so it doesn't contain transitive super-predicates). This - /// is a subset of the full list of predicates. We store these in a separate map + /// Returns the explicitly user-written *super-predicates* of the trait given by `DefId`. + /// + /// These predicates are unelaborated and consequently don't contain transitive super-predicates. + /// + /// This is a subset of the full list of predicates. We store these in a separate map /// because we must evaluate them even during type conversion, often before the full /// predicates are available (note that super-predicates must not be cyclic). query explicit_super_predicates_of(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { @@ -665,8 +716,9 @@ rustc_queries! { separate_provide_extern } - /// The predicates of the trait that are implied during elaboration. This is a - /// superset of the super-predicates of the trait, but a subset of the predicates + /// The predicates of the trait that are implied during elaboration. + /// + /// This is a superset of the super-predicates of the trait, but a subset of the predicates /// of the trait. For regular traits, this includes all super-predicates and their /// associated type bounds. For trait aliases, currently, this includes all of the /// predicates of the trait alias. @@ -680,7 +732,7 @@ rustc_queries! { /// of supertraits that define the given associated type. This is used to avoid /// cycles in resolving type-dependent associated item paths like `T::Item`. query explicit_supertraits_containing_assoc_item( - key: (DefId, rustc_span::symbol::Ident) + key: (DefId, rustc_span::Ident) ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { desc { |tcx| "computing the super traits of `{}` with associated type name `{}`", tcx.def_path_str(key.0), @@ -709,7 +761,7 @@ rustc_queries! { /// To avoid cycles within the predicates of a single item we compute /// per-type-parameter predicates for resolving `T::AssocTy`. query type_param_predicates( - key: (LocalDefId, LocalDefId, rustc_span::symbol::Ident) + key: (LocalDefId, LocalDefId, rustc_span::Ident) ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { desc { |tcx| "computing the bounds for type parameter `{}`", tcx.hir().ty_param_name(key.1) } } @@ -746,14 +798,27 @@ rustc_queries! { desc { |tcx| "computing drop-check constraints for `{}`", tcx.def_path_str(key) } } - /// Returns the constness of function-like things (tuple struct/variant constructors, functions, - /// methods) + /// Returns the constness of the function-like[^1] definition given by `DefId`. + /// + /// Tuple struct/variant constructors are *always* const, foreign functions are + /// *never* const. The rest is const iff marked with keyword `const` (or rather + /// its parent in the case of associated functions). /// - /// Will ICE if used on things that are always const or never const. + ///

/// - /// **Do not call this function manually.** It is only meant to cache the base data for the + /// **Do not call this query** directly. It is only meant to cache the base data for the /// higher-level functions. Consider using `is_const_fn` or `is_const_trait_impl` instead. - /// Also note that neither of them takes into account feature gates and stability. + /// + /// Also note that neither of them takes into account feature gates, stability and + /// const predicates/conditions! + /// + ///
+ /// + /// # Panics + /// + /// This query will panic if the given definition is not function-like[^1]. + /// + /// [^1]: Tuple struct/variant constructors, closures and free, associated and foreign functions. query constness(key: DefId) -> hir::Constness { desc { |tcx| "checking if item is const: `{}`", tcx.def_path_str(key) } separate_provide_extern @@ -799,13 +864,25 @@ rustc_queries! { separate_provide_extern } - /// Gets a map with the variance of every item; use `variances_of` instead. + /// Gets a map with the variances of every item in the local crate. + /// + ///
+ /// + /// **Do not call this query** directly, use [`Self::variances_of`] instead. + /// + ///
query crate_variances(_: ()) -> &'tcx ty::CrateVariancesMap<'tcx> { arena_cache desc { "computing the variances for items in this crate" } } - /// Maps from the `DefId` of a type or region parameter to its (inferred) variance. + /// Returns the (inferred) variances of the item given by `DefId`. + /// + /// The list of variances corresponds to the list of (early-bound) generic + /// parameters of the item (including its parents). + /// + /// **Tip**: You can use `#[rustc_variance]` on an item to basically print the + /// result of this query for use in UI tests or for debugging purposes. query variances_of(def_id: DefId) -> &'tcx [ty::Variance] { desc { |tcx| "computing the variances of `{}`", tcx.def_path_str(def_id) } cache_on_disk_if { def_id.is_local() } @@ -813,10 +890,16 @@ rustc_queries! { cycle_delay_bug } - /// Maps from thee `DefId` of a type to its (inferred) outlives. + /// Gets a map with the inferred outlives-predicates of every item in the local crate. + /// + ///
+ /// + /// **Do not call this query** directly, use [`Self::inferred_outlives_of`] instead. + /// + ///
query inferred_outlives_crate(_: ()) -> &'tcx ty::CratePredicatesMap<'tcx> { arena_cache - desc { "computing the inferred outlives predicates for items in this crate" } + desc { "computing the inferred outlives-predicates for items in this crate" } } /// Maps from an impl/trait or struct/variant `DefId` @@ -1017,9 +1100,6 @@ rustc_queries! { desc { |tcx| "type-checking `{}`", tcx.def_path_str(key) } cache_on_disk_if(tcx) { !tcx.is_typeck_child(key.to_def_id()) } } - query diagnostic_only_typeck(key: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> { - desc { |tcx| "type-checking `{}`", tcx.def_path_str(key) } - } query used_trait_imports(key: LocalDefId) -> &'tcx UnordSet { desc { |tcx| "finding used_trait_imports `{}`", tcx.def_path_str(key) } @@ -1039,20 +1119,35 @@ rustc_queries! { } /// Gets a complete map from all types to their inherent impls. - /// Not meant to be used directly outside of coherence. + /// + ///
+ /// + /// **Not meant to be used** directly outside of coherence. + /// + ///
query crate_inherent_impls(k: ()) -> (&'tcx CrateInherentImpls, Result<(), ErrorGuaranteed>) { desc { "finding all inherent impls defined in crate" } } /// Checks all types in the crate for overlap in their inherent impls. Reports errors. - /// Not meant to be used directly outside of coherence. + /// + ///
+ /// + /// **Not meant to be used** directly outside of coherence. + /// + ///
query crate_inherent_impls_validity_check(_: ()) -> Result<(), ErrorGuaranteed> { desc { "check for inherent impls that should not be defined in crate" } ensure_forwards_result_if_red } /// Checks all types in the crate for overlap in their inherent impls. Reports errors. - /// Not meant to be used directly outside of coherence. + /// + ///
+ /// + /// **Not meant to be used** directly outside of coherence. + /// + ///
query crate_inherent_impls_overlap_check(_: ()) -> Result<(), ErrorGuaranteed> { desc { "check for overlap between inherent impls defined in this crate" } ensure_forwards_result_if_red @@ -1090,8 +1185,12 @@ rustc_queries! { } /// Computes the tag (if any) for a given type and variant. + /// /// `None` means that the variant doesn't need a tag (because it is niched). - /// Will panic for uninhabited variants. + /// + /// # Panics + /// + /// This query will panic for uninhabited variants and if the passed type is not an enum. query tag_for_variant( key: (Ty<'tcx>, abi::VariantIdx) ) -> Option { @@ -1100,7 +1199,12 @@ rustc_queries! { /// Evaluates a constant and returns the computed allocation. /// - /// **Do not use this** directly, use the `eval_to_const_value` or `eval_to_valtree` instead. + ///
+ /// + /// **Do not call this query** directly, use [`Self::eval_to_const_value_raw`] or + /// [`Self::eval_to_valtree`] instead. + /// + ///
query eval_to_allocation_raw(key: ty::PseudoCanonicalInput<'tcx, GlobalId<'tcx>>) -> EvalToAllocationRawResult<'tcx> { desc { |tcx| @@ -1121,18 +1225,25 @@ rustc_queries! { feedable } - /// Evaluates const items or anonymous constants - /// (such as enum variant explicit discriminants or array lengths) - /// into a representation suitable for the type system and const generics. + /// Evaluates const items or anonymous constants[^1] into a representation + /// suitable for the type system and const generics. + /// + ///
/// - /// **Do not use this** directly, use one of the following wrappers: `tcx.const_eval_poly`, - /// `tcx.const_eval_resolve`, `tcx.const_eval_instance`, or `tcx.const_eval_global_id`. + /// **Do not call this** directly, use one of the following wrappers: + /// [`TyCtxt::const_eval_poly`], [`TyCtxt::const_eval_resolve`], + /// [`TyCtxt::const_eval_instance`], or [`TyCtxt::const_eval_global_id`]. + /// + ///
+ /// + /// [^1]: Such as enum variant explicit discriminants or array lengths. query eval_to_const_value_raw(key: ty::PseudoCanonicalInput<'tcx, GlobalId<'tcx>>) -> EvalToConstValueResult<'tcx> { desc { |tcx| "simplifying constant for the type system `{}`", key.value.display(tcx) } + depth_limit cache_on_disk_if { true } } @@ -1158,7 +1269,7 @@ rustc_queries! { // FIXME get rid of this with valtrees query lit_to_const( key: LitToConstInput<'tcx> - ) -> Result, LitToConstError> { + ) -> ty::Const<'tcx> { desc { "converting literal to const" } } @@ -1253,13 +1364,13 @@ rustc_queries! { separate_provide_extern } - /// Determines whether an item is annotated with `doc(hidden)`. + /// Determines whether an item is annotated with `#[doc(hidden)]`. query is_doc_hidden(def_id: DefId) -> bool { desc { |tcx| "checking whether `{}` is `doc(hidden)`", tcx.def_path_str(def_id) } separate_provide_extern } - /// Determines whether an item is annotated with `doc(notable_trait)`. + /// Determines whether an item is annotated with `#[doc(notable_trait)]`. query is_doc_notable_trait(def_id: DefId) -> bool { desc { |tcx| "checking whether `{}` is `doc(notable_trait)`", tcx.def_path_str(def_id) } } @@ -1284,7 +1395,7 @@ rustc_queries! { desc { |tcx| "computing target features for inline asm of `{}`", tcx.def_path_str(def_id) } } - query fn_arg_names(def_id: DefId) -> &'tcx [rustc_span::symbol::Ident] { + query fn_arg_names(def_id: DefId) -> &'tcx [rustc_span::Ident] { desc { |tcx| "looking up function parameter names for `{}`", tcx.def_path_str(def_id) } separate_provide_extern } @@ -1606,6 +1717,11 @@ rustc_queries! { ensure_forwards_result_if_red } + query enforce_impl_non_lifetime_params_are_constrained(key: LocalDefId) -> Result<(), ErrorGuaranteed> { + desc { |tcx| "checking that `{}`'s generics are constrained by the impl header", tcx.def_path_str(key) } + ensure_forwards_result_if_red + } + // The `DefId`s of all non-generic functions and statics in the given crate // that can be reached from outside the crate. // @@ -1797,13 +1913,22 @@ rustc_queries! { query is_late_bound_map(owner_id: hir::OwnerId) -> Option<&'tcx FxIndexSet> { desc { |tcx| "testing if a region is late bound inside `{}`", tcx.def_path_str(owner_id) } } - /// For a given item's generic parameter, gets the default lifetimes to be used - /// for each parameter if a trait object were to be passed for that parameter. - /// For example, for `T` in `struct Foo<'a, T>`, this would be `'static`. - /// For `T` in `struct Foo<'a, T: 'a>`, this would instead be `'a`. - /// This query will panic if passed something that is not a type parameter. + /// Returns the *default lifetime* to be used if a trait object type were to be passed for + /// the type parameter given by `DefId`. + /// + /// **Tip**: You can use `#[rustc_object_lifetime_default]` on an item to basically + /// print the result of this query for use in UI tests or for debugging purposes. + /// + /// # Examples + /// + /// - For `T` in `struct Foo<'a, T: 'a>(&'a T);`, this would be `Param('a)` + /// - For `T` in `struct Bar<'a, T>(&'a T);`, this would be `Empty` + /// + /// # Panics + /// + /// This query will panic if the given definition is not a type parameter. query object_lifetime_default(def_id: DefId) -> ObjectLifetimeDefault { - desc { "looking up lifetime defaults for generic parameter `{}`", tcx.def_path_str(def_id) } + desc { "looking up lifetime defaults for type parameter `{}`", tcx.def_path_str(def_id) } separate_provide_extern } query late_bound_vars_map(owner_id: hir::OwnerId) @@ -2004,6 +2129,8 @@ rustc_queries! { eval_always desc { "calculating the stability index for the local crate" } } + /// All available crates in the graph, including those that should not be user-facing + /// (such as private crates). query crates(_: ()) -> &'tcx [CrateNum] { eval_always desc { "fetching all foreign CrateNum instances" } @@ -2233,7 +2360,7 @@ rustc_queries! { } /// Returns the Rust target features for the current target. These are not always the same as LLVM target features! - query rust_target_features(_: CrateNum) -> &'tcx UnordMap { + query rust_target_features(_: CrateNum) -> &'tcx UnordMap { arena_cache eval_always desc { "looking up Rust target features" } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index 2c22f7b8f4928..4a144ebb89940 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -799,7 +799,7 @@ macro_rules! impl_ref_decoder { impl_ref_decoder! {<'tcx> Span, rustc_hir::Attribute, - rustc_span::symbol::Ident, + rustc_span::Ident, ty::Variance, rustc_span::def_id::DefId, rustc_span::def_id::LocalDefId, diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs index 3337f7ceee7d7..2cb6f6d8c6e87 100644 --- a/compiler/rustc_middle/src/query/plumbing.rs +++ b/compiler/rustc_middle/src/query/plumbing.rs @@ -548,7 +548,6 @@ macro_rules! define_feedable { let dep_node_index = tcx.dep_graph.with_feed_task( dep_node, tcx, - key, &value, hash_result!([$($modifiers)*]), ); diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 8c434265d2764..db2bb8a7248a6 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -20,8 +20,7 @@ use rustc_macros::{ Decodable, Encodable, HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable, }; use rustc_span::def_id::{CRATE_DEF_ID, LocalDefId}; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol}; // FIXME: Remove this import and import via `solve::` pub use rustc_type_ir::solve::BuiltinImplSource; use smallvec::{SmallVec, smallvec}; @@ -126,6 +125,15 @@ impl<'tcx> ObligationCause<'tcx> { self } + pub fn derived_host_cause( + mut self, + parent_host_pred: ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>, + variant: impl FnOnce(DerivedHostCause<'tcx>) -> ObligationCauseCode<'tcx>, + ) -> ObligationCause<'tcx> { + self.code = variant(DerivedHostCause { parent_host_pred, parent_code: self.code }).into(); + self + } + pub fn to_constraint_category(&self) -> ConstraintCategory<'tcx> { match self.code() { ObligationCauseCode::MatchImpl(cause, _) => cause.to_constraint_category(), @@ -248,11 +256,11 @@ pub enum ObligationCauseCode<'tcx> { /// If element is a `const fn` or const ctor we display a help message suggesting /// to move it to a new `const` item while saying that `T` doesn't implement `Copy`. is_constable: IsConstable, - elt_type: Ty<'tcx>, + + /// Span of the repeat element. + /// + /// This is used to suggest wrapping it in a `const { ... }` block. elt_span: Span, - /// Span of the statement/item in which the repeat expression occurs. We can use this to - /// place a `const` declaration before it - elt_stmt_span: Span, }, /// Types of fields (other than the last, except for packed structs) in a struct must be sized. @@ -279,6 +287,14 @@ pub enum ObligationCauseCode<'tcx> { /// Derived obligation for WF goals. WellFormedDerived(DerivedCause<'tcx>), + /// Derived obligation (i.e. `where` clause) on an user-provided impl + /// or a trait alias. + ImplDerivedHost(Box>), + + /// Derived obligation (i.e. `where` clause) on an user-provided impl + /// or a trait alias. + BuiltinDerivedHost(DerivedHostCause<'tcx>), + /// Derived obligation refined to point at a specific argument in /// a call or method expression. FunctionArg { @@ -438,36 +454,38 @@ pub enum WellFormedLoc { }, } -#[derive(Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] -#[derive(TypeVisitable, TypeFoldable)] -pub struct ImplDerivedCause<'tcx> { - pub derived: DerivedCause<'tcx>, - /// The `DefId` of the `impl` that gave rise to the `derived` obligation. - /// If the `derived` obligation arose from a trait alias, which conceptually has a synthetic impl, - /// then this will be the `DefId` of that trait alias. Care should therefore be taken to handle - /// that exceptional case where appropriate. - pub impl_or_alias_def_id: DefId, - /// The index of the derived predicate in the parent impl's predicates. - pub impl_def_predicate_index: Option, - pub span: Span, -} - impl<'tcx> ObligationCauseCode<'tcx> { /// Returns the base obligation, ignoring derived obligations. pub fn peel_derives(&self) -> &Self { let mut base_cause = self; - while let Some((parent_code, _)) = base_cause.parent() { + while let Some(parent_code) = base_cause.parent() { base_cause = parent_code; } base_cause } + pub fn parent(&self) -> Option<&Self> { + match self { + ObligationCauseCode::FunctionArg { parent_code, .. } => Some(parent_code), + ObligationCauseCode::BuiltinDerived(derived) + | ObligationCauseCode::WellFormedDerived(derived) + | ObligationCauseCode::ImplDerived(box ImplDerivedCause { derived, .. }) => { + Some(&derived.parent_code) + } + ObligationCauseCode::BuiltinDerivedHost(derived) + | ObligationCauseCode::ImplDerivedHost(box ImplDerivedHostCause { derived, .. }) => { + Some(&derived.parent_code) + } + _ => None, + } + } + /// Returns the base obligation and the base trait predicate, if any, ignoring /// derived obligations. pub fn peel_derives_with_predicate(&self) -> (&Self, Option>) { let mut base_cause = self; let mut base_trait_pred = None; - while let Some((parent_code, parent_pred)) = base_cause.parent() { + while let Some((parent_code, parent_pred)) = base_cause.parent_with_predicate() { base_cause = parent_code; if let Some(parent_pred) = parent_pred { base_trait_pred = Some(parent_pred); @@ -477,7 +495,7 @@ impl<'tcx> ObligationCauseCode<'tcx> { (base_cause, base_trait_pred) } - pub fn parent(&self) -> Option<(&Self, Option>)> { + pub fn parent_with_predicate(&self) -> Option<(&Self, Option>)> { match self { ObligationCauseCode::FunctionArg { parent_code, .. } => Some((parent_code, None)), ObligationCauseCode::BuiltinDerived(derived) @@ -574,6 +592,42 @@ pub struct DerivedCause<'tcx> { pub parent_code: InternedObligationCauseCode<'tcx>, } +#[derive(Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +#[derive(TypeVisitable, TypeFoldable)] +pub struct ImplDerivedCause<'tcx> { + pub derived: DerivedCause<'tcx>, + /// The `DefId` of the `impl` that gave rise to the `derived` obligation. + /// If the `derived` obligation arose from a trait alias, which conceptually has a synthetic impl, + /// then this will be the `DefId` of that trait alias. Care should therefore be taken to handle + /// that exceptional case where appropriate. + pub impl_or_alias_def_id: DefId, + /// The index of the derived predicate in the parent impl's predicates. + pub impl_def_predicate_index: Option, + pub span: Span, +} + +#[derive(Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +#[derive(TypeVisitable, TypeFoldable)] +pub struct DerivedHostCause<'tcx> { + /// The trait predicate of the parent obligation that led to the + /// current obligation. Note that only trait obligations lead to + /// derived obligations, so we just store the trait predicate here + /// directly. + pub parent_host_pred: ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>, + + /// The parent trait had this cause. + pub parent_code: InternedObligationCauseCode<'tcx>, +} + +#[derive(Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +#[derive(TypeVisitable, TypeFoldable)] +pub struct ImplDerivedHostCause<'tcx> { + pub derived: DerivedHostCause<'tcx>, + /// The `DefId` of the `impl` that gave rise to the `derived` obligation. + pub impl_def_id: DefId, + pub span: Span, +} + #[derive(Clone, Debug, PartialEq, Eq, TypeVisitable)] pub enum SelectionError<'tcx> { /// The trait is not implemented. diff --git a/compiler/rustc_middle/src/traits/specialization_graph.rs b/compiler/rustc_middle/src/traits/specialization_graph.rs index 515aabbe2fc8a..9ce5373b03118 100644 --- a/compiler/rustc_middle/src/traits/specialization_graph.rs +++ b/compiler/rustc_middle/src/traits/specialization_graph.rs @@ -2,7 +2,7 @@ use rustc_data_structures::fx::FxIndexMap; use rustc_errors::ErrorGuaranteed; use rustc_hir::def_id::{DefId, DefIdMap}; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::error::StrictCoherenceNeedsNegativeCoherence; use crate::ty::fast_reject::SimplifiedType; @@ -75,7 +75,7 @@ impl OverlapMode { tcx.hir().attrs(tcx.local_def_id_to_hir_id(local_def_id)) }) .find(|attr| attr.has_name(sym::rustc_strict_coherence)) - .map(|attr| attr.span); + .map(|attr| attr.span()); tcx.dcx().emit_err(StrictCoherenceNeedsNegativeCoherence { span: tcx.def_span(trait_id), attr_span, diff --git a/compiler/rustc_middle/src/ty/adt.rs b/compiler/rustc_middle/src/ty/adt.rs index 447cbc8932ee5..e28fcc555dcd1 100644 --- a/compiler/rustc_middle/src/ty/adt.rs +++ b/compiler/rustc_middle/src/ty/adt.rs @@ -17,7 +17,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; use rustc_query_system::ich::StableHashingContext; use rustc_session::DataTypeKind; -use rustc_span::symbol::sym; +use rustc_span::sym; use rustc_type_ir::solve::AdtDestructorKind; use tracing::{debug, info, trace}; @@ -249,9 +249,9 @@ pub enum AdtKind { Enum, } -impl Into for AdtKind { - fn into(self) -> DataTypeKind { - match self { +impl From for DataTypeKind { + fn from(val: AdtKind) -> Self { + match val { AdtKind::Struct => DataTypeKind::Struct, AdtKind::Union => DataTypeKind::Union, AdtKind::Enum => DataTypeKind::Enum, diff --git a/compiler/rustc_middle/src/ty/assoc.rs b/compiler/rustc_middle/src/ty/assoc.rs index 62157d9bfe2cc..6309dd2e490ff 100644 --- a/compiler/rustc_middle/src/ty/assoc.rs +++ b/compiler/rustc_middle/src/ty/assoc.rs @@ -3,7 +3,7 @@ use rustc_hir as hir; use rustc_hir::def::{DefKind, Namespace}; use rustc_hir::def_id::DefId; use rustc_macros::{Decodable, Encodable, HashStable}; -use rustc_span::symbol::{Ident, Symbol}; +use rustc_span::{Ident, Symbol}; use super::{TyCtxt, Visibility}; use crate::ty; diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs index 6ab4d76e5451a..33d39b137b6d9 100644 --- a/compiler/rustc_middle/src/ty/closure.rs +++ b/compiler/rustc_middle/src/ty/closure.rs @@ -7,8 +7,7 @@ use rustc_hir::HirId; use rustc_hir::def_id::LocalDefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_span::def_id::LocalDefIdMap; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use super::TyCtxt; use crate::hir::place::{ diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs index aba5719138c55..94bf1aa4f0356 100644 --- a/compiler/rustc_middle/src/ty/codec.rs +++ b/compiler/rustc_middle/src/ty/codec.rs @@ -548,7 +548,7 @@ macro_rules! impl_arena_copy_decoder { impl_arena_copy_decoder! {<'tcx> Span, - rustc_span::symbol::Ident, + rustc_span::Ident, ty::Variance, rustc_span::def_id::DefId, rustc_span::def_id::LocalDefId, diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index f32656decd212..27b7e5d378817 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -10,6 +10,7 @@ use std::cmp::Ordering; use std::hash::{Hash, Hasher}; use std::marker::PhantomData; use std::ops::{Bound, Deref}; +use std::sync::OnceLock; use std::{fmt, iter, mem}; use rustc_abi::{ExternAbi, FieldIdx, Layout, LayoutData, TargetDataLayout, VariantIdx}; @@ -46,8 +47,7 @@ use rustc_session::cstore::{CrateStoreDyn, Untracked}; use rustc_session::lint::Lint; use rustc_session::{Limit, MetadataKind, Session}; use rustc_span::def_id::{CRATE_DEF_ID, DefPathHash, StableCrateId}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use rustc_type_ir::TyKind::*; use rustc_type_ir::fold::TypeFoldable; use rustc_type_ir::lang_items::TraitSolverLangItem; @@ -60,7 +60,7 @@ use crate::dep_graph::{DepGraph, DepKindStruct}; use crate::infer::canonical::{CanonicalParamEnvCache, CanonicalVarInfo, CanonicalVarInfos}; use crate::lint::lint_level; use crate::metadata::ModChild; -use crate::middle::codegen_fn_attrs::CodegenFnAttrs; +use crate::middle::codegen_fn_attrs::{CodegenFnAttrs, TargetFeature}; use crate::middle::{resolve_bound_vars, stability}; use crate::mir::interpret::{self, Allocation, ConstAllocation}; use crate::mir::{Body, Local, Place, PlaceElem, ProjectionKind, Promoted}; @@ -471,7 +471,8 @@ impl<'tcx> Interner for TyCtxt<'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(_, _) | ty::Never - | ty::Tuple(_) => { + | ty::Tuple(_) + | ty::UnsafeBinder(_) => { let simp = ty::fast_reject::simplify_type( tcx, self_ty, @@ -612,7 +613,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.coroutine_is_async_gen(coroutine_def_id) } - type UnsizingParams = &'tcx rustc_index::bit_set::BitSet; + type UnsizingParams = &'tcx rustc_index::bit_set::DenseBitSet; fn unsizing_params_for_adt(self, adt_def_id: DefId) -> Self::UnsizingParams { self.unsizing_params_for_adt(adt_def_id) } @@ -1118,10 +1119,10 @@ impl<'tcx> CommonConsts<'tcx> { /// either a `ReEarlyParam` or `ReLateParam`. #[derive(Debug)] pub struct FreeRegionInfo { - /// `LocalDefId` of the free region. - pub def_id: LocalDefId, - /// the bound region corresponding to free region. - pub bound_region: ty::BoundRegionKind, + /// `LocalDefId` of the scope. + pub scope: LocalDefId, + /// the `DefId` of the free region. + pub region_def_id: DefId, /// checks if bound region is in Impl Item pub is_impl_item: bool, } @@ -1346,46 +1347,6 @@ pub struct GlobalCtxt<'tcx> { /// Stores memory for globals (statics/consts). pub(crate) alloc_map: Lock>, - - current_gcx: CurrentGcx, -} - -impl<'tcx> GlobalCtxt<'tcx> { - /// Installs `self` in a `TyCtxt` and `ImplicitCtxt` for the duration of - /// `f`. - pub fn enter(&'tcx self, f: F) -> R - where - F: FnOnce(TyCtxt<'tcx>) -> R, - { - let icx = tls::ImplicitCtxt::new(self); - - // Reset `current_gcx` to `None` when we exit. - let _on_drop = defer(move || { - *self.current_gcx.value.write() = None; - }); - - // Set this `GlobalCtxt` as the current one. - { - let mut guard = self.current_gcx.value.write(); - assert!(guard.is_none(), "no `GlobalCtxt` is currently set"); - *guard = Some(self as *const _ as *const ()); - } - - tls::enter_context(&icx, || f(icx.tcx)) - } - - pub fn finish(&'tcx self) { - // We assume that no queries are run past here. If there are new queries - // after this point, they'll show up as "" in self-profiling data. - self.enter(|tcx| tcx.alloc_self_profile_query_strings()); - - self.enter(|tcx| tcx.save_dep_graph()); - self.enter(|tcx| tcx.query_key_hash_verify_all()); - - if let Err((path, error)) = self.dep_graph.finish_encoding() { - self.sess.dcx().emit_fatal(crate::error::FailedWritingFile { path: &path, error }); - } - } } /// This is used to get a reference to a `GlobalCtxt` if one is available. @@ -1507,7 +1468,7 @@ impl<'tcx> TyCtxt<'tcx> { Bound::Included(a.get()) } else { self.dcx().span_delayed_bug( - attr.span, + attr.span(), "invalid rustc_layout_scalar_valid_range attribute", ); Bound::Unbounded @@ -1529,7 +1490,8 @@ impl<'tcx> TyCtxt<'tcx> { /// By only providing the `TyCtxt` inside of the closure we enforce that the type /// context and any interned value (types, args, etc.) can only be used while `ty::tls` /// has a valid reference to the context, to allow formatting values that need it. - pub fn create_global_ctxt( + pub fn create_global_ctxt( + gcx_cell: &'tcx OnceLock>, s: &'tcx Session, crate_types: Vec, stable_crate_id: StableCrateId, @@ -1541,7 +1503,8 @@ impl<'tcx> TyCtxt<'tcx> { query_system: QuerySystem<'tcx>, hooks: crate::hooks::Providers, current_gcx: CurrentGcx, - ) -> GlobalCtxt<'tcx> { + f: impl FnOnce(TyCtxt<'tcx>) -> T, + ) -> T { let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| { s.dcx().emit_fatal(err); }); @@ -1550,7 +1513,7 @@ impl<'tcx> TyCtxt<'tcx> { let common_lifetimes = CommonLifetimes::new(&interners); let common_consts = CommonConsts::new(&interners, &common_types, s, &untracked); - GlobalCtxt { + let gcx = gcx_cell.get_or_init(|| GlobalCtxt { sess: s, crate_types, stable_crate_id, @@ -1574,8 +1537,23 @@ impl<'tcx> TyCtxt<'tcx> { canonical_param_env_cache: Default::default(), data_layout, alloc_map: Lock::new(interpret::AllocMap::new()), - current_gcx, + }); + + let icx = tls::ImplicitCtxt::new(&gcx); + + // Reset `current_gcx` to `None` when we exit. + let _on_drop = defer(|| { + *current_gcx.value.write() = None; + }); + + // Set this `GlobalCtxt` as the current one. + { + let mut guard = current_gcx.value.write(); + assert!(guard.is_none(), "no `GlobalCtxt` is currently set"); + *guard = Some(&gcx as *const _ as *const ()); } + + tls::enter_context(&icx, || f(icx.tcx)) } /// Obtain all lang items of this crate and all dependencies (recursively) @@ -1798,6 +1776,37 @@ impl<'tcx> TyCtxt<'tcx> { pub fn dcx(self) -> DiagCtxtHandle<'tcx> { self.sess.dcx() } + + pub fn is_target_feature_call_safe( + self, + callee_features: &[TargetFeature], + body_features: &[TargetFeature], + ) -> bool { + // If the called function has target features the calling function hasn't, + // the call requires `unsafe`. Don't check this on wasm + // targets, though. For more information on wasm see the + // is_like_wasm check in hir_analysis/src/collect.rs + self.sess.target.options.is_like_wasm + || callee_features + .iter() + .all(|feature| body_features.iter().any(|f| f.name == feature.name)) + } + + /// Returns the safe version of the signature of the given function, if calling it + /// would be safe in the context of the given caller. + pub fn adjust_target_feature_sig( + self, + fun_def: DefId, + fun_sig: ty::Binder<'tcx, ty::FnSig<'tcx>>, + caller: DefId, + ) -> Option>> { + let fun_features = &self.codegen_fn_attrs(fun_def).target_features; + let callee_features = &self.codegen_fn_attrs(caller).target_features; + if self.is_target_feature_call_safe(&fun_features, &callee_features) { + return Some(fun_sig.map_bound(|sig| ty::FnSig { safety: hir::Safety::Safe, ..sig })); + } + None + } } impl<'tcx> TyCtxtAt<'tcx> { @@ -1982,7 +1991,7 @@ impl<'tcx> TyCtxt<'tcx> { generic_param_scope: LocalDefId, mut region: Region<'tcx>, ) -> Option { - let (suitable_region_binding_scope, bound_region) = loop { + let (suitable_region_binding_scope, region_def_id) = loop { let def_id = region.opt_param_def_id(self, generic_param_scope.to_def_id())?.as_local()?; let scope = self.local_parent(def_id); @@ -1992,10 +2001,7 @@ impl<'tcx> TyCtxt<'tcx> { region = self.map_opaque_lifetime_to_parent_lifetime(def_id); continue; } - break ( - scope, - ty::BoundRegionKind::Named(def_id.into(), self.item_name(def_id.into())), - ); + break (scope, def_id.into()); }; let is_impl_item = match self.hir_node_by_def_id(suitable_region_binding_scope) { @@ -2004,7 +2010,7 @@ impl<'tcx> TyCtxt<'tcx> { _ => false, }; - Some(FreeRegionInfo { def_id: suitable_region_binding_scope, bound_region, is_impl_item }) + Some(FreeRegionInfo { scope: suitable_region_binding_scope, region_def_id, is_impl_item }) } /// Given a `DefId` for an `fn`, return all the `dyn` and `impl` traits in its return type. @@ -2103,12 +2109,23 @@ impl<'tcx> TyCtxt<'tcx> { self.limits(()).move_size_limit } + /// All traits in the crate graph, including those not visible to the user. pub fn all_traits(self) -> impl Iterator + 'tcx { iter::once(LOCAL_CRATE) .chain(self.crates(()).iter().copied()) .flat_map(move |cnum| self.traits(cnum).iter().copied()) } + /// All traits that are visible within the crate graph (i.e. excluding private dependencies). + pub fn visible_traits(self) -> impl Iterator + 'tcx { + let visible_crates = + self.crates(()).iter().copied().filter(move |cnum| self.is_user_visible_dep(*cnum)); + + iter::once(LOCAL_CRATE) + .chain(visible_crates) + .flat_map(move |cnum| self.traits(cnum).iter().copied()) + } + #[inline] pub fn local_visibility(self, def_id: LocalDefId) -> Visibility { self.visibility(def_id).expect_local() @@ -2119,6 +2136,19 @@ impl<'tcx> TyCtxt<'tcx> { pub fn local_opaque_ty_origin(self, def_id: LocalDefId) -> hir::OpaqueTyOrigin { self.hir().expect_opaque_ty(def_id).origin } + + pub fn finish(self) { + // We assume that no queries are run past here. If there are new queries + // after this point, they'll show up as "" in self-profiling data. + self.alloc_self_profile_query_strings(); + + self.save_dep_graph(); + self.query_key_hash_verify_all(); + + if let Err((path, error)) = self.dep_graph.finish_encoding() { + self.sess.dcx().emit_fatal(crate::error::FailedWritingFile { path: &path, error }); + } + } } macro_rules! nop_lift { @@ -2308,6 +2338,7 @@ impl<'tcx> TyCtxt<'tcx> { Ref, FnDef, FnPtr, + UnsafeBinder, Placeholder, Coroutine, CoroutineWitness, @@ -3104,7 +3135,7 @@ impl<'tcx> TyCtxt<'tcx> { return ty::Region::new_late_param( self, new_parent.to_def_id(), - ty::BoundRegionKind::Named( + ty::LateParamRegionKind::Named( lbv.to_def_id(), self.item_name(lbv.to_def_id()), ), diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index 604f1da26c648..77745599afb92 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -69,10 +69,10 @@ impl<'tcx> Ty<'tcx> { /// description in error messages. This is used in the primary span label. Beyond what /// `is_simple_ty` includes, it also accepts ADTs with no type arguments and references to /// ADTs with no type arguments. - pub fn is_simple_text(self, tcx: TyCtxt<'tcx>) -> bool { + pub fn is_simple_text(self) -> bool { match self.kind() { Adt(_, args) => args.non_erasable_generics().next().is_none(), - Ref(_, ty, _) => ty.is_simple_text(tcx), + Ref(_, ty, _) => ty.is_simple_text(), _ => self.is_simple_ty(), } } @@ -309,7 +309,7 @@ pub fn suggest_constraining_type_params<'a>( let Some(param) = param else { return false }; { - let mut sized_constraints = constraints.extract_if(|(_, def_id, _)| { + let mut sized_constraints = constraints.extract_if(.., |(_, def_id, _)| { def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized)) }); if let Some((_, def_id, _)) = sized_constraints.next() { diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 4a82af3255950..35fbaa99569c9 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -109,6 +109,9 @@ impl<'tcx> TypeError<'tcx> { TypeError::ConstMismatch(ref values) => { format!("expected `{}`, found `{}`", values.expected, values.found).into() } + TypeError::ForceInlineCast => { + "cannot coerce functions which must be inlined to function pointers".into() + } TypeError::IntrinsicCast => "cannot coerce intrinsics to function pointers".into(), TypeError::TargetFeatureCast(_) => { "cannot coerce functions with `#[target_feature]` to safe function pointers".into() @@ -191,6 +194,7 @@ impl<'tcx> Ty<'tcx> { _ => "fn item".into(), }, ty::FnPtr(..) => "fn pointer".into(), + ty::UnsafeBinder(_) => "unsafe binder".into(), ty::Dynamic(..) => "trait object".into(), ty::Closure(..) | ty::CoroutineClosure(..) => "closure".into(), ty::Coroutine(def_id, ..) => { diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index 04d03187541ce..0af57f636aaab 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -253,6 +253,12 @@ impl FlagComputation { &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { computation.add_tys(sig_tys.inputs_and_output); }), + + &ty::UnsafeBinder(bound_ty) => { + self.bound_computation(bound_ty.into(), |computation, ty| { + computation.add_ty(ty); + }) + } } } diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs index 1b073d3c466d3..067516917ef77 100644 --- a/compiler/rustc_middle/src/ty/fold.rs +++ b/compiler/rustc_middle/src/ty/fold.rs @@ -270,7 +270,8 @@ impl<'tcx> TyCtxt<'tcx> { T: TypeFoldable>, { self.instantiate_bound_regions_uncached(value, |br| { - ty::Region::new_late_param(self, all_outlive_scope, br.kind) + let kind = ty::LateParamRegionKind::from_bound(br.var, br.kind); + ty::Region::new_late_param(self, all_outlive_scope, kind) }) } diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index ce40ab182613c..85d9db7ee7473 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -2,8 +2,7 @@ use rustc_ast as ast; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, kw}; +use rustc_span::{Span, Symbol, kw}; use tracing::instrument; use super::{Clause, InstantiatedPredicates, ParamConst, ParamTy, Ty, TyCtxt}; diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 49b5588e261af..e4ded2c30f539 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; use rustc_data_structures::fx::FxHashMap; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; -use rustc_hir::def::Namespace; +use rustc_hir::def::{CtorKind, DefKind, Namespace}; use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::lang_items::LangItem; use rustc_index::bit_set::FiniteBitSet; @@ -498,7 +498,8 @@ impl<'tcx> Instance<'tcx> { /// Resolves a `(def_id, args)` pair to an (optional) instance -- most commonly, /// this is used to find the precise code that will run for a trait method invocation, - /// if known. + /// if known. This should only be used for functions and consts. If you want to + /// resolve an associated type, use [`TyCtxt::try_normalize_erasing_regions`]. /// /// Returns `Ok(None)` if we cannot resolve `Instance` to a specific instance. /// For example, in a context like this, @@ -527,6 +528,23 @@ impl<'tcx> Instance<'tcx> { def_id: DefId, args: GenericArgsRef<'tcx>, ) -> Result>, ErrorGuaranteed> { + assert_matches!( + tcx.def_kind(def_id), + DefKind::Fn + | DefKind::AssocFn + | DefKind::Const + | DefKind::AssocConst + | DefKind::AnonConst + | DefKind::InlineConst + | DefKind::Static { .. } + | DefKind::Ctor(_, CtorKind::Fn) + | DefKind::Closure + | DefKind::SyntheticCoroutineBody, + "`Instance::try_resolve` should only be used to resolve instances of \ + functions, statics, and consts; to resolve associated types, use \ + `try_normalize_erasing_regions`." + ); + // Rust code can easily create exponentially-long types using only a // polynomial recursion depth. Even with the default recursion // depth, you can easily get cases that take >2^60 steps to run, diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 07573a7926003..1e67cdfc32a9b 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -16,8 +16,7 @@ use rustc_hir::def_id::DefId; use rustc_index::IndexVec; use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension}; use rustc_session::config::OptLevel; -use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; use rustc_target::callconv::FnAbi; use rustc_target::spec::{ HasTargetSpec, HasWasmCAbiOpt, HasX86AbiOpt, PanicStrategy, Target, WasmCAbi, X86Abi, @@ -735,21 +734,22 @@ where let layout = match this.variants { Variants::Single { index } // If all variants but one are uninhabited, the variant layout is the enum layout. - if index == variant_index && - // Don't confuse variants of uninhabited enums with the enum itself. - // For more details see https://github.com/rust-lang/rust/issues/69763. - this.fields != FieldsShape::Primitive => + if index == variant_index => { this.layout } - Variants::Single { index } => { + Variants::Single { .. } | Variants::Empty => { + // Single-variant and no-variant enums *can* have other variants, but those are + // uninhabited. Produce a layout that has the right fields for that variant, so that + // the rest of the compiler can project fields etc as usual. + let tcx = cx.tcx(); let typing_env = cx.typing_env(); // Deny calling for_variant more than once for non-Single enums. if let Ok(original_layout) = tcx.layout_of(typing_env.as_query_input(this.ty)) { - assert_eq!(original_layout.variants, Variants::Single { index }); + assert_eq!(original_layout.variants, this.variants); } let fields = match this.ty.kind() { @@ -770,6 +770,7 @@ where size: Size::ZERO, max_repr_align: None, unadjusted_abi_align: tcx.data_layout.i8_align.abi, + randomization_seed: 0, }) } @@ -816,6 +817,11 @@ where bug!("TyAndLayout::field({:?}): not applicable", this) } + ty::UnsafeBinder(bound_ty) => { + let ty = tcx.instantiate_bound_regions_with_erased(bound_ty.into()); + field_ty_or_layout(TyAndLayout { ty, ..this }, cx, i) + } + // Potentially-wide pointers. ty::Ref(_, pointee, _) | ty::RawPtr(pointee, _) => { assert!(i < this.fields.count()); @@ -903,6 +909,7 @@ where ), ty::Coroutine(def_id, args) => match this.variants { + Variants::Empty => unreachable!(), Variants::Single { index } => TyMaybeWithLayout::Ty( args.as_coroutine() .state_tys(def_id, tcx) @@ -928,6 +935,7 @@ where let field = &def.variant(index).fields[FieldIdx::from_usize(i)]; TyMaybeWithLayout::Ty(field.ty(tcx, args)) } + Variants::Empty => panic!("there is no field in Variants::Empty types"), // Discriminant field for enums (where applicable). Variants::Multiple { tag, .. } => { @@ -1233,6 +1241,7 @@ pub fn fn_can_unwind(tcx: TyCtxt<'_>, fn_def_id: Option, abi: ExternAbi) PtxKernel | Msp430Interrupt | X86Interrupt + | GpuKernel | EfiApi | AvrInterrupt | AvrNonBlockingInterrupt diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index 30a5586f59ca4..6718493f6b32a 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -21,7 +21,7 @@ use crate::arena::Arena; /// pointer. /// - Because of this, you cannot get a `List` that is a sub-list of another /// `List`. You can get a sub-slice `&[T]`, however. -/// - `List` can be used with `CopyTaggedPtr`, which is useful within +/// - `List` can be used with `TaggedRef`, which is useful within /// structs whose size must be minimized. /// - Because of the uniqueness assumption, we can use the address of a /// `List` for faster equality comparisons and hashing. diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 061b4e806b2d8..43c67dbb43deb 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -28,11 +28,12 @@ use rustc_abi::{Align, FieldIdx, Integer, IntegerType, ReprFlags, ReprOptions, V use rustc_ast::expand::StrippedCfgItem; use rustc_ast::node_id::NodeMap; pub use rustc_ast_ir::{Movability, Mutability, try_visit}; +use rustc_attr_parsing::AttributeKind; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; use rustc_data_structures::intern::Interned; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::steal::Steal; -use rustc_errors::{Diag, ErrorGuaranteed, StashKey}; +use rustc_errors::{Diag, ErrorGuaranteed}; use rustc_hir::LangItem; use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; @@ -46,8 +47,7 @@ use rustc_serialize::{Decodable, Encodable}; use rustc_session::lint::LintBuffer; pub use rustc_session::lint::RegisteredTools; use rustc_span::hygiene::MacroKind; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{ExpnId, ExpnKind, Span}; +use rustc_span::{ExpnId, ExpnKind, Ident, Span, Symbol, kw, sym}; pub use rustc_type_ir::relate::VarianceDiagInfo; pub use rustc_type_ir::*; use tracing::{debug, instrument}; @@ -84,7 +84,8 @@ pub use self::predicate::{ TypeOutlivesPredicate, }; pub use self::region::{ - BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, Region, RegionKind, RegionVid, + BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, LateParamRegionKind, Region, + RegionKind, RegionVid, }; pub use self::rvalue_scopes::RvalueScopes; pub use self::sty::{ @@ -222,6 +223,7 @@ pub struct DelegationFnSig { pub param_count: usize, pub has_self: bool, pub c_variadic: bool, + pub target_feature: bool, } #[derive(Clone, Copy, Debug)] @@ -782,18 +784,8 @@ impl<'tcx> OpaqueHiddenType<'tcx> { pub fn build_mismatch_error( &self, other: &Self, - opaque_def_id: LocalDefId, tcx: TyCtxt<'tcx>, ) -> Result, ErrorGuaranteed> { - // We used to cancel here for slightly better error messages, but - // cancelling stashed diagnostics is no longer allowed because it - // causes problems when tracking whether errors have actually - // occurred. - tcx.sess.dcx().try_steal_modify_and_emit_err( - tcx.def_span(opaque_def_id), - StashKey::OpaqueHiddenTypeMismatch, - |_err| {}, - ); (self.ty, other.ty).error_reported()?; // Found different concrete types for the opaque type. let sub_diag = if self.span == other.span { @@ -972,7 +964,7 @@ pub struct ParamEnv<'tcx> { } impl<'tcx> rustc_type_ir::inherent::ParamEnv> for ParamEnv<'tcx> { - fn caller_bounds(self) -> impl IntoIterator> { + fn caller_bounds(self) -> impl inherent::SliceLike> { self.caller_bounds() } } @@ -1417,8 +1409,8 @@ impl Hash for FieldDef { impl<'tcx> FieldDef { /// Returns the type of this field. The resulting type is not normalized. The `arg` is /// typically obtained via the second field of [`TyKind::Adt`]. - pub fn ty(&self, tcx: TyCtxt<'tcx>, arg: GenericArgsRef<'tcx>) -> Ty<'tcx> { - tcx.type_of(self.did).instantiate(tcx, arg) + pub fn ty(&self, tcx: TyCtxt<'tcx>, args: GenericArgsRef<'tcx>) -> Ty<'tcx> { + tcx.type_of(self.did).instantiate(tcx, args) } /// Computes the `Ident` of this variant by looking up the `Span` @@ -1506,9 +1498,10 @@ impl<'tcx> TyCtxt<'tcx> { field_shuffle_seed ^= user_seed; } - for attr in self.get_attrs(did, sym::repr) { - for r in attr::parse_repr_attr(self.sess, attr) { - flags.insert(match r { + if let Some(reprs) = attr::find_attr!(self.get_all_attrs(did), AttributeKind::Repr(r) => r) + { + for (r, _) in reprs { + flags.insert(match *r { attr::ReprRust => ReprFlags::empty(), attr::ReprC => ReprFlags::IS_C, attr::ReprPacked(pack) => { @@ -1546,6 +1539,10 @@ impl<'tcx> TyCtxt<'tcx> { max_align = max_align.max(Some(align)); ReprFlags::empty() } + attr::ReprEmpty => { + /* skip these, they're just for diagnostics */ + ReprFlags::empty() + } }); } } @@ -1758,14 +1755,20 @@ impl<'tcx> TyCtxt<'tcx> { self, did: impl Into, attr: Symbol, + ) -> impl Iterator { + self.get_all_attrs(did).filter(move |a: &&hir::Attribute| a.has_name(attr)) + } + + /// Gets all attributes. + pub fn get_all_attrs( + self, + did: impl Into, ) -> impl Iterator { let did: DefId = did.into(); - let filter_fn = move |a: &&hir::Attribute| a.has_name(attr); if let Some(did) = did.as_local() { - self.hir().attrs(self.local_def_id_to_hir_id(did)).iter().filter(filter_fn) + self.hir().attrs(self.local_def_id_to_hir_id(did)).iter() } else { - debug_assert!(rustc_feature::encode_cross_crate(attr)); - self.attrs_for_def(did).iter().filter(filter_fn) + self.attrs_for_def(did).iter() } } diff --git a/compiler/rustc_middle/src/ty/normalize_erasing_regions.rs b/compiler/rustc_middle/src/ty/normalize_erasing_regions.rs index f611b69905c9c..e86e01451fefb 100644 --- a/compiler/rustc_middle/src/ty/normalize_erasing_regions.rs +++ b/compiler/rustc_middle/src/ty/normalize_erasing_regions.rs @@ -165,10 +165,14 @@ impl<'tcx> NormalizeAfterErasingRegionsFolder<'tcx> { arg: ty::GenericArg<'tcx>, ) -> ty::GenericArg<'tcx> { let arg = self.typing_env.as_query_input(arg); - self.tcx.try_normalize_generic_arg_after_erasing_regions(arg).unwrap_or_else(|_| bug!( - "Failed to normalize {:?}, maybe try to call `try_normalize_erasing_regions` instead", - arg.value - )) + self.tcx.try_normalize_generic_arg_after_erasing_regions(arg).unwrap_or_else(|_| { + bug!( + "Failed to normalize {:?} in typing_env={:?}, \ + maybe try to call `try_normalize_erasing_regions` instead", + arg.value, + self.typing_env, + ) + }) } } diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs index 59f2555be95c0..6b6c6f3c72feb 100644 --- a/compiler/rustc_middle/src/ty/parameterized.rs +++ b/compiler/rustc_middle/src/ty/parameterized.rs @@ -96,7 +96,7 @@ trivially_parameterized_over_tcx! { rustc_hir::def_id::DefIndex, rustc_hir::definitions::DefKey, rustc_hir::OpaqueTyOrigin, - rustc_index::bit_set::BitSet, + rustc_index::bit_set::DenseBitSet, rustc_index::bit_set::FiniteBitSet, rustc_session::cstore::ForeignModule, rustc_session::cstore::LinkagePreference, @@ -109,7 +109,7 @@ trivially_parameterized_over_tcx! { rustc_span::Symbol, rustc_span::def_id::DefPathHash, rustc_span::hygiene::SyntaxContextData, - rustc_span::symbol::Ident, + rustc_span::Ident, rustc_type_ir::Variance, rustc_hir::Attribute, } diff --git a/compiler/rustc_middle/src/ty/predicate.rs b/compiler/rustc_middle/src/ty/predicate.rs index 3ecaa3e22d39c..32d6455e82557 100644 --- a/compiler/rustc_middle/src/ty/predicate.rs +++ b/compiler/rustc_middle/src/ty/predicate.rs @@ -634,6 +634,28 @@ impl<'tcx> UpcastFrom, PolyProjectionPredicate<'tcx>> for Clause<'t } } +impl<'tcx> UpcastFrom, ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>> + for Predicate<'tcx> +{ + fn upcast_from( + from: ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>, + tcx: TyCtxt<'tcx>, + ) -> Self { + from.map_bound(ty::ClauseKind::HostEffect).upcast(tcx) + } +} + +impl<'tcx> UpcastFrom, ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>> + for Clause<'tcx> +{ + fn upcast_from( + from: ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>, + tcx: TyCtxt<'tcx>, + ) -> Self { + from.map_bound(ty::ClauseKind::HostEffect).upcast(tcx) + } +} + impl<'tcx> UpcastFrom, NormalizesTo<'tcx>> for Predicate<'tcx> { fn upcast_from(from: NormalizesTo<'tcx>, tcx: TyCtxt<'tcx>) -> Self { PredicateKind::NormalizesTo(from).upcast(tcx) diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index cc7467467603a..72f353f06ff52 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -45,10 +45,25 @@ pub trait Printer<'tcx>: Sized { &mut self, impl_def_id: DefId, args: &'tcx [GenericArg<'tcx>], - self_ty: Ty<'tcx>, - trait_ref: Option>, ) -> Result<(), PrintError> { - self.default_print_impl_path(impl_def_id, args, self_ty, trait_ref) + let tcx = self.tcx(); + let self_ty = tcx.type_of(impl_def_id); + let impl_trait_ref = tcx.impl_trait_ref(impl_def_id); + let (self_ty, impl_trait_ref) = if tcx.generics_of(impl_def_id).count() <= args.len() { + ( + self_ty.instantiate(tcx, args), + impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate(tcx, args)), + ) + } else { + // We are probably printing a nested item inside of an impl. + // Use the identity substitutions for the impl. + ( + self_ty.instantiate_identity(), + impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate_identity()), + ) + }; + + self.default_print_impl_path(impl_def_id, self_ty, impl_trait_ref) } fn print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), PrintError>; @@ -107,23 +122,7 @@ pub trait Printer<'tcx>: Sized { self.path_crate(def_id.krate) } - DefPathData::Impl => { - let generics = self.tcx().generics_of(def_id); - let self_ty = self.tcx().type_of(def_id); - let impl_trait_ref = self.tcx().impl_trait_ref(def_id); - let (self_ty, impl_trait_ref) = if args.len() >= generics.count() { - ( - self_ty.instantiate(self.tcx(), args), - impl_trait_ref.map(|i| i.instantiate(self.tcx(), args)), - ) - } else { - ( - self_ty.instantiate_identity(), - impl_trait_ref.map(|i| i.instantiate_identity()), - ) - }; - self.print_impl_path(def_id, args, self_ty, impl_trait_ref) - } + DefPathData::Impl => self.print_impl_path(def_id, args), _ => { let parent_def_id = DefId { index: key.parent.unwrap(), ..def_id }; @@ -201,7 +200,6 @@ pub trait Printer<'tcx>: Sized { fn default_print_impl_path( &mut self, impl_def_id: DefId, - _args: &'tcx [GenericArg<'tcx>], self_ty: Ty<'tcx>, impl_trait_ref: Option>, ) -> Result<(), PrintError> { @@ -291,6 +289,7 @@ fn characteristic_def_id_of_type_cached<'a>( | ty::Uint(_) | ty::Str | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Alias(..) | ty::Placeholder(..) | ty::Param(_) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 40e0fb0087f54..ac900edefe12f 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -16,8 +16,7 @@ use rustc_hir::definitions::{DefKey, DefPathDataName}; use rustc_macros::{Lift, extension}; use rustc_session::Limit; use rustc_session::cstore::{ExternCrate, ExternCrateSource}; -use rustc_span::symbol::{Ident, Symbol, kw}; -use rustc_span::{FileNameDisplayPreference, sym}; +use rustc_span::{FileNameDisplayPreference, Ident, Symbol, kw, sym}; use rustc_type_ir::{Upcast as _, elaborate}; use smallvec::SmallVec; @@ -691,11 +690,22 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if with_reduced_queries() { p!(print_def_path(def_id, args)); } else { - let sig = self.tcx().fn_sig(def_id).instantiate(self.tcx(), args); + let mut sig = self.tcx().fn_sig(def_id).instantiate(self.tcx(), args); + if self.tcx().codegen_fn_attrs(def_id).safe_target_features { + p!("#[target_features] "); + sig = sig.map_bound(|mut sig| { + sig.safety = hir::Safety::Safe; + sig + }); + } p!(print(sig), " {{", print_value_path(def_id, args), "}}"); } } ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), + ty::UnsafeBinder(ref bound_ty) => { + // FIXME(unsafe_binders): Make this print `unsafe<>` rather than `for<>`. + self.wrap_binder(bound_ty, |ty, cx| cx.pretty_print_type(*ty))?; + } ty::Infer(infer_ty) => { if self.should_print_verbose() { p!(write("{:?}", ty.kind())); @@ -838,6 +848,12 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!( " upvar_tys=", print(args.as_coroutine().tupled_upvars_ty()), + " resume_ty=", + print(args.as_coroutine().resume_ty()), + " yield_ty=", + print(args.as_coroutine().yield_ty()), + " return_ty=", + print(args.as_coroutine().return_ty()), " witness=", print(args.as_coroutine().witness()) ); @@ -2375,8 +2391,8 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { match *region { ty::ReEarlyParam(ref data) => data.has_name(), + ty::ReLateParam(ty::LateParamRegion { kind, .. }) => kind.is_named(), ty::ReBound(_, ty::BoundRegion { kind: br, .. }) - | ty::ReLateParam(ty::LateParamRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { bound: ty::BoundRegion { kind: br, .. }, .. }) => { @@ -2449,8 +2465,13 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { return Ok(()); } } + ty::ReLateParam(ty::LateParamRegion { kind, .. }) => { + if let Some(name) = kind.get_name() { + p!(write("{}", name)); + return Ok(()); + } + } ty::ReBound(_, ty::BoundRegion { kind: br, .. }) - | ty::ReLateParam(ty::LateParamRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { bound: ty::BoundRegion { kind: br, .. }, .. }) => { diff --git a/compiler/rustc_middle/src/ty/region.rs b/compiler/rustc_middle/src/ty/region.rs index 60c2c322d4ff4..14a2e5befe696 100644 --- a/compiler/rustc_middle/src/ty/region.rs +++ b/compiler/rustc_middle/src/ty/region.rs @@ -4,8 +4,7 @@ use rustc_data_structures::intern::Interned; use rustc_errors::MultiSpan; use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; -use rustc_span::symbol::{Symbol, kw, sym}; -use rustc_span::{DUMMY_SP, ErrorGuaranteed}; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Symbol, kw, sym}; use rustc_type_ir::RegionKind as IrRegionKind; pub use rustc_type_ir::RegionVid; use tracing::debug; @@ -70,9 +69,10 @@ impl<'tcx> Region<'tcx> { pub fn new_late_param( tcx: TyCtxt<'tcx>, scope: DefId, - bound_region: ty::BoundRegionKind, + kind: LateParamRegionKind, ) -> Region<'tcx> { - tcx.intern_region(ty::ReLateParam(ty::LateParamRegion { scope, bound_region })) + let data = LateParamRegion { scope, kind }; + tcx.intern_region(ty::ReLateParam(data)) } #[inline] @@ -125,8 +125,8 @@ impl<'tcx> Region<'tcx> { match kind { ty::ReEarlyParam(region) => Region::new_early_param(tcx, region), ty::ReBound(debruijn, region) => Region::new_bound(tcx, debruijn, region), - ty::ReLateParam(ty::LateParamRegion { scope, bound_region }) => { - Region::new_late_param(tcx, scope, bound_region) + ty::ReLateParam(ty::LateParamRegion { scope, kind }) => { + Region::new_late_param(tcx, scope, kind) } ty::ReStatic => tcx.lifetimes.re_static, ty::ReVar(vid) => Region::new_var(tcx, vid), @@ -166,7 +166,7 @@ impl<'tcx> Region<'tcx> { match *self { ty::ReEarlyParam(ebr) => Some(ebr.name), ty::ReBound(_, br) => br.kind.get_name(), - ty::ReLateParam(fr) => fr.bound_region.get_name(), + ty::ReLateParam(fr) => fr.kind.get_name(), ty::ReStatic => Some(kw::StaticLifetime), ty::RePlaceholder(placeholder) => placeholder.bound.kind.get_name(), _ => None, @@ -188,7 +188,7 @@ impl<'tcx> Region<'tcx> { match *self { ty::ReEarlyParam(ebr) => ebr.has_name(), ty::ReBound(_, br) => br.kind.is_named(), - ty::ReLateParam(fr) => fr.bound_region.is_named(), + ty::ReLateParam(fr) => fr.kind.is_named(), ty::ReStatic => true, ty::ReVar(..) => false, ty::RePlaceholder(placeholder) => placeholder.bound.kind.is_named(), @@ -311,7 +311,7 @@ impl<'tcx> Region<'tcx> { Some(tcx.generics_of(binding_item).region_param(ebr, tcx).def_id) } ty::ReLateParam(ty::LateParamRegion { - bound_region: ty::BoundRegionKind::Named(def_id, _), + kind: ty::LateParamRegionKind::Named(def_id, _), .. }) => Some(def_id), _ => None, @@ -359,7 +359,71 @@ impl std::fmt::Debug for EarlyParamRegion { /// different parameters apart. pub struct LateParamRegion { pub scope: DefId, - pub bound_region: BoundRegionKind, + pub kind: LateParamRegionKind, +} + +/// When liberating bound regions, we map their [`BoundRegionKind`] +/// to this as we need to track the index of anonymous regions. We +/// otherwise end up liberating multiple bound regions to the same +/// late-bound region. +#[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)] +#[derive(HashStable)] +pub enum LateParamRegionKind { + /// An anonymous region parameter for a given fn (&T) + /// + /// Unlike [`BoundRegionKind::Anon`], this tracks the index of the + /// liberated bound region. + /// + /// We should ideally never liberate anonymous regions, but do so for the + /// sake of diagnostics in `FnCtxt::sig_of_closure_with_expectation`. + Anon(u32), + + /// Named region parameters for functions (a in &'a T) + /// + /// The `DefId` is needed to distinguish free regions in + /// the event of shadowing. + Named(DefId, Symbol), + + /// Anonymous region for the implicit env pointer parameter + /// to a closure + ClosureEnv, +} + +impl LateParamRegionKind { + pub fn from_bound(var: BoundVar, br: BoundRegionKind) -> LateParamRegionKind { + match br { + BoundRegionKind::Anon => LateParamRegionKind::Anon(var.as_u32()), + BoundRegionKind::Named(def_id, name) => LateParamRegionKind::Named(def_id, name), + BoundRegionKind::ClosureEnv => LateParamRegionKind::ClosureEnv, + } + } + + pub fn is_named(&self) -> bool { + match *self { + LateParamRegionKind::Named(_, name) => { + name != kw::UnderscoreLifetime && name != kw::Empty + } + _ => false, + } + } + + pub fn get_name(&self) -> Option { + if self.is_named() { + match *self { + LateParamRegionKind::Named(_, name) => return Some(name), + _ => unreachable!(), + } + } + + None + } + + pub fn get_id(&self) -> Option { + match *self { + LateParamRegionKind::Named(id, _) => Some(id), + _ => None, + } + } } #[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)] diff --git a/compiler/rustc_middle/src/ty/rvalue_scopes.rs b/compiler/rustc_middle/src/ty/rvalue_scopes.rs index 57c2d7623d2d7..b00c8169a36a7 100644 --- a/compiler/rustc_middle/src/ty/rvalue_scopes.rs +++ b/compiler/rustc_middle/src/ty/rvalue_scopes.rs @@ -35,7 +35,7 @@ impl RvalueScopes { // if there's one. Static items, for instance, won't // have an enclosing scope, hence no scope will be // returned. - let mut id = Scope { id: expr_id, data: ScopeData::Node }; + let mut id = Scope { local_id: expr_id, data: ScopeData::Node }; let mut backwards_incompatible = None; while let Some(&(p, _)) = region_scope_tree.parent_map.get(&id) { @@ -60,7 +60,7 @@ impl RvalueScopes { if backwards_incompatible.is_none() { backwards_incompatible = region_scope_tree .backwards_incompatible_scope - .get(&p.item_local_id()) + .get(&p.local_id) .copied(); } id = p @@ -76,7 +76,7 @@ impl RvalueScopes { pub fn record_rvalue_scope(&mut self, var: hir::ItemLocalId, lifetime: Option) { debug!("record_rvalue_scope(var={var:?}, lifetime={lifetime:?})"); if let Some(lifetime) = lifetime { - assert!(var != lifetime.item_local_id()); + assert!(var != lifetime.local_id); } self.map.insert(var, lifetime); } diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 0af0a5f170d73..68cb56f35837d 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -77,7 +77,23 @@ impl fmt::Debug for ty::BoundRegionKind { impl fmt::Debug for ty::LateParamRegion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "ReLateParam({:?}, {:?})", self.scope, self.bound_region) + write!(f, "ReLateParam({:?}, {:?})", self.scope, self.kind) + } +} + +impl fmt::Debug for ty::LateParamRegionKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ty::LateParamRegionKind::Anon(idx) => write!(f, "BrAnon({idx})"), + ty::LateParamRegionKind::Named(did, name) => { + if did.is_crate_root() { + write!(f, "BrNamed({name})") + } else { + write!(f, "BrNamed({did:?}, {name})") + } + } + ty::LateParamRegionKind::ClosureEnv => write!(f, "BrEnv"), + } } } @@ -224,7 +240,6 @@ TrivialTypeTraversalImpls! { ::rustc_ast::InlineAsmOptions, ::rustc_ast::InlineAsmTemplatePiece, ::rustc_ast::NodeId, - ::rustc_span::symbol::Symbol, ::rustc_hir::def::Res, ::rustc_hir::def_id::LocalDefId, ::rustc_hir::ByRef, @@ -252,8 +267,9 @@ TrivialTypeTraversalImpls! { crate::ty::Placeholder, crate::ty::LateParamRegion, crate::ty::adjustment::PointerCoercion, + ::rustc_span::Ident, ::rustc_span::Span, - ::rustc_span::symbol::Ident, + ::rustc_span::Symbol, ty::BoundVar, ty::ValTree<'tcx>, } @@ -377,6 +393,7 @@ impl<'tcx> TypeSuperFoldable> for Ty<'tcx> { ty::Tuple(ts) => ty::Tuple(ts.try_fold_with(folder)?), ty::FnDef(def_id, args) => ty::FnDef(def_id, args.try_fold_with(folder)?), ty::FnPtr(sig_tys, hdr) => ty::FnPtr(sig_tys.try_fold_with(folder)?, hdr), + ty::UnsafeBinder(f) => ty::UnsafeBinder(f.try_fold_with(folder)?), ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } @@ -427,6 +444,7 @@ impl<'tcx> TypeSuperVisitable> for Ty<'tcx> { ty::Tuple(ts) => ts.visit_with(visitor), ty::FnDef(_, args) => args.visit_with(visitor), ty::FnPtr(ref sig_tys, _) => sig_tys.visit_with(visitor), + ty::UnsafeBinder(ref f) => f.visit_with(visitor), ty::Ref(r, ty, _) => { try_visit!(r.visit_with(visitor)); ty.visit_with(visitor) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 3fbc23924f59a..bf37ae05c8251 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -15,8 +15,7 @@ use rustc_hir as hir; use rustc_hir::LangItem; use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, extension}; -use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use rustc_type_ir::TyKind::*; use rustc_type_ir::visit::TypeVisitableExt; use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind}; @@ -28,7 +27,7 @@ use crate::infer::canonical::Canonical; use crate::ty::InferTy::*; use crate::ty::{ self, AdtDef, BoundRegionKind, Discr, GenericArg, GenericArgs, GenericArgsRef, List, ParamEnv, - Region, Ty, TyCtxt, TypeFlags, TypeSuperVisitable, TypeVisitable, TypeVisitor, + Region, Ty, TyCtxt, TypeFlags, TypeSuperVisitable, TypeVisitable, TypeVisitor, UintTy, }; // Re-export and re-parameterize some `I = TyCtxt<'tcx>` types here @@ -402,7 +401,7 @@ impl<'tcx> Ty<'tcx> { /// The more specific methods will often optimize their creation. #[allow(rustc::usage_of_ty_tykind)] #[inline] - pub fn new(tcx: TyCtxt<'tcx>, st: TyKind<'tcx>) -> Ty<'tcx> { + fn new(tcx: TyCtxt<'tcx>, st: TyKind<'tcx>) -> Ty<'tcx> { tcx.mk_ty_from_kind(st) } @@ -614,6 +613,41 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_adt(tcx: TyCtxt<'tcx>, def: AdtDef<'tcx>, args: GenericArgsRef<'tcx>) -> Ty<'tcx> { tcx.debug_assert_args_compatible(def.did(), args); + if cfg!(debug_assertions) { + match tcx.def_kind(def.did()) { + DefKind::Struct | DefKind::Union | DefKind::Enum => {} + DefKind::Mod + | DefKind::Variant + | DefKind::Trait + | DefKind::TyAlias + | DefKind::ForeignTy + | DefKind::TraitAlias + | DefKind::AssocTy + | DefKind::TyParam + | DefKind::Fn + | DefKind::Const + | DefKind::ConstParam + | DefKind::Static { .. } + | DefKind::Ctor(..) + | DefKind::AssocFn + | DefKind::AssocConst + | DefKind::Macro(..) + | DefKind::ExternCrate + | DefKind::Use + | DefKind::ForeignMod + | DefKind::AnonConst + | DefKind::InlineConst + | DefKind::OpaqueTy + | DefKind::Field + | DefKind::LifetimeParam + | DefKind::GlobalAsm + | DefKind::Impl { .. } + | DefKind::Closure + | DefKind::SyntheticCoroutineBody => { + bug!("not an adt: {def:?} ({:?})", tcx.def_kind(def.did())) + } + } + } Ty::new(tcx, Adt(def, args)) } @@ -674,6 +708,11 @@ impl<'tcx> Ty<'tcx> { Ty::new(tcx, FnPtr(sig_tys, hdr)) } + #[inline] + pub fn new_unsafe_binder(tcx: TyCtxt<'tcx>, b: Binder<'tcx, Ty<'tcx>>) -> Ty<'tcx> { + Ty::new(tcx, UnsafeBinder(b.into())) + } + #[inline] pub fn new_dynamic( tcx: TyCtxt<'tcx>, @@ -768,7 +807,7 @@ impl<'tcx> Ty<'tcx> { } } }); - Ty::new(tcx, Adt(adt_def, args)) + Ty::new_adt(tcx, adt_def, args) } #[inline] @@ -963,6 +1002,10 @@ impl<'tcx> rustc_type_ir::inherent::Ty> for Ty<'tcx> { Ty::new_pat(interner, ty, pat) } + fn new_unsafe_binder(interner: TyCtxt<'tcx>, ty: ty::Binder<'tcx, Ty<'tcx>>) -> Self { + Ty::new_unsafe_binder(interner, ty) + } + fn new_unit(interner: TyCtxt<'tcx>) -> Self { interner.types.unit } @@ -1009,6 +1052,18 @@ impl<'tcx> Ty<'tcx> { } } + /// Check if type is an `usize`. + #[inline] + pub fn is_usize(self) -> bool { + matches!(self.kind(), Uint(UintTy::Usize)) + } + + /// Check if type is an `usize` or an integral type variable. + #[inline] + pub fn is_usize_like(self) -> bool { + matches!(self.kind(), Uint(UintTy::Usize) | Infer(IntVar(_))) + } + #[inline] pub fn is_never(self) -> bool { matches!(self.kind(), Never) @@ -1481,6 +1536,7 @@ impl<'tcx> Ty<'tcx> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Error(_) | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, @@ -1660,6 +1716,8 @@ impl<'tcx> Ty<'tcx> { // metadata of `tail`. ty::Param(_) | ty::Alias(..) => Err(tail), + | ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + ty::Infer(ty::TyVar(_)) | ty::Pat(..) | ty::Bound(..) @@ -1820,6 +1878,7 @@ impl<'tcx> Ty<'tcx> { | ty::Float(_) | ty::FnDef(..) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1899,6 +1958,8 @@ impl<'tcx> Ty<'tcx> { // Might be, but not "trivial" so just giving the safe answer. ty::Adt(..) | ty::Closure(..) | ty::CoroutineClosure(..) => false, + ty::UnsafeBinder(_) => false, + // Needs normalization or revealing to determine, so no is the safe answer. ty::Alias(..) => false, @@ -1977,7 +2038,8 @@ impl<'tcx> Ty<'tcx> { | Coroutine(_, _) | CoroutineWitness(..) | Never - | Tuple(_) => true, + | Tuple(_) + | UnsafeBinder(_) => true, Error(_) | Infer(_) | Alias(_, _) | Param(_) | Bound(_, _) | Placeholder(_) => false, } } diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs index 188107e5d5498..743ea33b20a27 100644 --- a/compiler/rustc_middle/src/ty/trait_def.rs +++ b/compiler/rustc_middle/src/ty/trait_def.rs @@ -70,12 +70,12 @@ pub struct TraitDef { /// Whether to add a builtin `dyn Trait: Trait` implementation. /// This is enabled for all traits except ones marked with - /// `#[rustc_deny_explicit_impl(implement_via_object = false)]`. + /// `#[rustc_do_not_implement_via_object]`. pub implement_via_object: bool, /// Whether a trait is fully built-in, and any implementation is disallowed. /// This only applies to built-in traits, and is marked via - /// `#[rustc_deny_explicit_impl(implement_via_object = ...)]`. + /// `#[rustc_deny_explicit_impl]`. pub deny_explicit_impl: bool, } diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs index 551c113aa5922..f94f52e4f6180 100644 --- a/compiler/rustc_middle/src/ty/typeck_results.rs +++ b/compiler/rustc_middle/src/ty/typeck_results.rs @@ -74,9 +74,8 @@ pub struct TypeckResults<'tcx> { pat_binding_modes: ItemLocalMap, /// Top-level patterns whose match ergonomics need to be desugared by the Rust 2021 -> 2024 - /// migration lint. The boolean indicates whether the emitted diagnostic should be a hard error - /// (if any of the incompatible pattern elements are in edition 2024). - rust_2024_migration_desugared_pats: ItemLocalMap, + /// migration lint. Problematic subpatterns are stored in the `Vec` for the lint to highlight. + rust_2024_migration_desugared_pats: ItemLocalMap>, /// Stores the types which were implicitly dereferenced in pattern binding modes /// for later usage in THIR lowering. For example, @@ -419,14 +418,18 @@ impl<'tcx> TypeckResults<'tcx> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } } - pub fn rust_2024_migration_desugared_pats(&self) -> LocalTableInContext<'_, bool> { + pub fn rust_2024_migration_desugared_pats( + &self, + ) -> LocalTableInContext<'_, Vec<(Span, String)>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.rust_2024_migration_desugared_pats, } } - pub fn rust_2024_migration_desugared_pats_mut(&mut self) -> LocalTableInContextMut<'_, bool> { + pub fn rust_2024_migration_desugared_pats_mut( + &mut self, + ) -> LocalTableInContextMut<'_, Vec<(Span, String)>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.rust_2024_migration_desugared_pats, diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index fc3530e3dde97..75893da0e5836 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -876,6 +876,11 @@ impl<'tcx> TyCtxt<'tcx> { /// [public]: TyCtxt::is_private_dep /// [direct]: rustc_session::cstore::ExternCrate::is_direct pub fn is_user_visible_dep(self, key: CrateNum) -> bool { + // `#![rustc_private]` overrides defaults to make private dependencies usable. + if self.features().enabled(sym::rustc_private) { + return true; + } + // | Private | Direct | Visible | | // |---------|--------|---------|--------------------| // | Yes | Yes | Yes | !true || true | @@ -1241,6 +1246,7 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(_) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1281,6 +1287,7 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(_) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1322,6 +1329,9 @@ impl<'tcx> Ty<'tcx> { | ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, + // FIXME(unsafe_binders): + ty::UnsafeBinder(_) => todo!(), + ty::Tuple(tys) if tys.is_empty() => AsyncDropGlueMorphology::Noop, ty::Adt(adt_def, _) if adt_def.is_manually_drop() => AsyncDropGlueMorphology::Noop, @@ -1522,7 +1532,7 @@ impl<'tcx> Ty<'tcx> { false } - ty::Foreign(_) | ty::CoroutineWitness(..) | ty::Error(_) => false, + ty::Foreign(_) | ty::CoroutineWitness(..) | ty::Error(_) | ty::UnsafeBinder(_) => false, } } @@ -1681,7 +1691,8 @@ pub fn needs_drop_components_with_async<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::CoroutineWitness(..) => Ok(smallvec![ty]), + | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) => Ok(smallvec![ty]), } } @@ -1772,9 +1783,16 @@ pub fn intrinsic_raw(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { + !has_body + } + _ => true, + }; Some(ty::IntrinsicDef { name: tcx.item_name(def_id.into()), - must_be_overridden: tcx.has_attr(def_id, sym::rustc_intrinsic_must_be_overridden), + must_be_overridden, const_stable: tcx.has_attr(def_id, sym::rustc_intrinsic_const_stable_indirect), }) } else { diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs index a93a146ec7c42..2dcba8c2f82c4 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_middle/src/ty/walk.rs @@ -194,6 +194,9 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), ); } + ty::UnsafeBinder(bound_ty) => { + stack.push(bound_ty.skip_binder().into()); + } }, GenericArgKind::Lifetime(_) => {} GenericArgKind::Const(parent_ct) => match parent_ct.kind() { diff --git a/compiler/rustc_middle/src/util/find_self_call.rs b/compiler/rustc_middle/src/util/find_self_call.rs index ec6051d0a771a..0fdd352073860 100644 --- a/compiler/rustc_middle/src/util/find_self_call.rs +++ b/compiler/rustc_middle/src/util/find_self_call.rs @@ -17,26 +17,29 @@ pub fn find_self_call<'tcx>( debug!("find_self_call(local={:?}): terminator={:?}", local, body[block].terminator); if let Some(Terminator { kind: TerminatorKind::Call { func, args, .. }, .. }) = &body[block].terminator + && let Operand::Constant(box ConstOperand { const_, .. }) = func + && let ty::FnDef(def_id, fn_args) = *const_.ty().kind() + && let Some(ty::AssocItem { fn_has_self_parameter: true, .. }) = + tcx.opt_associated_item(def_id) + && let [Spanned { node: Operand::Move(self_place) | Operand::Copy(self_place), .. }, ..] = + **args { - debug!("find_self_call: func={:?}", func); - if let Operand::Constant(box ConstOperand { const_, .. }) = func { - if let ty::FnDef(def_id, fn_args) = *const_.ty().kind() { - if let Some(ty::AssocItem { fn_has_self_parameter: true, .. }) = - tcx.opt_associated_item(def_id) - { - debug!("find_self_call: args={:?}", fn_args); - if let [ - Spanned { - node: Operand::Move(self_place) | Operand::Copy(self_place), .. - }, - .., - ] = **args - { - if self_place.as_local() == Some(local) { - return Some((def_id, fn_args)); - } - } - } + if self_place.as_local() == Some(local) { + return Some((def_id, fn_args)); + } + + // Handle the case where `self_place` gets reborrowed. + // This happens when the receiver is `&T`. + for stmt in &body[block].statements { + if let StatementKind::Assign(box (place, rvalue)) = &stmt.kind + && let Some(reborrow_local) = place.as_local() + && self_place.as_local() == Some(reborrow_local) + && let Rvalue::Ref(_, _, deref_place) = rvalue + && let PlaceRef { local: deref_local, projection: [ProjectionElem::Deref] } = + deref_place.as_ref() + && deref_local == local + { + return Some((def_id, fn_args)); } } } diff --git a/compiler/rustc_middle/src/util/mod.rs b/compiler/rustc_middle/src/util/mod.rs index 8dafc42264485..097a868191c15 100644 --- a/compiler/rustc_middle/src/util/mod.rs +++ b/compiler/rustc_middle/src/util/mod.rs @@ -1,9 +1,7 @@ pub mod bug; -pub mod call_kind; pub mod common; pub mod find_self_call; -pub use call_kind::{CallDesugaringKind, CallKind, call_kind}; pub use find_self_call::find_self_call; #[derive(Default, Copy, Clone)] diff --git a/compiler/rustc_mir_build/Cargo.toml b/compiler/rustc_mir_build/Cargo.toml index 119047227431d..1f3689926bcf3 100644 --- a/compiler/rustc_mir_build/Cargo.toml +++ b/compiler/rustc_mir_build/Cargo.toml @@ -12,6 +12,7 @@ rustc_abi = { path = "../rustc_abi" } rustc_apfloat = "0.2.0" rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } +rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl index f647486f62ade..5203c33c968fb 100644 --- a/compiler/rustc_mir_build/messages.ftl +++ b/compiler/rustc_mir_build/messages.ftl @@ -118,6 +118,12 @@ mir_build_extern_static_requires_unsafe_unsafe_op_in_unsafe_fn_allowed = .note = extern statics are not controlled by the Rust type system: invalid data, aliasing violations or data races will cause undefined behavior .label = use of extern static +mir_build_force_inline = + `{$callee}` is incompatible with `#[rustc_force_inline]` + .attr = annotation here + .callee = `{$callee}` defined here + .note = incompatible due to: {$reason} + mir_build_inform_irrefutable = `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant mir_build_initializing_type_with_requires_unsafe = @@ -285,12 +291,7 @@ mir_build_pointer_pattern = function pointers and raw pointers not derived from mir_build_privately_uninhabited = pattern `{$witness_1}` is currently uninhabited, but this variant contains private fields which may become inhabited in the future -mir_build_rust_2024_incompatible_pat = patterns are not allowed to reset the default binding mode in edition 2024 - -mir_build_rustc_box_attribute_error = `#[rustc_box]` attribute used incorrectly - .attributes = no other attributes may be applied - .not_box = `#[rustc_box]` may only be applied to a `Box::new()` call - .missing_box = `#[rustc_box]` requires the `owned_box` lang item +mir_build_rust_2024_incompatible_pat = this pattern relies on behavior which may change in edition 2024 mir_build_static_in_pattern = statics cannot be referenced in patterns .label = can't be used in patterns diff --git a/compiler/rustc_mir_build/src/build/block.rs b/compiler/rustc_mir_build/src/builder/block.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/block.rs rename to compiler/rustc_mir_build/src/builder/block.rs index 89e64015bc439..ba63a97de89fb 100644 --- a/compiler/rustc_mir_build/src/build/block.rs +++ b/compiler/rustc_mir_build/src/builder/block.rs @@ -5,9 +5,9 @@ use rustc_middle::{span_bug, ty}; use rustc_span::Span; use tracing::debug; -use crate::build::ForGuard::OutsideGuard; -use crate::build::matches::{DeclareLetBindings, EmitStorageLive, ScheduleDrops}; -use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; +use crate::builder::ForGuard::OutsideGuard; +use crate::builder::matches::{DeclareLetBindings, EmitStorageLive, ScheduleDrops}; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn ast_block( diff --git a/compiler/rustc_mir_build/src/build/cfg.rs b/compiler/rustc_mir_build/src/builder/cfg.rs similarity index 97% rename from compiler/rustc_mir_build/src/build/cfg.rs rename to compiler/rustc_mir_build/src/builder/cfg.rs index 9c5ee5b0996e5..42212f2518b0a 100644 --- a/compiler/rustc_mir_build/src/build/cfg.rs +++ b/compiler/rustc_mir_build/src/builder/cfg.rs @@ -4,7 +4,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; use tracing::debug; -use crate::build::CFG; +use crate::builder::CFG; impl<'tcx> CFG<'tcx> { pub(crate) fn block_data(&self, blk: BasicBlock) -> &BasicBlockData<'tcx> { @@ -19,7 +19,7 @@ impl<'tcx> CFG<'tcx> { // it as #[inline(never)] to keep rustc's stack use in check. #[inline(never)] pub(crate) fn start_new_block(&mut self) -> BasicBlock { - self.basic_blocks.push(BasicBlockData::new(None)) + self.basic_blocks.push(BasicBlockData::new(None, false)) } pub(crate) fn start_new_cleanup_block(&mut self) -> BasicBlock { diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/builder/coverageinfo.rs similarity index 99% rename from compiler/rustc_mir_build/src/build/coverageinfo.rs rename to compiler/rustc_mir_build/src/builder/coverageinfo.rs index 52a4a4b4b510d..a80bd4f3c8009 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/builder/coverageinfo.rs @@ -8,8 +8,8 @@ use rustc_middle::thir::{ExprId, ExprKind, Pat, Thir}; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::LocalDefId; -use crate::build::coverageinfo::mcdc::MCDCInfoBuilder; -use crate::build::{Builder, CFG}; +use crate::builder::coverageinfo::mcdc::MCDCInfoBuilder; +use crate::builder::{Builder, CFG}; mod mcdc; diff --git a/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs b/compiler/rustc_mir_build/src/builder/coverageinfo/mcdc.rs similarity index 99% rename from compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs rename to compiler/rustc_mir_build/src/builder/coverageinfo/mcdc.rs index 343d400004315..6b4871dc1fcc8 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs +++ b/compiler/rustc_mir_build/src/builder/coverageinfo/mcdc.rs @@ -9,7 +9,7 @@ use rustc_middle::thir::LogicalOp; use rustc_middle::ty::TyCtxt; use rustc_span::Span; -use crate::build::Builder; +use crate::builder::Builder; use crate::errors::MCDCExceedsConditionLimit; /// LLVM uses `i16` to represent condition id. Hence `i16::MAX` is the hard limit for number of diff --git a/compiler/rustc_mir_build/src/build/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/custom/mod.rs rename to compiler/rustc_mir_build/src/builder/custom/mod.rs index 34cdc288f0ba1..ca2e1dd7a1a81 100644 --- a/compiler/rustc_mir_build/src/build/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -64,7 +64,7 @@ pub(super) fn build_custom_mir<'tcx>( }; body.local_decls.push(LocalDecl::new(return_ty, return_ty_span)); - body.basic_blocks_mut().push(BasicBlockData::new(None)); + body.basic_blocks_mut().push(BasicBlockData::new(None, false)); body.source_scopes.push(SourceScopeData { span, parent_scope: None, diff --git a/compiler/rustc_mir_build/src/build/custom/parse.rs b/compiler/rustc_mir_build/src/builder/custom/parse.rs similarity index 96% rename from compiler/rustc_mir_build/src/build/custom/parse.rs rename to compiler/rustc_mir_build/src/builder/custom/parse.rs index 538068e1fac8e..91e284604b68c 100644 --- a/compiler/rustc_mir_build/src/build/custom/parse.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse.rs @@ -199,10 +199,12 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { match &self.thir[stmt].kind { StmtKind::Let { pattern, initializer: Some(initializer), .. } => { let (var, ..) = self.parse_var(pattern)?; - let mut data = BasicBlockData::new(None); - data.is_cleanup = parse_by_kind!(self, *initializer, _, "basic block declaration", - @variant(mir_basic_block, Normal) => false, - @variant(mir_basic_block, Cleanup) => true, + let data = BasicBlockData::new( + None, + parse_by_kind!(self, *initializer, _, "basic block declaration", + @variant(mir_basic_block, Normal) => false, + @variant(mir_basic_block, Cleanup) => true, + ), ); let block = self.body.basic_blocks_mut().push(data); self.block_map.insert(var, block); @@ -308,8 +310,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { ExprKind::Block { block } => &self.thir[*block], ); - let mut data = BasicBlockData::new(None); - data.is_cleanup = is_cleanup; + let mut data = BasicBlockData::new(None, is_cleanup); for stmt_id in &*block.stmts { let stmt = self.statement_as_expr(*stmt_id)?; let span = self.thir[stmt].span; diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/custom/parse/instruction.rs rename to compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index 67114efdff52e..3dd5de0223081 100644 --- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -9,8 +9,8 @@ use rustc_span::Span; use rustc_span::source_map::Spanned; use super::{PResult, ParseCtxt, parse_by_kind}; -use crate::build::custom::ParseError; -use crate::build::expr::as_constant::as_constant_inner; +use crate::builder::custom::ParseError; +use crate::builder::expr::as_constant::as_constant_inner; impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { pub(crate) fn parse_statement(&self, expr_id: ExprId) -> PResult> { @@ -246,7 +246,6 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { let offset = self.parse_operand(args[1])?; Ok(Rvalue::BinaryOp(BinOp::Offset, Box::new((ptr, offset)))) }, - @call(mir_len, args) => Ok(Rvalue::Len(self.parse_place(args[0])?)), @call(mir_ptr_metadata, args) => Ok(Rvalue::UnaryOp(UnOp::PtrMetadata, self.parse_operand(args[0])?)), @call(mir_copy_for_deref, args) => Ok(Rvalue::CopyForDeref(self.parse_place(args[0])?)), ExprKind::Borrow { borrow_kind, arg } => Ok( diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/builder/expr/as_constant.rs similarity index 80% rename from compiler/rustc_mir_build/src/build/expr/as_constant.rs rename to compiler/rustc_mir_build/src/builder/expr/as_constant.rs index 640408cb9c808..e4e452aff7553 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_constant.rs @@ -3,18 +3,17 @@ use rustc_abi::Size; use rustc_ast as ast; use rustc_hir::LangItem; -use rustc_middle::mir::interpret::{ - Allocation, CTFE_ALLOC_SALT, LitToConstError, LitToConstInput, Scalar, -}; +use rustc_middle::mir::interpret::{Allocation, CTFE_ALLOC_SALT, LitToConstInput, Scalar}; use rustc_middle::mir::*; use rustc_middle::thir::*; use rustc_middle::ty::{ - self, CanonicalUserType, CanonicalUserTypeAnnotation, Ty, TyCtxt, UserTypeAnnotationIndex, + self, CanonicalUserType, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypeVisitableExt as _, + UserTypeAnnotationIndex, }; use rustc_middle::{bug, mir, span_bug}; use tracing::{instrument, trace}; -use crate::build::{Builder, parse_float_into_constval}; +use crate::builder::{Builder, parse_float_into_constval}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, yielding a compile-time constant. Assumes that @@ -50,16 +49,7 @@ pub(crate) fn as_constant_inner<'tcx>( let Expr { ty, temp_lifetime: _, span, ref kind } = *expr; match *kind { ExprKind::Literal { lit, neg } => { - let const_ = match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }) - { - Ok(c) => c, - Err(LitToConstError::Reported(guar)) => { - Const::Ty(Ty::new_error(tcx, guar), ty::Const::new_error(tcx, guar)) - } - Err(LitToConstError::TypeError) => { - bug!("encountered type error in `lit_to_mir_constant`") - } - }; + let const_ = lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }); ConstOperand { span, user_ty: None, const_ } } @@ -108,11 +98,13 @@ pub(crate) fn as_constant_inner<'tcx>( } #[instrument(skip(tcx, lit_input))] -fn lit_to_mir_constant<'tcx>( - tcx: TyCtxt<'tcx>, - lit_input: LitToConstInput<'tcx>, -) -> Result, LitToConstError> { +fn lit_to_mir_constant<'tcx>(tcx: TyCtxt<'tcx>, lit_input: LitToConstInput<'tcx>) -> Const<'tcx> { let LitToConstInput { lit, ty, neg } = lit_input; + + if let Err(guar) = ty.error_reported() { + return Const::Ty(Ty::new_error(tcx, guar), ty::Const::new_error(tcx, guar)); + } + let trunc = |n| { let width = match tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty)) { Ok(layout) => layout.size, @@ -123,7 +115,7 @@ fn lit_to_mir_constant<'tcx>( trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits()); let result = width.truncate(n); trace!("trunc result: {}", result); - Ok(ConstValue::Scalar(Scalar::from_uint(result, width))) + ConstValue::Scalar(Scalar::from_uint(result, width)) }; let value = match (lit, ty.kind()) { @@ -154,20 +146,18 @@ fn lit_to_mir_constant<'tcx>( ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1))) } (ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => { - trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })? - } - (ast::LitKind::Float(n, _), ty::Float(fty)) => parse_float_into_constval(*n, *fty, neg) - .ok_or_else(|| { - LitToConstError::Reported( - tcx.dcx() - .delayed_bug(format!("couldn't parse float literal: {:?}", lit_input.lit)), - ) - })?, + trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() }) + } + (ast::LitKind::Float(n, _), ty::Float(fty)) => { + parse_float_into_constval(*n, *fty, neg).unwrap() + } (ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)), (ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)), - (ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)), - _ => return Err(LitToConstError::TypeError), + (ast::LitKind::Err(guar), _) => { + return Const::Ty(Ty::new_error(tcx, *guar), ty::Const::new_error(tcx, *guar)); + } + _ => bug!("invalid lit/ty combination in `lit_to_mir_constant`: {lit:?}: {ty:?}"), }; - Ok(Const::Val(value, ty)) + Const::Val(value, ty) } diff --git a/compiler/rustc_mir_build/src/build/expr/as_operand.rs b/compiler/rustc_mir_build/src/builder/expr/as_operand.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/expr/as_operand.rs rename to compiler/rustc_mir_build/src/builder/expr/as_operand.rs index 777ff9e68f029..63e9b1dc6cd5a 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_operand.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_operand.rs @@ -4,8 +4,8 @@ use rustc_middle::mir::*; use rustc_middle::thir::*; use tracing::{debug, instrument}; -use crate::build::expr::category::Category; -use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; +use crate::builder::expr::category::Category; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Construct a temporary lifetime restricted to just the local scope diff --git a/compiler/rustc_mir_build/src/build/expr/as_place.rs b/compiler/rustc_mir_build/src/builder/expr/as_place.rs similarity index 94% rename from compiler/rustc_mir_build/src/build/expr/as_place.rs rename to compiler/rustc_mir_build/src/builder/expr/as_place.rs index 70a74910a68c6..89c7bb357ef51 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_place.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_place.rs @@ -14,9 +14,9 @@ use rustc_middle::{bug, span_bug}; use rustc_span::{DesugaringKind, Span}; use tracing::{debug, instrument, trace}; -use crate::build::ForGuard::{OutsideGuard, RefWithinGuard}; -use crate::build::expr::category::Category; -use crate::build::{BlockAnd, BlockAndExtension, Builder, Capture, CaptureMap}; +use crate::builder::ForGuard::{OutsideGuard, RefWithinGuard}; +use crate::builder::expr::category::Category; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, Capture, CaptureMap}; /// The "outermost" place that holds this value. #[derive(Copy, Clone, Debug, PartialEq)] @@ -68,7 +68,7 @@ pub(crate) enum PlaceBase { /// This is used internally when building a place for an expression like `a.b.c`. The fields `b` /// and `c` can be progressively pushed onto the place builder that is created when converting `a`. #[derive(Clone, Debug, PartialEq)] -pub(in crate::build) struct PlaceBuilder<'tcx> { +pub(in crate::builder) struct PlaceBuilder<'tcx> { base: PlaceBase, projection: Vec>, } @@ -249,7 +249,7 @@ fn strip_prefix<'a, 'tcx>( } impl<'tcx> PlaceBuilder<'tcx> { - pub(in crate::build) fn to_place(&self, cx: &Builder<'_, 'tcx>) -> Place<'tcx> { + pub(in crate::builder) fn to_place(&self, cx: &Builder<'_, 'tcx>) -> Place<'tcx> { self.try_to_place(cx).unwrap_or_else(|| match self.base { PlaceBase::Local(local) => span_bug!( cx.local_decls[local].source_info.span, @@ -265,7 +265,7 @@ impl<'tcx> PlaceBuilder<'tcx> { } /// Creates a `Place` or returns `None` if an upvar cannot be resolved - pub(in crate::build) fn try_to_place(&self, cx: &Builder<'_, 'tcx>) -> Option> { + pub(in crate::builder) fn try_to_place(&self, cx: &Builder<'_, 'tcx>) -> Option> { let resolved = self.resolve_upvar(cx); let builder = resolved.as_ref().unwrap_or(self); let PlaceBase::Local(local) = builder.base else { return None }; @@ -283,7 +283,7 @@ impl<'tcx> PlaceBuilder<'tcx> { /// not captured. This can happen because the final mir that will be /// generated doesn't require a read for this place. Failures will only /// happen inside closures. - pub(in crate::build) fn resolve_upvar( + pub(in crate::builder) fn resolve_upvar( &self, cx: &Builder<'_, 'tcx>, ) -> Option> { @@ -635,7 +635,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// For arrays it'll be `Operand::Constant` with the actual length; /// For slices it'll be `Operand::Move` of a local using `PtrMetadata`. - fn len_of_slice_or_array( + pub(in crate::builder) fn len_of_slice_or_array( &mut self, block: BasicBlock, place: Place<'tcx>, @@ -647,13 +647,31 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { match place_ty.kind() { ty::Array(_elem_ty, len_const) => { - // We know how long an array is, so just use that as a constant - // directly -- no locals needed. We do need one statement so - // that borrow- and initialization-checking consider it used, - // though. FIXME: Do we really *need* to count this as a use? - // Could partial array tracking work off something else instead? - self.cfg.push_fake_read(block, source_info, FakeReadCause::ForIndex, place); - let const_ = Const::from_ty_const(*len_const, usize_ty, self.tcx); + let ty_const = if let Some((_, len_ty)) = len_const.try_to_valtree() + && len_ty != self.tcx.types.usize + { + // Bad const generics can give us a constant from the type that's + // not actually a `usize`, so in that case give an error instead. + // FIXME: It'd be nice if the type checker made sure this wasn't + // possible, instead. + let err = self.tcx.dcx().span_delayed_bug( + span, + format!( + "Array length should have already been a type error, as it's {len_ty:?}" + ), + ); + ty::Const::new_error(self.tcx, err) + } else { + // We know how long an array is, so just use that as a constant + // directly -- no locals needed. We do need one statement so + // that borrow- and initialization-checking consider it used, + // though. FIXME: Do we really *need* to count this as a use? + // Could partial array tracking work off something else instead? + self.cfg.push_fake_read(block, source_info, FakeReadCause::ForIndex, place); + *len_const + }; + + let const_ = Const::from_ty_const(ty_const, usize_ty, self.tcx); Operand::Constant(Box::new(ConstOperand { span, user_ty: None, const_ })) } ty::Slice(_elem_ty) => { diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs similarity index 99% rename from compiler/rustc_mir_build/src/build/expr/as_rvalue.rs rename to compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs index c66af118453e5..9961c2488ef42 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs @@ -16,9 +16,9 @@ use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span}; use tracing::debug; -use crate::build::expr::as_place::PlaceBase; -use crate::build::expr::category::{Category, RvalueFunc}; -use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; +use crate::builder::expr::as_place::PlaceBase; +use crate::builder::expr::category::{Category, RvalueFunc}; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Returns an rvalue suitable for use until the end of the current diff --git a/compiler/rustc_mir_build/src/build/expr/as_temp.rs b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs similarity index 97% rename from compiler/rustc_mir_build/src/build/expr/as_temp.rs rename to compiler/rustc_mir_build/src/builder/expr/as_temp.rs index 466f67b1ba4d8..2927f5b0c45d5 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_temp.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs @@ -7,8 +7,8 @@ use rustc_middle::mir::*; use rustc_middle::thir::*; use tracing::{debug, instrument}; -use crate::build::scope::DropKind; -use crate::build::{BlockAnd, BlockAndExtension, Builder}; +use crate::builder::scope::DropKind; +use crate::builder::{BlockAnd, BlockAndExtension, Builder}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr` into a fresh temporary. This is used when building @@ -75,11 +75,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { LocalInfo::BlockTailTemp(tail_info) } - _ if let Some(Scope { data: ScopeData::IfThenRescope, id }) = + _ if let Some(Scope { data: ScopeData::IfThenRescope, local_id }) = temp_lifetime.temp_lifetime => { LocalInfo::IfThenRescopeTemp { - if_then: HirId { owner: this.hir_id.owner, local_id: id }, + if_then: HirId { owner: this.hir_id.owner, local_id }, } } diff --git a/compiler/rustc_mir_build/src/build/expr/category.rs b/compiler/rustc_mir_build/src/builder/expr/category.rs similarity index 100% rename from compiler/rustc_mir_build/src/build/expr/category.rs rename to compiler/rustc_mir_build/src/builder/expr/category.rs diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs similarity index 99% rename from compiler/rustc_mir_build/src/build/expr/into.rs rename to compiler/rustc_mir_build/src/builder/expr/into.rs index 0ac1ae56d59bb..88f63d4e22cbd 100644 --- a/compiler/rustc_mir_build/src/build/expr/into.rs +++ b/compiler/rustc_mir_build/src/builder/expr/into.rs @@ -11,9 +11,9 @@ use rustc_middle::ty::CanonicalUserTypeAnnotation; use rustc_span::source_map::Spanned; use tracing::{debug, instrument}; -use crate::build::expr::category::{Category, RvalueFunc}; -use crate::build::matches::DeclareLetBindings; -use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, NeedsTemporary}; +use crate::builder::expr::category::{Category, RvalueFunc}; +use crate::builder::matches::DeclareLetBindings; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, NeedsTemporary}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, storing the result into `destination`, which diff --git a/compiler/rustc_mir_build/src/build/expr/mod.rs b/compiler/rustc_mir_build/src/builder/expr/mod.rs similarity index 100% rename from compiler/rustc_mir_build/src/build/expr/mod.rs rename to compiler/rustc_mir_build/src/builder/expr/mod.rs diff --git a/compiler/rustc_mir_build/src/build/expr/stmt.rs b/compiler/rustc_mir_build/src/builder/expr/stmt.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/expr/stmt.rs rename to compiler/rustc_mir_build/src/builder/expr/stmt.rs index 15ee6dd014ce9..4ae3536d9c24b 100644 --- a/compiler/rustc_mir_build/src/build/expr/stmt.rs +++ b/compiler/rustc_mir_build/src/builder/expr/stmt.rs @@ -5,8 +5,8 @@ use rustc_middle::thir::*; use rustc_span::source_map::Spanned; use tracing::debug; -use crate::build::scope::BreakableTarget; -use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; +use crate::builder::scope::BreakableTarget; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Builds a block of MIR statements to evaluate the THIR `expr`. diff --git a/compiler/rustc_mir_build/src/build/matches/match_pair.rs b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/matches/match_pair.rs rename to compiler/rustc_mir_build/src/builder/matches/match_pair.rs index 33fbd7b1a3f2c..9d59ffc88ba23 100644 --- a/compiler/rustc_mir_build/src/build/matches/match_pair.rs +++ b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs @@ -2,9 +2,9 @@ use rustc_middle::mir::*; use rustc_middle::thir::{self, *}; use rustc_middle::ty::{self, Ty, TypeVisitableExt}; -use crate::build::Builder; -use crate::build::expr::as_place::{PlaceBase, PlaceBuilder}; -use crate::build::matches::{FlatPat, MatchPairTree, TestCase}; +use crate::builder::Builder; +use crate::builder::expr::as_place::{PlaceBase, PlaceBuilder}; +use crate::builder::matches::{FlatPat, MatchPairTree, TestCase}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Builds and returns [`MatchPairTree`] subtrees, one for each pattern in @@ -86,7 +86,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> { /// Recursively builds a match pair tree for the given pattern and its /// subpatterns. - pub(in crate::build) fn for_pattern( + pub(in crate::builder) fn for_pattern( mut place_builder: PlaceBuilder<'tcx>, pattern: &'pat Pat<'tcx>, cx: &mut Builder<'_, 'tcx>, diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/builder/matches/mod.rs similarity index 99% rename from compiler/rustc_mir_build/src/build/matches/mod.rs rename to compiler/rustc_mir_build/src/builder/matches/mod.rs index 5791460a6b1c8..b944d13fb0d24 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/builder/matches/mod.rs @@ -14,14 +14,13 @@ use rustc_middle::middle::region; use rustc_middle::mir::{self, *}; use rustc_middle::thir::{self, *}; use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty}; -use rustc_span::symbol::Symbol; -use rustc_span::{BytePos, Pos, Span}; +use rustc_span::{BytePos, Pos, Span, Symbol}; use tracing::{debug, instrument}; -use crate::build::ForGuard::{self, OutsideGuard, RefWithinGuard}; -use crate::build::expr::as_place::PlaceBuilder; -use crate::build::scope::DropKind; -use crate::build::{ +use crate::builder::ForGuard::{self, OutsideGuard, RefWithinGuard}; +use crate::builder::expr::as_place::PlaceBuilder; +use crate::builder::scope::DropKind; +use crate::builder::{ BlockAnd, BlockAndExtension, Builder, GuardFrame, GuardFrameLocal, LocalsForNode, }; diff --git a/compiler/rustc_mir_build/src/build/matches/simplify.rs b/compiler/rustc_mir_build/src/builder/matches/simplify.rs similarity index 96% rename from compiler/rustc_mir_build/src/build/matches/simplify.rs rename to compiler/rustc_mir_build/src/builder/matches/simplify.rs index 5b402604395af..ebaed1e431bbc 100644 --- a/compiler/rustc_mir_build/src/build/matches/simplify.rs +++ b/compiler/rustc_mir_build/src/builder/matches/simplify.rs @@ -16,8 +16,8 @@ use std::mem; use tracing::{debug, instrument}; -use crate::build::Builder; -use crate::build::matches::{MatchPairTree, PatternExtraData, TestCase}; +use crate::builder::Builder; +use crate::builder::matches::{MatchPairTree, PatternExtraData, TestCase}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Simplify a list of match pairs so they all require a test. Stores relevant bindings and diff --git a/compiler/rustc_mir_build/src/build/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/matches/test.rs rename to compiler/rustc_mir_build/src/builder/matches/test.rs index 4f7bbc4ce3e69..0d36b7bb3ee7b 100644 --- a/compiler/rustc_mir_build/src/build/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -16,12 +16,11 @@ use rustc_middle::ty::{self, GenericArg, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::DefId; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{DUMMY_SP, Span}; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use tracing::{debug, instrument}; -use crate::build::Builder; -use crate::build::matches::{Candidate, MatchPairTree, Test, TestBranch, TestCase, TestKind}; +use crate::builder::Builder; +use crate::builder::matches::{Candidate, MatchPairTree, Test, TestBranch, TestCase, TestKind}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Identifies what test is needed to decide if `match_pair` is applicable. @@ -244,11 +243,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } TestKind::Len { len, op } => { - let usize_ty = self.tcx.types.usize; - let actual = self.temp(usize_ty, test.span); - // actual = len(place) - self.cfg.push_assign(block, source_info, actual, Rvalue::Len(place)); + let actual = self.len_of_slice_or_array(block, place, test.span, source_info); // expected = let expected = self.push_usize(block, source_info, len); @@ -263,7 +259,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fail_block, source_info, op, - Operand::Move(actual), + actual, Operand::Move(expected), ); } diff --git a/compiler/rustc_mir_build/src/build/matches/util.rs b/compiler/rustc_mir_build/src/builder/matches/util.rs similarity index 98% rename from compiler/rustc_mir_build/src/build/matches/util.rs rename to compiler/rustc_mir_build/src/builder/matches/util.rs index 555684ded81a1..1bd399e511b39 100644 --- a/compiler/rustc_mir_build/src/build/matches/util.rs +++ b/compiler/rustc_mir_build/src/builder/matches/util.rs @@ -4,9 +4,9 @@ use rustc_middle::ty::Ty; use rustc_span::Span; use tracing::debug; -use crate::build::Builder; -use crate::build::expr::as_place::PlaceBase; -use crate::build::matches::{Binding, Candidate, FlatPat, MatchPairTree, TestCase}; +use crate::builder::Builder; +use crate::builder::expr::as_place::PlaceBase; +use crate::builder::matches::{Binding, Candidate, FlatPat, MatchPairTree, TestCase}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Creates a false edge to `imaginary_target` and a real edge to diff --git a/compiler/rustc_mir_build/src/build/misc.rs b/compiler/rustc_mir_build/src/builder/misc.rs similarity index 95% rename from compiler/rustc_mir_build/src/build/misc.rs rename to compiler/rustc_mir_build/src/builder/misc.rs index a14dcad6573ce..9ea56a9574fde 100644 --- a/compiler/rustc_mir_build/src/build/misc.rs +++ b/compiler/rustc_mir_build/src/builder/misc.rs @@ -7,7 +7,7 @@ use rustc_span::Span; use rustc_trait_selection::infer::InferCtxtExt; use tracing::debug; -use crate::build::Builder; +use crate::builder::Builder; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Adds a new temporary value of type `ty` storing the result of @@ -56,10 +56,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn consume_by_copy_or_move(&self, place: Place<'tcx>) -> Operand<'tcx> { let tcx = self.tcx; let ty = place.ty(&self.local_decls, tcx).ty; - if !self.infcx.type_is_copy_modulo_regions(self.param_env, ty) { - Operand::Move(place) - } else { + if self.infcx.type_is_copy_modulo_regions(self.param_env, ty) { Operand::Copy(place) + } else { + Operand::Move(place) } } } diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs similarity index 97% rename from compiler/rustc_mir_build/src/build/mod.rs rename to compiler/rustc_mir_build/src/builder/mod.rs index f43c29d8f5d68..8b01ec0d06af1 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/builder/mod.rs @@ -1,3 +1,8 @@ +//! This module used to be named `build`, but that was causing GitHub's +//! "Go to file" feature to silently ignore all files in the module, probably +//! because it assumes that "build" is a build-output directory. +//! See . + use itertools::Itertools; use rustc_abi::{ExternAbi, FieldIdx}; use rustc_apfloat::Float; @@ -19,12 +24,12 @@ use rustc_middle::query::TyCtxtAt; use rustc_middle::thir::{self, ExprId, LintLevel, LocalVarId, Param, ParamId, PatKind, Thir}; use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt, TypeVisitableExt, TypingMode}; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Symbol, sym}; use super::lints; -use crate::build::expr::as_place::PlaceBuilder; -use crate::build::scope::DropKind; +use crate::builder::expr::as_place::PlaceBuilder; +use crate::builder::scope::DropKind; +use crate::check_inline; pub(crate) fn closure_saved_names_of_captured_variables<'tcx>( tcx: TyCtxt<'tcx>, @@ -76,6 +81,7 @@ pub(crate) fn mir_build<'tcx>(tcx: TyCtxtAt<'tcx>, def: LocalDefId) -> Body<'tcx }; lints::check(tcx, &body); + check_inline::check_force_inline(tcx, &body); // The borrow checker will replace all the regions here with its own // inference variables. There's no point having non-erased regions here. @@ -527,9 +533,9 @@ fn construct_fn<'tcx>( ); let call_site_scope = - region::Scope { id: body.id().hir_id.local_id, data: region::ScopeData::CallSite }; + region::Scope { local_id: body.id().hir_id.local_id, data: region::ScopeData::CallSite }; let arg_scope = - region::Scope { id: body.id().hir_id.local_id, data: region::ScopeData::Arguments }; + region::Scope { local_id: body.id().hir_id.local_id, data: region::ScopeData::Arguments }; let source_info = builder.source_info(span); let call_site_s = (call_site_scope, source_info); let _: BlockAnd<()> = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { @@ -1001,11 +1007,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let Some(source_scope) = scope { self.source_scope = source_scope; } + if self.tcx.intrinsic(self.def_id).is_some_and(|i| i.must_be_overridden) { let source_info = self.source_info(rustc_span::DUMMY_SP); self.cfg.terminate(block, source_info, TerminatorKind::Unreachable); self.cfg.start_new_block().unit() } else { + // Ensure we don't silently codegen functions with fake bodies. + match self.tcx.hir_node(self.hir_id) { + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn { has_body: false, .. }, + .. + }) => { + self.tcx.dcx().span_delayed_bug( + expr_span, + format!("fn item without body has reached MIR building: {:?}", self.def_id), + ); + } + _ => {} + } self.expr_into_dest(Place::return_place(), block, expr_id) } } diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/builder/scope.rs similarity index 91% rename from compiler/rustc_mir_build/src/build/scope.rs rename to compiler/rustc_mir_build/src/builder/scope.rs index 636e47b7ad2f2..20441530a4790 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/builder/scope.rs @@ -89,13 +89,13 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::thir::{ExprId, LintLevel}; -use rustc_middle::{bug, span_bug, ty}; +use rustc_middle::{bug, span_bug}; use rustc_session::lint::Level; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; -use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; +use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; #[derive(Debug)] pub(crate) struct Scopes<'tcx> { @@ -151,15 +151,13 @@ struct DropData { /// Whether this is a value Drop or a StorageDead. kind: DropKind, - - /// Whether this is a backwards-incompatible drop lint - backwards_incompatible_lint: bool, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub(crate) enum DropKind { Value, Storage, + ForLint, } #[derive(Debug)] @@ -248,7 +246,7 @@ impl Scope { /// use of optimizations in the MIR coroutine transform. fn needs_cleanup(&self) -> bool { self.drops.iter().any(|drop| match drop.kind { - DropKind::Value => true, + DropKind::Value | DropKind::ForLint => true, DropKind::Storage => false, }) } @@ -277,12 +275,8 @@ impl DropTree { // represents the block in the tree that should be jumped to once all // of the required drops have been performed. let fake_source_info = SourceInfo::outermost(DUMMY_SP); - let fake_data = DropData { - source_info: fake_source_info, - local: Local::MAX, - kind: DropKind::Storage, - backwards_incompatible_lint: false, - }; + let fake_data = + DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage }; let drops = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]); Self { drops, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() } } @@ -411,6 +405,27 @@ impl DropTree { }; cfg.terminate(block, drop_node.data.source_info, terminator); } + DropKind::ForLint => { + let stmt = Statement { + source_info: drop_node.data.source_info, + kind: StatementKind::BackwardIncompatibleDropHint { + place: Box::new(drop_node.data.local.into()), + reason: BackwardIncompatibleDropReason::Edition2024, + }, + }; + cfg.push(block, stmt); + let target = blocks[drop_node.next].unwrap(); + if target != block { + // Diagnostics don't use this `Span` but debuginfo + // might. Since we don't want breakpoints to be placed + // here, especially when this is on an unwind path, we + // use `DUMMY_SP`. + let source_info = + SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info }; + let terminator = TerminatorKind::Goto { target }; + cfg.terminate(block, source_info, terminator); + } + } // Root nodes don't correspond to a drop. DropKind::Storage if drop_idx == ROOT_NODE => {} DropKind::Storage => { @@ -770,12 +785,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let local = place.as_local().unwrap_or_else(|| bug!("projection in tail call args")); - Some(DropData { - source_info, - local, - kind: DropKind::Value, - backwards_incompatible_lint: false, - }) + Some(DropData { source_info, local, kind: DropKind::Value }) } Operand::Constant(_) => None, }) @@ -822,6 +832,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }); block = next; } + DropKind::ForLint => { + self.cfg.push(block, Statement { + source_info, + kind: StatementKind::BackwardIncompatibleDropHint { + place: Box::new(local.into()), + reason: BackwardIncompatibleDropReason::Edition2024, + }, + }); + } DropKind::Storage => { // Only temps and vars need their storage dead. assert!(local.index() > self.arg_count); @@ -1021,7 +1040,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { drop_kind: DropKind, ) { let needs_drop = match drop_kind { - DropKind::Value => { + DropKind::Value | DropKind::ForLint => { if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) { return; } @@ -1101,7 +1120,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, local, kind: drop_kind, - backwards_incompatible_lint: false, }); return; @@ -1113,18 +1131,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// Schedule emission of a backwards incompatible drop lint hint. /// Applicable only to temporary values for now. + #[instrument(level = "debug", skip(self))] pub(crate) fn schedule_backwards_incompatible_drop( &mut self, span: Span, region_scope: region::Scope, local: Local, ) { - if !self.local_decls[local].ty.has_significant_drop(self.tcx, ty::TypingEnv { - typing_mode: ty::TypingMode::non_body_analysis(), - param_env: self.param_env, - }) { - return; - } + // Note that we are *not* gating BIDs here on whether they have significant destructor. + // We need to know all of them so that we can capture potential borrow-checking errors. for scope in self.scopes.scopes.iter_mut().rev() { // Since we are inserting linting MIR statement, we have to invalidate the caches scope.invalidate_cache(); @@ -1135,8 +1150,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { scope.drops.push(DropData { source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, local, - kind: DropKind::Value, - backwards_incompatible_lint: true, + kind: DropKind::ForLint, }); return; @@ -1379,12 +1393,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } /// Builds drops for `pop_scope` and `leave_top_scope`. +/// +/// # Parameters +/// +/// * `unwind_drops`, the drop tree data structure storing what needs to be cleaned up if unwind occurs +/// * `scope`, describes the drops that will occur on exiting the scope in regular execution +/// * `block`, the block to branch to once drops are complete (assuming no unwind occurs) +/// * `unwind_to`, describes the drops that would occur at this point in the code if a +/// panic occurred (a subset of the drops in `scope`, since we sometimes elide StorageDead and other +/// instructions on unwinding) +/// * `storage_dead_on_unwind`, if true, then we should emit `StorageDead` even when unwinding +/// * `arg_count`, number of MIR local variables corresponding to fn arguments (used to assert that we don't drop those) fn build_scope_drops<'tcx>( cfg: &mut CFG<'tcx>, unwind_drops: &mut DropTree, scope: &Scope, - mut block: BasicBlock, - mut unwind_to: DropIdx, + block: BasicBlock, + unwind_to: DropIdx, storage_dead_on_unwind: bool, arg_count: usize, ) -> BlockAnd<()> { @@ -1409,6 +1434,18 @@ fn build_scope_drops<'tcx>( // drops for the unwind path should have already been generated by // `diverge_cleanup_gen`. + // `unwind_to` indicates what needs to be dropped should unwinding occur. + // This is a subset of what needs to be dropped when exiting the scope. + // As we unwind the scope, we will also move `unwind_to` backwards to match, + // so that we can use it should a destructor panic. + let mut unwind_to = unwind_to; + + // The block that we should jump to after drops complete. We start by building the final drop (`drops[n]` + // in the diagram above) and then build the drops (e.g., `drop[1]`, `drop[0]`) that come before it. + // block begins as the successor of `drops[n]` and then becomes `drops[n]` so that `drops[n-1]` + // will branch to `drops[n]`. + let mut block = block; + for drop_data in scope.drops.iter().rev() { let source_info = drop_data.source_info; let local = drop_data.local; @@ -1418,6 +1455,9 @@ fn build_scope_drops<'tcx>( // `unwind_to` should drop the value that we're about to // schedule. If dropping this value panics, then we continue // with the *next* value on the unwind path. + // + // We adjust this BEFORE we create the drop (e.g., `drops[n]`) + // because `drops[n]` should unwind to `drops[n-1]`. debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local); debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; @@ -1430,27 +1470,50 @@ fn build_scope_drops<'tcx>( continue; } - if drop_data.backwards_incompatible_lint { - cfg.push(block, Statement { - source_info, - kind: StatementKind::BackwardIncompatibleDropHint { - place: Box::new(local.into()), - reason: BackwardIncompatibleDropReason::Edition2024, - }, - }); - } else { - unwind_drops.add_entry_point(block, unwind_to); - let next = cfg.start_new_block(); - cfg.terminate(block, source_info, TerminatorKind::Drop { - place: local.into(), - target: next, - unwind: UnwindAction::Continue, - replace: false, - }); - block = next; + unwind_drops.add_entry_point(block, unwind_to); + let next = cfg.start_new_block(); + cfg.terminate(block, source_info, TerminatorKind::Drop { + place: local.into(), + target: next, + unwind: UnwindAction::Continue, + replace: false, + }); + block = next; + } + DropKind::ForLint => { + // As in the `DropKind::Storage` case below: + // normally lint-related drops are not emitted for unwind, + // so we can just leave `unwind_to` unmodified, but in some + // cases we emit things ALSO on the unwind path, so we need to adjust + // `unwind_to` in that case. + if storage_dead_on_unwind { + debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local); + debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); + unwind_to = unwind_drops.drops[unwind_to].next; } + + // If the operand has been moved, and we are not on an unwind + // path, then don't generate the drop. (We only take this into + // account for non-unwind paths so as not to disturb the + // caching mechanism.) + if scope.moved_locals.iter().any(|&o| o == local) { + continue; + } + + cfg.push(block, Statement { + source_info, + kind: StatementKind::BackwardIncompatibleDropHint { + place: Box::new(local.into()), + reason: BackwardIncompatibleDropReason::Edition2024, + }, + }); } DropKind::Storage => { + // Ordinarily, storage-dead nodes are not emitted on unwind, so we don't + // need to adjust `unwind_to` on this path. However, in some specific cases + // we *do* emit storage-dead nodes on the unwind path, and in that case now that + // the storage-dead has completed, we need to adjust the `unwind_to` pointer + // so that any future drops we emit will not register storage-dead. if storage_dead_on_unwind { debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local); debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); @@ -1489,7 +1552,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) { match drop_node.data.kind { - DropKind::Storage => { + DropKind::Storage | DropKind::ForLint => { if is_coroutine { let unwind_drop = self .scopes diff --git a/compiler/rustc_mir_build/src/check_inline.rs b/compiler/rustc_mir_build/src/check_inline.rs new file mode 100644 index 0000000000000..1af3b3e2c1353 --- /dev/null +++ b/compiler/rustc_mir_build/src/check_inline.rs @@ -0,0 +1,81 @@ +use rustc_attr_parsing::InlineAttr; +use rustc_hir::def_id::DefId; +use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; +use rustc_middle::mir::{Body, TerminatorKind}; +use rustc_middle::ty; +use rustc_middle::ty::TyCtxt; +use rustc_span::sym; + +/// Check that a body annotated with `#[rustc_force_inline]` will not fail to inline based on its +/// definition alone (irrespective of any specific caller). +pub(crate) fn check_force_inline<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { + let def_id = body.source.def_id(); + if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() || !def_id.is_local() { + return; + } + let InlineAttr::Force { attr_span, .. } = tcx.codegen_fn_attrs(def_id).inline else { + return; + }; + + if let Err(reason) = + is_inline_valid_on_fn(tcx, def_id).and_then(|_| is_inline_valid_on_body(tcx, body)) + { + tcx.dcx().emit_err(crate::errors::InvalidForceInline { + attr_span, + callee_span: tcx.def_span(def_id), + callee: tcx.def_path_str(def_id), + reason, + }); + } +} + +pub fn is_inline_valid_on_fn<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<(), &'static str> { + let codegen_attrs = tcx.codegen_fn_attrs(def_id); + if tcx.has_attr(def_id, sym::rustc_no_mir_inline) { + return Err("#[rustc_no_mir_inline]"); + } + + // FIXME(#127234): Coverage instrumentation currently doesn't handle inlined + // MIR correctly when Modified Condition/Decision Coverage is enabled. + if tcx.sess.instrument_coverage_mcdc() { + return Err("incompatible with MC/DC coverage"); + } + + let ty = tcx.type_of(def_id); + if match ty.instantiate_identity().kind() { + ty::FnDef(..) => tcx.fn_sig(def_id).instantiate_identity().c_variadic(), + ty::Closure(_, args) => args.as_closure().sig().c_variadic(), + _ => false, + } { + return Err("C variadic"); + } + + if codegen_attrs.flags.contains(CodegenFnAttrFlags::COLD) { + return Err("cold"); + } + + // Intrinsic fallback bodies are automatically made cross-crate inlineable, + // but at this stage we don't know whether codegen knows the intrinsic, + // so just conservatively don't inline it. This also ensures that we do not + // accidentally inline the body of an intrinsic that *must* be overridden. + if tcx.has_attr(def_id, sym::rustc_intrinsic) { + return Err("callee is an intrinsic"); + } + + Ok(()) +} + +pub fn is_inline_valid_on_body<'tcx>( + _: TyCtxt<'tcx>, + body: &Body<'tcx>, +) -> Result<(), &'static str> { + if body + .basic_blocks + .iter() + .any(|bb| matches!(bb.terminator().kind, TerminatorKind::TailCall { .. })) + { + return Err("can't inline functions with tail calls"); + } + + Ok(()) +} diff --git a/compiler/rustc_mir_build/src/check_tail_calls.rs b/compiler/rustc_mir_build/src/check_tail_calls.rs index b1f46d37d5062..0659e3ea314bb 100644 --- a/compiler/rustc_mir_build/src/check_tail_calls.rs +++ b/compiler/rustc_mir_build/src/check_tail_calls.rs @@ -117,7 +117,7 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.report_arguments_mismatch(expr.span, caller_sig, callee_sig); } - // FIXME(explicit_tail_calls): this currenly fails for cases where opaques are used. + // FIXME(explicit_tail_calls): this currently fails for cases where opaques are used. // e.g. // ``` // fn a() -> impl Sized { become b() } // ICE diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 90be690e0345f..5eed9ef798d01 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -15,10 +15,9 @@ use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::lint::Level; use rustc_session::lint::builtin::{DEPRECATED_SAFE_2024, UNSAFE_OP_IN_UNSAFE_FN, UNUSED_UNSAFE}; use rustc_span::def_id::{DefId, LocalDefId}; -use rustc_span::symbol::Symbol; -use rustc_span::{Span, sym}; +use rustc_span::{Span, Symbol, sym}; -use crate::build::ExprCategory; +use crate::builder::ExprCategory; use crate::errors::*; struct UnsafetyVisitor<'a, 'tcx> { @@ -479,23 +478,26 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { return; // don't visit the whole expression } ExprKind::Call { fun, ty: _, args: _, from_hir_call: _, fn_span: _ } => { - if self.thir[fun].ty.fn_sig(self.tcx).safety().is_unsafe() { - let func_id = if let ty::FnDef(func_id, _) = self.thir[fun].ty.kind() { + let fn_ty = self.thir[fun].ty; + let sig = fn_ty.fn_sig(self.tcx); + let (callee_features, safe_target_features): (&[_], _) = match fn_ty.kind() { + ty::FnDef(func_id, ..) => { + let cg_attrs = self.tcx.codegen_fn_attrs(func_id); + (&cg_attrs.target_features, cg_attrs.safe_target_features) + } + _ => (&[], false), + }; + if sig.safety().is_unsafe() && !safe_target_features { + let func_id = if let ty::FnDef(func_id, _) = fn_ty.kind() { Some(*func_id) } else { None }; self.requires_unsafe(expr.span, CallToUnsafeFunction(func_id)); - } else if let &ty::FnDef(func_did, _) = self.thir[fun].ty.kind() { - // If the called function has target features the calling function hasn't, - // the call requires `unsafe`. Don't check this on wasm - // targets, though. For more information on wasm see the - // is_like_wasm check in hir_analysis/src/collect.rs - let callee_features = &self.tcx.codegen_fn_attrs(func_did).target_features; - if !self.tcx.sess.target.options.is_like_wasm - && !callee_features.iter().all(|feature| { - self.body_target_features.iter().any(|f| f.name == feature.name) - }) + } else if let &ty::FnDef(func_did, _) = fn_ty.kind() { + if !self + .tcx + .is_target_feature_call_safe(callee_features, self.body_target_features) { let missing: Vec<_> = callee_features .iter() @@ -740,7 +742,10 @@ impl UnsafeOpKind { ) { let parent_id = tcx.hir().get_parent_item(hir_id); let parent_owner = tcx.hir_owner_node(parent_id); - let should_suggest = parent_owner.fn_sig().is_some_and(|sig| sig.header.is_unsafe()); + let should_suggest = parent_owner.fn_sig().is_some_and(|sig| { + // Do not suggest for safe target_feature functions + matches!(sig.header.safety, hir::HeaderSafety::Normal(hir::Safety::Unsafe)) + }); let unsafe_not_inherited_note = if should_suggest { suggest_unsafe_block.then(|| { let body_span = tcx.hir().body(parent_owner.body_id().unwrap()).value.span; @@ -903,7 +908,7 @@ impl UnsafeOpKind { { true } else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) - && sig.header.is_unsafe() + && matches!(sig.header.safety, hir::HeaderSafety::Normal(hir::Safety::Unsafe)) { true } else { @@ -1112,7 +1117,16 @@ pub(crate) fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) { let hir_id = tcx.local_def_id_to_hir_id(def); let safety_context = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(SafetyContext::Safe, |fn_sig| { - if fn_sig.header.safety.is_unsafe() { SafetyContext::UnsafeFn } else { SafetyContext::Safe } + match fn_sig.header.safety { + // We typeck the body as safe, but otherwise treat it as unsafe everywhere else. + // Call sites to other SafeTargetFeatures functions are checked explicitly and don't need + // to care about safety of the body. + hir::HeaderSafety::SafeTargetFeatures => SafetyContext::Safe, + hir::HeaderSafety::Normal(safety) => match safety { + hir::Safety::Unsafe => SafetyContext::UnsafeFn, + hir::Safety::Safe => SafetyContext::Safe, + }, + } }); let body_target_features = &tcx.body_codegen_attrs(def.to_def_id()).target_features; let mut warnings = Vec::new(); diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 3632da943e18b..90c31a2caa326 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -1,14 +1,13 @@ use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, - MultiSpan, SubdiagMessageOp, Subdiagnostic, + MultiSpan, SubdiagMessageOp, Subdiagnostic, pluralize, }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{self, Ty}; use rustc_pattern_analysis::errors::Uncovered; use rustc_pattern_analysis::rustc::RustcPatCtxt; -use rustc_span::Span; -use rustc_span::symbol::Symbol; +use rustc_span::{Span, Symbol}; use crate::fluent_generated as fluent; @@ -1068,39 +1067,22 @@ pub(crate) enum MiscPatternSuggestion { }, } -#[derive(Diagnostic)] -#[diag(mir_build_rustc_box_attribute_error)] -pub(crate) struct RustcBoxAttributeError { - #[primary_span] - pub(crate) span: Span, - #[subdiagnostic] - pub(crate) reason: RustcBoxAttrReason, -} - -#[derive(Subdiagnostic)] -pub(crate) enum RustcBoxAttrReason { - #[note(mir_build_attributes)] - Attributes, - #[note(mir_build_not_box)] - NotBoxNew, - #[note(mir_build_missing_box)] - MissingBox, -} - #[derive(LintDiagnostic)] #[diag(mir_build_rust_2024_incompatible_pat)] -pub(crate) struct Rust2024IncompatiblePat { +pub(crate) struct Rust2024IncompatiblePat<'a> { #[subdiagnostic] - pub(crate) sugg: Rust2024IncompatiblePatSugg, + pub(crate) sugg: Rust2024IncompatiblePatSugg<'a>, } -pub(crate) struct Rust2024IncompatiblePatSugg { +pub(crate) struct Rust2024IncompatiblePatSugg<'a> { pub(crate) suggestion: Vec<(Span, String)>, - /// Whether the incompatibility is a hard error because a relevant span is in edition 2024. - pub(crate) is_hard_error: bool, + pub(crate) ref_pattern_count: usize, + pub(crate) binding_mode_count: usize, + /// Labeled spans for subpatterns invalid in Rust 2024. + pub(crate) labels: &'a [(Span, String)], } -impl Subdiagnostic for Rust2024IncompatiblePatSugg { +impl<'a> Subdiagnostic for Rust2024IncompatiblePatSugg<'a> { fn add_to_diag_with>( self, diag: &mut Diag<'_, G>, @@ -1112,6 +1094,28 @@ impl Subdiagnostic for Rust2024IncompatiblePatSugg { } else { Applicability::MaybeIncorrect }; - diag.multipart_suggestion("desugar the match ergonomics", self.suggestion, applicability); + let plural_derefs = pluralize!(self.ref_pattern_count); + let and_modes = if self.binding_mode_count > 0 { + format!(" and variable binding mode{}", pluralize!(self.binding_mode_count)) + } else { + String::new() + }; + diag.multipart_suggestion_verbose( + format!("make the implied reference pattern{plural_derefs}{and_modes} explicit"), + self.suggestion, + applicability, + ); } } + +#[derive(Diagnostic)] +#[diag(mir_build_force_inline)] +#[note] +pub(crate) struct InvalidForceInline { + #[primary_span] + pub attr_span: Span, + #[label(mir_build_callee)] + pub callee_span: Span, + pub callee: String, + pub reason: &'static str, +} diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs index 833e5019865e8..76a35355de72a 100644 --- a/compiler/rustc_mir_build/src/lib.rs +++ b/compiler/rustc_mir_build/src/lib.rs @@ -11,7 +11,11 @@ #![warn(unreachable_pub)] // tidy-alphabetical-end -mod build; +// The `builder` module used to be named `build`, but that was causing GitHub's +// "Go to file" feature to silently ignore all files in the module, probably +// because it assumes that "build" is a build-output directory. See #134365. +mod builder; +pub mod check_inline; mod check_tail_calls; mod check_unsafety; mod errors; @@ -25,9 +29,9 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" } pub fn provide(providers: &mut Providers) { providers.check_match = thir::pattern::check_match; providers.lit_to_const = thir::constant::lit_to_const; - providers.hooks.build_mir = build::mir_build; + providers.hooks.build_mir = builder::mir_build; providers.closure_saved_names_of_captured_variables = - build::closure_saved_names_of_captured_variables; + builder::closure_saved_names_of_captured_variables; providers.check_unsafety = check_unsafety::check_unsafety; providers.check_tail_calls = check_tail_calls::check_tail_calls; providers.thir_body = thir::cx::thir_body; diff --git a/compiler/rustc_mir_build/src/thir/constant.rs b/compiler/rustc_mir_build/src/thir/constant.rs index 30b6718683b67..49db522cf0eed 100644 --- a/compiler/rustc_mir_build/src/thir/constant.rs +++ b/compiler/rustc_mir_build/src/thir/constant.rs @@ -1,20 +1,20 @@ use rustc_ast as ast; use rustc_hir::LangItem; use rustc_middle::bug; -use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput}; +use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::ty::{self, ScalarInt, TyCtxt, TypeVisitableExt as _}; use tracing::trace; -use crate::build::parse_float_into_scalar; +use crate::builder::parse_float_into_scalar; pub(crate) fn lit_to_const<'tcx>( tcx: TyCtxt<'tcx>, lit_input: LitToConstInput<'tcx>, -) -> Result, LitToConstError> { +) -> ty::Const<'tcx> { let LitToConstInput { lit, ty, neg } = lit_input; if let Err(guar) = ty.error_reported() { - return Ok(ty::Const::new_error(tcx, guar)); + return ty::Const::new_error(tcx, guar); } let trunc = |n| { @@ -28,8 +28,8 @@ pub(crate) fn lit_to_const<'tcx>( let result = width.truncate(n); trace!("trunc result: {}", result); - Ok(ScalarInt::try_from_uint(result, width) - .unwrap_or_else(|| bug!("expected to create ScalarInt from uint {:?}", result))) + ScalarInt::try_from_uint(result, width) + .unwrap_or_else(|| bug!("expected to create ScalarInt from uint {:?}", result)) }; let valtree = match (lit, ty.kind()) { @@ -57,20 +57,20 @@ pub(crate) fn lit_to_const<'tcx>( } (ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => { let scalar_int = - trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })?; + trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() }); ty::ValTree::from_scalar_int(scalar_int) } (ast::LitKind::Bool(b), ty::Bool) => ty::ValTree::from_scalar_int((*b).into()), (ast::LitKind::Float(n, _), ty::Float(fty)) => { - let bits = parse_float_into_scalar(*n, *fty, neg).ok_or_else(|| { + let bits = parse_float_into_scalar(*n, *fty, neg).unwrap_or_else(|| { tcx.dcx().bug(format!("couldn't parse float literal: {:?}", lit_input.lit)) - })?; + }); ty::ValTree::from_scalar_int(bits) } (ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()), - (ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)), - _ => return Err(LitToConstError::TypeError), + (ast::LitKind::Err(guar), _) => return ty::Const::new_error(tcx, *guar), + _ => return ty::Const::new_misc_error(tcx), }; - Ok(ty::Const::new_value(tcx, valtree, ty)) + ty::Const::new_value(tcx, valtree, ty) } diff --git a/compiler/rustc_mir_build/src/thir/cx/block.rs b/compiler/rustc_mir_build/src/thir/cx/block.rs index 069c2e7881ea6..c9df027687ab9 100644 --- a/compiler/rustc_mir_build/src/thir/cx/block.rs +++ b/compiler/rustc_mir_build/src/thir/cx/block.rs @@ -16,7 +16,7 @@ impl<'tcx> Cx<'tcx> { let block = Block { targeted_by_break: block.targeted_by_break, region_scope: region::Scope { - id: block.hir_id.local_id, + local_id: block.hir_id.local_id, data: region::ScopeData::Node, }, span: block.span, @@ -51,7 +51,7 @@ impl<'tcx> Cx<'tcx> { let stmt = Stmt { kind: StmtKind::Expr { scope: region::Scope { - id: hir_id.local_id, + local_id: hir_id.local_id, data: region::ScopeData::Node, }, expr: self.mirror_expr(expr), @@ -65,7 +65,7 @@ impl<'tcx> Cx<'tcx> { } hir::StmtKind::Let(local) => { let remainder_scope = region::Scope { - id: block_id, + local_id: block_id, data: region::ScopeData::Remainder(region::FirstStatementIndex::new( index, )), @@ -108,7 +108,7 @@ impl<'tcx> Cx<'tcx> { kind: StmtKind::Let { remainder_scope, init_scope: region::Scope { - id: hir_id.local_id, + local_id: hir_id.local_id, data: region::ScopeData::Node, }, pattern, diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs index 6770e562d5010..9cdf08d749b01 100644 --- a/compiler/rustc_mir_build/src/thir/cx/expr.rs +++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs @@ -20,11 +20,18 @@ use rustc_middle::{bug, span_bug}; use rustc_span::{Span, sym}; use tracing::{debug, info, instrument, trace}; -use crate::errors; use crate::thir::cx::Cx; use crate::thir::util::UserAnnotatedTyHelpers; impl<'tcx> Cx<'tcx> { + /// Create a THIR expression for the given HIR expression. This expands all + /// adjustments and directly adds the type information from the + /// `typeck_results`. See the [dev-guide] for more details. + /// + /// (The term "mirror" in this case does not refer to "flipped" or + /// "reversed".) + /// + /// [dev-guide]: https://rustc-dev-guide.rust-lang.org/thir.html pub(crate) fn mirror_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) -> ExprId { // `mirror_expr` is recursing very deep. Make sure the stack doesn't overflow. ensure_sufficient_stack(|| self.mirror_expr_inner(expr)) @@ -37,7 +44,7 @@ impl<'tcx> Cx<'tcx> { #[instrument(level = "trace", skip(self, hir_expr))] pub(super) fn mirror_expr_inner(&mut self, hir_expr: &'tcx hir::Expr<'tcx>) -> ExprId { let expr_scope = - region::Scope { id: hir_expr.hir_id.local_id, data: region::ScopeData::Node }; + region::Scope { local_id: hir_expr.hir_id.local_id, data: region::ScopeData::Node }; trace!(?hir_expr.hir_id, ?hir_expr.span); @@ -372,45 +379,25 @@ impl<'tcx> Cx<'tcx> { from_hir_call: true, fn_span: expr.span, } - } else { - let attrs = tcx.hir().attrs(expr.hir_id); - if attrs.iter().any(|a| a.name_or_empty() == sym::rustc_box) { - if attrs.len() != 1 { - tcx.dcx().emit_err(errors::RustcBoxAttributeError { - span: attrs[0].span, - reason: errors::RustcBoxAttrReason::Attributes, - }); - } else if let Some(box_item) = tcx.lang_items().owned_box() { - if let hir::ExprKind::Path(hir::QPath::TypeRelative(ty, fn_path)) = - fun.kind - && let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind - && path.res.opt_def_id().is_some_and(|did| did == box_item) - && fn_path.ident.name == sym::new - && let [value] = args - { - return Expr { - temp_lifetime: TempLifetime { - temp_lifetime, - backwards_incompatible, - }, - ty: expr_ty, - span: expr.span, - kind: ExprKind::Box { value: self.mirror_expr(value) }, - }; - } else { - tcx.dcx().emit_err(errors::RustcBoxAttributeError { - span: expr.span, - reason: errors::RustcBoxAttrReason::NotBoxNew, - }); - } - } else { - tcx.dcx().emit_err(errors::RustcBoxAttributeError { - span: attrs[0].span, - reason: errors::RustcBoxAttrReason::MissingBox, - }); - } + } else if let ty::FnDef(def_id, _) = self.typeck_results().expr_ty(fun).kind() + && let Some(intrinsic) = self.tcx.intrinsic(def_id) + && intrinsic.name == sym::box_new + { + // We don't actually evaluate `fun` here, so make sure that doesn't miss any side-effects. + if !matches!(fun.kind, hir::ExprKind::Path(_)) { + span_bug!( + expr.span, + "`box_new` intrinsic can only be called via path expression" + ); } - + let value = &args[0]; + return Expr { + temp_lifetime: TempLifetime { temp_lifetime, backwards_incompatible }, + ty: expr_ty, + span: expr.span, + kind: ExprKind::Box { value: self.mirror_expr(value) }, + }; + } else { // Tuple-like ADTs are represented as ExprKind::Call. We convert them here. let adt_data = if let hir::ExprKind::Path(ref qpath) = fun.kind && let Some(adt_def) = expr_ty.ty_adt_def() @@ -806,14 +793,20 @@ impl<'tcx> Cx<'tcx> { hir::ExprKind::Become(call) => ExprKind::Become { value: self.mirror_expr(call) }, hir::ExprKind::Break(dest, ref value) => match dest.target_id { Ok(target_id) => ExprKind::Break { - label: region::Scope { id: target_id.local_id, data: region::ScopeData::Node }, + label: region::Scope { + local_id: target_id.local_id, + data: region::ScopeData::Node, + }, value: value.map(|value| self.mirror_expr(value)), }, Err(err) => bug!("invalid loop id for break: {}", err), }, hir::ExprKind::Continue(dest) => match dest.target_id { Ok(loop_id) => ExprKind::Continue { - label: region::Scope { id: loop_id.local_id, data: region::ScopeData::Node }, + label: region::Scope { + local_id: loop_id.local_id, + data: region::ScopeData::Node, + }, }, Err(err) => bug!("invalid loop id for continue: {}", err), }, @@ -823,7 +816,7 @@ impl<'tcx> Cx<'tcx> { }, hir::ExprKind::If(cond, then, else_opt) => ExprKind::If { if_then_scope: region::Scope { - id: then.hir_id.local_id, + local_id: then.hir_id.local_id, data: { if expr.span.at_least_rust_2024() { region::ScopeData::IfThenRescope @@ -1013,7 +1006,7 @@ impl<'tcx> Cx<'tcx> { guard: arm.guard.as_ref().map(|g| self.mirror_expr(g)), body: self.mirror_expr(arm.body), lint_level: LintLevel::Explicit(arm.hir_id), - scope: region::Scope { id: arm.hir_id.local_id, data: region::ScopeData::Node }, + scope: region::Scope { local_id: arm.hir_id.local_id, data: region::ScopeData::Node }, span: arm.span, }; self.thir.arms.push(arm) @@ -1189,7 +1182,7 @@ impl<'tcx> Cx<'tcx> { .temporary_scope(self.region_scope_tree, closure_expr.hir_id.local_id); let var_ty = place.base_ty; - // The result of capture analysis in `rustc_hir_analysis/check/upvar.rs`represents a captured path + // The result of capture analysis in `rustc_hir_typeck/src/upvar.rs` represents a captured path // as it's seen for use within the closure and not at the time of closure creation. // // That is we see expect to see it start from a captured upvar and not something that is local diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index aed00aecefc99..3853b95f78b4c 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -1,3 +1,5 @@ +use core::ops::ControlFlow; + use rustc_abi::{FieldIdx, VariantIdx}; use rustc_apfloat::Float; use rustc_data_structures::fx::FxHashSet; @@ -8,7 +10,9 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::Obligation; use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::thir::{FieldPat, Pat, PatKind}; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, TypeVisitor, ValTree}; +use rustc_middle::ty::{ + self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitableExt, TypeVisitor, ValTree, +}; use rustc_middle::{mir, span_bug}; use rustc_span::def_id::DefId; use rustc_span::{Span, sym}; @@ -185,7 +189,7 @@ impl<'tcx> ConstToPat<'tcx> { if !inlined_const_as_pat.references_error() { // Always check for `PartialEq` if we had no other errors yet. - if !type_has_partial_eq_impl(self.tcx, typing_env, ty).0 { + if !type_has_partial_eq_impl(self.tcx, typing_env, ty).has_impl { let mut err = self.tcx.dcx().create_err(TypeNotPartialEq { span: self.span, ty }); extend_type_not_partial_eq(self.tcx, typing_env, ty, &mut err); return self.mk_err(err, ty); @@ -219,12 +223,13 @@ impl<'tcx> ConstToPat<'tcx> { // Extremely important check for all ADTs! Make sure they opted-in to be used in // patterns. debug!("adt_def {:?} has !type_marked_structural for cv.ty: {:?}", adt_def, ty); - let (_impls_partial_eq, derived, structural, impl_def_id) = - type_has_partial_eq_impl(self.tcx, self.typing_env, ty); + let PartialEqImplStatus { + is_derived, structural_partial_eq, non_blanket_impl, .. + } = type_has_partial_eq_impl(self.tcx, self.typing_env, ty); let (manual_partialeq_impl_span, manual_partialeq_impl_note) = - match (structural, impl_def_id) { + match (structural_partial_eq, non_blanket_impl) { (true, _) => (None, false), - (_, Some(def_id)) if def_id.is_local() && !derived => { + (_, Some(def_id)) if def_id.is_local() && !is_derived => { (Some(tcx.def_span(def_id)), false) } _ => (None, true), @@ -379,41 +384,50 @@ fn extend_type_not_partial_eq<'tcx>( adts_without_partialeq: FxHashSet, /// The user has written `impl PartialEq for Ty` which means it's non-structual, /// but we don't have a span to point at, so we'll just add them as a `note`. - manual: Vec>, + manual: FxHashSet>, /// The type has no `PartialEq` implementation, neither manual or derived, but /// we don't have a span to point at, so we'll just add them as a `note`. - without: Vec>, + without: FxHashSet>, } impl<'tcx> TypeVisitor> for UsedParamsNeedInstantiationVisitor<'tcx> { + type Result = ControlFlow<()>; fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result { - if let ty::Adt(def, _args) = ty.kind() { - let ty_def_id = def.did(); - let ty_def_span = self.tcx.def_span(ty_def_id); - let (impls_partial_eq, derived, structural, impl_def_id) = - type_has_partial_eq_impl(self.tcx, self.typing_env, ty); - match (impls_partial_eq, derived, structural, impl_def_id) { - (_, _, true, _) => {} - (true, false, _, Some(def_id)) if def_id.is_local() => { - self.adts_with_manual_partialeq.insert(self.tcx.def_span(def_id)); - } - (true, false, _, _) if ty_def_id.is_local() => { - self.adts_with_manual_partialeq.insert(ty_def_span); - } - (false, _, _, _) if ty_def_id.is_local() => { - self.adts_without_partialeq.insert(ty_def_span); - } - (true, false, _, _) => { - self.manual.push(ty); - } - (false, _, _, _) => { - self.without.push(ty); - } - _ => {} - }; + match ty.kind() { + ty::Dynamic(..) => return ControlFlow::Break(()), + ty::FnPtr(..) => return ControlFlow::Continue(()), + ty::Adt(def, _args) => { + let ty_def_id = def.did(); + let ty_def_span = self.tcx.def_span(ty_def_id); + let PartialEqImplStatus { + has_impl, + is_derived, + structural_partial_eq, + non_blanket_impl, + } = type_has_partial_eq_impl(self.tcx, self.typing_env, ty); + match (has_impl, is_derived, structural_partial_eq, non_blanket_impl) { + (_, _, true, _) => {} + (true, false, _, Some(def_id)) if def_id.is_local() => { + self.adts_with_manual_partialeq.insert(self.tcx.def_span(def_id)); + } + (true, false, _, _) if ty_def_id.is_local() => { + self.adts_with_manual_partialeq.insert(ty_def_span); + } + (false, _, _, _) if ty_def_id.is_local() => { + self.adts_without_partialeq.insert(ty_def_span); + } + (true, false, _, _) => { + self.manual.insert(ty); + } + (false, _, _, _) => { + self.without.insert(ty); + } + _ => {} + }; + ty.super_visit_with(self) + } + _ => ty.super_visit_with(self), } - use rustc_middle::ty::TypeSuperVisitable; - ty.super_visit_with(self) } } let mut v = UsedParamsNeedInstantiationVisitor { @@ -421,10 +435,12 @@ fn extend_type_not_partial_eq<'tcx>( typing_env, adts_with_manual_partialeq: FxHashSet::default(), adts_without_partialeq: FxHashSet::default(), - manual: vec![], - without: vec![], + manual: FxHashSet::default(), + without: FxHashSet::default(), }; - v.visit_ty(ty); + if v.visit_ty(ty).is_break() { + return; + } #[allow(rustc::potential_query_instability)] // Span labels will be sorted by the rendering for span in v.adts_with_manual_partialeq { err.span_note(span, "the `PartialEq` trait must be derived, manual `impl`s are not sufficient; see https://doc.rust-lang.org/stable/std/marker/trait.StructuralPartialEq.html for details"); @@ -436,29 +452,38 @@ fn extend_type_not_partial_eq<'tcx>( "must be annotated with `#[derive(PartialEq)]` to be usable in patterns", ); } - for ty in v.manual { + #[allow(rustc::potential_query_instability)] + let mut manual: Vec<_> = v.manual.into_iter().map(|t| t.to_string()).collect(); + manual.sort(); + for ty in manual { err.note(format!( "`{ty}` must be annotated with `#[derive(PartialEq)]` to be usable in patterns, manual `impl`s are not sufficient; see https://doc.rust-lang.org/stable/std/marker/trait.StructuralPartialEq.html for details" )); } - for ty in v.without { + #[allow(rustc::potential_query_instability)] + let mut without: Vec<_> = v.without.into_iter().map(|t| t.to_string()).collect(); + without.sort(); + for ty in without { err.note(format!( "`{ty}` must be annotated with `#[derive(PartialEq)]` to be usable in patterns" )); } } +#[derive(Debug)] +struct PartialEqImplStatus { + has_impl: bool, + is_derived: bool, + structural_partial_eq: bool, + non_blanket_impl: Option, +} + #[instrument(level = "trace", skip(tcx), ret)] fn type_has_partial_eq_impl<'tcx>( tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, ty: Ty<'tcx>, -) -> ( - /* has impl */ bool, - /* is derived */ bool, - /* structural partial eq */ bool, - /* non-blanket impl */ Option, -) { +) -> PartialEqImplStatus { let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); // double-check there even *is* a semantic `PartialEq` to dispatch to. // @@ -491,10 +516,14 @@ fn type_has_partial_eq_impl<'tcx>( // `PartialEq` for some lifetime but *not* for `'static`? If this ever becomes a problem // we'll need to leave some sort of trace of this requirement in the MIR so that borrowck // can ensure that the type really implements `PartialEq`. - ( - infcx.predicate_must_hold_modulo_regions(&partial_eq_obligation), - automatically_derived, - structural_peq, - impl_def_id, - ) + // We also do *not* require `const PartialEq`, not even in `const fn`. This violates the model + // that patterns can only do things that the code could also do without patterns, but it is + // needed for backwards compatibility. The actual pattern matching compares primitive values, + // `PartialEq::eq` never gets invoked, so there's no risk of us running non-const code. + PartialEqImplStatus { + has_impl: infcx.predicate_must_hold_modulo_regions(&partial_eq_obligation), + is_derived: automatically_derived, + structural_partial_eq: structural_peq, + non_blanket_impl: impl_def_id, + } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index 2dbc8b7b57385..44b038bb5faf1 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -6,13 +6,14 @@ mod const_to_pat; use std::cmp::Ordering; use rustc_abi::{FieldIdx, Integer}; +use rustc_errors::MultiSpan; use rustc_errors::codes::*; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::pat_util::EnumerateAndAdjustIterator; use rustc_hir::{self as hir, ByRef, Mutability, RangeEnd}; use rustc_index::Idx; use rustc_lint as lint; -use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput}; +use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::thir::{ Ascription, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary, }; @@ -34,7 +35,7 @@ struct PatCtxt<'a, 'tcx> { typeck_results: &'a ty::TypeckResults<'tcx>, /// Used by the Rust 2024 migration lint. - rust_2024_migration_suggestion: Option, + rust_2024_migration_suggestion: Option>, } pub(super) fn pat_from_hir<'a, 'tcx>( @@ -50,24 +51,36 @@ pub(super) fn pat_from_hir<'a, 'tcx>( rust_2024_migration_suggestion: typeck_results .rust_2024_migration_desugared_pats() .get(pat.hir_id) - .map(|&is_hard_error| Rust2024IncompatiblePatSugg { + .map(|labels| Rust2024IncompatiblePatSugg { suggestion: Vec::new(), - is_hard_error, + ref_pattern_count: 0, + binding_mode_count: 0, + labels: labels.as_slice(), }), }; let result = pcx.lower_pattern(pat); debug!("pat_from_hir({:?}) = {:?}", pat, result); if let Some(sugg) = pcx.rust_2024_migration_suggestion { - if sugg.is_hard_error { + let mut spans = MultiSpan::from_spans(sugg.labels.iter().map(|(span, _)| *span).collect()); + for (span, label) in sugg.labels { + spans.push_span_label(*span, label.clone()); + } + // If a relevant span is from at least edition 2024, this is a hard error. + let is_hard_error = spans.primary_spans().iter().any(|span| span.at_least_rust_2024()); + if is_hard_error { let mut err = - tcx.dcx().struct_span_err(pat.span, fluent::mir_build_rust_2024_incompatible_pat); + tcx.dcx().struct_span_err(spans, fluent::mir_build_rust_2024_incompatible_pat); + if let Some(info) = lint::builtin::RUST_2024_INCOMPATIBLE_PAT.future_incompatible { + // provide the same reference link as the lint + err.note(format!("for more information, see {}", info.reference)); + } err.subdiagnostic(sugg); err.emit(); } else { tcx.emit_node_span_lint( lint::builtin::RUST_2024_INCOMPATIBLE_PAT, pat.hir_id, - pat.span, + spans, Rust2024IncompatiblePat { sugg }, ); } @@ -133,6 +146,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { }) .collect(); s.suggestion.push((pat.span.shrink_to_lo(), suggestion_str)); + s.ref_pattern_count += adjustments.len(); }; adjusted_pat @@ -140,7 +154,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { fn lower_pattern_range_endpoint( &mut self, - expr: Option<&'tcx hir::Expr<'tcx>>, + expr: Option<&'tcx hir::PatExpr<'tcx>>, ) -> Result< (Option>, Option>, Option), ErrorGuaranteed, @@ -186,13 +200,12 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { /// This is only called when the range is already known to be malformed. fn error_on_literal_overflow( &self, - expr: Option<&'tcx hir::Expr<'tcx>>, + expr: Option<&'tcx hir::PatExpr<'tcx>>, ty: Ty<'tcx>, ) -> Result<(), ErrorGuaranteed> { - use hir::{ExprKind, UnOp}; use rustc_ast::ast::LitKind; - let Some(mut expr) = expr else { + let Some(expr) = expr else { return Ok(()); }; let span = expr.span; @@ -200,12 +213,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // We need to inspect the original expression, because if we only inspect the output of // `eval_bits`, an overflowed value has already been wrapped around. // We mostly copy the logic from the `rustc_lint::OVERFLOWING_LITERALS` lint. - let mut negated = false; - if let ExprKind::Unary(UnOp::Neg, sub_expr) = expr.kind { - negated = true; - expr = sub_expr; - } - let ExprKind::Lit(lit) = expr.kind else { + let hir::PatExprKind::Lit { lit, negated } = expr.kind else { return Ok(()); }; let LitKind::Int(lit_val, _) = lit.node else { @@ -234,8 +242,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { fn lower_pattern_range( &mut self, - lo_expr: Option<&'tcx hir::Expr<'tcx>>, - hi_expr: Option<&'tcx hir::Expr<'tcx>>, + lo_expr: Option<&'tcx hir::PatExpr<'tcx>>, + hi_expr: Option<&'tcx hir::PatExpr<'tcx>>, end: RangeEnd, ty: Ty<'tcx>, span: Span, @@ -316,7 +324,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { hir::PatKind::Never => PatKind::Never, - hir::PatKind::Lit(value) => self.lower_lit(value), + hir::PatKind::Expr(value) => self.lower_lit(value), hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => { let (lo_expr, hi_expr) = (lo_expr.as_deref(), hi_expr.as_deref()); @@ -371,7 +379,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { s.suggestion.push(( pat.span.with_lo(ident.span.lo()).shrink_to_lo(), sugg_str.to_owned(), - )) + )); + s.binding_mode_count += 1; } // A ref x pattern is the same node used for x, and as such it has @@ -420,6 +429,9 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { hir::PatKind::Or(pats) => PatKind::Or { pats: self.lower_patterns(pats) }, + // FIXME(guard_patterns): implement guard pattern lowering + hir::PatKind::Guard(pat, _) => self.lower_pattern(pat).kind, + hir::PatKind::Err(guar) => PatKind::Error(guar), }; @@ -644,31 +656,21 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { /// The special case for negation exists to allow things like `-128_i8` /// which would overflow if we tried to evaluate `128_i8` and then negate /// afterwards. - fn lower_lit(&mut self, expr: &'tcx hir::Expr<'tcx>) -> PatKind<'tcx> { - let (lit, neg) = match expr.kind { - hir::ExprKind::Path(ref qpath) => { + fn lower_lit(&mut self, expr: &'tcx hir::PatExpr<'tcx>) -> PatKind<'tcx> { + let (lit, neg) = match &expr.kind { + hir::PatExprKind::Path(qpath) => { return self.lower_path(qpath, expr.hir_id, expr.span).kind; } - hir::ExprKind::ConstBlock(ref anon_const) => { + hir::PatExprKind::ConstBlock(anon_const) => { return self.lower_inline_const(anon_const, expr.hir_id, expr.span); } - hir::ExprKind::Lit(ref lit) => (lit, false), - hir::ExprKind::Unary(hir::UnOp::Neg, ref expr) => { - let hir::ExprKind::Lit(ref lit) = expr.kind else { - span_bug!(expr.span, "not a literal: {:?}", expr); - }; - (lit, true) - } - _ => span_bug!(expr.span, "not a literal: {:?}", expr), + hir::PatExprKind::Lit { lit, negated } => (lit, *negated), }; - let ct_ty = self.typeck_results.expr_ty(expr); + let ct_ty = self.typeck_results.node_type(expr.hir_id); let lit_input = LitToConstInput { lit: &lit.node, ty: ct_ty, neg }; - match self.tcx.at(expr.span).lit_to_const(lit_input) { - Ok(constant) => self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind, - Err(LitToConstError::Reported(e)) => PatKind::Error(e), - Err(LitToConstError::TypeError) => bug!("lower_lit: had type error"), - } + let constant = self.tcx.at(expr.span).lit_to_const(lit_input); + self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind } } diff --git a/compiler/rustc_mir_dataflow/src/debuginfo.rs b/compiler/rustc_mir_dataflow/src/debuginfo.rs index fd5e8cf295552..0d25ce91c9a9e 100644 --- a/compiler/rustc_mir_dataflow/src/debuginfo.rs +++ b/compiler/rustc_mir_dataflow/src/debuginfo.rs @@ -1,17 +1,17 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; /// Return the set of locals that appear in debuginfo. -pub fn debuginfo_locals(body: &Body<'_>) -> BitSet { - let mut visitor = DebuginfoLocals(BitSet::new_empty(body.local_decls.len())); +pub fn debuginfo_locals(body: &Body<'_>) -> DenseBitSet { + let mut visitor = DebuginfoLocals(DenseBitSet::new_empty(body.local_decls.len())); for debuginfo in body.var_debug_info.iter() { visitor.visit_var_debug_info(debuginfo); } visitor.0 } -struct DebuginfoLocals(BitSet); +struct DebuginfoLocals(DenseBitSet); impl Visitor<'_> for DebuginfoLocals { fn visit_local(&mut self, local: Local, _: PlaceContext, _: Location) { diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs index 494b7d54d8a12..f8a8467494753 100644 --- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs +++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs @@ -1,4 +1,4 @@ -use std::{fmt, iter}; +use std::{fmt, iter, mem}; use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx}; use rustc_hir::lang_items::LangItem; @@ -6,6 +6,7 @@ use rustc_index::Idx; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; use rustc_middle::span_bug; +use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::util::IntTypeExt; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt}; use rustc_span::DUMMY_SP; @@ -738,8 +739,13 @@ where loop_block } - fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option) -> BasicBlock { - debug!("open_drop_for_array({:?}, {:?})", ety, opt_size); + fn open_drop_for_array( + &mut self, + array_ty: Ty<'tcx>, + ety: Ty<'tcx>, + opt_size: Option, + ) -> BasicBlock { + debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size); let tcx = self.tcx(); if let Some(size) = opt_size { @@ -801,13 +807,50 @@ where } } - self.drop_loop_pair(ety) + let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty); + let array_ptr = self.new_temp(array_ptr_ty); + + let slice_ty = Ty::new_slice(tcx, ety); + let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty); + let slice_ptr = self.new_temp(slice_ptr_ty); + + let mut delegate_block = BasicBlockData { + statements: vec![ + self.assign(Place::from(array_ptr), Rvalue::RawPtr(Mutability::Mut, self.place)), + self.assign( + Place::from(slice_ptr), + Rvalue::Cast( + CastKind::PointerCoercion( + PointerCoercion::Unsize, + CoercionSource::Implicit, + ), + Operand::Move(Place::from(array_ptr)), + slice_ptr_ty, + ), + ), + ], + is_cleanup: self.unwind.is_cleanup(), + terminator: None, + }; + + let array_place = mem::replace( + &mut self.place, + Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx), + ); + let slice_block = self.drop_loop_pair_for_slice(ety); + self.place = array_place; + + delegate_block.terminator = Some(Terminator { + source_info: self.source_info, + kind: TerminatorKind::Goto { target: slice_block }, + }); + self.elaborator.patch().new_block(delegate_block) } /// Creates a pair of drop-loops of `place`, which drops its contents, even /// in the case of 1 panic. - fn drop_loop_pair(&mut self, ety: Ty<'tcx>) -> BasicBlock { - debug!("drop_loop_pair({:?})", ety); + fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { + debug!("drop_loop_pair_for_slice({:?})", ety); let tcx = self.tcx(); let len = self.new_temp(tcx.types.usize); let cur = self.new_temp(tcx.types.usize); @@ -817,10 +860,24 @@ where let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind); + let [PlaceElem::Deref] = self.place.projection.as_slice() else { + span_bug!( + self.source_info.span, + "Expected place for slice drop shim to be *_n, but it's {:?}", + self.place, + ); + }; + let zero = self.constant_usize(0); let block = BasicBlockData { statements: vec![ - self.assign(len.into(), Rvalue::Len(self.place)), + self.assign( + len.into(), + Rvalue::UnaryOp( + UnOp::PtrMetadata, + Operand::Copy(Place::from(self.place.local)), + ), + ), self.assign(cur.into(), Rvalue::Use(zero)), ], is_cleanup: unwind.is_cleanup(), @@ -863,9 +920,9 @@ where ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind), ty::Array(ety, size) => { let size = size.try_to_target_usize(self.tcx()); - self.open_drop_for_array(*ety, size) + self.open_drop_for_array(ty, *ety, size) } - ty::Slice(ety) => self.drop_loop_pair(*ety), + ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety), _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty), } diff --git a/compiler/rustc_mir_dataflow/src/framework/cursor.rs b/compiler/rustc_mir_dataflow/src/framework/cursor.rs index 89ff93d9943a4..c46ae9775cf68 100644 --- a/compiler/rustc_mir_dataflow/src/framework/cursor.rs +++ b/compiler/rustc_mir_dataflow/src/framework/cursor.rs @@ -4,7 +4,7 @@ use std::cmp::Ordering; use std::ops::{Deref, DerefMut}; #[cfg(debug_assertions)] -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{self, BasicBlock, Location}; use super::{Analysis, Direction, Effect, EffectIndex, Results}; @@ -71,7 +71,7 @@ where state_needs_reset: bool, #[cfg(debug_assertions)] - reachable_blocks: BitSet, + reachable_blocks: DenseBitSet, } impl<'mir, 'tcx, A> ResultsCursor<'mir, 'tcx, A> diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs index 9d943ebe327bc..6427fd0fd295b 100644 --- a/compiler/rustc_mir_dataflow/src/framework/direction.rs +++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs @@ -1,8 +1,6 @@ use std::ops::RangeInclusive; -use rustc_middle::mir::{ - self, BasicBlock, CallReturnPlaces, Location, SwitchTargets, TerminatorEdges, -}; +use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, TerminatorEdges}; use super::visitor::ResultsVisitor; use super::{Analysis, Effect, EffectIndex, Results, SwitchIntTarget}; @@ -78,8 +76,6 @@ impl Direction for Backward { for pred in body.basic_blocks.predecessors()[block].iter().copied() { match body[pred].terminator().kind { // Apply terminator-specific edge effects. - // - // FIXME(ecstaticmorse): Avoid cloning the exit state unconditionally. mir::TerminatorKind::Call { destination, target: Some(dest), .. } if dest == block => { @@ -115,18 +111,18 @@ impl Direction for Backward { } mir::TerminatorKind::SwitchInt { targets: _, ref discr } => { - let mut applier = BackwardSwitchIntEdgeEffectsApplier { - body, - pred, - exit_state, - block, - propagate: &mut propagate, - effects_applied: false, - }; - - analysis.apply_switch_int_edge_effects(pred, discr, &mut applier); - - if !applier.effects_applied { + if let Some(mut data) = analysis.get_switch_int_data(block, discr) { + let values = &body.basic_blocks.switch_sources()[&(block, pred)]; + let targets = + values.iter().map(|&value| SwitchIntTarget { value, target: block }); + + let mut tmp = analysis.bottom_value(body); + for target in targets { + tmp.clone_from(&exit_state); + analysis.apply_switch_int_edge_effect(&mut data, &mut tmp, target); + propagate(pred, &tmp); + } + } else { propagate(pred, exit_state) } } @@ -245,37 +241,6 @@ impl Direction for Backward { } } -struct BackwardSwitchIntEdgeEffectsApplier<'mir, 'tcx, D, F> { - body: &'mir mir::Body<'tcx>, - pred: BasicBlock, - exit_state: &'mir mut D, - block: BasicBlock, - propagate: &'mir mut F, - effects_applied: bool, -} - -impl super::SwitchIntEdgeEffects for BackwardSwitchIntEdgeEffectsApplier<'_, '_, D, F> -where - D: Clone, - F: FnMut(BasicBlock, &D), -{ - fn apply(&mut self, mut apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)) { - assert!(!self.effects_applied); - - let values = &self.body.basic_blocks.switch_sources()[&(self.block, self.pred)]; - let targets = values.iter().map(|&value| SwitchIntTarget { value, target: self.block }); - - let mut tmp = None; - for target in targets { - let tmp = opt_clone_from_or_clone(&mut tmp, self.exit_state); - apply_edge_effect(tmp, target); - (self.propagate)(self.pred, tmp); - } - - self.effects_applied = true; - } -} - /// Dataflow that runs from the entry of a block (the first statement), to its exit (terminator). pub struct Forward; @@ -284,7 +249,7 @@ impl Direction for Forward { fn apply_effects_in_block<'mir, 'tcx, A>( analysis: &mut A, - _body: &mir::Body<'tcx>, + body: &mir::Body<'tcx>, state: &mut A::Domain, block: BasicBlock, block_data: &'mir mir::BasicBlockData<'tcx>, @@ -324,23 +289,28 @@ impl Direction for Forward { } } TerminatorEdges::SwitchInt { targets, discr } => { - let mut applier = ForwardSwitchIntEdgeEffectsApplier { - exit_state, - targets, - propagate, - effects_applied: false, - }; - - analysis.apply_switch_int_edge_effects(block, discr, &mut applier); - - let ForwardSwitchIntEdgeEffectsApplier { - exit_state, - mut propagate, - effects_applied, - .. - } = applier; - - if !effects_applied { + if let Some(mut data) = analysis.get_switch_int_data(block, discr) { + let mut tmp = analysis.bottom_value(body); + for (value, target) in targets.iter() { + tmp.clone_from(&exit_state); + analysis.apply_switch_int_edge_effect( + &mut data, + &mut tmp, + SwitchIntTarget { value: Some(value), target }, + ); + propagate(target, &tmp); + } + + // Once we get to the final, "otherwise" branch, there is no need to preserve + // `exit_state`, so pass it directly to `apply_switch_int_edge_effect` to save + // a clone of the dataflow state. + let otherwise = targets.otherwise(); + analysis.apply_switch_int_edge_effect(&mut data, exit_state, SwitchIntTarget { + value: None, + target: otherwise, + }); + propagate(otherwise, exit_state); + } else { for target in targets.all_targets() { propagate(*target, exit_state); } @@ -454,54 +424,3 @@ impl Direction for Forward { vis.visit_block_end(state); } } - -struct ForwardSwitchIntEdgeEffectsApplier<'mir, D, F> { - exit_state: &'mir mut D, - targets: &'mir SwitchTargets, - propagate: F, - - effects_applied: bool, -} - -impl super::SwitchIntEdgeEffects for ForwardSwitchIntEdgeEffectsApplier<'_, D, F> -where - D: Clone, - F: FnMut(BasicBlock, &D), -{ - fn apply(&mut self, mut apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)) { - assert!(!self.effects_applied); - - let mut tmp = None; - for (value, target) in self.targets.iter() { - let tmp = opt_clone_from_or_clone(&mut tmp, self.exit_state); - apply_edge_effect(tmp, SwitchIntTarget { value: Some(value), target }); - (self.propagate)(target, tmp); - } - - // Once we get to the final, "otherwise" branch, there is no need to preserve `exit_state`, - // so pass it directly to `apply_edge_effect` to save a clone of the dataflow state. - let otherwise = self.targets.otherwise(); - apply_edge_effect(self.exit_state, SwitchIntTarget { value: None, target: otherwise }); - (self.propagate)(otherwise, self.exit_state); - - self.effects_applied = true; - } -} - -/// An analogue of `Option::get_or_insert_with` that stores a clone of `val` into `opt`, but uses -/// the more efficient `clone_from` if `opt` was `Some`. -/// -/// Returns a mutable reference to the new clone that resides in `opt`. -// -// FIXME: Figure out how to express this using `Option::clone_from`, or maybe lift it into the -// standard library? -fn opt_clone_from_or_clone<'a, T: Clone>(opt: &'a mut Option, val: &T) -> &'a mut T { - if opt.is_some() { - let ret = opt.as_mut().unwrap(); - ret.clone_from(val); - ret - } else { - *opt = Some(val.clone()); - opt.as_mut().unwrap() - } -} diff --git a/compiler/rustc_mir_dataflow/src/framework/fmt.rs b/compiler/rustc_mir_dataflow/src/framework/fmt.rs index faf2c411ddefe..38599cd094933 100644 --- a/compiler/rustc_mir_dataflow/src/framework/fmt.rs +++ b/compiler/rustc_mir_dataflow/src/framework/fmt.rs @@ -4,7 +4,7 @@ use std::fmt; use rustc_index::Idx; -use rustc_index::bit_set::{BitSet, ChunkedBitSet, MixedBitSet}; +use rustc_index::bit_set::{ChunkedBitSet, DenseBitSet, MixedBitSet}; use super::lattice::MaybeReachable; @@ -73,7 +73,7 @@ where // Impls -impl DebugWithContext for BitSet +impl DebugWithContext for DenseBitSet where T: Idx + DebugWithContext, { diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs index 5b2b128e03574..e457b514936bf 100644 --- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs +++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs @@ -9,14 +9,14 @@ use std::{io, ops, str}; use regex::Regex; use rustc_hir::def_id::DefId; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{ self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name, traversal, }; use rustc_middle::ty::TyCtxt; use rustc_middle::ty::print::with_no_trimmed_paths; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Symbol, sym}; use tracing::debug; use {rustc_ast as ast, rustc_graphviz as dot}; @@ -205,7 +205,7 @@ where // the operations that involve the mutation, i.e. within the `borrow_mut`. cursor: RefCell>, style: OutputStyle, - reachable: BitSet, + reachable: DenseBitSet, } impl<'mir, 'tcx, A> Formatter<'mir, 'tcx, A> diff --git a/compiler/rustc_mir_dataflow/src/framework/lattice.rs b/compiler/rustc_mir_dataflow/src/framework/lattice.rs index cb8159ce37bf3..919e346bda747 100644 --- a/compiler/rustc_mir_dataflow/src/framework/lattice.rs +++ b/compiler/rustc_mir_dataflow/src/framework/lattice.rs @@ -39,7 +39,7 @@ //! [poset]: https://en.wikipedia.org/wiki/Partially_ordered_set use rustc_index::Idx; -use rustc_index::bit_set::{BitSet, MixedBitSet}; +use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use crate::framework::BitSetExt; @@ -68,10 +68,10 @@ pub trait HasTop { const TOP: Self; } -/// A `BitSet` represents the lattice formed by the powerset of all possible values of -/// the index type `T` ordered by inclusion. Equivalently, it is a tuple of "two-point" lattices, -/// one for each possible value of `T`. -impl JoinSemiLattice for BitSet { +/// A `DenseBitSet` represents the lattice formed by the powerset of all possible values of the +/// index type `T` ordered by inclusion. Equivalently, it is a tuple of "two-point" lattices, one +/// for each possible value of `T`. +impl JoinSemiLattice for DenseBitSet { fn join(&mut self, other: &Self) -> bool { self.union(other) } diff --git a/compiler/rustc_mir_dataflow/src/framework/mod.rs b/compiler/rustc_mir_dataflow/src/framework/mod.rs index 41df5fae0deaa..60c5cb0cae8ad 100644 --- a/compiler/rustc_mir_dataflow/src/framework/mod.rs +++ b/compiler/rustc_mir_dataflow/src/framework/mod.rs @@ -35,7 +35,7 @@ use std::cmp::Ordering; use rustc_data_structures::work_queue::WorkQueue; -use rustc_index::bit_set::{BitSet, MixedBitSet}; +use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use rustc_index::{Idx, IndexVec}; use rustc_middle::bug; use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, TerminatorEdges, traversal}; @@ -65,7 +65,7 @@ pub trait BitSetExt { fn contains(&self, elem: T) -> bool; } -impl BitSetExt for BitSet { +impl BitSetExt for DenseBitSet { fn contains(&self, elem: T) -> bool { self.contains(elem) } @@ -103,6 +103,9 @@ pub trait Analysis<'tcx> { /// The direction of this analysis. Either `Forward` or `Backward`. type Direction: Direction = Forward; + /// Auxiliary data used for analyzing `SwitchInt` terminators, if necessary. + type SwitchIntData = !; + /// A descriptive name for this analysis. Used only for debugging. /// /// This name should be brief and contain no spaces, periods or other characters that are not @@ -190,25 +193,36 @@ pub trait Analysis<'tcx> { ) { } - /// Updates the current dataflow state with the effect of taking a particular branch in a - /// `SwitchInt` terminator. + /// Used to update the current dataflow state with the effect of taking a particular branch in + /// a `SwitchInt` terminator. /// /// Unlike the other edge-specific effects, which are allowed to mutate `Self::Domain` - /// directly, overriders of this method must pass a callback to - /// `SwitchIntEdgeEffects::apply`. The callback will be run once for each outgoing edge and - /// will have access to the dataflow state that will be propagated along that edge. + /// directly, overriders of this method must return a `Self::SwitchIntData` value (wrapped in + /// `Some`). The `apply_switch_int_edge_effect` method will then be called once for each + /// outgoing edge and will have access to the dataflow state that will be propagated along that + /// edge, and also the `Self::SwitchIntData` value. /// /// This interface is somewhat more complex than the other visitor-like "effect" methods. /// However, it is both more ergonomic—callers don't need to recompute or cache information /// about a given `SwitchInt` terminator for each one of its edges—and more efficient—the /// engine doesn't need to clone the exit state for a block unless - /// `SwitchIntEdgeEffects::apply` is actually called. - fn apply_switch_int_edge_effects( + /// `get_switch_int_data` is actually called. + fn get_switch_int_data( &mut self, - _block: BasicBlock, + _block: mir::BasicBlock, _discr: &mir::Operand<'tcx>, - _apply_edge_effects: &mut impl SwitchIntEdgeEffects, + ) -> Option { + None + } + + /// See comments on `get_switch_int_data`. + fn apply_switch_int_edge_effect( + &mut self, + _data: &mut Self::SwitchIntData, + _state: &mut Self::Domain, + _edge: SwitchIntTarget, ) { + unreachable!(); } /* Extension methods */ @@ -320,7 +334,7 @@ pub trait GenKill { } } -impl GenKill for BitSet { +impl GenKill for DenseBitSet { fn gen_(&mut self, elem: T) { self.insert(elem); } @@ -421,12 +435,5 @@ pub struct SwitchIntTarget { pub target: BasicBlock, } -/// A type that records the edge-specific effects for a `SwitchInt` terminator. -pub trait SwitchIntEdgeEffects { - /// Calls `apply_edge_effect` for each outgoing edge from a `SwitchInt` terminator and - /// records the results. - fn apply(&mut self, apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)); -} - #[cfg(test)] mod tests; diff --git a/compiler/rustc_mir_dataflow/src/framework/tests.rs b/compiler/rustc_mir_dataflow/src/framework/tests.rs index 8e7d4ab0fa33b..5b3a9ccba695d 100644 --- a/compiler/rustc_mir_dataflow/src/framework/tests.rs +++ b/compiler/rustc_mir_dataflow/src/framework/tests.rs @@ -84,13 +84,13 @@ impl MockAnalysis<'_, D> { /// The entry set for each `BasicBlock` is the ID of that block offset by a fixed amount to /// avoid colliding with the statement/terminator effects. - fn mock_entry_set(&self, bb: BasicBlock) -> BitSet { + fn mock_entry_set(&self, bb: BasicBlock) -> DenseBitSet { let mut ret = self.bottom_value(self.body); ret.insert(Self::BASIC_BLOCK_OFFSET + bb.index()); ret } - fn mock_entry_states(&self) -> IndexVec> { + fn mock_entry_states(&self) -> IndexVec> { let empty = self.bottom_value(self.body); let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks); @@ -121,7 +121,7 @@ impl MockAnalysis<'_, D> { /// For example, the expected state when calling /// `seek_before_primary_effect(Location { block: 2, statement_index: 2 })` /// would be `[102, 0, 1, 2, 3, 4]`. - fn expected_state_at_target(&self, target: SeekTarget) -> BitSet { + fn expected_state_at_target(&self, target: SeekTarget) -> DenseBitSet { let block = target.block(); let mut ret = self.bottom_value(self.body); ret.insert(Self::BASIC_BLOCK_OFFSET + block.index()); @@ -155,13 +155,13 @@ impl MockAnalysis<'_, D> { } impl<'tcx, D: Direction> Analysis<'tcx> for MockAnalysis<'tcx, D> { - type Domain = BitSet; + type Domain = DenseBitSet; type Direction = D; const NAME: &'static str = "mock"; fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { - BitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks.len()) + DenseBitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks.len()) } fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { diff --git a/compiler/rustc_mir_dataflow/src/framework/visitor.rs b/compiler/rustc_mir_dataflow/src/framework/visitor.rs index d18e9fa33f0c4..a03aecee7be12 100644 --- a/compiler/rustc_mir_dataflow/src/framework/visitor.rs +++ b/compiler/rustc_mir_dataflow/src/framework/visitor.rs @@ -35,7 +35,6 @@ where { fn visit_block_start(&mut self, _state: &A::Domain) {} - /// // njn: grep for "before", "primary", etc. /// Called after the "early" effect of the given statement is applied to `state`. fn visit_after_early_statement_effect( &mut self, diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs index 568d8a5acaf95..f2ef5018c4953 100644 --- a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs +++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs @@ -1,4 +1,4 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; @@ -21,12 +21,12 @@ impl MaybeBorrowedLocals { } impl<'tcx> Analysis<'tcx> for MaybeBorrowedLocals { - type Domain = BitSet; + type Domain = DenseBitSet; const NAME: &'static str = "maybe_borrowed_locals"; fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain { // bottom = unborrowed - BitSet::new_empty(body.local_decls().len()) + DenseBitSet::new_empty(body.local_decls().len()) } fn initialize_start_block(&self, _: &Body<'tcx>, _: &mut Self::Domain) { @@ -91,7 +91,6 @@ where | Rvalue::Use(..) | Rvalue::ThreadLocalRef(..) | Rvalue::Repeat(..) - | Rvalue::Len(..) | Rvalue::BinaryOp(..) | Rvalue::NullaryOp(..) | Rvalue::UnaryOp(..) @@ -138,8 +137,8 @@ where } /// The set of locals that are borrowed at some point in the MIR body. -pub fn borrowed_locals(body: &Body<'_>) -> BitSet { - struct Borrowed(BitSet); +pub fn borrowed_locals(body: &Body<'_>) -> DenseBitSet { + struct Borrowed(DenseBitSet); impl GenKill for Borrowed { #[inline] @@ -152,7 +151,7 @@ pub fn borrowed_locals(body: &Body<'_>) -> BitSet { } } - let mut borrowed = Borrowed(BitSet::new_empty(body.local_decls.len())); + let mut borrowed = Borrowed(DenseBitSet::new_empty(body.local_decls.len())); TransferFunction { trans: &mut borrowed }.visit_body(body); borrowed.0 } diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index fb02408e17dfa..760f94af52dd3 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -1,20 +1,96 @@ use std::assert_matches::assert_matches; +use rustc_abi::VariantIdx; use rustc_index::Idx; -use rustc_index::bit_set::{BitSet, MixedBitSet}; +use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use rustc_middle::bug; use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges}; +use rustc_middle::ty::util::Discr; use rustc_middle::ty::{self, TyCtxt}; use tracing::{debug, instrument}; use crate::elaborate_drops::DropFlagState; -use crate::framework::SwitchIntEdgeEffects; +use crate::framework::SwitchIntTarget; use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex}; use crate::{ Analysis, GenKill, MaybeReachable, drop_flag_effects, drop_flag_effects_for_function_entry, drop_flag_effects_for_location, on_all_children_bits, on_lookup_result_bits, }; +// Used by both `MaybeInitializedPlaces` and `MaybeUninitializedPlaces`. +pub struct MaybePlacesSwitchIntData<'tcx> { + enum_place: mir::Place<'tcx>, + discriminants: Vec<(VariantIdx, Discr<'tcx>)>, + index: usize, +} + +impl<'tcx> MaybePlacesSwitchIntData<'tcx> { + // The discriminant order in the `SwitchInt` targets should match the order yielded by + // `AdtDef::discriminants`. We rely on this to match each discriminant in the targets to its + // corresponding variant in linear time. + fn next_discr(&mut self, value: u128) -> VariantIdx { + // An out-of-bounds abort will occur if the discriminant ordering isn't as described above. + loop { + let (variant, discr) = self.discriminants[self.index]; + self.index += 1; + if discr.val == value { + return variant; + } + } + } +} + +impl<'tcx> MaybePlacesSwitchIntData<'tcx> { + fn new( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + block: mir::BasicBlock, + discr: &mir::Operand<'tcx>, + ) -> Option { + let Some(discr) = discr.place() else { return None }; + + // Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` + // is an enum discriminant. + // + // We expect such blocks to have a call to `discriminant` as their last statement like so: + // ```text + // ... + // _42 = discriminant(_1) + // SwitchInt(_42, ..) + // ``` + // If the basic block matches this pattern, this function gathers the place corresponding + // to the enum (`_1` in the example above) as well as the discriminants. + let block_data = &body[block]; + for statement in block_data.statements.iter().rev() { + match statement.kind { + mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(enum_place))) + if lhs == discr => + { + match enum_place.ty(body, tcx).ty.kind() { + ty::Adt(enum_def, _) => { + return Some(MaybePlacesSwitchIntData { + enum_place, + discriminants: enum_def.discriminants(tcx).collect(), + index: 0, + }); + } + + // `Rvalue::Discriminant` is also used to get the active yield point for a + // coroutine, but we do not need edge-specific effects in that case. This + // may change in the future. + ty::Coroutine(..) => break, + + t => bug!("`discriminant` called on unexpected type {:?}", t), + } + } + mir::StatementKind::Coverage(_) => continue, + _ => break, + } + } + None + } +} + /// `MaybeInitializedPlaces` tracks all places that might be /// initialized upon reaching a particular point in the control flow /// for a function. @@ -131,7 +207,7 @@ pub struct MaybeUninitializedPlaces<'a, 'tcx> { move_data: &'a MoveData<'tcx>, mark_inactive_variants_as_uninit: bool, - skip_unreachable_unwind: BitSet, + skip_unreachable_unwind: DenseBitSet, } impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> { @@ -141,7 +217,7 @@ impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> { body, move_data, mark_inactive_variants_as_uninit: false, - skip_unreachable_unwind: BitSet::new_empty(body.basic_blocks.len()), + skip_unreachable_unwind: DenseBitSet::new_empty(body.basic_blocks.len()), } } @@ -157,7 +233,7 @@ impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> { pub fn skipping_unreachable_unwind( mut self, - unreachable_unwind: BitSet, + unreachable_unwind: DenseBitSet, ) -> Self { self.skip_unreachable_unwind = unreachable_unwind; self @@ -247,6 +323,8 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { /// We use a mixed bitset to avoid paying too high a memory footprint. type Domain = MaybeReachable>; + type SwitchIntData = MaybePlacesSwitchIntData<'tcx>; + const NAME: &'static str = "maybe_init"; fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { @@ -293,6 +371,8 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { terminator: &'mir mir::Terminator<'tcx>, location: Location, ) -> TerminatorEdges<'mir, 'tcx> { + // Note: `edges` must be computed first because `drop_flag_effects_for_location` can change + // the result of `is_unwind_dead`. let mut edges = terminator.edges(); if self.skip_unreachable_unwind && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind @@ -326,46 +406,34 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { }); } - fn apply_switch_int_edge_effects( + fn get_switch_int_data( &mut self, block: mir::BasicBlock, discr: &mir::Operand<'tcx>, - edge_effects: &mut impl SwitchIntEdgeEffects, - ) { + ) -> Option { if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration { - return; + return None; } - let enum_ = discr.place().and_then(|discr| { - switch_on_enum_discriminant(self.tcx, self.body, &self.body[block], discr) - }); - - let Some((enum_place, enum_def)) = enum_ else { - return; - }; - - let mut discriminants = enum_def.discriminants(self.tcx); - edge_effects.apply(|state, edge| { - let Some(value) = edge.value else { - return; - }; - - // MIR building adds discriminants to the `values` array in the same order as they - // are yielded by `AdtDef::discriminants`. We rely on this to match each - // discriminant in `values` to its corresponding variant in linear time. - let (variant, _) = discriminants - .find(|&(_, discr)| discr.val == value) - .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`"); + MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr) + } + fn apply_switch_int_edge_effect( + &mut self, + data: &mut Self::SwitchIntData, + state: &mut Self::Domain, + edge: SwitchIntTarget, + ) { + if let Some(value) = edge.value { // Kill all move paths that correspond to variants we know to be inactive along this // particular outgoing edge of a `SwitchInt`. drop_flag_effects::on_all_inactive_variants( - self.move_data(), - enum_place, - variant, + self.move_data, + data.enum_place, + data.next_discr(value), |mpi| state.kill(mpi), ); - }); + } } } @@ -376,6 +444,8 @@ pub type MaybeUninitializedPlacesDomain = MixedBitSet; impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> { type Domain = MaybeUninitializedPlacesDomain; + type SwitchIntData = MaybePlacesSwitchIntData<'tcx>; + const NAME: &'static str = "maybe_uninit"; fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { @@ -445,50 +515,38 @@ impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> { }); } - fn apply_switch_int_edge_effects( + fn get_switch_int_data( &mut self, block: mir::BasicBlock, discr: &mir::Operand<'tcx>, - edge_effects: &mut impl SwitchIntEdgeEffects, - ) { + ) -> Option { if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration { - return; + return None; } if !self.mark_inactive_variants_as_uninit { - return; + return None; } - let enum_ = discr.place().and_then(|discr| { - switch_on_enum_discriminant(self.tcx, self.body, &self.body[block], discr) - }); - - let Some((enum_place, enum_def)) = enum_ else { - return; - }; - - let mut discriminants = enum_def.discriminants(self.tcx); - edge_effects.apply(|state, edge| { - let Some(value) = edge.value else { - return; - }; - - // MIR building adds discriminants to the `values` array in the same order as they - // are yielded by `AdtDef::discriminants`. We rely on this to match each - // discriminant in `values` to its corresponding variant in linear time. - let (variant, _) = discriminants - .find(|&(_, discr)| discr.val == value) - .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`"); + MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr) + } + fn apply_switch_int_edge_effect( + &mut self, + data: &mut Self::SwitchIntData, + state: &mut Self::Domain, + edge: SwitchIntTarget, + ) { + if let Some(value) = edge.value { // Mark all move paths that correspond to variants other than this one as maybe // uninitialized (in reality, they are *definitely* uninitialized). drop_flag_effects::on_all_inactive_variants( - self.move_data(), - enum_place, - variant, + self.move_data, + data.enum_place, + data.next_discr(value), |mpi| state.gen_(mpi), ); - }); + } } } @@ -578,45 +636,3 @@ impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> { } } } - -/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is -/// an enum discriminant. -/// -/// We expect such blocks to have a call to `discriminant` as their last statement like so: -/// -/// ```text -/// ... -/// _42 = discriminant(_1) -/// SwitchInt(_42, ..) -/// ``` -/// -/// If the basic block matches this pattern, this function returns the place corresponding to the -/// enum (`_1` in the example above) as well as the `AdtDef` of that enum. -fn switch_on_enum_discriminant<'mir, 'tcx>( - tcx: TyCtxt<'tcx>, - body: &'mir mir::Body<'tcx>, - block: &'mir mir::BasicBlockData<'tcx>, - switch_on: mir::Place<'tcx>, -) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> { - for statement in block.statements.iter().rev() { - match &statement.kind { - mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated))) - if *lhs == switch_on => - { - match discriminated.ty(body, tcx).ty.kind() { - ty::Adt(def, _) => return Some((*discriminated, *def)), - - // `Rvalue::Discriminant` is also used to get the active yield point for a - // coroutine, but we do not need edge-specific effects in that case. This may - // change in the future. - ty::Coroutine(..) => return None, - - t => bug!("`discriminant` called on unexpected type {:?}", t), - } - } - mir::StatementKind::Coverage(_) => continue, - _ => return None, - } - } - None -} diff --git a/compiler/rustc_mir_dataflow/src/impls/liveness.rs b/compiler/rustc_mir_dataflow/src/impls/liveness.rs index b2050a6adf987..6ec1b03a34e68 100644 --- a/compiler/rustc_mir_dataflow/src/impls/liveness.rs +++ b/compiler/rustc_mir_dataflow/src/impls/liveness.rs @@ -1,4 +1,4 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{ self, CallReturnPlaces, Local, Location, Place, StatementKind, TerminatorEdges, @@ -26,14 +26,14 @@ use crate::{Analysis, Backward, GenKill}; pub struct MaybeLiveLocals; impl<'tcx> Analysis<'tcx> for MaybeLiveLocals { - type Domain = BitSet; + type Domain = DenseBitSet; type Direction = Backward; const NAME: &'static str = "liveness"; fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { // bottom = not live - BitSet::new_empty(body.local_decls.len()) + DenseBitSet::new_empty(body.local_decls.len()) } fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { @@ -81,7 +81,7 @@ impl<'tcx> Analysis<'tcx> for MaybeLiveLocals { } } -pub struct TransferFunction<'a>(pub &'a mut BitSet); +pub struct TransferFunction<'a>(pub &'a mut DenseBitSet); impl<'tcx> Visitor<'tcx> for TransferFunction<'_> { fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) { @@ -117,7 +117,7 @@ impl<'tcx> Visitor<'tcx> for TransferFunction<'_> { } } -struct YieldResumeEffect<'a>(&'a mut BitSet); +struct YieldResumeEffect<'a>(&'a mut DenseBitSet); impl<'tcx> Visitor<'tcx> for YieldResumeEffect<'_> { fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) { @@ -137,7 +137,7 @@ enum DefUse { } impl DefUse { - fn apply(state: &mut BitSet, place: Place<'_>, context: PlaceContext) { + fn apply(state: &mut DenseBitSet, place: Place<'_>, context: PlaceContext) { match DefUse::for_place(place, context) { Some(DefUse::Def) => state.kill(place.local), Some(DefUse::Use) => state.gen_(place.local), @@ -204,7 +204,7 @@ impl DefUse { /// /// All of the caveats of `MaybeLiveLocals` apply. pub struct MaybeTransitiveLiveLocals<'a> { - always_live: &'a BitSet, + always_live: &'a DenseBitSet, } impl<'a> MaybeTransitiveLiveLocals<'a> { @@ -212,20 +212,20 @@ impl<'a> MaybeTransitiveLiveLocals<'a> { /// considered live. /// /// This should include at least all locals that are ever borrowed. - pub fn new(always_live: &'a BitSet) -> Self { + pub fn new(always_live: &'a DenseBitSet) -> Self { MaybeTransitiveLiveLocals { always_live } } } impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> { - type Domain = BitSet; + type Domain = DenseBitSet; type Direction = Backward; const NAME: &'static str = "transitive liveness"; fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { // bottom = not live - BitSet::new_empty(body.local_decls.len()) + DenseBitSet::new_empty(body.local_decls.len()) } fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { diff --git a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs index 65b480d3a5efc..e3aa8f5a62014 100644 --- a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs +++ b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -10,8 +10,8 @@ use crate::{Analysis, GenKill, ResultsCursor}; /// The set of locals in a MIR body that do not have `StorageLive`/`StorageDead` annotations. /// /// These locals have fixed storage for the duration of the body. -pub fn always_storage_live_locals(body: &Body<'_>) -> BitSet { - let mut always_live_locals = BitSet::new_filled(body.local_decls.len()); +pub fn always_storage_live_locals(body: &Body<'_>) -> DenseBitSet { + let mut always_live_locals = DenseBitSet::new_filled(body.local_decls.len()); for block in &*body.basic_blocks { for statement in &block.statements { @@ -25,23 +25,23 @@ pub fn always_storage_live_locals(body: &Body<'_>) -> BitSet { } pub struct MaybeStorageLive<'a> { - always_live_locals: Cow<'a, BitSet>, + always_live_locals: Cow<'a, DenseBitSet>, } impl<'a> MaybeStorageLive<'a> { - pub fn new(always_live_locals: Cow<'a, BitSet>) -> Self { + pub fn new(always_live_locals: Cow<'a, DenseBitSet>) -> Self { MaybeStorageLive { always_live_locals } } } impl<'a, 'tcx> Analysis<'tcx> for MaybeStorageLive<'a> { - type Domain = BitSet; + type Domain = DenseBitSet; const NAME: &'static str = "maybe_storage_live"; fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain { // bottom = dead - BitSet::new_empty(body.local_decls.len()) + DenseBitSet::new_empty(body.local_decls.len()) } fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { @@ -67,23 +67,23 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeStorageLive<'a> { } pub struct MaybeStorageDead<'a> { - always_live_locals: Cow<'a, BitSet>, + always_live_locals: Cow<'a, DenseBitSet>, } impl<'a> MaybeStorageDead<'a> { - pub fn new(always_live_locals: Cow<'a, BitSet>) -> Self { + pub fn new(always_live_locals: Cow<'a, DenseBitSet>) -> Self { MaybeStorageDead { always_live_locals } } } impl<'a, 'tcx> Analysis<'tcx> for MaybeStorageDead<'a> { - type Domain = BitSet; + type Domain = DenseBitSet; const NAME: &'static str = "maybe_storage_dead"; fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain { // bottom = live - BitSet::new_empty(body.local_decls.len()) + DenseBitSet::new_empty(body.local_decls.len()) } fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { @@ -125,13 +125,13 @@ impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> { } impl<'tcx> Analysis<'tcx> for MaybeRequiresStorage<'_, 'tcx> { - type Domain = BitSet; + type Domain = DenseBitSet; const NAME: &'static str = "requires_storage"; fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain { // bottom = dead - BitSet::new_empty(body.local_decls.len()) + DenseBitSet::new_empty(body.local_decls.len()) } fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { @@ -304,7 +304,7 @@ impl<'tcx> MaybeRequiresStorage<'_, 'tcx> { struct MoveVisitor<'a, 'mir, 'tcx> { borrowed_locals: &'a mut BorrowedLocalsResults<'mir, 'tcx>, - state: &'a mut BitSet, + state: &'a mut DenseBitSet, } impl<'tcx> Visitor<'tcx> for MoveVisitor<'_, '_, 'tcx> { diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs index 85255db5d9a61..0cc79b0c93903 100644 --- a/compiler/rustc_mir_dataflow/src/lib.rs +++ b/compiler/rustc_mir_dataflow/src/lib.rs @@ -5,6 +5,7 @@ #![feature(exact_size_is_empty)] #![feature(file_buffered)] #![feature(let_chains)] +#![feature(never_type)] #![feature(try_blocks)] #![warn(unreachable_pub)] // tidy-alphabetical-end @@ -19,8 +20,7 @@ pub use self::drop_flag_effects::{ }; pub use self::framework::{ Analysis, Backward, Direction, EntryStates, Forward, GenKill, JoinSemiLattice, MaybeReachable, - Results, ResultsCursor, ResultsVisitor, SwitchIntEdgeEffects, fmt, graphviz, lattice, - visit_results, + Results, ResultsCursor, ResultsVisitor, fmt, graphviz, lattice, visit_results, }; use self::move_paths::MoveData; diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 0880364bfca00..80875f32e4f66 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -161,6 +161,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -200,6 +201,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -411,7 +413,6 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { Rvalue::Ref(..) | Rvalue::RawPtr(..) | Rvalue::Discriminant(..) - | Rvalue::Len(..) | Rvalue::NullaryOp( NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..) | NullOp::UbChecks, _, diff --git a/compiler/rustc_mir_dataflow/src/points.rs b/compiler/rustc_mir_dataflow/src/points.rs index 74209da876ab6..5d2a78acbf526 100644 --- a/compiler/rustc_mir_dataflow/src/points.rs +++ b/compiler/rustc_mir_dataflow/src/points.rs @@ -1,4 +1,4 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::interval::SparseIntervalMatrix; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::{self, BasicBlock, Body, Location}; @@ -102,7 +102,7 @@ pub fn save_as_intervals<'tcx, N, A>( ) -> SparseIntervalMatrix where N: Idx, - A: Analysis<'tcx, Domain = BitSet>, + A: Analysis<'tcx, Domain = DenseBitSet>, { let values = SparseIntervalMatrix::new(elements.num_points()); let mut visitor = Visitor { elements, values }; @@ -122,7 +122,7 @@ struct Visitor<'a, N: Idx> { impl<'mir, 'tcx, A, N> ResultsVisitor<'mir, 'tcx, A> for Visitor<'_, N> where - A: Analysis<'tcx, Domain = BitSet>, + A: Analysis<'tcx, Domain = DenseBitSet>, N: Idx, { fn visit_after_primary_statement_effect( diff --git a/compiler/rustc_mir_dataflow/src/rustc_peek.rs b/compiler/rustc_mir_dataflow/src/rustc_peek.rs index 85cf8ca2104ab..399141aa9212e 100644 --- a/compiler/rustc_mir_dataflow/src/rustc_peek.rs +++ b/compiler/rustc_mir_dataflow/src/rustc_peek.rs @@ -2,8 +2,7 @@ use rustc_ast::MetaItem; use rustc_hir::def_id::DefId; use rustc_middle::mir::{self, Body, Local, Location}; use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_span::Span; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::{Span, Symbol, sym}; use tracing::{debug, info}; use crate::errors::{ diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 9328870c7ae04..a51af8c40fd43 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -6,7 +6,7 @@ use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashMap, FxIndexSet, StdEntry}; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -399,7 +399,7 @@ impl<'tcx> Map<'tcx> { &mut self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>, - exclude: BitSet, + exclude: DenseBitSet, value_limit: Option, ) { // Start by constructing the places for each bare local. @@ -912,9 +912,9 @@ pub fn iter_fields<'tcx>( } /// Returns all locals with projections that have their reference or address taken. -pub fn excluded_locals(body: &Body<'_>) -> BitSet { +pub fn excluded_locals(body: &Body<'_>) -> DenseBitSet { struct Collector { - result: BitSet, + result: DenseBitSet, } impl<'tcx> Visitor<'tcx> for Collector { @@ -932,7 +932,7 @@ pub fn excluded_locals(body: &Body<'_>) -> BitSet { } } - let mut collector = Collector { result: BitSet::new_empty(body.local_decls.len()) }; + let mut collector = Collector { result: DenseBitSet::new_empty(body.local_decls.len()) }; collector.visit_body(body); collector.result } diff --git a/compiler/rustc_mir_transform/messages.ftl b/compiler/rustc_mir_transform/messages.ftl index d00bfc66a6a57..b0c023cca8242 100644 --- a/compiler/rustc_mir_transform/messages.ftl +++ b/compiler/rustc_mir_transform/messages.ftl @@ -19,6 +19,17 @@ mir_transform_ffi_unwind_call = call to {$foreign -> mir_transform_fn_item_ref = taking a reference to a function item does not give a function pointer .suggestion = cast `{$ident}` to obtain a function pointer +mir_transform_force_inline = + `{$callee}` could not be inlined into `{$caller}` but is required to be inlined + .call = ...`{$callee}` called here + .attr = inlining due to this annotation + .caller = within `{$caller}`... + .callee = `{$callee}` defined here + .note = could not be inlined due to: {$reason} + +mir_transform_force_inline_justification = + `{$callee}` is required to be inlined to: {$sym} + mir_transform_must_not_suspend = {$pre}`{$def_path}`{$post} held across a suspend point, but should not be .label = the value is held across this suspend point .note = {$reason} diff --git a/compiler/rustc_mir_transform/src/copy_prop.rs b/compiler/rustc_mir_transform/src/copy_prop.rs index 9b3443d3209e7..f149bf97cde65 100644 --- a/compiler/rustc_mir_transform/src/copy_prop.rs +++ b/compiler/rustc_mir_transform/src/copy_prop.rs @@ -1,5 +1,5 @@ use rustc_index::IndexSlice; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -34,7 +34,7 @@ impl<'tcx> crate::MirPass<'tcx> for CopyProp { let fully_moved = fully_moved_locals(&ssa, body); debug!(?fully_moved); - let mut storage_to_remove = BitSet::new_empty(fully_moved.domain_size()); + let mut storage_to_remove = DenseBitSet::new_empty(fully_moved.domain_size()); for (local, &head) in ssa.copy_classes().iter_enumerated() { if local != head { storage_to_remove.insert(head); @@ -68,8 +68,8 @@ impl<'tcx> crate::MirPass<'tcx> for CopyProp { /// This means that replacing it by a copy of `_a` if ok, since this copy happens before `_c` is /// moved, and therefore that `_d` is moved. #[instrument(level = "trace", skip(ssa, body))] -fn fully_moved_locals(ssa: &SsaLocals, body: &Body<'_>) -> BitSet { - let mut fully_moved = BitSet::new_filled(body.local_decls.len()); +fn fully_moved_locals(ssa: &SsaLocals, body: &Body<'_>) -> DenseBitSet { + let mut fully_moved = DenseBitSet::new_filled(body.local_decls.len()); for (_, rvalue, _) in ssa.assignments(body) { let (Rvalue::Use(Operand::Copy(place) | Operand::Move(place)) @@ -96,9 +96,9 @@ fn fully_moved_locals(ssa: &SsaLocals, body: &Body<'_>) -> BitSet { /// Utility to help performing substitution of `*pattern` by `target`. struct Replacer<'a, 'tcx> { tcx: TyCtxt<'tcx>, - fully_moved: BitSet, - storage_to_remove: BitSet, - borrowed_locals: &'a BitSet, + fully_moved: DenseBitSet, + storage_to_remove: DenseBitSet, + borrowed_locals: &'a DenseBitSet, copy_classes: &'a IndexSlice, } diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index 31d5245fb5ca7..a3715b5d48556 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -60,7 +60,7 @@ use rustc_errors::pluralize; use rustc_hir as hir; use rustc_hir::lang_items::LangItem; use rustc_hir::{CoroutineDesugaring, CoroutineKind}; -use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet}; +use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet}; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -73,9 +73,8 @@ use rustc_mir_dataflow::impls::{ always_storage_live_locals, }; use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor}; -use rustc_span::Span; use rustc_span::def_id::{DefId, LocalDefId}; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use rustc_target::spec::PanicStrategy; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::TyCtxtInferExt as _; @@ -186,13 +185,13 @@ struct TransformVisitor<'tcx> { remap: IndexVec, VariantIdx, FieldIdx)>>, // A map from a suspension point in a block to the locals which have live storage at that point - storage_liveness: IndexVec>>, + storage_liveness: IndexVec>>, // A list of suspension points, generated during the transform suspension_points: Vec>, // The set of locals that have no `StorageLive`/`StorageDead` annotations. - always_live_locals: BitSet, + always_live_locals: DenseBitSet, // The original RETURN_PLACE local old_ret_local: Local, @@ -634,7 +633,7 @@ struct LivenessInfo { saved_locals: CoroutineSavedLocals, /// The set of saved locals live at each suspension point. - live_locals_at_suspension_points: Vec>, + live_locals_at_suspension_points: Vec>, /// Parallel vec to the above with SourceInfo for each yield terminator. source_info_at_suspension_points: Vec, @@ -646,7 +645,7 @@ struct LivenessInfo { /// For every suspending block, the locals which are storage-live across /// that suspension point. - storage_liveness: IndexVec>>, + storage_liveness: IndexVec>>, } /// Computes which locals have to be stored in the state-machine for the @@ -660,7 +659,7 @@ struct LivenessInfo { fn locals_live_across_suspend_points<'tcx>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, - always_live_locals: &BitSet, + always_live_locals: &DenseBitSet, movable: bool, ) -> LivenessInfo { // Calculate when MIR locals have live storage. This gives us an upper bound of their @@ -689,7 +688,7 @@ fn locals_live_across_suspend_points<'tcx>( let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks); let mut live_locals_at_suspension_points = Vec::new(); let mut source_info_at_suspension_points = Vec::new(); - let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); + let mut live_locals_at_any_suspension_point = DenseBitSet::new_empty(body.local_decls.len()); for (block, data) in body.basic_blocks.iter_enumerated() { if let TerminatorKind::Yield { .. } = data.terminator().kind { @@ -769,7 +768,7 @@ fn locals_live_across_suspend_points<'tcx>( /// `CoroutineSavedLocal` is indexed in terms of the elements in this set; /// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local /// included in this set. -struct CoroutineSavedLocals(BitSet); +struct CoroutineSavedLocals(DenseBitSet); impl CoroutineSavedLocals { /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds @@ -778,11 +777,11 @@ impl CoroutineSavedLocals { self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l)) } - /// Transforms a `BitSet` that contains only locals saved across yield points to the - /// equivalent `BitSet`. - fn renumber_bitset(&self, input: &BitSet) -> BitSet { + /// Transforms a `DenseBitSet` that contains only locals saved across yield points to the + /// equivalent `DenseBitSet`. + fn renumber_bitset(&self, input: &DenseBitSet) -> DenseBitSet { assert!(self.superset(input), "{:?} not a superset of {:?}", self.0, input); - let mut out = BitSet::new_empty(self.count()); + let mut out = DenseBitSet::new_empty(self.count()); for (saved_local, local) in self.iter_enumerated() { if input.contains(local) { out.insert(saved_local); @@ -802,7 +801,7 @@ impl CoroutineSavedLocals { } impl ops::Deref for CoroutineSavedLocals { - type Target = BitSet; + type Target = DenseBitSet; fn deref(&self) -> &Self::Target { &self.0 @@ -816,7 +815,7 @@ impl ops::Deref for CoroutineSavedLocals { fn compute_storage_conflicts<'mir, 'tcx>( body: &'mir Body<'tcx>, saved_locals: &'mir CoroutineSavedLocals, - always_live_locals: BitSet, + always_live_locals: DenseBitSet, mut requires_storage: Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>, ) -> BitMatrix { assert_eq!(body.local_decls.len(), saved_locals.domain_size()); @@ -834,7 +833,7 @@ fn compute_storage_conflicts<'mir, 'tcx>( body, saved_locals, local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()), - eligible_storage_live: BitSet::new_empty(body.local_decls.len()), + eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()), }; requires_storage.visit_reachable_with(body, &mut visitor); @@ -872,7 +871,7 @@ struct StorageConflictVisitor<'a, 'tcx> { // benchmarks for coroutines. local_conflicts: BitMatrix, // We keep this bitset as a buffer to avoid reallocating memory. - eligible_storage_live: BitSet, + eligible_storage_live: DenseBitSet, } impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> @@ -881,7 +880,7 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> fn visit_after_early_statement_effect( &mut self, _results: &mut Results<'tcx, MaybeRequiresStorage<'a, 'tcx>>, - state: &BitSet, + state: &DenseBitSet, _statement: &'a Statement<'tcx>, loc: Location, ) { @@ -891,7 +890,7 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> fn visit_after_early_terminator_effect( &mut self, _results: &mut Results<'tcx, MaybeRequiresStorage<'a, 'tcx>>, - state: &BitSet, + state: &DenseBitSet, _terminator: &'a Terminator<'tcx>, loc: Location, ) { @@ -900,7 +899,7 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> } impl StorageConflictVisitor<'_, '_> { - fn apply_state(&mut self, state: &BitSet, loc: Location) { + fn apply_state(&mut self, state: &DenseBitSet, loc: Location) { // Ignore unreachable blocks. if let TerminatorKind::Unreachable = self.body.basic_blocks[loc.block].terminator().kind { return; @@ -925,7 +924,7 @@ fn compute_layout<'tcx>( ) -> ( IndexVec, VariantIdx, FieldIdx)>>, CoroutineLayout<'tcx>, - IndexVec>>, + IndexVec>>, ) { let LivenessInfo { saved_locals, @@ -1823,9 +1822,6 @@ impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> { fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) { let mut linted_tys = FxHashSet::default(); - // We want a user-facing param-env. - let param_env = tcx.param_env(body.source.def_id()); - for (variant, yield_source_info) in layout.variant_fields.iter().zip(&layout.variant_source_info) { @@ -1839,7 +1835,7 @@ fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, bo continue; }; - check_must_not_suspend_ty(tcx, decl.ty, hir_id, param_env, SuspendCheckData { + check_must_not_suspend_ty(tcx, decl.ty, hir_id, SuspendCheckData { source_span: decl.source_info.span, yield_span: yield_source_info.span, plural_len: 1, @@ -1869,7 +1865,6 @@ fn check_must_not_suspend_ty<'tcx>( tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, hir_id: hir::HirId, - param_env: ty::ParamEnv<'tcx>, data: SuspendCheckData<'_>, ) -> bool { if ty.is_unit() { @@ -1884,16 +1879,13 @@ fn check_must_not_suspend_ty<'tcx>( ty::Adt(_, args) if ty.is_box() => { let boxed_ty = args.type_at(0); let allocator_ty = args.type_at(1); - check_must_not_suspend_ty(tcx, boxed_ty, hir_id, param_env, SuspendCheckData { + check_must_not_suspend_ty(tcx, boxed_ty, hir_id, SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data - }) || check_must_not_suspend_ty( - tcx, - allocator_ty, - hir_id, - param_env, - SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data }, - ) + }) || check_must_not_suspend_ty(tcx, allocator_ty, hir_id, SuspendCheckData { + descr_pre: &format!("{}allocator ", data.descr_pre), + ..data + }) } ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data), // FIXME: support adding the attribute to TAITs @@ -1938,7 +1930,7 @@ fn check_must_not_suspend_ty<'tcx>( let mut has_emitted = false; for (i, ty) in fields.iter().enumerate() { let descr_post = &format!(" in tuple element {i}"); - if check_must_not_suspend_ty(tcx, ty, hir_id, param_env, SuspendCheckData { + if check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_post, ..data }) { @@ -1949,7 +1941,7 @@ fn check_must_not_suspend_ty<'tcx>( } ty::Array(ty, len) => { let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix); - check_must_not_suspend_ty(tcx, ty, hir_id, param_env, SuspendCheckData { + check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, // FIXME(must_not_suspend): This is wrong. We should handle printing unevaluated consts. plural_len: len.try_to_target_usize(tcx).unwrap_or(0) as usize + 1, @@ -1960,10 +1952,7 @@ fn check_must_not_suspend_ty<'tcx>( // may not be considered live across the await point. ty::Ref(_region, ty, _mutability) => { let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix); - check_must_not_suspend_ty(tcx, ty, hir_id, param_env, SuspendCheckData { - descr_pre, - ..data - }) + check_must_not_suspend_ty(tcx, ty, hir_id, SuspendCheckData { descr_pre, ..data }) } _ => false, } diff --git a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs index ef61866e9025d..9f5bb015fa335 100644 --- a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs +++ b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs @@ -78,7 +78,7 @@ use rustc_middle::hir::place::{Projection, ProjectionKind}; use rustc_middle::mir::visit::MutVisitor; use rustc_middle::mir::{self, dump_mir}; use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::symbol::kw; +use rustc_span::kw; pub(crate) fn coroutine_by_move_body_def_id<'tcx>( tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 9e80f1f1c4ac2..1a9323329f61f 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -1,18 +1,24 @@ use std::cmp::Ordering; use std::fmt::{self, Debug}; +use either::Either; +use itertools::Itertools; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::coverage::{CounterId, CovTerm, Expression, ExpressionId, Op}; -use tracing::{debug, debug_span, instrument}; -use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, ReadyFirstTraversal}; +use crate::coverage::counters::balanced_flow::BalancedFlowGraph; +use crate::coverage::counters::iter_nodes::IterNodes; +use crate::coverage::counters::node_flow::{CounterTerm, MergedNodeFlowGraph, NodeCounters}; +use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; -#[cfg(test)] -mod tests; +mod balanced_flow; +mod iter_nodes; +mod node_flow; +mod union_find; /// The coverage counter or counter expression associated with a particular /// BCB node or BCB edge. @@ -48,10 +54,12 @@ struct BcbExpression { } /// Enum representing either a node or an edge in the coverage graph. +/// +/// FIXME(#135481): This enum is no longer needed now that we only instrument +/// nodes and not edges. It can be removed in a subsequent PR. #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(super) enum Site { Node { bcb: BasicCoverageBlock }, - Edge { from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock }, } /// Generates and stores coverage counter and coverage expression information @@ -77,12 +85,40 @@ impl CoverageCounters { /// counters or counter expressions for nodes and edges as required. pub(super) fn make_bcb_counters( graph: &CoverageGraph, - bcb_needs_counter: &BitSet, + bcb_needs_counter: &DenseBitSet, ) -> Self { - let mut builder = CountersBuilder::new(graph, bcb_needs_counter); - builder.make_bcb_counters(); + let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); + let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph); + + // A "reloop" node has exactly one out-edge, which jumps back to the top + // of an enclosing loop. Reloop nodes are typically visited more times + // than loop-exit nodes, so try to avoid giving them physical counters. + let is_reloop_node = IndexVec::from_fn_n( + |node| match graph.successors[node].as_slice() { + &[succ] => graph.dominates(succ, node), + _ => false, + }, + graph.num_nodes(), + ); - builder.into_coverage_counters() + let mut nodes = balanced_graph.iter_nodes().rev().collect::>(); + // The first node is the sink, which must not get a physical counter. + assert_eq!(nodes[0], balanced_graph.sink); + // Sort the real nodes, such that earlier (lesser) nodes take priority + // in being given a counter expression instead of a physical counter. + nodes[1..].sort_by(|&a, &b| { + // Start with a dummy `Equal` to make the actual tests line up nicely. + Ordering::Equal + // Prefer a physical counter for return/yield nodes. + .then_with(|| Ord::cmp(&graph[a].is_out_summable, &graph[b].is_out_summable)) + // Prefer an expression for reloop nodes (see definition above). + .then_with(|| Ord::cmp(&is_reloop_node[a], &is_reloop_node[b]).reverse()) + // Otherwise, prefer a physical counter for dominating nodes. + .then_with(|| graph.cmp_in_dominator_order(a, b).reverse()) + }); + let node_counters = merged_graph.make_node_counters(&nodes); + + Transcriber::new(graph.num_nodes(), node_counters).transcribe_counters(bcb_needs_counter) } fn with_num_bcbs(num_bcbs: usize) -> Self { @@ -182,318 +218,51 @@ impl CoverageCounters { } } -/// Symbolic representation of the coverage counter to be used for a particular -/// node or edge in the coverage graph. The same site counter can be used for -/// multiple sites, if they have been determined to have the same count. -#[derive(Clone, Copy, Debug)] -enum SiteCounter { - /// A physical counter at some node/edge. - Phys { site: Site }, - /// A counter expression for a node that takes the sum of all its in-edge - /// counters. - NodeSumExpr { bcb: BasicCoverageBlock }, - /// A counter expression for an edge that takes the counter of its source - /// node, and subtracts the counters of all its sibling out-edges. - EdgeDiffExpr { from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock }, -} - -/// Yields the graph successors of `from_bcb` that aren't `to_bcb`. This is -/// used when creating a counter expression for [`SiteCounter::EdgeDiffExpr`]. -/// -/// For example, in this diagram the sibling out-edge targets of edge `AC` are -/// the nodes `B` and `D`. -/// -/// ```text -/// A -/// / | \ -/// B C D -/// ``` -fn sibling_out_edge_targets( - graph: &CoverageGraph, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, -) -> impl Iterator + Captures<'_> { - graph.successors[from_bcb].iter().copied().filter(move |&t| t != to_bcb) -} - -/// Helper struct that allows counter creation to inspect the BCB graph, and -/// the set of nodes that need counters. -struct CountersBuilder<'a> { - graph: &'a CoverageGraph, - bcb_needs_counter: &'a BitSet, - - site_counters: FxHashMap, -} - -impl<'a> CountersBuilder<'a> { - fn new(graph: &'a CoverageGraph, bcb_needs_counter: &'a BitSet) -> Self { - assert_eq!(graph.num_nodes(), bcb_needs_counter.domain_size()); - Self { graph, bcb_needs_counter, site_counters: FxHashMap::default() } - } - - fn make_bcb_counters(&mut self) { - debug!("make_bcb_counters(): adding a counter or expression to each BasicCoverageBlock"); - - // Traverse the coverage graph, ensuring that every node that needs a - // coverage counter has one. - for bcb in ReadyFirstTraversal::new(self.graph) { - let _span = debug_span!("traversal", ?bcb).entered(); - if self.bcb_needs_counter.contains(bcb) { - self.make_node_counter_and_out_edge_counters(bcb); - } - } - } - - /// Make sure the given node has a node counter, and then make sure each of - /// its out-edges has an edge counter (if appropriate). - #[instrument(level = "debug", skip(self))] - fn make_node_counter_and_out_edge_counters(&mut self, from_bcb: BasicCoverageBlock) { - // First, ensure that this node has a counter of some kind. - // We might also use that counter to compute one of the out-edge counters. - self.get_or_make_node_counter(from_bcb); - - // If this node's out-edges won't sum to the node's counter, - // then there's no reason to create edge counters here. - if !self.graph[from_bcb].is_out_summable { - return; - } - - // When choosing which out-edge should be given a counter expression, ignore edges that - // already have counters, or could use the existing counter of their target node. - let out_edge_has_counter = |to_bcb| { - if self.site_counters.contains_key(&Site::Edge { from_bcb, to_bcb }) { - return true; - } - self.graph.sole_predecessor(to_bcb) == Some(from_bcb) - && self.site_counters.contains_key(&Site::Node { bcb: to_bcb }) - }; - - // Determine the set of out-edges that could benefit from being given an expression. - let candidate_successors = self.graph.successors[from_bcb] - .iter() - .copied() - .filter(|&to_bcb| !out_edge_has_counter(to_bcb)) - .collect::>(); - debug!(?candidate_successors); - - // If there are out-edges without counters, choose one to be given an expression - // (computed from this node and the other out-edges) instead of a physical counter. - let Some(to_bcb) = self.choose_out_edge_for_expression(from_bcb, &candidate_successors) - else { - return; - }; - - // For each out-edge other than the one that was chosen to get an expression, - // ensure that it has a counter (existing counter/expression or a new counter). - for target in sibling_out_edge_targets(self.graph, from_bcb, to_bcb) { - self.get_or_make_edge_counter(from_bcb, target); - } - - // Now create an expression for the chosen edge, by taking the counter - // for its source node and subtracting the sum of its sibling out-edges. - let counter = SiteCounter::EdgeDiffExpr { from_bcb, to_bcb }; - self.site_counters.insert(Site::Edge { from_bcb, to_bcb }, counter); - } - - #[instrument(level = "debug", skip(self))] - fn get_or_make_node_counter(&mut self, bcb: BasicCoverageBlock) -> SiteCounter { - // If the BCB already has a counter, return it. - if let Some(&counter) = self.site_counters.get(&Site::Node { bcb }) { - debug!("{bcb:?} already has a counter: {counter:?}"); - return counter; - } - - let counter = self.make_node_counter_inner(bcb); - self.site_counters.insert(Site::Node { bcb }, counter); - counter - } - - fn make_node_counter_inner(&mut self, bcb: BasicCoverageBlock) -> SiteCounter { - // If the node's sole in-edge already has a counter, use that. - if let Some(sole_pred) = self.graph.sole_predecessor(bcb) - && let Some(&edge_counter) = - self.site_counters.get(&Site::Edge { from_bcb: sole_pred, to_bcb: bcb }) - { - return edge_counter; - } - - let predecessors = self.graph.predecessors[bcb].as_slice(); - - // Handle cases where we can't compute a node's count from its in-edges: - // - START_BCB has no in-edges, so taking the sum would panic (or be wrong). - // - For nodes with one in-edge, or that directly loop to themselves, - // trying to get the in-edge counts would require this node's counter, - // leading to infinite recursion. - if predecessors.len() <= 1 || predecessors.contains(&bcb) { - debug!(?bcb, ?predecessors, "node has <=1 predecessors or is its own predecessor"); - let counter = SiteCounter::Phys { site: Site::Node { bcb } }; - debug!(?bcb, ?counter, "node gets a physical counter"); - return counter; - } - - // A BCB with multiple incoming edges can compute its count by ensuring that counters - // exist for each of those edges, and then adding them up to get a total count. - for &from_bcb in predecessors { - self.get_or_make_edge_counter(from_bcb, bcb); - } - let sum_of_in_edges = SiteCounter::NodeSumExpr { bcb }; - - debug!("{bcb:?} gets a new counter (sum of predecessor counters): {sum_of_in_edges:?}"); - sum_of_in_edges - } - - #[instrument(level = "debug", skip(self))] - fn get_or_make_edge_counter( - &mut self, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, - ) -> SiteCounter { - // If the edge already has a counter, return it. - if let Some(&counter) = self.site_counters.get(&Site::Edge { from_bcb, to_bcb }) { - debug!("Edge {from_bcb:?}->{to_bcb:?} already has a counter: {counter:?}"); - return counter; - } - - let counter = self.make_edge_counter_inner(from_bcb, to_bcb); - self.site_counters.insert(Site::Edge { from_bcb, to_bcb }, counter); - counter - } - - fn make_edge_counter_inner( - &mut self, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, - ) -> SiteCounter { - // If the target node has exactly one in-edge (i.e. this one), then just - // use the node's counter, since it will have the same value. - if let Some(sole_pred) = self.graph.sole_predecessor(to_bcb) { - assert_eq!(sole_pred, from_bcb); - // This call must take care not to invoke `get_or_make_edge` for - // this edge, since that would result in infinite recursion! - return self.get_or_make_node_counter(to_bcb); - } - - // If the source node has exactly one out-edge (i.e. this one) and would have - // the same execution count as that edge, then just use the node's counter. - if let Some(simple_succ) = self.graph.simple_successor(from_bcb) { - assert_eq!(simple_succ, to_bcb); - return self.get_or_make_node_counter(from_bcb); - } - - // Make a new counter to count this edge. - let counter = SiteCounter::Phys { site: Site::Edge { from_bcb, to_bcb } }; - debug!(?from_bcb, ?to_bcb, ?counter, "edge gets a physical counter"); - counter - } - - /// Given a set of candidate out-edges (represented by their successor node), - /// choose one to be given a counter expression instead of a physical counter. - fn choose_out_edge_for_expression( - &self, - from_bcb: BasicCoverageBlock, - candidate_successors: &[BasicCoverageBlock], - ) -> Option { - // Try to find a candidate that leads back to the top of a loop, - // because reloop edges tend to be executed more times than loop-exit edges. - if let Some(reloop_target) = self.find_good_reloop_edge(from_bcb, &candidate_successors) { - debug!("Selecting reloop target {reloop_target:?} to get an expression"); - return Some(reloop_target); - } - - // We couldn't identify a "good" edge, so just choose an arbitrary one. - let arbitrary_target = candidate_successors.first().copied()?; - debug!(?arbitrary_target, "selecting arbitrary out-edge to get an expression"); - Some(arbitrary_target) - } - - /// Given a set of candidate out-edges (represented by their successor node), - /// tries to find one that leads back to the top of a loop. - /// - /// Reloop edges are good candidates for counter expressions, because they - /// will tend to be executed more times than a loop-exit edge, so it's nice - /// for them to be able to avoid a physical counter increment. - fn find_good_reloop_edge( - &self, - from_bcb: BasicCoverageBlock, - candidate_successors: &[BasicCoverageBlock], - ) -> Option { - // If there are no candidates, avoid iterating over the loop stack. - if candidate_successors.is_empty() { - return None; - } - - // Consider each loop on the current traversal context stack, top-down. - for loop_header_node in self.graph.loop_headers_containing(from_bcb) { - // Try to find a candidate edge that doesn't exit this loop. - for &target_bcb in candidate_successors { - // An edge is a reloop edge if its target dominates any BCB that has - // an edge back to the loop header. (Otherwise it's an exit edge.) - let is_reloop_edge = self - .graph - .reloop_predecessors(loop_header_node) - .any(|reloop_bcb| self.graph.dominates(target_bcb, reloop_bcb)); - if is_reloop_edge { - // We found a good out-edge to be given an expression. - return Some(target_bcb); - } - } - - // All of the candidate edges exit this loop, so keep looking - // for a good reloop edge for one of the outer loops. - } - - None - } - - fn into_coverage_counters(self) -> CoverageCounters { - Transcriber::new(&self).transcribe_counters() - } -} - -/// Helper struct for converting `CountersBuilder` into a final `CoverageCounters`. -struct Transcriber<'a> { - old: &'a CountersBuilder<'a>, +struct Transcriber { + old: NodeCounters, new: CoverageCounters, phys_counter_for_site: FxHashMap, } -impl<'a> Transcriber<'a> { - fn new(old: &'a CountersBuilder<'a>) -> Self { +impl Transcriber { + fn new(num_nodes: usize, old: NodeCounters) -> Self { Self { old, - new: CoverageCounters::with_num_bcbs(old.graph.num_nodes()), + new: CoverageCounters::with_num_bcbs(num_nodes), phys_counter_for_site: FxHashMap::default(), } } - fn transcribe_counters(mut self) -> CoverageCounters { - for bcb in self.old.bcb_needs_counter.iter() { + fn transcribe_counters( + mut self, + bcb_needs_counter: &DenseBitSet, + ) -> CoverageCounters { + for bcb in bcb_needs_counter.iter() { let site = Site::Node { bcb }; - let site_counter = self.site_counter(site); - - // Resolve the site counter into flat lists of nodes/edges whose - // physical counts contribute to the counter for this node. - // Distinguish between counts that will be added vs subtracted. - let mut pos = vec![]; - let mut neg = vec![]; - self.push_resolved_sites(site_counter, &mut pos, &mut neg); - - // Simplify by cancelling out sites that appear on both sides. - let (mut pos, mut neg) = sort_and_cancel(pos, neg); + let (mut pos, mut neg): (Vec<_>, Vec<_>) = + self.old.counter_expr(bcb).iter().partition_map( + |&CounterTerm { node, op }| match op { + Op::Add => Either::Left(node), + Op::Subtract => Either::Right(node), + }, + ); if pos.is_empty() { - // If we somehow end up with no positive terms after cancellation, - // fall back to creating a physical counter. There's no known way - // for this to happen, but it's hard to confidently rule it out. + // If we somehow end up with no positive terms, fall back to + // creating a physical counter. There's no known way for this + // to happen, but we can avoid an ICE if it does. debug_assert!(false, "{site:?} has no positive counter terms"); - pos = vec![Some(site)]; + pos = vec![bcb]; neg = vec![]; } - let mut new_counters_for_sites = |sites: Vec>| { + pos.sort(); + neg.sort(); + + let mut new_counters_for_sites = |sites: Vec| { sites .into_iter() - .filter_map(|id| try { self.ensure_phys_counter(id?) }) + .map(|node| self.ensure_phys_counter(Site::Node { bcb: node })) .collect::>() }; let mut pos = new_counters_for_sites(pos); @@ -510,79 +279,7 @@ impl<'a> Transcriber<'a> { self.new } - fn site_counter(&self, site: Site) -> SiteCounter { - self.old.site_counters.get(&site).copied().unwrap_or_else(|| { - // We should have already created all necessary site counters. - // But if we somehow didn't, avoid crashing in release builds, - // and just use an extra physical counter instead. - debug_assert!(false, "{site:?} should have a counter"); - SiteCounter::Phys { site } - }) - } - fn ensure_phys_counter(&mut self, site: Site) -> BcbCounter { *self.phys_counter_for_site.entry(site).or_insert_with(|| self.new.make_phys_counter(site)) } - - /// Resolves the given counter into flat lists of nodes/edges, whose counters - /// will then be added and subtracted to form a counter expression. - fn push_resolved_sites(&self, counter: SiteCounter, pos: &mut Vec, neg: &mut Vec) { - match counter { - SiteCounter::Phys { site } => pos.push(site), - SiteCounter::NodeSumExpr { bcb } => { - for &from_bcb in &self.old.graph.predecessors[bcb] { - let edge_counter = self.site_counter(Site::Edge { from_bcb, to_bcb: bcb }); - self.push_resolved_sites(edge_counter, pos, neg); - } - } - SiteCounter::EdgeDiffExpr { from_bcb, to_bcb } => { - // First, add the count for `from_bcb`. - let node_counter = self.site_counter(Site::Node { bcb: from_bcb }); - self.push_resolved_sites(node_counter, pos, neg); - - // Then subtract the counts for the other out-edges. - for target in sibling_out_edge_targets(self.old.graph, from_bcb, to_bcb) { - let edge_counter = self.site_counter(Site::Edge { from_bcb, to_bcb: target }); - // Swap `neg` and `pos` so that the counter is subtracted. - self.push_resolved_sites(edge_counter, neg, pos); - } - } - } - } -} - -/// Given two lists: -/// - Sorts each list. -/// - Converts each list to `Vec>`. -/// - Scans for values that appear in both lists, and cancels them out by -/// replacing matching pairs of values with `None`. -fn sort_and_cancel(mut pos: Vec, mut neg: Vec) -> (Vec>, Vec>) { - pos.sort(); - neg.sort(); - - // Convert to `Vec>`. If `T` has a niche, this should be zero-cost. - let mut pos = pos.into_iter().map(Some).collect::>(); - let mut neg = neg.into_iter().map(Some).collect::>(); - - // Scan through the lists using two cursors. When either cursor reaches the - // end of its list, there can be no more equal pairs, so stop. - let mut p = 0; - let mut n = 0; - while p < pos.len() && n < neg.len() { - // If the values are equal, remove them and advance both cursors. - // Otherwise, advance whichever cursor points to the lesser value. - // (Choosing which cursor to advance relies on both lists being sorted.) - match pos[p].cmp(&neg[n]) { - Ordering::Less => p += 1, - Ordering::Equal => { - pos[p] = None; - neg[n] = None; - p += 1; - n += 1; - } - Ordering::Greater => n += 1, - } - } - - (pos, neg) } diff --git a/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs new file mode 100644 index 0000000000000..c108f96a564cf --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs @@ -0,0 +1,133 @@ +//! A control-flow graph can be said to have “balanced flow” if the flow +//! (execution count) of each node is equal to the sum of its in-edge flows, +//! and also equal to the sum of its out-edge flows. +//! +//! Control-flow graphs typically have one or more nodes that don't satisfy the +//! balanced-flow property, e.g.: +//! - The start node has out-edges, but no in-edges. +//! - Return nodes have in-edges, but no out-edges. +//! - `Yield` nodes can have an out-flow that is less than their in-flow. +//! - Inescapable loops cause the in-flow/out-flow relationship to break down. +//! +//! Balanced-flow graphs are nevertheless useful for analysis, so this module +//! provides a wrapper type ([`BalancedFlowGraph`]) that imposes balanced flow +//! on an underlying graph. This is done by non-destructively adding synthetic +//! nodes and edges as necessary. + +use rustc_data_structures::graph; +use rustc_data_structures::graph::iterate::DepthFirstSearch; +use rustc_data_structures::graph::reversed::ReversedGraph; +use rustc_index::Idx; +use rustc_index::bit_set::DenseBitSet; + +use crate::coverage::counters::iter_nodes::IterNodes; + +/// A view of an underlying graph that has been augmented to have “balanced flow”. +/// This means that the flow (execution count) of each node is equal to the +/// sum of its in-edge flows, and also equal to the sum of its out-edge flows. +/// +/// To achieve this, a synthetic "sink" node is non-destructively added to the +/// graph, with synthetic in-edges from these nodes: +/// - Any node that has no out-edges. +/// - Any node that explicitly requires a sink edge, as indicated by a +/// caller-supplied `force_sink_edge` function. +/// - Any node that would otherwise be unable to reach the sink, because it is +/// part of an inescapable loop. +/// +/// To make the graph fully balanced, there is also a synthetic edge from the +/// sink node back to the start node. +/// +/// --- +/// The benefit of having a balanced-flow graph is that it can be subsequently +/// transformed in ways that are guaranteed to preserve balanced flow +/// (e.g. merging nodes together), which is useful for discovering relationships +/// between the node flows of different nodes in the graph. +pub(crate) struct BalancedFlowGraph { + graph: G, + sink_edge_nodes: DenseBitSet, + pub(crate) sink: G::Node, +} + +impl BalancedFlowGraph { + /// Creates a balanced view of an underlying graph, by adding a synthetic + /// sink node that has in-edges from nodes that need or request such an edge, + /// and a single out-edge to the start node. + /// + /// Assumes that all nodes in the underlying graph are reachable from the + /// start node. + pub(crate) fn for_graph(graph: G, force_sink_edge: impl Fn(G::Node) -> bool) -> Self + where + G: graph::ControlFlowGraph, + { + let mut sink_edge_nodes = DenseBitSet::new_empty(graph.num_nodes()); + let mut dfs = DepthFirstSearch::new(ReversedGraph::new(&graph)); + + // First, determine the set of nodes that explicitly request or require + // an out-edge to the sink. + for node in graph.iter_nodes() { + if force_sink_edge(node) || graph.successors(node).next().is_none() { + sink_edge_nodes.insert(node); + dfs.push_start_node(node); + } + } + + // Next, find all nodes that are currently not reverse-reachable from + // `sink_edge_nodes`, and add them to the set as well. + dfs.complete_search(); + sink_edge_nodes.union_not(dfs.visited_set()); + + // The sink node is 1 higher than the highest real node. + let sink = G::Node::new(graph.num_nodes()); + + BalancedFlowGraph { graph, sink_edge_nodes, sink } + } +} + +impl graph::DirectedGraph for BalancedFlowGraph +where + G: graph::DirectedGraph, +{ + type Node = G::Node; + + /// Returns the number of nodes in this balanced-flow graph, which is 1 + /// more than the number of nodes in the underlying graph, to account for + /// the synthetic sink node. + fn num_nodes(&self) -> usize { + // The sink node's index is already the size of the underlying graph, + // so just add 1 to that instead. + self.sink.index() + 1 + } +} + +impl graph::StartNode for BalancedFlowGraph +where + G: graph::StartNode, +{ + fn start_node(&self) -> Self::Node { + self.graph.start_node() + } +} + +impl graph::Successors for BalancedFlowGraph +where + G: graph::StartNode + graph::Successors, +{ + fn successors(&self, node: Self::Node) -> impl Iterator { + let real_edges; + let sink_edge; + + if node == self.sink { + // The sink node has no real out-edges, and one synthetic out-edge + // to the start node. + real_edges = None; + sink_edge = Some(self.graph.start_node()); + } else { + // Real nodes have their real out-edges, and possibly one synthetic + // out-edge to the sink node. + real_edges = Some(self.graph.successors(node)); + sink_edge = self.sink_edge_nodes.contains(node).then_some(self.sink); + } + + real_edges.into_iter().flatten().chain(sink_edge) + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs b/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs new file mode 100644 index 0000000000000..9d87f7af1b04e --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs @@ -0,0 +1,16 @@ +use rustc_data_structures::graph; +use rustc_index::Idx; + +pub(crate) trait IterNodes: graph::DirectedGraph { + /// Iterates over all nodes of a graph in ascending numeric order. + /// Assumes that nodes are densely numbered, i.e. every index in + /// `0..num_nodes` is a valid node. + /// + /// FIXME: Can this just be part of [`graph::DirectedGraph`]? + fn iter_nodes( + &self, + ) -> impl Iterator + DoubleEndedIterator + ExactSizeIterator { + (0..self.num_nodes()).map(::new) + } +} +impl IterNodes for G {} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs new file mode 100644 index 0000000000000..610498c6c0ec7 --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -0,0 +1,306 @@ +//! For each node in a control-flow graph, determines whether that node should +//! have a physical counter, or a counter expression that is derived from the +//! physical counters of other nodes. +//! +//! Based on the algorithm given in +//! "Optimal measurement points for program frequency counts" +//! (Knuth & Stevenson, 1973). + +use rustc_data_structures::graph; +use rustc_index::bit_set::DenseBitSet; +use rustc_index::{Idx, IndexVec}; +use rustc_middle::mir::coverage::Op; +use smallvec::SmallVec; + +use crate::coverage::counters::iter_nodes::IterNodes; +use crate::coverage::counters::union_find::{FrozenUnionFind, UnionFind}; + +#[cfg(test)] +mod tests; + +/// View of some underlying graph, in which each node's successors have been +/// merged into a single "supernode". +/// +/// The resulting supernodes have no obvious meaning on their own. +/// However, merging successor nodes means that a node's out-edges can all +/// be combined into a single out-edge, whose flow is the same as the flow +/// (execution count) of its corresponding node in the original graph. +/// +/// With all node flows now in the original graph now represented as edge flows +/// in the merged graph, it becomes possible to analyze the original node flows +/// using techniques for analyzing edge flows. +#[derive(Debug)] +pub(crate) struct MergedNodeFlowGraph { + /// Maps each node to the supernode that contains it, indicated by some + /// arbitrary "root" node that is part of that supernode. + supernodes: FrozenUnionFind, + /// For each node, stores the single supernode that all of its successors + /// have been merged into. + /// + /// (Note that each node in a supernode can potentially have a _different_ + /// successor supernode from its peers.) + succ_supernodes: IndexVec, +} + +impl MergedNodeFlowGraph { + /// Creates a "merged" view of an underlying graph. + /// + /// The given graph is assumed to have [“balanced flow”](balanced-flow), + /// though it does not necessarily have to be a `BalancedFlowGraph`. + /// + /// [balanced-flow]: `crate::coverage::counters::balanced_flow::BalancedFlowGraph`. + pub(crate) fn for_balanced_graph(graph: G) -> Self + where + G: graph::DirectedGraph + graph::Successors, + { + let mut supernodes = UnionFind::::new(graph.num_nodes()); + + // For each node, merge its successors into a single supernode, and + // arbitrarily choose one of those successors to represent all of them. + let successors = graph + .iter_nodes() + .map(|node| { + graph + .successors(node) + .reduce(|a, b| supernodes.unify(a, b)) + .expect("each node in a balanced graph must have at least one out-edge") + }) + .collect::>(); + + // Now that unification is complete, freeze the supernode forest, + // and resolve each arbitrarily-chosen successor to its canonical root. + // (This avoids having to explicitly resolve them later.) + let supernodes = supernodes.freeze(); + let succ_supernodes = successors.into_iter().map(|succ| supernodes.find(succ)).collect(); + + Self { supernodes, succ_supernodes } + } + + fn num_nodes(&self) -> usize { + self.succ_supernodes.len() + } + + fn is_supernode(&self, node: Node) -> bool { + self.supernodes.find(node) == node + } + + /// Using the information in this merged graph, together with a given + /// permutation of all nodes in the graph, to create physical counters and + /// counter expressions for each node in the underlying graph. + /// + /// The given list must contain exactly one copy of each node in the + /// underlying balanced-flow graph. The order of nodes is used as a hint to + /// influence counter allocation: + /// - Earlier nodes are more likely to receive counter expressions. + /// - Later nodes are more likely to receive physical counters. + pub(crate) fn make_node_counters(&self, all_nodes_permutation: &[Node]) -> NodeCounters { + let mut builder = SpantreeBuilder::new(self); + + for &node in all_nodes_permutation { + builder.visit_node(node); + } + + NodeCounters { counter_exprs: builder.finish() } + } +} + +/// End result of allocating physical counters and counter expressions for the +/// nodes of a graph. +#[derive(Debug)] +pub(crate) struct NodeCounters { + counter_exprs: IndexVec>, +} + +impl NodeCounters { + /// For the given node, returns the finished list of terms that represent + /// its physical counter or counter expression. Always non-empty. + /// + /// If a node was given a physical counter, its "expression" will contain + /// that counter as its sole element. + pub(crate) fn counter_expr(&self, this: Node) -> &[CounterTerm] { + self.counter_exprs[this].as_slice() + } +} + +#[derive(Debug)] +struct SpantreeEdge { + /// If true, this edge in the spantree has been reversed an odd number of + /// times, so all physical counters added to its node's counter expression + /// need to be negated. + is_reversed: bool, + /// Each spantree edge is "claimed" by the (regular) node that caused it to + /// be created. When a node with a physical counter traverses this edge, + /// that counter is added to the claiming node's counter expression. + claiming_node: Node, + /// Supernode at the other end of this spantree edge. Transitively points + /// to the "root" of this supernode's spantree component. + span_parent: Node, +} + +/// Part of a node's counter expression, which is a sum of counter terms. +#[derive(Debug)] +pub(crate) struct CounterTerm { + /// Whether to add or subtract the value of the node's physical counter. + pub(crate) op: Op, + /// The node whose physical counter is represented by this term. + pub(crate) node: Node, +} + +/// Stores the list of counter terms that make up a node's counter expression. +type CounterExprVec = SmallVec<[CounterTerm; 2]>; + +#[derive(Debug)] +struct SpantreeBuilder<'a, Node: Idx> { + graph: &'a MergedNodeFlowGraph, + is_unvisited: DenseBitSet, + /// Links supernodes to each other, gradually forming a spanning tree of + /// the merged-flow graph. + /// + /// A supernode without a span edge is the root of its component of the + /// spantree. Nodes that aren't supernodes cannot have a spantree edge. + span_edges: IndexVec>>, + /// Shared path buffer recycled by all calls to `yank_to_spantree_root`. + yank_buffer: Vec, + /// An in-progress counter expression for each node. Each expression is + /// initially empty, and will be filled in as relevant nodes are visited. + counter_exprs: IndexVec>, +} + +impl<'a, Node: Idx> SpantreeBuilder<'a, Node> { + fn new(graph: &'a MergedNodeFlowGraph) -> Self { + let num_nodes = graph.num_nodes(); + Self { + graph, + is_unvisited: DenseBitSet::new_filled(num_nodes), + span_edges: IndexVec::from_fn_n(|_| None, num_nodes), + yank_buffer: vec![], + counter_exprs: IndexVec::from_fn_n(|_| SmallVec::new(), num_nodes), + } + } + + /// Given a supernode, finds the supernode that is the "root" of its + /// spantree component. Two nodes that have the same spantree root are + /// connected in the spantree. + fn spantree_root(&self, this: Node) -> Node { + debug_assert!(self.graph.is_supernode(this)); + + match self.span_edges[this] { + None => this, + Some(SpantreeEdge { span_parent, .. }) => self.spantree_root(span_parent), + } + } + + /// Rotates edges in the spantree so that `this` is the root of its + /// spantree component. + fn yank_to_spantree_root(&mut self, this: Node) { + debug_assert!(self.graph.is_supernode(this)); + + // The rotation is done iteratively, by first traversing from `this` to + // its root and storing the path in a buffer, and then traversing the + // path buffer backwards to reverse all the edges. + + // Recycle the same path buffer for all calls to this method. + let path_buf = &mut self.yank_buffer; + path_buf.clear(); + path_buf.push(this); + + // Traverse the spantree until we reach a supernode that has no + // span-parent, which must be the root. + let mut curr = this; + while let &Some(SpantreeEdge { span_parent, .. }) = &self.span_edges[curr] { + path_buf.push(span_parent); + curr = span_parent; + } + + // For each spantree edge `a -> b` in the path that was just traversed, + // reverse it to become `a <- b`, while preserving `claiming_node`. + for &[a, b] in path_buf.array_windows::<2>().rev() { + let SpantreeEdge { is_reversed, claiming_node, span_parent } = self.span_edges[a] + .take() + .expect("all nodes in the path (except the last) have a `span_parent`"); + debug_assert_eq!(span_parent, b); + debug_assert!(self.span_edges[b].is_none()); + self.span_edges[b] = + Some(SpantreeEdge { is_reversed: !is_reversed, claiming_node, span_parent: a }); + } + + // The result of the rotation is that `this` is now a spantree root. + debug_assert!(self.span_edges[this].is_none()); + } + + /// Must be called exactly once for each node in the balanced-flow graph. + fn visit_node(&mut self, this: Node) { + // Assert that this node was unvisited, and mark it visited. + assert!(self.is_unvisited.remove(this), "node has already been visited: {this:?}"); + + // Get the supernode containing `this`, and make it the root of its + // component of the spantree. + let this_supernode = self.graph.supernodes.find(this); + self.yank_to_spantree_root(this_supernode); + + // Get the supernode containing all of this's successors. + let succ_supernode = self.graph.succ_supernodes[this]; + debug_assert!(self.graph.is_supernode(succ_supernode)); + + // If two supernodes are already connected in the spantree, they will + // have the same spantree root. (Each supernode is connected to itself.) + if this_supernode != self.spantree_root(succ_supernode) { + // Adding this node's flow edge to the spantree would cause two + // previously-disconnected supernodes to become connected, so add + // it. That spantree-edge is now "claimed" by this node. + // + // Claiming a spantree-edge means that this node will get a counter + // expression instead of a physical counter. That expression is + // currently empty, but will be built incrementally as the other + // nodes are visited. + self.span_edges[this_supernode] = Some(SpantreeEdge { + is_reversed: false, + claiming_node: this, + span_parent: succ_supernode, + }); + } else { + // This node's flow edge would join two supernodes that are already + // connected in the spantree (or are the same supernode). That would + // create a cycle in the spantree, so don't add an edge. + // + // Instead, create a physical counter for this node, and add that + // counter to all expressions on the path from `succ_supernode` to + // `this_supernode`. + + // Instead of setting `this.measure = true` as in the original paper, + // we just add the node's ID to its own "expression". + self.counter_exprs[this].push(CounterTerm { node: this, op: Op::Add }); + + // Walk the spantree from `this.successor` back to `this`. For each + // spantree edge along the way, add this node's physical counter to + // the counter expression of the node that claimed the spantree edge. + let mut curr = succ_supernode; + while curr != this_supernode { + let &SpantreeEdge { is_reversed, claiming_node, span_parent } = + self.span_edges[curr].as_ref().unwrap(); + let op = if is_reversed { Op::Subtract } else { Op::Add }; + self.counter_exprs[claiming_node].push(CounterTerm { node: this, op }); + + curr = span_parent; + } + } + } + + /// Asserts that all nodes have been visited, and returns the computed + /// counter expressions (made up of physical counters) for each node. + fn finish(self) -> IndexVec> { + let Self { graph, is_unvisited, span_edges, yank_buffer: _, counter_exprs } = self; + assert!(is_unvisited.is_empty(), "some nodes were never visited: {is_unvisited:?}"); + debug_assert!( + span_edges + .iter_enumerated() + .all(|(node, span_edge)| { span_edge.is_some() <= graph.is_supernode(node) }), + "only supernodes can have a span edge", + ); + debug_assert!( + counter_exprs.iter().all(|expr| !expr.is_empty()), + "after visiting all nodes, every node should have a non-empty expression", + ); + counter_exprs + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs new file mode 100644 index 0000000000000..9e7f754523d3d --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs @@ -0,0 +1,64 @@ +use itertools::Itertools; +use rustc_data_structures::graph; +use rustc_data_structures::graph::vec_graph::VecGraph; +use rustc_index::Idx; +use rustc_middle::mir::coverage::Op; + +use super::{CounterTerm, MergedNodeFlowGraph, NodeCounters}; + +fn merged_node_flow_graph(graph: G) -> MergedNodeFlowGraph { + MergedNodeFlowGraph::for_balanced_graph(graph) +} + +fn make_graph(num_nodes: usize, edge_pairs: Vec<(Node, Node)>) -> VecGraph { + VecGraph::new(num_nodes, edge_pairs) +} + +/// Example used in "Optimal Measurement Points for Program Frequency Counts" +/// (Knuth & Stevenson, 1973), but with 0-based node IDs. +#[test] +fn example_driver() { + let graph = make_graph::(5, vec![ + (0, 1), + (0, 3), + (1, 0), + (1, 2), + (2, 1), + (2, 4), + (3, 3), + (3, 4), + (4, 0), + ]); + + let merged = merged_node_flow_graph(&graph); + let counters = merged.make_node_counters(&[3, 1, 2, 0, 4]); + + assert_eq!(format_counter_expressions(&counters), &[ + // (comment to force vertical formatting for clarity) + "[0]: +c0", + "[1]: +c0 +c2 -c4", + "[2]: +c2", + "[3]: +c3", + "[4]: +c4", + ]); +} + +fn format_counter_expressions(counters: &NodeCounters) -> Vec { + let format_item = |&CounterTerm { node, op }| { + let op = match op { + Op::Subtract => '-', + Op::Add => '+', + }; + format!("{op}c{node:?}") + }; + + counters + .counter_exprs + .indices() + .map(|node| { + let mut expr = counters.counter_expr(node).iter().collect::>(); + expr.sort_by_key(|item| item.node.index()); + format!("[{node:?}]: {}", expr.into_iter().map(format_item).join(" ")) + }) + .collect() +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/tests.rs deleted file mode 100644 index 794d4358f8237..0000000000000 --- a/compiler/rustc_mir_transform/src/coverage/counters/tests.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::fmt::Debug; - -use super::sort_and_cancel; - -fn flatten(input: Vec>) -> Vec { - input.into_iter().flatten().collect() -} - -fn sort_and_cancel_and_flatten(pos: Vec, neg: Vec) -> (Vec, Vec) { - let (pos_actual, neg_actual) = sort_and_cancel(pos, neg); - (flatten(pos_actual), flatten(neg_actual)) -} - -#[track_caller] -fn check_test_case( - pos: Vec, - neg: Vec, - pos_expected: Vec, - neg_expected: Vec, -) { - eprintln!("pos = {pos:?}; neg = {neg:?}"); - let output = sort_and_cancel_and_flatten(pos, neg); - assert_eq!(output, (pos_expected, neg_expected)); -} - -#[test] -fn cancellation() { - let cases: &[(Vec, Vec, Vec, Vec)] = &[ - (vec![], vec![], vec![], vec![]), - (vec![4, 2, 1, 5, 3], vec![], vec![1, 2, 3, 4, 5], vec![]), - (vec![5, 5, 5, 5, 5], vec![5], vec![5, 5, 5, 5], vec![]), - (vec![1, 1, 2, 2, 3, 3], vec![1, 2, 3], vec![1, 2, 3], vec![]), - (vec![1, 1, 2, 2, 3, 3], vec![2, 4, 2], vec![1, 1, 3, 3], vec![4]), - ]; - - for (pos, neg, pos_expected, neg_expected) in cases { - check_test_case(pos.to_vec(), neg.to_vec(), pos_expected.to_vec(), neg_expected.to_vec()); - // Same test case, but with its inputs flipped and its outputs flipped. - check_test_case(neg.to_vec(), pos.to_vec(), neg_expected.to_vec(), pos_expected.to_vec()); - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs b/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs new file mode 100644 index 0000000000000..2da4f5f5fce11 --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs @@ -0,0 +1,116 @@ +use std::cmp::Ordering; +use std::mem; + +use rustc_index::{Idx, IndexVec}; + +#[cfg(test)] +mod tests; + +/// Simple implementation of a union-find data structure, i.e. a disjoint-set +/// forest. +#[derive(Debug)] +pub(crate) struct UnionFind { + table: IndexVec>, +} + +#[derive(Debug)] +struct UnionFindEntry { + /// Transitively points towards the "root" of the set containing this key. + /// + /// Invariant: A root key is its own parent. + parent: Key, + /// When merging two "root" keys, their ranks determine which key becomes + /// the new root, to prevent the parent tree from becoming unnecessarily + /// tall. See [`UnionFind::unify`] for details. + rank: u32, +} + +impl UnionFind { + /// Creates a new disjoint-set forest containing the keys `0..num_keys`. + /// Initially, every key is part of its own one-element set. + pub(crate) fn new(num_keys: usize) -> Self { + // Initially, every key is the root of its own set, so its parent is itself. + Self { table: IndexVec::from_fn_n(|key| UnionFindEntry { parent: key, rank: 0 }, num_keys) } + } + + /// Returns the "root" key of the disjoint-set containing the given key. + /// If two keys have the same root, they belong to the same set. + /// + /// Also updates internal data structures to make subsequent `find` + /// operations faster. + pub(crate) fn find(&mut self, key: Key) -> Key { + // Loop until we find a key that is its own parent. + let mut curr = key; + while let parent = self.table[curr].parent + && curr != parent + { + // Perform "path compression" by peeking one layer ahead, and + // setting the current key's parent to that value. + // (This works even when `parent` is the root of its set, because + // of the invariant that a root is its own parent.) + let parent_parent = self.table[parent].parent; + self.table[curr].parent = parent_parent; + + // Advance by one step and continue. + curr = parent; + } + curr + } + + /// Merges the set containing `a` and the set containing `b` into one set. + /// + /// Returns the common root of both keys, after the merge. + pub(crate) fn unify(&mut self, a: Key, b: Key) -> Key { + let mut a = self.find(a); + let mut b = self.find(b); + + // If both keys have the same root, they're already in the same set, + // so there's nothing more to do. + if a == b { + return a; + }; + + // Ensure that `a` has strictly greater rank, swapping if necessary. + // If both keys have the same rank, increment the rank of `a` so that + // future unifications will also prefer `a`, leading to flatter trees. + match Ord::cmp(&self.table[a].rank, &self.table[b].rank) { + Ordering::Less => mem::swap(&mut a, &mut b), + Ordering::Equal => self.table[a].rank += 1, + Ordering::Greater => {} + } + + debug_assert!(self.table[a].rank > self.table[b].rank); + debug_assert_eq!(self.table[b].parent, b); + + // Make `a` the parent of `b`. + self.table[b].parent = a; + + a + } + + /// Creates a snapshot of this disjoint-set forest that can no longer be + /// mutated, but can be queried without mutation. + pub(crate) fn freeze(&mut self) -> FrozenUnionFind { + // Just resolve each key to its actual root. + let roots = self.table.indices().map(|key| self.find(key)).collect(); + FrozenUnionFind { roots } + } +} + +/// Snapshot of a disjoint-set forest that can no longer be mutated, but can be +/// queried in O(1) time without mutation. +/// +/// This is really just a wrapper around a direct mapping from keys to roots, +/// but with a [`Self::find`] method that resembles [`UnionFind::find`]. +#[derive(Debug)] +pub(crate) struct FrozenUnionFind { + roots: IndexVec, +} + +impl FrozenUnionFind { + /// Returns the "root" key of the disjoint-set containing the given key. + /// If two keys have the same root, they belong to the same set. + pub(crate) fn find(&self, key: Key) -> Key { + self.roots[key] + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs new file mode 100644 index 0000000000000..34a4e4f8e6edc --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs @@ -0,0 +1,32 @@ +use super::UnionFind; + +#[test] +fn empty() { + let mut sets = UnionFind::::new(10); + + for i in 1..10 { + assert_eq!(sets.find(i), i); + } +} + +#[test] +fn transitive() { + let mut sets = UnionFind::::new(10); + + sets.unify(3, 7); + sets.unify(4, 2); + + assert_eq!(sets.find(7), sets.find(3)); + assert_eq!(sets.find(2), sets.find(4)); + assert_ne!(sets.find(3), sets.find(4)); + + sets.unify(7, 4); + + assert_eq!(sets.find(7), sets.find(3)); + assert_eq!(sets.find(2), sets.find(4)); + assert_eq!(sets.find(3), sets.find(4)); + + for i in [0, 1, 5, 6, 8, 9] { + assert_eq!(sets.find(i), i); + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index ad6774fccd6f5..25dc7f31227c8 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -1,14 +1,13 @@ use std::cmp::Ordering; -use std::collections::VecDeque; use std::ops::{Index, IndexMut}; -use std::{iter, mem, slice}; +use std::{mem, slice}; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::Dominators; use rustc_data_structures::graph::{self, DirectedGraph, StartNode}; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{self, BasicBlock, Terminator, TerminatorKind}; use tracing::debug; @@ -27,7 +26,7 @@ pub(crate) struct CoverageGraph { /// their relative order is consistent but arbitrary. dominator_order_rank: IndexVec, /// A loop header is a node that dominates one or more of its predecessors. - is_loop_header: BitSet, + is_loop_header: DenseBitSet, /// For each node, the loop header node of its nearest enclosing loop. /// This forms a linked list that can be traversed to find all enclosing loops. enclosing_loop_header: IndexVec>, @@ -72,7 +71,7 @@ impl CoverageGraph { predecessors, dominators: None, dominator_order_rank: IndexVec::from_elem_n(0, num_nodes), - is_loop_header: BitSet::new_empty(num_nodes), + is_loop_header: DenseBitSet::new_empty(num_nodes), enclosing_loop_header: IndexVec::from_elem_n(None, num_nodes), }; assert_eq!(num_nodes, this.num_nodes()); @@ -211,54 +210,6 @@ impl CoverageGraph { self.dominator_order_rank[a].cmp(&self.dominator_order_rank[b]) } - /// Returns the source of this node's sole in-edge, if it has exactly one. - /// That edge can be assumed to have the same execution count as the node - /// itself (in the absence of panics). - pub(crate) fn sole_predecessor( - &self, - to_bcb: BasicCoverageBlock, - ) -> Option { - // Unlike `simple_successor`, there is no need for extra checks here. - if let &[from_bcb] = self.predecessors[to_bcb].as_slice() { Some(from_bcb) } else { None } - } - - /// Returns the target of this node's sole out-edge, if it has exactly - /// one, but only if that edge can be assumed to have the same execution - /// count as the node itself (in the absence of panics). - pub(crate) fn simple_successor( - &self, - from_bcb: BasicCoverageBlock, - ) -> Option { - // If a node's count is the sum of its out-edges, and it has exactly - // one out-edge, then that edge has the same count as the node. - if self.bcbs[from_bcb].is_out_summable - && let &[to_bcb] = self.successors[from_bcb].as_slice() - { - Some(to_bcb) - } else { - None - } - } - - /// For each loop that contains the given node, yields the "loop header" - /// node representing that loop, from innermost to outermost. If the given - /// node is itself a loop header, it is yielded first. - pub(crate) fn loop_headers_containing( - &self, - bcb: BasicCoverageBlock, - ) -> impl Iterator + Captures<'_> { - let self_if_loop_header = self.is_loop_header.contains(bcb).then_some(bcb).into_iter(); - - let mut curr = Some(bcb); - let strictly_enclosing = iter::from_fn(move || { - let enclosing = self.enclosing_loop_header[curr?]; - curr = enclosing; - enclosing - }); - - self_if_loop_header.chain(strictly_enclosing) - } - /// For the given node, yields the subset of its predecessor nodes that /// it dominates. If that subset is non-empty, the node is a "loop header", /// and each of those predecessors represents an in-edge that jumps back to @@ -489,126 +440,3 @@ impl<'a, 'tcx> graph::Successors for CoverageRelevantSubgraph<'a, 'tcx> { self.coverage_successors(bb).into_iter() } } - -/// State of a node in the coverage graph during ready-first traversal. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -enum ReadyState { - /// This node has not yet been added to the fallback queue or ready queue. - Unqueued, - /// This node is currently in the fallback queue. - InFallbackQueue, - /// This node's predecessors have all been visited, so it is in the ready queue. - /// (It might also have a stale entry in the fallback queue.) - InReadyQueue, - /// This node has been visited. - /// (It might also have a stale entry in the fallback queue.) - Visited, -} - -/// Iterator that visits nodes in the coverage graph, in an order that always -/// prefers "ready" nodes whose predecessors have already been visited. -pub(crate) struct ReadyFirstTraversal<'a> { - graph: &'a CoverageGraph, - - /// For each node, the number of its predecessor nodes that haven't been visited yet. - n_unvisited_preds: IndexVec, - /// Indicates whether a node has been visited, or which queue it is in. - state: IndexVec, - - /// Holds unvisited nodes whose predecessors have all been visited. - ready_queue: VecDeque, - /// Holds unvisited nodes with some unvisited predecessors. - /// Also contains stale entries for nodes that were upgraded to ready. - fallback_queue: VecDeque, -} - -impl<'a> ReadyFirstTraversal<'a> { - pub(crate) fn new(graph: &'a CoverageGraph) -> Self { - let num_nodes = graph.num_nodes(); - - let n_unvisited_preds = - IndexVec::from_fn_n(|node| graph.predecessors[node].len() as u32, num_nodes); - let mut state = IndexVec::from_elem_n(ReadyState::Unqueued, num_nodes); - - // We know from coverage graph construction that the start node is the - // only node with no predecessors. - debug_assert!( - n_unvisited_preds.iter_enumerated().all(|(node, &n)| (node == START_BCB) == (n == 0)) - ); - let ready_queue = VecDeque::from(vec![START_BCB]); - state[START_BCB] = ReadyState::InReadyQueue; - - Self { graph, state, n_unvisited_preds, ready_queue, fallback_queue: VecDeque::new() } - } - - /// Returns the next node from the ready queue, or else the next unvisited - /// node from the fallback queue. - fn next_inner(&mut self) -> Option { - // Always prefer to yield a ready node if possible. - if let Some(node) = self.ready_queue.pop_front() { - assert_eq!(self.state[node], ReadyState::InReadyQueue); - return Some(node); - } - - while let Some(node) = self.fallback_queue.pop_front() { - match self.state[node] { - // This entry in the fallback queue is not stale, so yield it. - ReadyState::InFallbackQueue => return Some(node), - // This node was added to the fallback queue, but later became - // ready and was visited via the ready queue. Ignore it here. - ReadyState::Visited => {} - // Unqueued nodes can't be in the fallback queue, by definition. - // We know that the ready queue is empty at this point. - ReadyState::Unqueued | ReadyState::InReadyQueue => unreachable!( - "unexpected state for {node:?} in the fallback queue: {:?}", - self.state[node] - ), - } - } - - None - } - - fn mark_visited_and_enqueue_successors(&mut self, node: BasicCoverageBlock) { - assert!(self.state[node] < ReadyState::Visited); - self.state[node] = ReadyState::Visited; - - // For each of this node's successors, decrease the successor's - // "unvisited predecessors" count, and enqueue it if appropriate. - for &succ in &self.graph.successors[node] { - let is_unqueued = match self.state[succ] { - ReadyState::Unqueued => true, - ReadyState::InFallbackQueue => false, - ReadyState::InReadyQueue => { - unreachable!("nodes in the ready queue have no unvisited predecessors") - } - // The successor was already visited via one of its other predecessors. - ReadyState::Visited => continue, - }; - - self.n_unvisited_preds[succ] -= 1; - if self.n_unvisited_preds[succ] == 0 { - // This node's predecessors have all been visited, so add it to - // the ready queue. If it's already in the fallback queue, that - // fallback entry will be ignored later. - self.state[succ] = ReadyState::InReadyQueue; - self.ready_queue.push_back(succ); - } else if is_unqueued { - // This node has unvisited predecessors, so add it to the - // fallback queue in case we run out of ready nodes later. - self.state[succ] = ReadyState::InFallbackQueue; - self.fallback_queue.push_back(succ); - } - } - } -} - -impl<'a> Iterator for ReadyFirstTraversal<'a> { - type Item = BasicCoverageBlock; - - fn next(&mut self) -> Option { - let node = self.next_inner()?; - self.mark_visited_and_enqueue_successors(node); - Some(node) - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index 2db7c6cf1d6fd..8d0d92dc36772 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -3,7 +3,7 @@ use std::collections::BTreeSet; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::coverage::{ BlockMarkerId, BranchSpan, ConditionId, ConditionInfo, CoverageInfoHi, CoverageKind, }; @@ -80,7 +80,7 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>, hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, ) -> ExtractedMappings { let mut code_mappings = vec![]; let mut branch_pairs = vec![]; @@ -102,23 +102,23 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( } } else { // Extract coverage spans from MIR statements/terminators as normal. - extract_refined_covspans(mir_body, hir_info, basic_coverage_blocks, &mut code_mappings); + extract_refined_covspans(mir_body, hir_info, graph, &mut code_mappings); } - branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, basic_coverage_blocks)); + branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, graph)); extract_mcdc_mappings( mir_body, tcx, hir_info.body_span, - basic_coverage_blocks, + graph, &mut mcdc_bitmap_bits, &mut mcdc_degraded_branches, &mut mcdc_mappings, ); ExtractedMappings { - num_bcbs: basic_coverage_blocks.num_nodes(), + num_bcbs: graph.num_nodes(), code_mappings, branch_pairs, mcdc_bitmap_bits, @@ -128,7 +128,7 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( } impl ExtractedMappings { - pub(super) fn all_bcbs_with_counter_mappings(&self) -> BitSet { + pub(super) fn all_bcbs_with_counter_mappings(&self) -> DenseBitSet { // Fully destructure self to make sure we don't miss any fields that have mappings. let Self { num_bcbs, @@ -140,7 +140,7 @@ impl ExtractedMappings { } = self; // Identify which BCBs have one or more mappings. - let mut bcbs_with_counter_mappings = BitSet::new_empty(*num_bcbs); + let mut bcbs_with_counter_mappings = DenseBitSet::new_empty(*num_bcbs); let mut insert = |bcb| { bcbs_with_counter_mappings.insert(bcb); }; @@ -172,8 +172,8 @@ impl ExtractedMappings { } /// Returns the set of BCBs that have one or more `Code` mappings. - pub(super) fn bcbs_with_ordinary_code_mappings(&self) -> BitSet { - let mut bcbs = BitSet::new_empty(self.num_bcbs); + pub(super) fn bcbs_with_ordinary_code_mappings(&self) -> DenseBitSet { + let mut bcbs = DenseBitSet::new_empty(self.num_bcbs); for &CodeMapping { span: _, bcb } in &self.code_mappings { bcbs.insert(bcb); } @@ -211,7 +211,7 @@ fn resolve_block_markers( pub(super) fn extract_branch_pairs( mir_body: &mir::Body<'_>, hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, ) -> Vec { let Some(coverage_info_hi) = mir_body.coverage_info_hi.as_deref() else { return vec![] }; @@ -228,8 +228,7 @@ pub(super) fn extract_branch_pairs( } let span = unexpand_into_body_span(raw_span, hir_info.body_span)?; - let bcb_from_marker = - |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); + let bcb_from_marker = |marker: BlockMarkerId| graph.bcb_from_bb(block_markers[marker]?); let true_bcb = bcb_from_marker(true_marker)?; let false_bcb = bcb_from_marker(false_marker)?; @@ -243,7 +242,7 @@ pub(super) fn extract_mcdc_mappings( mir_body: &mir::Body<'_>, tcx: TyCtxt<'_>, body_span: Span, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, mcdc_bitmap_bits: &mut usize, mcdc_degraded_branches: &mut impl Extend, mcdc_mappings: &mut impl Extend<(MCDCDecision, Vec)>, @@ -252,8 +251,7 @@ pub(super) fn extract_mcdc_mappings( let block_markers = resolve_block_markers(coverage_info_hi, mir_body); - let bcb_from_marker = - |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); + let bcb_from_marker = |marker: BlockMarkerId| graph.bcb_from_bb(block_markers[marker]?); let check_branch_bcb = |raw_span: Span, true_marker: BlockMarkerId, false_marker: BlockMarkerId| { @@ -369,9 +367,8 @@ fn calc_test_vectors_index(conditions: &mut Vec) -> usize { }) .collect::>(); - let mut queue = std::collections::VecDeque::from_iter( - next_conditions.swap_remove(&ConditionId::START).into_iter(), - ); + let mut queue = + std::collections::VecDeque::from_iter(next_conditions.swap_remove(&ConditionId::START)); num_paths_stats[ConditionId::START] = 1; let mut decision_end_nodes = Vec::new(); while let Some(branch) = queue.pop_front() { diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 241c3c79114ac..b1b609595b74a 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -13,16 +13,12 @@ use rustc_hir::intravisit::{Visitor, walk_expr}; use rustc_middle::hir::map::Map; use rustc_middle::hir::nested_filter; use rustc_middle::mir::coverage::{ - CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind, SourceRegion, -}; -use rustc_middle::mir::{ - self, BasicBlock, BasicBlockData, SourceInfo, Statement, StatementKind, Terminator, - TerminatorKind, + CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind, }; +use rustc_middle::mir::{self, BasicBlock, Statement, StatementKind, TerminatorKind}; use rustc_middle::ty::TyCtxt; +use rustc_span::Span; use rustc_span::def_id::LocalDefId; -use rustc_span::source_map::SourceMap; -use rustc_span::{BytePos, Pos, SourceFile, Span}; use tracing::{debug, debug_span, trace}; use crate::coverage::counters::{CoverageCounters, Site}; @@ -72,16 +68,15 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: let _span = debug_span!("instrument_function_for_coverage", ?def_id).entered(); let hir_info = extract_hir_info(tcx, def_id.expect_local()); - let basic_coverage_blocks = CoverageGraph::from_mir(mir_body); + + // Build the coverage graph, which is a simplified view of the MIR control-flow + // graph that ignores some details not relevant to coverage instrumentation. + let graph = CoverageGraph::from_mir(mir_body); //////////////////////////////////////////////////// // Extract coverage spans and other mapping info from MIR. - let extracted_mappings = mappings::extract_all_mapping_info_from_mir( - tcx, - mir_body, - &hir_info, - &basic_coverage_blocks, - ); + let extracted_mappings = + mappings::extract_all_mapping_info_from_mir(tcx, mir_body, &hir_info, &graph); //////////////////////////////////////////////////// // Create an optimized mix of `Counter`s and `Expression`s for the `CoverageGraph`. Ensure @@ -95,23 +90,18 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: } let coverage_counters = - CoverageCounters::make_bcb_counters(&basic_coverage_blocks, &bcbs_with_counter_mappings); + CoverageCounters::make_bcb_counters(&graph, &bcbs_with_counter_mappings); - let mappings = create_mappings(tcx, &hir_info, &extracted_mappings, &coverage_counters); + let mappings = create_mappings(&extracted_mappings, &coverage_counters); if mappings.is_empty() { // No spans could be converted into valid mappings, so skip this function. debug!("no spans could be converted into valid mappings; skipping"); return; } - inject_coverage_statements( - mir_body, - &basic_coverage_blocks, - &extracted_mappings, - &coverage_counters, - ); + inject_coverage_statements(mir_body, &graph, &extracted_mappings, &coverage_counters); - inject_mcdc_statements(mir_body, &basic_coverage_blocks, &extracted_mappings); + inject_mcdc_statements(mir_body, &graph, &extracted_mappings); let mcdc_num_condition_bitmaps = extracted_mappings .mcdc_mappings @@ -136,18 +126,12 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: /// /// Precondition: All BCBs corresponding to those spans have been given /// coverage counters. -fn create_mappings<'tcx>( - tcx: TyCtxt<'tcx>, - hir_info: &ExtractedHirInfo, +fn create_mappings( extracted_mappings: &ExtractedMappings, coverage_counters: &CoverageCounters, ) -> Vec { - let source_map = tcx.sess.source_map(); - let file = source_map.lookup_source_file(hir_info.body_span.lo()); - let term_for_bcb = |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters"); - let region_for_span = |span: Span| make_source_region(source_map, hir_info, &file, span); // Fully destructure the mappings struct to make sure we don't miss any kinds. let ExtractedMappings { @@ -160,22 +144,20 @@ fn create_mappings<'tcx>( } = extracted_mappings; let mut mappings = Vec::new(); - mappings.extend(code_mappings.iter().filter_map( + mappings.extend(code_mappings.iter().map( // Ordinary code mappings are the simplest kind. |&mappings::CodeMapping { span, bcb }| { - let source_region = region_for_span(span)?; let kind = MappingKind::Code(term_for_bcb(bcb)); - Some(Mapping { kind, source_region }) + Mapping { kind, span } }, )); - mappings.extend(branch_pairs.iter().filter_map( + mappings.extend(branch_pairs.iter().map( |&mappings::BranchPair { span, true_bcb, false_bcb }| { let true_term = term_for_bcb(true_bcb); let false_term = term_for_bcb(false_bcb); let kind = MappingKind::Branch { true_term, false_term }; - let source_region = region_for_span(span)?; - Some(Mapping { kind, source_region }) + Mapping { kind, span } }, )); @@ -183,7 +165,7 @@ fn create_mappings<'tcx>( |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters"); // MCDC branch mappings are appended with their decisions in case decisions were ignored. - mappings.extend(mcdc_degraded_branches.iter().filter_map( + mappings.extend(mcdc_degraded_branches.iter().map( |&mappings::MCDCBranch { span, true_bcb, @@ -192,10 +174,9 @@ fn create_mappings<'tcx>( true_index: _, false_index: _, }| { - let source_region = region_for_span(span)?; let true_term = term_for_bcb(true_bcb); let false_term = term_for_bcb(false_bcb); - Some(Mapping { kind: MappingKind::Branch { true_term, false_term }, source_region }) + Mapping { kind: MappingKind::Branch { true_term, false_term }, span } }, )); @@ -203,7 +184,7 @@ fn create_mappings<'tcx>( let num_conditions = branches.len() as u16; let conditions = branches .into_iter() - .filter_map( + .map( |&mappings::MCDCBranch { span, true_bcb, @@ -212,32 +193,28 @@ fn create_mappings<'tcx>( true_index: _, false_index: _, }| { - let source_region = region_for_span(span)?; let true_term = term_for_bcb(true_bcb); let false_term = term_for_bcb(false_bcb); - Some(Mapping { + Mapping { kind: MappingKind::MCDCBranch { true_term, false_term, mcdc_params: condition_info, }, - source_region, - }) + span, + } }, ) .collect::>(); - if conditions.len() == num_conditions as usize - && let Some(source_region) = region_for_span(decision.span) - { + if conditions.len() == num_conditions as usize { // LLVM requires end index for counter mapping regions. let kind = MappingKind::MCDCDecision(DecisionInfo { bitmap_idx: (decision.bitmap_idx + decision.num_test_vectors) as u32, num_conditions, }); - mappings.extend( - std::iter::once(Mapping { kind, source_region }).chain(conditions.into_iter()), - ); + let span = decision.span; + mappings.extend(std::iter::once(Mapping { kind, span }).chain(conditions.into_iter())); } else { mappings.extend(conditions.into_iter().map(|mapping| { let MappingKind::MCDCBranch { true_term, false_term, mcdc_params: _ } = @@ -245,10 +222,7 @@ fn create_mappings<'tcx>( else { unreachable!("all mappings here are MCDCBranch as shown above"); }; - Mapping { - kind: MappingKind::Branch { true_term, false_term }, - source_region: mapping.source_region, - } + Mapping { kind: MappingKind::Branch { true_term, false_term }, span: mapping.span } })) } } @@ -260,7 +234,7 @@ fn create_mappings<'tcx>( /// inject any necessary coverage statements into MIR. fn inject_coverage_statements<'tcx>( mir_body: &mut mir::Body<'tcx>, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, extracted_mappings: &ExtractedMappings, coverage_counters: &CoverageCounters, ) { @@ -270,20 +244,7 @@ fn inject_coverage_statements<'tcx>( // For BCB nodes this is just their first block, but for edges we need // to create a new block between the two BCBs, and inject into that. let target_bb = match site { - Site::Node { bcb } => basic_coverage_blocks[bcb].leader_bb(), - Site::Edge { from_bcb, to_bcb } => { - // Create a new block between the last block of `from_bcb` and - // the first block of `to_bcb`. - let from_bb = basic_coverage_blocks[from_bcb].last_bb(); - let to_bb = basic_coverage_blocks[to_bcb].leader_bb(); - - let new_bb = inject_edge_counter_basic_block(mir_body, from_bb, to_bb); - debug!( - "Edge {from_bcb:?} (last {from_bb:?}) -> {to_bcb:?} (leader {to_bb:?}) \ - requires a new MIR BasicBlock {new_bb:?} for counter increment {id:?}", - ); - new_bb - } + Site::Node { bcb } => graph[bcb].leader_bb(), }; inject_statement(mir_body, CoverageKind::CounterIncrement { id }, target_bb); @@ -308,7 +269,7 @@ fn inject_coverage_statements<'tcx>( inject_statement( mir_body, CoverageKind::ExpressionUsed { id: expression_id }, - basic_coverage_blocks[bcb].leader_bb(), + graph[bcb].leader_bb(), ); } } @@ -317,13 +278,13 @@ fn inject_coverage_statements<'tcx>( /// For each decision inject statements to update test vector bitmap after it has been evaluated. fn inject_mcdc_statements<'tcx>( mir_body: &mut mir::Body<'tcx>, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, extracted_mappings: &ExtractedMappings, ) { for (decision, conditions) in &extracted_mappings.mcdc_mappings { // Inject test vector update first because `inject_statement` always insert new statement at head. for &end in &decision.end_bcbs { - let end_bb = basic_coverage_blocks[end].leader_bb(); + let end_bb = graph[end].leader_bb(); inject_statement( mir_body, CoverageKind::TestVectorBitmapUpdate { @@ -344,7 +305,7 @@ fn inject_mcdc_statements<'tcx>( } in conditions { for (index, bcb) in [(false_index, false_bcb), (true_index, true_bcb)] { - let bb = basic_coverage_blocks[bcb].leader_bb(); + let bb = graph[bcb].leader_bb(); inject_statement( mir_body, CoverageKind::CondBitmapUpdate { @@ -358,31 +319,6 @@ fn inject_mcdc_statements<'tcx>( } } -/// Given two basic blocks that have a control-flow edge between them, creates -/// and returns a new block that sits between those blocks. -fn inject_edge_counter_basic_block( - mir_body: &mut mir::Body<'_>, - from_bb: BasicBlock, - to_bb: BasicBlock, -) -> BasicBlock { - let span = mir_body[from_bb].terminator().source_info.span.shrink_to_hi(); - let new_bb = mir_body.basic_blocks_mut().push(BasicBlockData { - statements: vec![], // counter will be injected here - terminator: Some(Terminator { - source_info: SourceInfo::outermost(span), - kind: TerminatorKind::Goto { target: to_bb }, - }), - is_cleanup: false, - }); - let edge_ref = mir_body[from_bb] - .terminator_mut() - .successors_mut() - .find(|successor| **successor == to_bb) - .expect("from_bb should have a successor for to_bb"); - *edge_ref = new_bb; - new_bb -} - fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb: BasicBlock) { debug!(" injecting statement {counter_kind:?} for {bb:?}"); let data = &mut mir_body[bb]; @@ -391,121 +327,6 @@ fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb data.statements.insert(0, statement); } -fn ensure_non_empty_span( - source_map: &SourceMap, - hir_info: &ExtractedHirInfo, - span: Span, -) -> Option { - if !span.is_empty() { - return Some(span); - } - - let lo = span.lo(); - let hi = span.hi(); - - // The span is empty, so try to expand it to cover an adjacent '{' or '}', - // but only within the bounds of the body span. - let try_next = hi < hir_info.body_span.hi(); - let try_prev = hir_info.body_span.lo() < lo; - if !(try_next || try_prev) { - return None; - } - - source_map - .span_to_source(span, |src, start, end| try { - // We're only checking for specific ASCII characters, so we don't - // have to worry about multi-byte code points. - if try_next && src.as_bytes()[end] == b'{' { - Some(span.with_hi(hi + BytePos(1))) - } else if try_prev && src.as_bytes()[start - 1] == b'}' { - Some(span.with_lo(lo - BytePos(1))) - } else { - None - } - }) - .ok()? -} - -/// Converts the span into its start line and column, and end line and column. -/// -/// Line numbers and column numbers are 1-based. Unlike most column numbers emitted by -/// the compiler, these column numbers are denoted in **bytes**, because that's what -/// LLVM's `llvm-cov` tool expects to see in coverage maps. -/// -/// Returns `None` if the conversion failed for some reason. This shouldn't happen, -/// but it's hard to rule out entirely (especially in the presence of complex macros -/// or other expansions), and if it does happen then skipping a span or function is -/// better than an ICE or `llvm-cov` failure that the user might have no way to avoid. -fn make_source_region( - source_map: &SourceMap, - hir_info: &ExtractedHirInfo, - file: &SourceFile, - span: Span, -) -> Option { - let span = ensure_non_empty_span(source_map, hir_info, span)?; - - let lo = span.lo(); - let hi = span.hi(); - - // Column numbers need to be in bytes, so we can't use the more convenient - // `SourceMap` methods for looking up file coordinates. - let line_and_byte_column = |pos: BytePos| -> Option<(usize, usize)> { - let rpos = file.relative_position(pos); - let line_index = file.lookup_line(rpos)?; - let line_start = file.lines()[line_index]; - // Line numbers and column numbers are 1-based, so add 1 to each. - Some((line_index + 1, (rpos - line_start).to_usize() + 1)) - }; - - let (mut start_line, start_col) = line_and_byte_column(lo)?; - let (mut end_line, end_col) = line_and_byte_column(hi)?; - - // Apply an offset so that code in doctests has correct line numbers. - // FIXME(#79417): Currently we have no way to offset doctest _columns_. - start_line = source_map.doctest_offset_line(&file.name, start_line); - end_line = source_map.doctest_offset_line(&file.name, end_line); - - check_source_region(SourceRegion { - start_line: start_line as u32, - start_col: start_col as u32, - end_line: end_line as u32, - end_col: end_col as u32, - }) -} - -/// If `llvm-cov` sees a source region that is improperly ordered (end < start), -/// it will immediately exit with a fatal error. To prevent that from happening, -/// discard regions that are improperly ordered, or might be interpreted in a -/// way that makes them improperly ordered. -fn check_source_region(source_region: SourceRegion) -> Option { - let SourceRegion { start_line, start_col, end_line, end_col } = source_region; - - // Line/column coordinates are supposed to be 1-based. If we ever emit - // coordinates of 0, `llvm-cov` might misinterpret them. - let all_nonzero = [start_line, start_col, end_line, end_col].into_iter().all(|x| x != 0); - // Coverage mappings use the high bit of `end_col` to indicate that a - // region is actually a "gap" region, so make sure it's unset. - let end_col_has_high_bit_unset = (end_col & (1 << 31)) == 0; - // If a region is improperly ordered (end < start), `llvm-cov` will exit - // with a fatal error, which is inconvenient for users and hard to debug. - let is_ordered = (start_line, start_col) <= (end_line, end_col); - - if all_nonzero && end_col_has_high_bit_unset && is_ordered { - Some(source_region) - } else { - debug!( - ?source_region, - ?all_nonzero, - ?end_col_has_high_bit_unset, - ?is_ordered, - "Skipping source region that would be misinterpreted or rejected by LLVM" - ); - // If this happens in a debug build, ICE to make it easier to notice. - debug_assert!(false, "Improper source region: {source_region:?}"); - None - } -} - /// Function information extracted from HIR by the coverage instrumentor. #[derive(Debug)] struct ExtractedHirInfo { diff --git a/compiler/rustc_mir_transform/src/coverage/query.rs b/compiler/rustc_mir_transform/src/coverage/query.rs index edaec3c796511..c56685699f6ad 100644 --- a/compiler/rustc_mir_transform/src/coverage/query.rs +++ b/compiler/rustc_mir_transform/src/coverage/query.rs @@ -1,5 +1,5 @@ use rustc_data_structures::captures::Captures; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::mir::coverage::{ CounterId, CovTerm, CoverageIdsInfo, CoverageKind, Expression, ExpressionId, @@ -68,7 +68,7 @@ fn coverage_attr_on(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { Some(_) | None => { // Other possibilities should have been rejected by `rustc_parse::validate_attr`. // Use `span_delayed_bug` to avoid an ICE in failing builds (#127880). - tcx.dcx().span_delayed_bug(attr.span, "unexpected value of coverage attribute"); + tcx.dcx().span_delayed_bug(attr.span(), "unexpected value of coverage attribute"); } } } @@ -92,13 +92,13 @@ fn coverage_ids_info<'tcx>( let Some(fn_cov_info) = mir_body.function_coverage_info.as_deref() else { return CoverageIdsInfo { - counters_seen: BitSet::new_empty(0), - zero_expressions: BitSet::new_empty(0), + counters_seen: DenseBitSet::new_empty(0), + zero_expressions: DenseBitSet::new_empty(0), }; }; - let mut counters_seen = BitSet::new_empty(fn_cov_info.num_counters); - let mut expressions_seen = BitSet::new_filled(fn_cov_info.expressions.len()); + let mut counters_seen = DenseBitSet::new_empty(fn_cov_info.num_counters); + let mut expressions_seen = DenseBitSet::new_filled(fn_cov_info.expressions.len()); // For each expression ID that is directly used by one or more mappings, // mark it as not-yet-seen. This indicates that we expect to see a @@ -148,23 +148,23 @@ fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool { scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some() } -/// Identify expressions that will always have a value of zero, and note -/// their IDs in a `BitSet`. Mappings that refer to a zero expression -/// can instead become mappings to a constant zero value. +/// Identify expressions that will always have a value of zero, and note their +/// IDs in a `DenseBitSet`. Mappings that refer to a zero expression can instead +/// become mappings to a constant zero value. /// /// This function mainly exists to preserve the simplifications that were /// already being performed by the Rust-side expression renumbering, so that /// the resulting coverage mappings don't get worse. fn identify_zero_expressions( fn_cov_info: &FunctionCoverageInfo, - counters_seen: &BitSet, - expressions_seen: &BitSet, -) -> BitSet { + counters_seen: &DenseBitSet, + expressions_seen: &DenseBitSet, +) -> DenseBitSet { // The set of expressions that either were optimized out entirely, or // have zero as both of their operands, and will therefore always have // a value of zero. Other expressions that refer to these as operands // can have those operands replaced with `CovTerm::Zero`. - let mut zero_expressions = BitSet::new_empty(fn_cov_info.expressions.len()); + let mut zero_expressions = DenseBitSet::new_empty(fn_cov_info.expressions.len()); // Simplify a copy of each expression based on lower-numbered expressions, // and then update the set of always-zero expressions if necessary. @@ -228,8 +228,8 @@ fn identify_zero_expressions( /// into account knowledge of which counters are unused and which expressions /// are always zero. fn is_zero_term( - counters_seen: &BitSet, - zero_expressions: &BitSet, + counters_seen: &DenseBitSet, + zero_expressions: &DenseBitSet, term: CovTerm, ) -> bool { match term { diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 085c738f1f9fc..314a86ea52f0a 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -17,14 +17,13 @@ mod from_mir; pub(super) fn extract_refined_covspans( mir_body: &mir::Body<'_>, hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, code_mappings: &mut impl Extend, ) { - let ExtractedCovspans { mut covspans } = - extract_covspans_from_mir(mir_body, hir_info, basic_coverage_blocks); + let ExtractedCovspans { mut covspans } = extract_covspans_from_mir(mir_body, hir_info, graph); // First, perform the passes that need macro information. - covspans.sort_by(|a, b| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb)); + covspans.sort_by(|a, b| graph.cmp_in_dominator_order(a.bcb, b.bcb)); remove_unwanted_expansion_spans(&mut covspans); split_visible_macro_spans(&mut covspans); @@ -34,7 +33,7 @@ pub(super) fn extract_refined_covspans( let compare_covspans = |a: &Covspan, b: &Covspan| { compare_spans(a.span, b.span) // After deduplication, we want to keep only the most-dominated BCB. - .then_with(|| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb).reverse()) + .then_with(|| graph.cmp_in_dominator_order(a.bcb, b.bcb).reverse()) }; covspans.sort_by(compare_covspans); diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 824d657e1fc2d..26ce743be3613 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -22,13 +22,13 @@ pub(crate) struct ExtractedCovspans { pub(crate) fn extract_covspans_from_mir( mir_body: &mir::Body<'_>, hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, + graph: &CoverageGraph, ) -> ExtractedCovspans { let &ExtractedHirInfo { body_span, .. } = hir_info; let mut covspans = vec![]; - for (bcb, bcb_data) in basic_coverage_blocks.iter_enumerated() { + for (bcb, bcb_data) in graph.iter_enumerated() { bcb_to_initial_coverage_spans(mir_body, body_span, bcb, bcb_data, &mut covspans); } diff --git a/compiler/rustc_mir_transform/src/coverage/tests.rs b/compiler/rustc_mir_transform/src/coverage/tests.rs index 233ca9981c50c..b2ee50de50a23 100644 --- a/compiler/rustc_mir_transform/src/coverage/tests.rs +++ b/compiler/rustc_mir_transform/src/coverage/tests.rs @@ -223,16 +223,12 @@ fn print_mir_graphviz(name: &str, mir_body: &Body<'_>) { } } -fn print_coverage_graphviz( - name: &str, - mir_body: &Body<'_>, - basic_coverage_blocks: &graph::CoverageGraph, -) { +fn print_coverage_graphviz(name: &str, mir_body: &Body<'_>, graph: &graph::CoverageGraph) { if PRINT_GRAPHS { println!( "digraph {} {{\n{}\n}}", name, - basic_coverage_blocks + graph .iter_enumerated() .map(|(bcb, bcb_data)| { format!( @@ -240,7 +236,7 @@ fn print_coverage_graphviz( bcb, bcb, mir_body[bcb_data.last_bb()].terminator().kind.name(), - basic_coverage_blocks + graph .successors(bcb) .map(|successor| { format!(" {:?} -> {:?};", bcb, successor) }) .join("\n") @@ -300,11 +296,11 @@ fn goto_switchint<'a>() -> Body<'a> { #[track_caller] fn assert_successors( - basic_coverage_blocks: &graph::CoverageGraph, + graph: &graph::CoverageGraph, bcb: BasicCoverageBlock, expected_successors: &[BasicCoverageBlock], ) { - let mut successors = basic_coverage_blocks.successors[bcb].clone(); + let mut successors = graph.successors[bcb].clone(); successors.sort_unstable(); assert_eq!(successors, expected_successors); } @@ -315,8 +311,8 @@ fn test_covgraph_goto_switchint() { if false { eprintln!("basic_blocks = {}", debug_basic_blocks(&mir_body)); } - let basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); - print_coverage_graphviz("covgraph_goto_switchint ", &mir_body, &basic_coverage_blocks); + let graph = graph::CoverageGraph::from_mir(&mir_body); + print_coverage_graphviz("covgraph_goto_switchint ", &mir_body, &graph); /* ┌──────────────┐ ┌─────────────────┐ │ bcb2: Return │ ◀── │ bcb0: SwitchInt │ @@ -328,16 +324,11 @@ fn test_covgraph_goto_switchint() { │ bcb1: Return │ └─────────────────┘ */ - assert_eq!( - basic_coverage_blocks.num_nodes(), - 3, - "basic_coverage_blocks: {:?}", - basic_coverage_blocks.iter_enumerated().collect::>() - ); + assert_eq!(graph.num_nodes(), 3, "graph: {:?}", graph.iter_enumerated().collect::>()); - assert_successors(&basic_coverage_blocks, bcb(0), &[bcb(1), bcb(2)]); - assert_successors(&basic_coverage_blocks, bcb(1), &[]); - assert_successors(&basic_coverage_blocks, bcb(2), &[]); + assert_successors(&graph, bcb(0), &[bcb(1), bcb(2)]); + assert_successors(&graph, bcb(1), &[]); + assert_successors(&graph, bcb(2), &[]); } /// Create a mock `Body` with a loop. @@ -383,12 +374,8 @@ fn switchint_then_loop_else_return<'a>() -> Body<'a> { #[test] fn test_covgraph_switchint_then_loop_else_return() { let mir_body = switchint_then_loop_else_return(); - let basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); - print_coverage_graphviz( - "covgraph_switchint_then_loop_else_return", - &mir_body, - &basic_coverage_blocks, - ); + let graph = graph::CoverageGraph::from_mir(&mir_body); + print_coverage_graphviz("covgraph_switchint_then_loop_else_return", &mir_body, &graph); /* ┌─────────────────┐ │ bcb0: Call │ @@ -408,17 +395,12 @@ fn test_covgraph_switchint_then_loop_else_return() { │ │ └─────────────────────────────────────┘ */ - assert_eq!( - basic_coverage_blocks.num_nodes(), - 4, - "basic_coverage_blocks: {:?}", - basic_coverage_blocks.iter_enumerated().collect::>() - ); + assert_eq!(graph.num_nodes(), 4, "graph: {:?}", graph.iter_enumerated().collect::>()); - assert_successors(&basic_coverage_blocks, bcb(0), &[bcb(1)]); - assert_successors(&basic_coverage_blocks, bcb(1), &[bcb(2), bcb(3)]); - assert_successors(&basic_coverage_blocks, bcb(2), &[]); - assert_successors(&basic_coverage_blocks, bcb(3), &[bcb(1)]); + assert_successors(&graph, bcb(0), &[bcb(1)]); + assert_successors(&graph, bcb(1), &[bcb(2), bcb(3)]); + assert_successors(&graph, bcb(2), &[]); + assert_successors(&graph, bcb(3), &[bcb(1)]); } /// Create a mock `Body` with nested loops. @@ -494,11 +476,11 @@ fn switchint_loop_then_inner_loop_else_break<'a>() -> Body<'a> { #[test] fn test_covgraph_switchint_loop_then_inner_loop_else_break() { let mir_body = switchint_loop_then_inner_loop_else_break(); - let basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); + let graph = graph::CoverageGraph::from_mir(&mir_body); print_coverage_graphviz( "covgraph_switchint_loop_then_inner_loop_else_break", &mir_body, - &basic_coverage_blocks, + &graph, ); /* ┌─────────────────┐ @@ -531,18 +513,13 @@ fn test_covgraph_switchint_loop_then_inner_loop_else_break() { │ │ └────────────────────────────────────────────┘ */ - assert_eq!( - basic_coverage_blocks.num_nodes(), - 7, - "basic_coverage_blocks: {:?}", - basic_coverage_blocks.iter_enumerated().collect::>() - ); - - assert_successors(&basic_coverage_blocks, bcb(0), &[bcb(1)]); - assert_successors(&basic_coverage_blocks, bcb(1), &[bcb(2), bcb(3)]); - assert_successors(&basic_coverage_blocks, bcb(2), &[]); - assert_successors(&basic_coverage_blocks, bcb(3), &[bcb(4)]); - assert_successors(&basic_coverage_blocks, bcb(4), &[bcb(5), bcb(6)]); - assert_successors(&basic_coverage_blocks, bcb(5), &[bcb(1)]); - assert_successors(&basic_coverage_blocks, bcb(6), &[bcb(4)]); + assert_eq!(graph.num_nodes(), 7, "graph: {:?}", graph.iter_enumerated().collect::>()); + + assert_successors(&graph, bcb(0), &[bcb(1)]); + assert_successors(&graph, bcb(1), &[bcb(2), bcb(3)]); + assert_successors(&graph, bcb(2), &[]); + assert_successors(&graph, bcb(3), &[bcb(4)]); + assert_successors(&graph, bcb(4), &[bcb(5), bcb(6)]); + assert_successors(&graph, bcb(5), &[bcb(1)]); + assert_successors(&graph, bcb(6), &[bcb(4)]); } diff --git a/compiler/rustc_mir_transform/src/cross_crate_inline.rs b/compiler/rustc_mir_transform/src/cross_crate_inline.rs index e1f1dd83f0df4..8fce856687cb8 100644 --- a/compiler/rustc_mir_transform/src/cross_crate_inline.rs +++ b/compiler/rustc_mir_transform/src/cross_crate_inline.rs @@ -46,7 +46,7 @@ fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { // #[inline(never)] to force code generation. match codegen_fn_attrs.inline { InlineAttr::Never => return false, - InlineAttr::Hint | InlineAttr::Always => return true, + InlineAttr::Hint | InlineAttr::Always | InlineAttr::Force { .. } => return true, _ => {} } @@ -69,8 +69,9 @@ fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { // Don't do any inference if codegen optimizations are disabled and also MIR inlining is not // enabled. This ensures that we do inference even if someone only passes -Zinline-mir, // which is less confusing than having to also enable -Copt-level=1. - if matches!(tcx.sess.opts.optimize, OptLevel::No) && !pm::should_run_pass(tcx, &inline::Inline) - { + let inliner_will_run = pm::should_run_pass(tcx, &inline::Inline) + || inline::ForceInline::should_run_pass_for_callee(tcx, def_id.to_def_id()); + if matches!(tcx.sess.opts.optimize, OptLevel::No) && !inliner_will_run { return false; } diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 711cf2edc4627..cc44114782cf3 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -408,18 +408,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { state: &mut State>, ) -> ValueOrPlace> { let val = match rvalue { - Rvalue::Len(place) => { - let place_ty = place.ty(self.local_decls, self.tcx); - if let ty::Array(_, len) = place_ty.ty.kind() { - Const::Ty(self.tcx.types.usize, *len) - .try_eval_scalar(self.tcx, self.typing_env) - .map_or(FlatSet::Top, FlatSet::Elem) - } else if let [ProjectionElem::Deref] = place.projection[..] { - state.get_len(place.local.into(), &self.map) - } else { - FlatSet::Top - } - } Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => { let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else { return ValueOrPlace::Value(FlatSet::Top); @@ -944,7 +932,8 @@ fn try_write_constant<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::Dynamic(..) => throw_machine_stop_str!("unsupported type"), + | ty::Dynamic(..) + | ty::UnsafeBinder(_) => throw_machine_stop_str!("unsupported type"), ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(), } diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs index 0c75cdadc92d7..434e921d43929 100644 --- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs +++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs @@ -26,8 +26,8 @@ use crate::util::is_within_packed; /// Performs the optimization on the body /// -/// The `borrowed` set must be a `BitSet` of all the locals that are ever borrowed in this body. It -/// can be generated via the [`borrowed_locals`] function. +/// The `borrowed` set must be a `DenseBitSet` of all the locals that are ever borrowed in this +/// body. It can be generated via the [`borrowed_locals`] function. fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let borrowed_locals = borrowed_locals(body); diff --git a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs index 67b215c7c9d63..049f13ce96d74 100644 --- a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs +++ b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs @@ -6,7 +6,7 @@ //! dependent crates can use them. use rustc_hir::def_id::LocalDefId; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{Body, Location, Operand, Place, RETURN_PLACE, Terminator, TerminatorKind}; use rustc_middle::ty::{self, DeducedParamAttrs, Ty, TyCtxt}; @@ -18,13 +18,13 @@ struct DeduceReadOnly { /// Each bit is indexed by argument number, starting at zero (so 0 corresponds to local decl /// 1). The bit is true if the argument may have been mutated or false if we know it hasn't /// been up to the point we're at. - mutable_args: BitSet, + mutable_args: DenseBitSet, } impl DeduceReadOnly { /// Returns a new DeduceReadOnly instance. fn new(arg_count: usize) -> Self { - Self { mutable_args: BitSet::new_empty(arg_count) } + Self { mutable_args: DenseBitSet::new_empty(arg_count) } } } diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 8f977d2979ec3..1ac4d835946f6 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -132,7 +132,7 @@ //! [attempt 3]: https://github.com/rust-lang/rust/pull/72632 use rustc_data_structures::fx::{FxIndexMap, IndexEntry, IndexOccupiedEntry}; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::interval::SparseIntervalMatrix; use rustc_middle::bug; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; @@ -204,7 +204,8 @@ impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation { // Because we only filter once per round, it is unsound to use a local for more than // one merge operation within a single round of optimizations. We store here which ones // we have already used. - let mut merged_locals: BitSet = BitSet::new_empty(body.local_decls.len()); + let mut merged_locals: DenseBitSet = + DenseBitSet::new_empty(body.local_decls.len()); // This is the set of merges we will apply this round. It is a subset of the candidates. let mut merges = FxIndexMap::default(); @@ -274,7 +275,7 @@ fn apply_merges<'tcx>( body: &mut Body<'tcx>, tcx: TyCtxt<'tcx>, merges: FxIndexMap, - merged_locals: BitSet, + merged_locals: DenseBitSet, ) { let mut merger = Merger { tcx, merges, merged_locals }; merger.visit_body_preserves_cfg(body); @@ -283,7 +284,7 @@ fn apply_merges<'tcx>( struct Merger<'tcx> { tcx: TyCtxt<'tcx>, merges: FxIndexMap, - merged_locals: BitSet, + merged_locals: DenseBitSet, } impl<'tcx> MutVisitor<'tcx> for Merger<'tcx> { @@ -351,7 +352,7 @@ impl Candidates { /// Collects the candidates for merging. /// /// This is responsible for enforcing the first and third bullet point. - fn reset_and_find<'tcx>(&mut self, body: &Body<'tcx>, borrowed: &BitSet) { + fn reset_and_find<'tcx>(&mut self, body: &Body<'tcx>, borrowed: &DenseBitSet) { self.c.clear(); self.reverse.clear(); let mut visitor = FindAssignments { body, candidates: &mut self.c, borrowed }; @@ -574,7 +575,6 @@ impl WriteInfo { | Rvalue::NullaryOp(_, _) | Rvalue::Ref(_, _, _) | Rvalue::RawPtr(_, _) - | Rvalue::Len(_) | Rvalue::Discriminant(_) | Rvalue::CopyForDeref(_) => {} } @@ -736,7 +736,7 @@ fn places_to_candidate_pair<'tcx>( struct FindAssignments<'a, 'tcx> { body: &'a Body<'tcx>, candidates: &'a mut FxIndexMap>, - borrowed: &'a BitSet, + borrowed: &'a DenseBitSet, } impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> { diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs index 17c8348140a3e..91e1395e76415 100644 --- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs +++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs @@ -129,18 +129,29 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { let mut patch = MirPatch::new(body); - // create temp to store second discriminant in, `_s` in example above - let second_discriminant_temp = - patch.new_temp(opt_data.child_ty, opt_data.child_source.span); + let (second_discriminant_temp, second_operand) = if opt_data.need_hoist_discriminant { + // create temp to store second discriminant in, `_s` in example above + let second_discriminant_temp = + patch.new_temp(opt_data.child_ty, opt_data.child_source.span); - patch.add_statement(parent_end, StatementKind::StorageLive(second_discriminant_temp)); + patch.add_statement( + parent_end, + StatementKind::StorageLive(second_discriminant_temp), + ); - // create assignment of discriminant - patch.add_assign( - parent_end, - Place::from(second_discriminant_temp), - Rvalue::Discriminant(opt_data.child_place), - ); + // create assignment of discriminant + patch.add_assign( + parent_end, + Place::from(second_discriminant_temp), + Rvalue::Discriminant(opt_data.child_place), + ); + ( + Some(second_discriminant_temp), + Operand::Move(Place::from(second_discriminant_temp)), + ) + } else { + (None, Operand::Copy(opt_data.child_place)) + }; // create temp to store inequality comparison between the two discriminants, `_t` in // example above @@ -149,11 +160,9 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { let comp_temp = patch.new_temp(comp_res_type, opt_data.child_source.span); patch.add_statement(parent_end, StatementKind::StorageLive(comp_temp)); - // create inequality comparison between the two discriminants - let comp_rvalue = Rvalue::BinaryOp( - nequal, - Box::new((parent_op.clone(), Operand::Move(Place::from(second_discriminant_temp)))), - ); + // create inequality comparison + let comp_rvalue = + Rvalue::BinaryOp(nequal, Box::new((parent_op.clone(), second_operand))); patch.add_statement( parent_end, StatementKind::Assign(Box::new((Place::from(comp_temp), comp_rvalue))), @@ -170,14 +179,17 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { let eq_targets = SwitchTargets::new(eq_new_targets, parent_targets.otherwise()); // Create `bbEq` in example above - let eq_switch = BasicBlockData::new(Some(Terminator { - source_info: bbs[parent].terminator().source_info, - kind: TerminatorKind::SwitchInt { - // switch on the first discriminant, so we can mark the second one as dead - discr: parent_op, - targets: eq_targets, - }, - })); + let eq_switch = BasicBlockData::new( + Some(Terminator { + source_info: bbs[parent].terminator().source_info, + kind: TerminatorKind::SwitchInt { + // switch on the first discriminant, so we can mark the second one as dead + discr: parent_op, + targets: eq_targets, + }, + }), + bbs[parent].is_cleanup, + ); let eq_bb = patch.new_block(eq_switch); @@ -189,8 +201,13 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { TerminatorKind::if_(Operand::Move(Place::from(comp_temp)), true_case, false_case), ); - // generate StorageDead for the second_discriminant_temp not in use anymore - patch.add_statement(parent_end, StatementKind::StorageDead(second_discriminant_temp)); + if let Some(second_discriminant_temp) = second_discriminant_temp { + // generate StorageDead for the second_discriminant_temp not in use anymore + patch.add_statement( + parent_end, + StatementKind::StorageDead(second_discriminant_temp), + ); + } // Generate a StorageDead for comp_temp in each of the targets, since we moved it into // the switch @@ -218,6 +235,7 @@ struct OptimizationData<'tcx> { child_place: Place<'tcx>, child_ty: Ty<'tcx>, child_source: SourceInfo, + need_hoist_discriminant: bool, } fn evaluate_candidate<'tcx>( @@ -226,49 +244,21 @@ fn evaluate_candidate<'tcx>( parent: BasicBlock, ) -> Option> { let bbs = &body.basic_blocks; + // NB: If this BB is a cleanup, we may need to figure out what else needs to be handled. + if bbs[parent].is_cleanup { + return None; + } let TerminatorKind::SwitchInt { targets, discr: parent_discr } = &bbs[parent].terminator().kind else { return None; }; let parent_ty = parent_discr.ty(body.local_decls(), tcx); - if !bbs[targets.otherwise()].is_empty_unreachable() { - // Someone could write code like this: - // ```rust - // let Q = val; - // if discriminant(P) == otherwise { - // let ptr = &mut Q as *mut _ as *mut u8; - // // It may be difficult for us to effectively determine whether values are valid. - // // Invalid values can come from all sorts of corners. - // unsafe { *ptr = 10; } - // } - // - // match P { - // A => match Q { - // A => { - // // code - // } - // _ => { - // // don't use Q - // } - // } - // _ => { - // // don't use Q - // } - // }; - // ``` - // - // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant - // of an invalid value, which is UB. - // In order to fix this, **we would either need to show that the discriminant computation of - // `place` is computed in all branches**. - // FIXME(#95162) For the moment, we adopt a conservative approach and - // consider only the `otherwise` branch has no statements and an unreachable terminator. - return None; - } let (_, child) = targets.iter().next()?; - let child_terminator = &bbs[child].terminator(); - let TerminatorKind::SwitchInt { targets: child_targets, discr: child_discr } = - &child_terminator.kind + + let Terminator { + kind: TerminatorKind::SwitchInt { targets: child_targets, discr: child_discr }, + source_info, + } = bbs[child].terminator() else { return None; }; @@ -276,25 +266,115 @@ fn evaluate_candidate<'tcx>( if child_ty != parent_ty { return None; } - let Some(StatementKind::Assign(boxed)) = &bbs[child].statements.first().map(|x| &x.kind) else { + + // We only handle: + // ``` + // bb4: { + // _8 = discriminant((_3.1: Enum1)); + // switchInt(move _8) -> [2: bb7, otherwise: bb1]; + // } + // ``` + // and + // ``` + // bb2: { + // switchInt((_3.1: u64)) -> [1: bb5, otherwise: bb1]; + // } + // ``` + if bbs[child].statements.len() > 1 { return None; + } + + // When thie BB has exactly one statement, this statement should be discriminant. + let need_hoist_discriminant = bbs[child].statements.len() == 1; + let child_place = if need_hoist_discriminant { + if !bbs[targets.otherwise()].is_empty_unreachable() { + // Someone could write code like this: + // ```rust + // let Q = val; + // if discriminant(P) == otherwise { + // let ptr = &mut Q as *mut _ as *mut u8; + // // It may be difficult for us to effectively determine whether values are valid. + // // Invalid values can come from all sorts of corners. + // unsafe { *ptr = 10; } + // } + // + // match P { + // A => match Q { + // A => { + // // code + // } + // _ => { + // // don't use Q + // } + // } + // _ => { + // // don't use Q + // } + // }; + // ``` + // + // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant of an + // invalid value, which is UB. + // In order to fix this, **we would either need to show that the discriminant computation of + // `place` is computed in all branches**. + // FIXME(#95162) For the moment, we adopt a conservative approach and + // consider only the `otherwise` branch has no statements and an unreachable terminator. + return None; + } + // Handle: + // ``` + // bb4: { + // _8 = discriminant((_3.1: Enum1)); + // switchInt(move _8) -> [2: bb7, otherwise: bb1]; + // } + // ``` + let [ + Statement { + kind: StatementKind::Assign(box (_, Rvalue::Discriminant(child_place))), + .. + }, + ] = bbs[child].statements.as_slice() + else { + return None; + }; + *child_place + } else { + // Handle: + // ``` + // bb2: { + // switchInt((_3.1: u64)) -> [1: bb5, otherwise: bb1]; + // } + // ``` + let Operand::Copy(child_place) = child_discr else { + return None; + }; + *child_place }; - let (_, Rvalue::Discriminant(child_place)) = &**boxed else { - return None; + let destination = if need_hoist_discriminant || bbs[targets.otherwise()].is_empty_unreachable() + { + child_targets.otherwise() + } else { + targets.otherwise() }; - let destination = child_targets.otherwise(); // Verify that the optimization is legal for each branch for (value, child) in targets.iter() { - if !verify_candidate_branch(&bbs[child], value, *child_place, destination) { + if !verify_candidate_branch( + &bbs[child], + value, + child_place, + destination, + need_hoist_discriminant, + ) { return None; } } Some(OptimizationData { destination, - child_place: *child_place, + child_place, child_ty, - child_source: child_terminator.source_info, + child_source: *source_info, + need_hoist_discriminant, }) } @@ -303,31 +383,48 @@ fn verify_candidate_branch<'tcx>( value: u128, place: Place<'tcx>, destination: BasicBlock, + need_hoist_discriminant: bool, ) -> bool { - // In order for the optimization to be correct, the branch must... - // ...have exactly one statement - if let [statement] = branch.statements.as_slice() - // ...assign the discriminant of `place` in that statement - && let StatementKind::Assign(boxed) = &statement.kind - && let (discr_place, Rvalue::Discriminant(from_place)) = &**boxed - && *from_place == place - // ...make that assignment to a local - && discr_place.projection.is_empty() - // ...terminate on a `SwitchInt` that invalidates that local - && let TerminatorKind::SwitchInt { discr: switch_op, targets, .. } = - &branch.terminator().kind - && *switch_op == Operand::Move(*discr_place) - // ...fall through to `destination` if the switch misses - && destination == targets.otherwise() - // ...have a branch for value `value` - && let mut iter = targets.iter() - && let Some((target_value, _)) = iter.next() - && target_value == value - // ...and have no more branches - && iter.next().is_none() - { - true + // In order for the optimization to be correct, the terminator must be a `SwitchInt`. + let TerminatorKind::SwitchInt { discr: switch_op, targets } = &branch.terminator().kind else { + return false; + }; + if need_hoist_discriminant { + // If we need hoist discriminant, the branch must have exactly one statement. + let [statement] = branch.statements.as_slice() else { + return false; + }; + // The statement must assign the discriminant of `place`. + let StatementKind::Assign(box (discr_place, Rvalue::Discriminant(from_place))) = + statement.kind + else { + return false; + }; + if from_place != place { + return false; + } + // The assignment must invalidate a local that terminate on a `SwitchInt`. + if !discr_place.projection.is_empty() || *switch_op != Operand::Move(discr_place) { + return false; + } } else { - false + // If we don't need hoist discriminant, the branch must not have any statements. + if !branch.statements.is_empty() { + return false; + } + // The place on `SwitchInt` must be the same. + if *switch_op != Operand::Copy(place) { + return false; + } } + // It must fall through to `destination` if the switch misses. + if destination != targets.otherwise() { + return false; + } + // It must have exactly one branch for value `value` and have no more branches. + let mut iter = targets.iter(); + let (Some((target_value, _)), None) = (iter.next(), iter.next()) else { + return false; + }; + target_value == value } diff --git a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs index b909dfa132052..d6ecadbfe29c1 100644 --- a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs +++ b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs @@ -29,13 +29,8 @@ fn build_ptr_tys<'tcx>( pub(super) fn build_projection<'tcx>( unique_ty: Ty<'tcx>, nonnull_ty: Ty<'tcx>, - ptr_ty: Ty<'tcx>, -) -> [PlaceElem<'tcx>; 3] { - [ - PlaceElem::Field(FieldIdx::ZERO, unique_ty), - PlaceElem::Field(FieldIdx::ZERO, nonnull_ty), - PlaceElem::Field(FieldIdx::ZERO, ptr_ty), - ] +) -> [PlaceElem<'tcx>; 2] { + [PlaceElem::Field(FieldIdx::ZERO, unique_ty), PlaceElem::Field(FieldIdx::ZERO, nonnull_ty)] } struct ElaborateBoxDerefVisitor<'a, 'tcx> { @@ -75,10 +70,14 @@ impl<'a, 'tcx> MutVisitor<'tcx> for ElaborateBoxDerefVisitor<'a, 'tcx> { self.patch.add_assign( location, Place::from(ptr_local), - Rvalue::Use(Operand::Copy( - Place::from(place.local) - .project_deeper(&build_projection(unique_ty, nonnull_ty, ptr_ty), tcx), - )), + Rvalue::Cast( + CastKind::Transmute, + Operand::Copy( + Place::from(place.local) + .project_deeper(&build_projection(unique_ty, nonnull_ty), tcx), + ), + ptr_ty, + ), ); place.local = ptr_local; @@ -133,8 +132,10 @@ impl<'tcx> crate::MirPass<'tcx> for ElaborateBoxDerefs { let (unique_ty, nonnull_ty, ptr_ty) = build_ptr_tys(tcx, boxed_ty, unique_did, nonnull_did); - new_projections - .extend_from_slice(&build_projection(unique_ty, nonnull_ty, ptr_ty)); + new_projections.extend_from_slice(&build_projection(unique_ty, nonnull_ty)); + // While we can't project into `NonNull<_>` in a basic block + // due to MCP#807, this is debug info where it's fine. + new_projections.push(PlaceElem::Field(FieldIdx::ZERO, ptr_ty)); new_projections.push(PlaceElem::Deref); } else if let Some(new_projections) = new_projections.as_mut() { // Keep building up our projections list once we've started it. diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index 3ebc911372507..988f1a25561e9 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -2,7 +2,7 @@ use std::fmt; use rustc_abi::{FieldIdx, VariantIdx}; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; use rustc_middle::ty::{self, TyCtxt}; @@ -96,10 +96,10 @@ impl<'tcx> crate::MirPass<'tcx> for ElaborateDrops { fn compute_dead_unwinds<'a, 'tcx>( body: &'a Body<'tcx>, flow_inits: &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, -) -> BitSet { +) -> DenseBitSet { // We only need to do this pass once, because unwind edges can only // reach cleanup blocks, which can't have unwind edges themselves. - let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len()); + let mut dead_unwinds = DenseBitSet::new_empty(body.basic_blocks.len()); for (bb, bb_data) in body.basic_blocks.iter_enumerated() { let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } = bb_data.terminator().kind diff --git a/compiler/rustc_mir_transform/src/errors.rs b/compiler/rustc_mir_transform/src/errors.rs index 2d9eeddea2e22..015633d145f15 100644 --- a/compiler/rustc_mir_transform/src/errors.rs +++ b/compiler/rustc_mir_transform/src/errors.rs @@ -4,8 +4,8 @@ use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::mir::AssertKind; use rustc_middle::ty::TyCtxt; use rustc_session::lint::{self, Lint}; -use rustc_span::Span; use rustc_span::def_id::DefId; +use rustc_span::{Span, Symbol}; use crate::fluent_generated as fluent; @@ -142,3 +142,29 @@ pub(crate) struct MustNotSuspendReason { #[note(mir_transform_note2)] #[help] pub(crate) struct UndefinedTransmute; + +#[derive(Diagnostic)] +#[diag(mir_transform_force_inline)] +#[note] +pub(crate) struct ForceInlineFailure { + #[label(mir_transform_caller)] + pub caller_span: Span, + #[label(mir_transform_callee)] + pub callee_span: Span, + #[label(mir_transform_attr)] + pub attr_span: Span, + #[primary_span] + #[label(mir_transform_call)] + pub call_span: Span, + pub callee: String, + pub caller: String, + pub reason: &'static str, + #[subdiagnostic] + pub justification: Option, +} + +#[derive(Subdiagnostic)] +#[note(mir_transform_force_inline_justification)] +pub(crate) struct ForceInlineJustification { + pub sym: Symbol, +} diff --git a/compiler/rustc_mir_transform/src/function_item_references.rs b/compiler/rustc_mir_transform/src/function_item_references.rs index 2945fc6f3d564..fb21bf9977f15 100644 --- a/compiler/rustc_mir_transform/src/function_item_references.rs +++ b/compiler/rustc_mir_transform/src/function_item_references.rs @@ -5,9 +5,8 @@ use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; use rustc_middle::ty::{self, EarlyBinder, GenericArgsRef, Ty, TyCtxt}; use rustc_session::lint::builtin::FUNCTION_ITEM_REFERENCES; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_span::symbol::sym; +use rustc_span::{Span, sym}; use crate::errors; diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index d5a813ec8ec64..affc4cf0afc31 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -94,7 +94,7 @@ use rustc_const_eval::interpret::{ use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::graph::dominators::Dominators; use rustc_hir::def::DefKind; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexVec, newtype_index}; use rustc_middle::bug; use rustc_middle::mir::interpret::GlobalAlloc; @@ -223,8 +223,6 @@ enum Value<'tcx> { Projection(VnIndex, ProjectionElem>), /// Discriminant of the given value. Discriminant(VnIndex), - /// Length of an array or slice. - Len(VnIndex), // Operations. NullaryOp(NullOp<'tcx>, Ty<'tcx>), @@ -258,7 +256,7 @@ struct VnState<'body, 'tcx> { feature_unsized_locals: bool, ssa: &'body SsaLocals, dominators: Dominators, - reused_locals: BitSet, + reused_locals: DenseBitSet, } impl<'body, 'tcx> VnState<'body, 'tcx> { @@ -289,7 +287,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { feature_unsized_locals: tcx.features().unsized_locals(), ssa, dominators, - reused_locals: BitSet::new_empty(local_decls.len()), + reused_locals: DenseBitSet::new_empty(local_decls.len()), } } @@ -513,13 +511,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?; discr_value.into() } - Len(slice) => { - let slice = self.evaluated[slice].as_ref()?; - let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap(); - let len = slice.len(&self.ecx).discard_err()?; - let imm = ImmTy::from_uint(len, usize_layout); - imm.into() - } NullaryOp(null_op, ty) => { let layout = self.ecx.layout_of(ty).ok()?; if let NullOp::SizeOf | NullOp::AlignOf = null_op @@ -863,7 +854,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } // Operations. - Rvalue::Len(ref mut place) => return self.simplify_len(place, location), Rvalue::Cast(ref mut kind, ref mut value, to) => { return self.simplify_cast(kind, value, to, location); } @@ -1376,104 +1366,114 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { return self.new_opaque(); } - let mut was_updated = false; + let mut was_ever_updated = false; + loop { + let mut was_updated_this_iteration = false; + + // Transmuting between raw pointers is just a pointer cast so long as + // they have the same metadata type (like `*const i32` <=> `*mut u64` + // or `*mut [i32]` <=> `*const [u64]`), including the common special + // case of `*const T` <=> `*mut T`. + if let Transmute = kind + && from.is_unsafe_ptr() + && to.is_unsafe_ptr() + && self.pointers_have_same_metadata(from, to) + { + *kind = PtrToPtr; + was_updated_this_iteration = true; + } - // If that cast just casts away the metadata again, - if let PtrToPtr = kind - && let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) = - self.get(value) - && let ty::RawPtr(to_pointee, _) = to.kind() - && to_pointee.is_sized(self.tcx, self.typing_env()) - { - from = *data_pointer_ty; - value = fields[0]; - was_updated = true; - if *data_pointer_ty == to { - return Some(fields[0]); + // If a cast just casts away the metadata again, then we can get it by + // casting the original thin pointer passed to `from_raw_parts` + if let PtrToPtr = kind + && let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) = + self.get(value) + && let ty::RawPtr(to_pointee, _) = to.kind() + && to_pointee.is_sized(self.tcx, self.typing_env()) + { + from = *data_pointer_ty; + value = fields[0]; + was_updated_this_iteration = true; + if *data_pointer_ty == to { + return Some(fields[0]); + } } - } - // PtrToPtr-then-PtrToPtr can skip the intermediate step - if let PtrToPtr = kind - && let Value::Cast { kind: inner_kind, value: inner_value, from: inner_from, to: _ } = - *self.get(value) - && let PtrToPtr = inner_kind - { - from = inner_from; - value = inner_value; - was_updated = true; - if inner_from == to { - return Some(inner_value); + // Aggregate-then-Transmute can just transmute the original field value, + // so long as the bytes of a value from only from a single field. + if let Transmute = kind + && let Value::Aggregate(_aggregate_ty, variant_idx, field_values) = self.get(value) + && let Some((field_idx, field_ty)) = + self.value_is_all_in_one_field(from, *variant_idx) + { + from = field_ty; + value = field_values[field_idx.as_usize()]; + was_updated_this_iteration = true; + if field_ty == to { + return Some(value); + } } - } - // PtrToPtr-then-Transmute can just transmute the original, so long as the - // PtrToPtr didn't change metadata (and thus the size of the pointer) - if let Transmute = kind - && let Value::Cast { - kind: PtrToPtr, + // Various cast-then-cast cases can be simplified. + if let Value::Cast { + kind: inner_kind, value: inner_value, from: inner_from, to: inner_to, } = *self.get(value) - && self.pointers_have_same_metadata(inner_from, inner_to) - { - from = inner_from; - value = inner_value; - was_updated = true; - if inner_from == to { - return Some(inner_value); + { + let new_kind = match (inner_kind, *kind) { + // Even if there's a narrowing cast in here that's fine, because + // things like `*mut [i32] -> *mut i32 -> *const i32` and + // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR. + (PtrToPtr, PtrToPtr) => Some(PtrToPtr), + // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity: + // `*const T -> *mut T -> NonNull` is fine, but we need to check for narrowing + // to skip things like `*const [i32] -> *const i32 -> NonNull`. + (PtrToPtr, Transmute) + if self.pointers_have_same_metadata(inner_from, inner_to) => + { + Some(Transmute) + } + // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different + // variables for their metadata, and thus this can't merge with the previous arm. + (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => { + Some(Transmute) + } + // If would be legal to always do this, but we don't want to hide information + // from the backend that it'd otherwise be able to use for optimizations. + (Transmute, Transmute) + if !self.type_may_have_niche_of_interest_to_backend(inner_to) => + { + Some(Transmute) + } + _ => None, + }; + if let Some(new_kind) = new_kind { + *kind = new_kind; + from = inner_from; + value = inner_value; + was_updated_this_iteration = true; + if inner_from == to { + return Some(inner_value); + } + } + } + + if was_updated_this_iteration { + was_ever_updated = true; + } else { + break; } } - if was_updated && let Some(op) = self.try_as_operand(value, location) { + if was_ever_updated && let Some(op) = self.try_as_operand(value, location) { *operand = op; } Some(self.insert(Value::Cast { kind: *kind, value, from, to })) } - fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option { - // Trivial case: we are fetching a statically known length. - let place_ty = place.ty(self.local_decls, self.tcx).ty; - if let ty::Array(_, len) = place_ty.kind() { - return self.insert_constant(Const::from_ty_const( - *len, - self.tcx.types.usize, - self.tcx, - )); - } - - let mut inner = self.simplify_place_value(place, location)?; - - // The length information is stored in the wide pointer. - // Reborrowing copies length information from one pointer to the other. - while let Value::Address { place: borrowed, .. } = self.get(inner) - && let [PlaceElem::Deref] = borrowed.projection[..] - && let Some(borrowed) = self.locals[borrowed.local] - { - inner = borrowed; - } - - // We have an unsizing cast, which assigns the length to wide pointer metadata. - if let Value::Cast { kind, from, to, .. } = self.get(inner) - && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind - && let Some(from) = from.builtin_deref(true) - && let ty::Array(_, len) = from.kind() - && let Some(to) = to.builtin_deref(true) - && let ty::Slice(..) = to.kind() - { - return self.insert_constant(Const::from_ty_const( - *len, - self.tcx.types.usize, - self.tcx, - )); - } - - // Fallback: a symbolic `Len`. - Some(self.insert(Value::Len(inner))) - } - fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool { let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); @@ -1489,6 +1489,54 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { false } } + + /// Returns `false` if we know for sure that this type has no interesting niche, + /// and thus we can skip transmuting through it without worrying. + /// + /// The backend will emit `assume`s when transmuting between types with niches, + /// so we want to preserve `i32 -> char -> u32` so that that data is around, + /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`. + fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool { + let Ok(layout) = self.ecx.layout_of(ty) else { + // If it's too generic or something, then assume it might be interesting later. + return true; + }; + + match layout.backend_repr { + BackendRepr::Uninhabited => true, + BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx), + BackendRepr::ScalarPair(a, b) => { + !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx) + } + BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => false, + } + } + + fn value_is_all_in_one_field( + &self, + ty: Ty<'tcx>, + variant: VariantIdx, + ) -> Option<(FieldIdx, Ty<'tcx>)> { + if let Ok(layout) = self.ecx.layout_of(ty) + && let abi::Variants::Single { index } = layout.variants + && index == variant + && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx) + && layout.size == field_layout.size + { + // We needed to check the variant to avoid trying to read the tag + // field from an enum where no fields have variants, since that tag + // field isn't in the `Aggregate` from which we're getting values. + Some((FieldIdx::from_usize(field_idx), field_layout.ty)) + } else if let ty::Adt(adt, args) = ty.kind() + && adt.is_struct() + && adt.repr().transparent() + && let [single_field] = adt.non_enum_variant().fields.raw.as_slice() + { + Some((FieldIdx::ZERO, single_field.ty(self.tcx, args))) + } else { + None + } + } } fn op_to_prop_const<'tcx>( @@ -1666,7 +1714,7 @@ impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> { struct StorageRemover<'tcx> { tcx: TyCtxt<'tcx>, - reused_locals: BitSet, + reused_locals: DenseBitSet, } impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> { diff --git a/compiler/rustc_mir_transform/src/impossible_predicates.rs b/compiler/rustc_mir_transform/src/impossible_predicates.rs new file mode 100644 index 0000000000000..ba8389bbe2ffb --- /dev/null +++ b/compiler/rustc_mir_transform/src/impossible_predicates.rs @@ -0,0 +1,56 @@ +//! Check if it's even possible to satisfy the 'where' clauses +//! for this item. +//! +//! It's possible to `#!feature(trivial_bounds)]` to write +//! a function with impossible to satisfy clauses, e.g.: +//! `fn foo() where String: Copy {}`. +//! +//! We don't usually need to worry about this kind of case, +//! since we would get a compilation error if the user tried +//! to call it. However, since we optimize even without any +//! calls to the function, we need to make sure that it even +//! makes sense to try to evaluate the body. +//! +//! If there are unsatisfiable where clauses, then all bets are +//! off, and we just give up. +//! +//! We manually filter the predicates, skipping anything that's not +//! "global". We are in a potentially generic context +//! (e.g. we are evaluating a function without instantiating generic +//! parameters, so this filtering serves two purposes: +//! +//! 1. We skip evaluating any predicates that we would +//! never be able prove are unsatisfiable (e.g. `` +//! 2. We avoid trying to normalize predicates involving generic +//! parameters (e.g. `::MyItem`). This can confuse +//! the normalization code (leading to cycle errors), since +//! it's usually never invoked in this way. + +use rustc_middle::mir::{Body, START_BLOCK, TerminatorKind}; +use rustc_middle::ty::{TyCtxt, TypeVisitableExt}; +use rustc_trait_selection::traits; +use tracing::trace; + +use crate::pass_manager::MirPass; + +pub(crate) struct ImpossiblePredicates; + +impl<'tcx> MirPass<'tcx> for ImpossiblePredicates { + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let predicates = tcx + .predicates_of(body.source.def_id()) + .predicates + .iter() + .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None }); + if traits::impossible_predicates(tcx, traits::elaborate(tcx, predicates).collect()) { + trace!("found unsatisfiable predicates for {:?}", body.source); + // Clear the body to only contain a single `unreachable` statement. + let bbs = body.basic_blocks.as_mut(); + bbs.raw.truncate(1); + bbs[START_BLOCK].statements.clear(); + bbs[START_BLOCK].terminator_mut().kind = TerminatorKind::Unreachable; + body.var_debug_info.clear(); + body.local_decls.raw.truncate(body.arg_count + 1); + } + } +} diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index f0acbaf56b63e..470393c9ae1b7 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -8,15 +8,14 @@ use rustc_attr_parsing::InlineAttr; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_index::Idx; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; -use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}; +use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; use rustc_middle::ty::{self, Instance, InstanceKind, Ty, TyCtxt, TypeFlags, TypeVisitableExt}; use rustc_session::config::{DebugInfo, OptLevel}; use rustc_span::source_map::Spanned; -use rustc_span::sym; use tracing::{debug, instrument, trace, trace_span}; use crate::cost_checker::CostChecker; @@ -29,10 +28,6 @@ pub(crate) mod cycle; const TOP_DOWN_DEPTH_LIMIT: usize = 5; -// Made public so that `mir_drops_elaborated_and_const_checked` can be overridden -// by custom rustc drivers, running all the steps by themselves. See #114628. -pub struct Inline; - #[derive(Clone, Debug)] struct CallSite<'tcx> { callee: Instance<'tcx>, @@ -41,14 +36,12 @@ struct CallSite<'tcx> { source_info: SourceInfo, } +// Made public so that `mir_drops_elaborated_and_const_checked` can be overridden +// by custom rustc drivers, running all the steps by themselves. See #114628. +pub struct Inline; + impl<'tcx> crate::MirPass<'tcx> for Inline { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - // FIXME(#127234): Coverage instrumentation currently doesn't handle inlined - // MIR correctly when Modified Condition/Decision Coverage is enabled. - if sess.instrument_coverage_mcdc() { - return false; - } - if let Some(enabled) = sess.opts.unstable_opts.inline_mir { return enabled; } @@ -67,7 +60,7 @@ impl<'tcx> crate::MirPass<'tcx> for Inline { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id())); let _guard = span.enter(); - if inline(tcx, body) { + if inline::>(tcx, body) { debug!("running simplify cfg on {:?}", body.source); simplify_cfg(body); deref_finder(tcx, body); @@ -75,47 +68,83 @@ impl<'tcx> crate::MirPass<'tcx> for Inline { } } -fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { - let def_id = body.source.def_id().expect_local(); +pub struct ForceInline; - // Only do inlining into fn bodies. - if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() { - return false; +impl ForceInline { + pub fn should_run_pass_for_callee<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { + matches!(tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. }) } - if body.source.promoted.is_some() { - return false; +} + +impl<'tcx> crate::MirPass<'tcx> for ForceInline { + fn is_enabled(&self, _: &rustc_session::Session) -> bool { + true } - // Avoid inlining into coroutines, since their `optimized_mir` is used for layout computation, - // which can create a cycle, even when no attempt is made to inline the function in the other - // direction. - if body.coroutine.is_some() { - return false; + + fn can_be_overridden(&self) -> bool { + false + } + + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let span = trace_span!("force_inline", body = %tcx.def_path_str(body.source.def_id())); + let _guard = span.enter(); + if inline::>(tcx, body) { + debug!("running simplify cfg on {:?}", body.source); + simplify_cfg(body); + deref_finder(tcx, body); + } } +} - let typing_env = body.typing_env(tcx); - let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id); +trait Inliner<'tcx> { + fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &Body<'tcx>) -> Self; - let mut this = Inliner { - tcx, - typing_env, - codegen_fn_attrs, - history: Vec::new(), - changed: false, - caller_is_inline_forwarder: matches!( - codegen_fn_attrs.inline, - InlineAttr::Hint | InlineAttr::Always - ) && body_is_forwarder(body), - }; - let blocks = START_BLOCK..body.basic_blocks.next_index(); - this.process_blocks(body, blocks); - this.changed + fn tcx(&self) -> TyCtxt<'tcx>; + fn typing_env(&self) -> ty::TypingEnv<'tcx>; + fn history(&self) -> &[DefId]; + fn caller_def_id(&self) -> DefId; + + /// Has the caller body been changed? + fn changed(self) -> bool; + + /// Should inlining happen for a given callee? + fn should_inline_for_callee(&self, def_id: DefId) -> bool; + + fn check_caller_mir_body(&self, body: &Body<'tcx>) -> bool; + + /// Returns inlining decision that is based on the examination of callee MIR body. + /// Assumes that codegen attributes have been checked for compatibility already. + fn check_callee_mir_body( + &self, + callsite: &CallSite<'tcx>, + callee_body: &Body<'tcx>, + callee_attrs: &CodegenFnAttrs, + ) -> Result<(), &'static str>; + + // How many callsites in a body are we allowed to inline? We need to limit this in order + // to prevent super-linear growth in MIR size. + fn inline_limit_for_block(&self) -> Option; + + /// Called when inlining succeeds. + fn on_inline_success( + &mut self, + callsite: &CallSite<'tcx>, + caller_body: &mut Body<'tcx>, + new_blocks: std::ops::Range, + ); + + /// Called when inlining failed or was not performed. + fn on_inline_failure(&self, callsite: &CallSite<'tcx>, reason: &'static str); + + /// Called when the inline limit for a body is reached. + fn on_inline_limit_reached(&self) -> bool; } -struct Inliner<'tcx> { +struct ForceInliner<'tcx> { tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, - /// Caller codegen attributes. - codegen_fn_attrs: &'tcx CodegenFnAttrs, + /// `DefId` of caller. + def_id: DefId, /// Stack of inlined instances. /// We only check the `DefId` and not the args because we want to /// avoid inlining cases of polymorphic recursion. @@ -124,365 +153,203 @@ struct Inliner<'tcx> { history: Vec, /// Indicates that the caller body has been modified. changed: bool, - /// Indicates that the caller is #[inline] and just calls another function, - /// and thus we can inline less into it as it'll be inlined itself. - caller_is_inline_forwarder: bool, } -impl<'tcx> Inliner<'tcx> { - fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range) { - // How many callsites in this body are we allowed to inline? We need to limit this in order - // to prevent super-linear growth in MIR size - let inline_limit = match self.history.len() { - 0 => usize::MAX, - 1..=TOP_DOWN_DEPTH_LIMIT => 1, - _ => return, - }; - let mut inlined_count = 0; - for bb in blocks { - let bb_data = &caller_body[bb]; - if bb_data.is_cleanup { - continue; - } - - let Some(callsite) = self.resolve_callsite(caller_body, bb, bb_data) else { - continue; - }; - - let span = trace_span!("process_blocks", %callsite.callee, ?bb); - let _guard = span.enter(); - - match self.try_inlining(caller_body, &callsite) { - Err(reason) => { - debug!("not-inlined {} [{}]", callsite.callee, reason); - } - Ok(new_blocks) => { - debug!("inlined {}", callsite.callee); - self.changed = true; - - self.history.push(callsite.callee.def_id()); - self.process_blocks(caller_body, new_blocks); - self.history.pop(); - - inlined_count += 1; - if inlined_count == inline_limit { - debug!("inline count reached"); - return; - } - } - } - } +impl<'tcx> Inliner<'tcx> for ForceInliner<'tcx> { + fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &Body<'tcx>) -> Self { + Self { tcx, typing_env: body.typing_env(tcx), def_id, history: Vec::new(), changed: false } } - /// Attempts to inline a callsite into the caller body. When successful returns basic blocks - /// containing the inlined body. Otherwise returns an error describing why inlining didn't take - /// place. - fn try_inlining( - &self, - caller_body: &mut Body<'tcx>, - callsite: &CallSite<'tcx>, - ) -> Result, &'static str> { - self.check_mir_is_available(caller_body, callsite.callee)?; - - let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id()); - let cross_crate_inlinable = self.tcx.cross_crate_inlinable(callsite.callee.def_id()); - self.check_codegen_attributes(callsite, callee_attrs, cross_crate_inlinable)?; - - // Intrinsic fallback bodies are automatically made cross-crate inlineable, - // but at this stage we don't know whether codegen knows the intrinsic, - // so just conservatively don't inline it. - if self.tcx.has_attr(callsite.callee.def_id(), sym::rustc_intrinsic) { - return Err("Callee is an intrinsic, do not inline fallback bodies"); - } - - let terminator = caller_body[callsite.block].terminator.as_ref().unwrap(); - let TerminatorKind::Call { args, destination, .. } = &terminator.kind else { bug!() }; - let destination_ty = destination.ty(&caller_body.local_decls, self.tcx).ty; - for arg in args { - if !arg.node.ty(&caller_body.local_decls, self.tcx).is_sized(self.tcx, self.typing_env) - { - // We do not allow inlining functions with unsized params. Inlining these functions - // could create unsized locals, which are unsound and being phased out. - return Err("Call has unsized argument"); - } - } - - let callee_body = try_instance_mir(self.tcx, callsite.callee.def)?; - self.check_mir_body(callsite, callee_body, callee_attrs, cross_crate_inlinable)?; - - let Ok(callee_body) = callsite.callee.try_instantiate_mir_and_normalize_erasing_regions( - self.tcx, - self.typing_env, - ty::EarlyBinder::bind(callee_body.clone()), - ) else { - return Err("failed to normalize callee body"); - }; - - // Normally, this shouldn't be required, but trait normalization failure can create a - // validation ICE. - if !validate_types(self.tcx, self.typing_env, &callee_body, &caller_body).is_empty() { - return Err("failed to validate callee body"); - } - - // Check call signature compatibility. - // Normally, this shouldn't be required, but trait normalization failure can create a - // validation ICE. - let output_type = callee_body.return_ty(); - if !util::sub_types(self.tcx, self.typing_env, output_type, destination_ty) { - trace!(?output_type, ?destination_ty); - return Err("failed to normalize return type"); - } - if callsite.fn_sig.abi() == ExternAbi::RustCall { - // FIXME: Don't inline user-written `extern "rust-call"` functions, - // since this is generally perf-negative on rustc, and we hope that - // LLVM will inline these functions instead. - if callee_body.spread_arg.is_some() { - return Err("do not inline user-written rust-call functions"); - } + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } - let (self_arg, arg_tuple) = match &args[..] { - [arg_tuple] => (None, arg_tuple), - [self_arg, arg_tuple] => (Some(self_arg), arg_tuple), - _ => bug!("Expected `rust-call` to have 1 or 2 args"), - }; + fn typing_env(&self) -> ty::TypingEnv<'tcx> { + self.typing_env + } - let self_arg_ty = - self_arg.map(|self_arg| self_arg.node.ty(&caller_body.local_decls, self.tcx)); + fn history(&self) -> &[DefId] { + &self.history + } - let arg_tuple_ty = arg_tuple.node.ty(&caller_body.local_decls, self.tcx); - let ty::Tuple(arg_tuple_tys) = *arg_tuple_ty.kind() else { - bug!("Closure arguments are not passed as a tuple"); - }; + fn caller_def_id(&self) -> DefId { + self.def_id + } - for (arg_ty, input) in - self_arg_ty.into_iter().chain(arg_tuple_tys).zip(callee_body.args_iter()) - { - let input_type = callee_body.local_decls[input].ty; - if !util::sub_types(self.tcx, self.typing_env, input_type, arg_ty) { - trace!(?arg_ty, ?input_type); - return Err("failed to normalize tuple argument type"); - } - } - } else { - for (arg, input) in args.iter().zip(callee_body.args_iter()) { - let input_type = callee_body.local_decls[input].ty; - let arg_ty = arg.node.ty(&caller_body.local_decls, self.tcx); - if !util::sub_types(self.tcx, self.typing_env, input_type, arg_ty) { - trace!(?arg_ty, ?input_type); - return Err("failed to normalize argument type"); - } - } - } + fn changed(self) -> bool { + self.changed + } - let old_blocks = caller_body.basic_blocks.next_index(); - self.inline_call(caller_body, callsite, callee_body); - let new_blocks = old_blocks..caller_body.basic_blocks.next_index(); + fn should_inline_for_callee(&self, def_id: DefId) -> bool { + ForceInline::should_run_pass_for_callee(self.tcx(), def_id) + } - Ok(new_blocks) + fn check_caller_mir_body(&self, _: &Body<'tcx>) -> bool { + true } - fn check_mir_is_available( + #[instrument(level = "debug", skip(self, callee_body))] + fn check_callee_mir_body( &self, - caller_body: &Body<'tcx>, - callee: Instance<'tcx>, + _: &CallSite<'tcx>, + callee_body: &Body<'tcx>, + callee_attrs: &CodegenFnAttrs, ) -> Result<(), &'static str> { - let caller_def_id = caller_body.source.def_id(); - let callee_def_id = callee.def_id(); - if callee_def_id == caller_def_id { - return Err("self-recursion"); - } - - match callee.def { - InstanceKind::Item(_) => { - // If there is no MIR available (either because it was not in metadata or - // because it has no MIR because it's an extern function), then the inliner - // won't cause cycles on this. - if !self.tcx.is_mir_available(callee_def_id) { - return Err("item MIR unavailable"); - } - } - // These have no own callable MIR. - InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => { - return Err("instance without MIR (intrinsic / virtual)"); - } - - // FIXME(#127030): `ConstParamHasTy` has bad interactions with - // the drop shim builder, which does not evaluate predicates in - // the correct param-env for types being dropped. Stall resolving - // the MIR for this instance until all of its const params are - // substituted. - InstanceKind::DropGlue(_, Some(ty)) if ty.has_type_flags(TypeFlags::HAS_CT_PARAM) => { - return Err("still needs substitution"); - } - - // This cannot result in an immediate cycle since the callee MIR is a shim, which does - // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we - // do not need to catch this here, we can wait until the inliner decides to continue - // inlining a second time. - InstanceKind::VTableShim(_) - | InstanceKind::ReifyShim(..) - | InstanceKind::FnPtrShim(..) - | InstanceKind::ClosureOnceShim { .. } - | InstanceKind::ConstructCoroutineInClosureShim { .. } - | InstanceKind::DropGlue(..) - | InstanceKind::CloneShim(..) - | InstanceKind::ThreadLocalShim(..) - | InstanceKind::FnPtrAddrShim(..) - | InstanceKind::AsyncDropGlueCtorShim(..) => return Ok(()), - } - - if self.tcx.is_constructor(callee_def_id) { - trace!("constructors always have MIR"); - // Constructor functions cannot cause a query cycle. - return Ok(()); + if callee_body.tainted_by_errors.is_some() { + return Err("body has errors"); } - if callee_def_id.is_local() { - // If we know for sure that the function we're calling will itself try to - // call us, then we avoid inlining that function. - if self.tcx.mir_callgraph_reachable((callee, caller_def_id.expect_local())) { - return Err("caller might be reachable from callee (query cycle avoidance)"); - } - - Ok(()) + let caller_attrs = self.tcx().codegen_fn_attrs(self.caller_def_id()); + if callee_attrs.instruction_set != caller_attrs.instruction_set + && callee_body + .basic_blocks + .iter() + .any(|bb| matches!(bb.terminator().kind, TerminatorKind::InlineAsm { .. })) + { + // During the attribute checking stage we allow a callee with no + // instruction_set assigned to count as compatible with a function that does + // assign one. However, during this stage we require an exact match when any + // inline-asm is detected. LLVM will still possibly do an inline later on + // if the no-attribute function ends up with the same instruction set anyway. + Err("cannot move inline-asm across instruction sets") } else { - // This cannot result in an immediate cycle since the callee MIR is from another crate - // and is already optimized. Any subsequent inlining may cause cycles, but we do - // not need to catch this here, we can wait until the inliner decides to continue - // inlining a second time. - trace!("functions from other crates always have MIR"); Ok(()) } } - fn resolve_callsite( - &self, - caller_body: &Body<'tcx>, - bb: BasicBlock, - bb_data: &BasicBlockData<'tcx>, - ) -> Option> { - // Only consider direct calls to functions - let terminator = bb_data.terminator(); - - // FIXME(explicit_tail_calls): figure out if we can inline tail calls - if let TerminatorKind::Call { ref func, fn_span, .. } = terminator.kind { - let func_ty = func.ty(caller_body, self.tcx); - if let ty::FnDef(def_id, args) = *func_ty.kind() { - // To resolve an instance its args have to be fully normalized. - let args = self.tcx.try_normalize_erasing_regions(self.typing_env, args).ok()?; - let callee = Instance::try_resolve(self.tcx, self.typing_env, def_id, args) - .ok() - .flatten()?; - - if let InstanceKind::Virtual(..) | InstanceKind::Intrinsic(_) = callee.def { - return None; - } - - if self.history.contains(&callee.def_id()) { - return None; - } + fn inline_limit_for_block(&self) -> Option { + Some(usize::MAX) + } - let fn_sig = self.tcx.fn_sig(def_id).instantiate(self.tcx, args); + fn on_inline_success( + &mut self, + callsite: &CallSite<'tcx>, + caller_body: &mut Body<'tcx>, + new_blocks: std::ops::Range, + ) { + self.changed = true; - // Additionally, check that the body that we're inlining actually agrees - // with the ABI of the trait that the item comes from. - if let InstanceKind::Item(instance_def_id) = callee.def - && self.tcx.def_kind(instance_def_id) == DefKind::AssocFn - && let instance_fn_sig = self.tcx.fn_sig(instance_def_id).skip_binder() - && instance_fn_sig.abi() != fn_sig.abi() - { - return None; - } + self.history.push(callsite.callee.def_id()); + process_blocks(self, caller_body, new_blocks); + self.history.pop(); + } - let source_info = SourceInfo { span: fn_span, ..terminator.source_info }; + fn on_inline_failure(&self, callsite: &CallSite<'tcx>, reason: &'static str) { + let tcx = self.tcx(); + let InlineAttr::Force { attr_span, reason: justification } = + tcx.codegen_fn_attrs(callsite.callee.def_id()).inline + else { + bug!("called on item without required inlining"); + }; - return Some(CallSite { callee, fn_sig, block: bb, source_info }); - } - } + let call_span = callsite.source_info.span; + tcx.dcx().emit_err(crate::errors::ForceInlineFailure { + call_span, + attr_span, + caller_span: tcx.def_span(self.def_id), + caller: tcx.def_path_str(self.def_id), + callee_span: tcx.def_span(callsite.callee.def_id()), + callee: tcx.def_path_str(callsite.callee.def_id()), + reason, + justification: justification.map(|sym| crate::errors::ForceInlineJustification { sym }), + }); + } - None + fn on_inline_limit_reached(&self) -> bool { + false } +} - /// Returns an error if inlining is not possible based on codegen attributes alone. A success - /// indicates that inlining decision should be based on other criteria. - fn check_codegen_attributes( - &self, - callsite: &CallSite<'tcx>, - callee_attrs: &CodegenFnAttrs, - cross_crate_inlinable: bool, - ) -> Result<(), &'static str> { - if self.tcx.has_attr(callsite.callee.def_id(), sym::rustc_no_mir_inline) { - return Err("#[rustc_no_mir_inline]"); - } +struct NormalInliner<'tcx> { + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + /// `DefId` of caller. + def_id: DefId, + /// Stack of inlined instances. + /// We only check the `DefId` and not the args because we want to + /// avoid inlining cases of polymorphic recursion. + /// The number of `DefId`s is finite, so checking history is enough + /// to ensure that we do not loop endlessly while inlining. + history: Vec, + /// Indicates that the caller body has been modified. + changed: bool, + /// Indicates that the caller is #[inline] and just calls another function, + /// and thus we can inline less into it as it'll be inlined itself. + caller_is_inline_forwarder: bool, +} - if let InlineAttr::Never = callee_attrs.inline { - return Err("never inline hint"); +impl<'tcx> Inliner<'tcx> for NormalInliner<'tcx> { + fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &Body<'tcx>) -> Self { + let typing_env = body.typing_env(tcx); + let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id); + + Self { + tcx, + typing_env, + def_id, + history: Vec::new(), + changed: false, + caller_is_inline_forwarder: matches!( + codegen_fn_attrs.inline, + InlineAttr::Hint | InlineAttr::Always | InlineAttr::Force { .. } + ) && body_is_forwarder(body), } + } - // Reachability pass defines which functions are eligible for inlining. Generally inlining - // other functions is incorrect because they could reference symbols that aren't exported. - let is_generic = callsite.callee.args.non_erasable_generics().next().is_some(); - if !is_generic && !cross_crate_inlinable { - return Err("not exported"); - } + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } - if callsite.fn_sig.c_variadic() { - return Err("C variadic"); - } + fn caller_def_id(&self) -> DefId { + self.def_id + } - if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) { - return Err("cold"); - } + fn typing_env(&self) -> ty::TypingEnv<'tcx> { + self.typing_env + } - if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize { - return Err("incompatible sanitizer set"); - } + fn history(&self) -> &[DefId] { + &self.history + } - // Two functions are compatible if the callee has no attribute (meaning - // that it's codegen agnostic), or sets an attribute that is identical - // to this function's attribute. - if callee_attrs.instruction_set.is_some() - && callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set - { - return Err("incompatible instruction set"); - } + fn changed(self) -> bool { + self.changed + } - let callee_feature_names = callee_attrs.target_features.iter().map(|f| f.name); - let this_feature_names = self.codegen_fn_attrs.target_features.iter().map(|f| f.name); - if callee_feature_names.ne(this_feature_names) { - // In general it is not correct to inline a callee with target features that are a - // subset of the caller. This is because the callee might contain calls, and the ABI of - // those calls depends on the target features of the surrounding function. By moving a - // `Call` terminator from one MIR body to another with more target features, we might - // change the ABI of that call! - return Err("incompatible target features"); + fn should_inline_for_callee(&self, _: DefId) -> bool { + true + } + + fn check_caller_mir_body(&self, body: &Body<'tcx>) -> bool { + // Avoid inlining into coroutines, since their `optimized_mir` is used for layout computation, + // which can create a cycle, even when no attempt is made to inline the function in the other + // direction. + if body.coroutine.is_some() { + return false; } - Ok(()) + true } - /// Returns inlining decision that is based on the examination of callee MIR body. - /// Assumes that codegen attributes have been checked for compatibility already. #[instrument(level = "debug", skip(self, callee_body))] - fn check_mir_body( + fn check_callee_mir_body( &self, callsite: &CallSite<'tcx>, callee_body: &Body<'tcx>, callee_attrs: &CodegenFnAttrs, - cross_crate_inlinable: bool, ) -> Result<(), &'static str> { - let tcx = self.tcx; + let tcx = self.tcx(); if let Some(_) = callee_body.tainted_by_errors { - return Err("Body is tainted"); + return Err("body has errors"); } let mut threshold = if self.caller_is_inline_forwarder { - self.tcx.sess.opts.unstable_opts.inline_mir_forwarder_threshold.unwrap_or(30) - } else if cross_crate_inlinable { - self.tcx.sess.opts.unstable_opts.inline_mir_hint_threshold.unwrap_or(100) + tcx.sess.opts.unstable_opts.inline_mir_forwarder_threshold.unwrap_or(30) + } else if tcx.cross_crate_inlinable(callsite.callee.def_id()) { + tcx.sess.opts.unstable_opts.inline_mir_hint_threshold.unwrap_or(100) } else { - self.tcx.sess.opts.unstable_opts.inline_mir_threshold.unwrap_or(50) + tcx.sess.opts.unstable_opts.inline_mir_threshold.unwrap_or(50) }; // Give a bonus functions with a small number of blocks, @@ -496,13 +363,13 @@ impl<'tcx> Inliner<'tcx> { // FIXME: Give a bonus to functions with only a single caller let mut checker = - CostChecker::new(self.tcx, self.typing_env, Some(callsite.callee), callee_body); + CostChecker::new(tcx, self.typing_env(), Some(callsite.callee), callee_body); checker.add_function_level_costs(); // Traverse the MIR manually so we can account for the effects of inlining on the CFG. let mut work_list = vec![START_BLOCK]; - let mut visited = BitSet::new_empty(callee_body.basic_blocks.len()); + let mut visited = DenseBitSet::new_empty(callee_body.basic_blocks.len()); while let Some(bb) = work_list.pop() { if !visited.insert(bb.index()) { continue; @@ -512,20 +379,20 @@ impl<'tcx> Inliner<'tcx> { checker.visit_basic_block_data(bb, blk); let term = blk.terminator(); + let caller_attrs = tcx.codegen_fn_attrs(self.caller_def_id()); if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind { work_list.push(target); // If the place doesn't actually need dropping, treat it like a regular goto. - let ty = callsite.callee.instantiate_mir( - self.tcx, - ty::EarlyBinder::bind(&place.ty(callee_body, tcx).ty), - ); - if ty.needs_drop(tcx, self.typing_env) + let ty = callsite + .callee + .instantiate_mir(tcx, ty::EarlyBinder::bind(&place.ty(callee_body, tcx).ty)); + if ty.needs_drop(tcx, self.typing_env()) && let UnwindAction::Cleanup(unwind) = unwind { work_list.push(unwind); } - } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set + } else if callee_attrs.instruction_set != caller_attrs.instruction_set && matches!(term.kind, TerminatorKind::InlineAsm { .. }) { // During the attribute checking stage we allow a callee with no @@ -533,7 +400,7 @@ impl<'tcx> Inliner<'tcx> { // assign one. However, during this stage we require an exact match when any // inline-asm is detected. LLVM will still possibly do an inline later on // if the no-attribute function ends up with the same instruction set anyway. - return Err("Cannot move inline-asm across instruction sets"); + return Err("cannot move inline-asm across instruction sets"); } else if let TerminatorKind::TailCall { .. } = term.kind { // FIXME(explicit_tail_calls): figure out how exactly functions containing tail // calls can be inlined (and if they even should) @@ -557,319 +424,688 @@ impl<'tcx> Inliner<'tcx> { } } - fn inline_call( - &self, - caller_body: &mut Body<'tcx>, + fn inline_limit_for_block(&self) -> Option { + match self.history.len() { + 0 => Some(usize::MAX), + 1..=TOP_DOWN_DEPTH_LIMIT => Some(1), + _ => None, + } + } + + fn on_inline_success( + &mut self, callsite: &CallSite<'tcx>, - mut callee_body: Body<'tcx>, + caller_body: &mut Body<'tcx>, + new_blocks: std::ops::Range, ) { - let terminator = caller_body[callsite.block].terminator.take().unwrap(); - let TerminatorKind::Call { func, args, destination, unwind, target, .. } = terminator.kind - else { - bug!("unexpected terminator kind {:?}", terminator.kind); - }; + self.changed = true; - let return_block = if let Some(block) = target { - // Prepare a new block for code that should execute when call returns. We don't use - // target block directly since it might have other predecessors. - let mut data = BasicBlockData::new(Some(Terminator { - source_info: terminator.source_info, - kind: TerminatorKind::Goto { target: block }, - })); - data.is_cleanup = caller_body[block].is_cleanup; - Some(caller_body.basic_blocks_mut().push(data)) - } else { - None + self.history.push(callsite.callee.def_id()); + process_blocks(self, caller_body, new_blocks); + self.history.pop(); + } + + fn on_inline_limit_reached(&self) -> bool { + true + } + + fn on_inline_failure(&self, _: &CallSite<'tcx>, _: &'static str) {} +} + +fn inline<'tcx, T: Inliner<'tcx>>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { + let def_id = body.source.def_id(); + + // Only do inlining into fn bodies. + if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() { + return false; + } + + let mut inliner = T::new(tcx, def_id, body); + if !inliner.check_caller_mir_body(body) { + return false; + } + + let blocks = START_BLOCK..body.basic_blocks.next_index(); + process_blocks(&mut inliner, body, blocks); + inliner.changed() +} + +fn process_blocks<'tcx, I: Inliner<'tcx>>( + inliner: &mut I, + caller_body: &mut Body<'tcx>, + blocks: Range, +) { + let Some(inline_limit) = inliner.inline_limit_for_block() else { return }; + let mut inlined_count = 0; + for bb in blocks { + let bb_data = &caller_body[bb]; + if bb_data.is_cleanup { + continue; + } + + let Some(callsite) = resolve_callsite(inliner, caller_body, bb, bb_data) else { + continue; }; - // If the call is something like `a[*i] = f(i)`, where - // `i : &mut usize`, then just duplicating the `a[*i]` - // Place could result in two different locations if `f` - // writes to `i`. To prevent this we need to create a temporary - // borrow of the place and pass the destination as `*temp` instead. - fn dest_needs_borrow(place: Place<'_>) -> bool { - for elem in place.projection.iter() { - match elem { - ProjectionElem::Deref | ProjectionElem::Index(_) => return true, - _ => {} + let span = trace_span!("process_blocks", %callsite.callee, ?bb); + let _guard = span.enter(); + + match try_inlining(inliner, caller_body, &callsite) { + Err(reason) => { + debug!("not-inlined {} [{}]", callsite.callee, reason); + inliner.on_inline_failure(&callsite, reason); + } + Ok(new_blocks) => { + debug!("inlined {}", callsite.callee); + inliner.on_inline_success(&callsite, caller_body, new_blocks); + + inlined_count += 1; + if inlined_count == inline_limit { + if inliner.on_inline_limit_reached() { + return; + } } } + } + } +} + +fn resolve_callsite<'tcx, I: Inliner<'tcx>>( + inliner: &I, + caller_body: &Body<'tcx>, + bb: BasicBlock, + bb_data: &BasicBlockData<'tcx>, +) -> Option> { + let tcx = inliner.tcx(); + // Only consider direct calls to functions + let terminator = bb_data.terminator(); + + // FIXME(explicit_tail_calls): figure out if we can inline tail calls + if let TerminatorKind::Call { ref func, fn_span, .. } = terminator.kind { + let func_ty = func.ty(caller_body, tcx); + if let ty::FnDef(def_id, args) = *func_ty.kind() { + if !inliner.should_inline_for_callee(def_id) { + debug!("not enabled"); + return None; + } + + // To resolve an instance its args have to be fully normalized. + let args = tcx.try_normalize_erasing_regions(inliner.typing_env(), args).ok()?; + let callee = + Instance::try_resolve(tcx, inliner.typing_env(), def_id, args).ok().flatten()?; + + if let InstanceKind::Virtual(..) | InstanceKind::Intrinsic(_) = callee.def { + return None; + } + + if inliner.history().contains(&callee.def_id()) { + return None; + } - false + let fn_sig = tcx.fn_sig(def_id).instantiate(tcx, args); + + // Additionally, check that the body that we're inlining actually agrees + // with the ABI of the trait that the item comes from. + if let InstanceKind::Item(instance_def_id) = callee.def + && tcx.def_kind(instance_def_id) == DefKind::AssocFn + && let instance_fn_sig = tcx.fn_sig(instance_def_id).skip_binder() + && instance_fn_sig.abi() != fn_sig.abi() + { + return None; + } + + let source_info = SourceInfo { span: fn_span, ..terminator.source_info }; + + return Some(CallSite { callee, fn_sig, block: bb, source_info }); } + } - let dest = if dest_needs_borrow(destination) { - trace!("creating temp for return destination"); - let dest = Rvalue::Ref( - self.tcx.lifetimes.re_erased, - BorrowKind::Mut { kind: MutBorrowKind::Default }, - destination, - ); - let dest_ty = dest.ty(caller_body, self.tcx); - let temp = - Place::from(self.new_call_temp(caller_body, callsite, dest_ty, return_block)); - caller_body[callsite.block].statements.push(Statement { - source_info: callsite.source_info, - kind: StatementKind::Assign(Box::new((temp, dest))), - }); - self.tcx.mk_place_deref(temp) - } else { - destination - }; + None +} - // Always create a local to hold the destination, as `RETURN_PLACE` may appear - // where a full `Place` is not allowed. - let (remap_destination, destination_local) = if let Some(d) = dest.as_local() { - (false, d) - } else { - ( - true, - self.new_call_temp( - caller_body, - callsite, - destination.ty(caller_body, self.tcx).ty, - return_block, - ), - ) - }; +/// Attempts to inline a callsite into the caller body. When successful returns basic blocks +/// containing the inlined body. Otherwise returns an error describing why inlining didn't take +/// place. +fn try_inlining<'tcx, I: Inliner<'tcx>>( + inliner: &I, + caller_body: &mut Body<'tcx>, + callsite: &CallSite<'tcx>, +) -> Result, &'static str> { + let tcx = inliner.tcx(); + check_mir_is_available(inliner, caller_body, callsite.callee)?; + + let callee_attrs = tcx.codegen_fn_attrs(callsite.callee.def_id()); + rustc_mir_build::check_inline::is_inline_valid_on_fn(tcx, callsite.callee.def_id())?; + check_codegen_attributes(inliner, callsite, callee_attrs)?; + + let terminator = caller_body[callsite.block].terminator.as_ref().unwrap(); + let TerminatorKind::Call { args, destination, .. } = &terminator.kind else { bug!() }; + let destination_ty = destination.ty(&caller_body.local_decls, tcx).ty; + for arg in args { + if !arg.node.ty(&caller_body.local_decls, tcx).is_sized(tcx, inliner.typing_env()) { + // We do not allow inlining functions with unsized params. Inlining these functions + // could create unsized locals, which are unsound and being phased out. + return Err("call has unsized argument"); + } + } - // Copy the arguments if needed. - let args = self.make_call_args(args, callsite, caller_body, &callee_body, return_block); + let callee_body = try_instance_mir(tcx, callsite.callee.def)?; + rustc_mir_build::check_inline::is_inline_valid_on_body(tcx, callee_body)?; + inliner.check_callee_mir_body(callsite, callee_body, callee_attrs)?; - let mut integrator = Integrator { - args: &args, - new_locals: Local::new(caller_body.local_decls.len()).., - new_scopes: SourceScope::new(caller_body.source_scopes.len()).., - new_blocks: BasicBlock::new(caller_body.basic_blocks.len()).., - destination: destination_local, - callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(), - callsite, - cleanup_block: unwind, - in_cleanup_block: false, - return_block, - tcx: self.tcx, - always_live_locals: BitSet::new_filled(callee_body.local_decls.len()), + let Ok(callee_body) = callsite.callee.try_instantiate_mir_and_normalize_erasing_regions( + tcx, + inliner.typing_env(), + ty::EarlyBinder::bind(callee_body.clone()), + ) else { + debug!("failed to normalize callee body"); + return Err("implementation limitation"); + }; + + // Normally, this shouldn't be required, but trait normalization failure can create a + // validation ICE. + if !validate_types(tcx, inliner.typing_env(), &callee_body, &caller_body).is_empty() { + debug!("failed to validate callee body"); + return Err("implementation limitation"); + } + + // Check call signature compatibility. + // Normally, this shouldn't be required, but trait normalization failure can create a + // validation ICE. + let output_type = callee_body.return_ty(); + if !util::sub_types(tcx, inliner.typing_env(), output_type, destination_ty) { + trace!(?output_type, ?destination_ty); + debug!("failed to normalize return type"); + return Err("implementation limitation"); + } + if callsite.fn_sig.abi() == ExternAbi::RustCall { + // FIXME: Don't inline user-written `extern "rust-call"` functions, + // since this is generally perf-negative on rustc, and we hope that + // LLVM will inline these functions instead. + if callee_body.spread_arg.is_some() { + return Err("user-written rust-call functions"); + } + + let (self_arg, arg_tuple) = match &args[..] { + [arg_tuple] => (None, arg_tuple), + [self_arg, arg_tuple] => (Some(self_arg), arg_tuple), + _ => bug!("Expected `rust-call` to have 1 or 2 args"), }; - // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones - // (or existing ones, in a few special cases) in the caller. - integrator.visit_body(&mut callee_body); + let self_arg_ty = self_arg.map(|self_arg| self_arg.node.ty(&caller_body.local_decls, tcx)); - // If there are any locals without storage markers, give them storage only for the - // duration of the call. - for local in callee_body.vars_and_temps_iter() { - if integrator.always_live_locals.contains(local) { - let new_local = integrator.map_local(local); - caller_body[callsite.block].statements.push(Statement { - source_info: callsite.source_info, - kind: StatementKind::StorageLive(new_local), - }); + let arg_tuple_ty = arg_tuple.node.ty(&caller_body.local_decls, tcx); + let ty::Tuple(arg_tuple_tys) = *arg_tuple_ty.kind() else { + bug!("Closure arguments are not passed as a tuple"); + }; + + for (arg_ty, input) in + self_arg_ty.into_iter().chain(arg_tuple_tys).zip(callee_body.args_iter()) + { + let input_type = callee_body.local_decls[input].ty; + if !util::sub_types(tcx, inliner.typing_env(), input_type, arg_ty) { + trace!(?arg_ty, ?input_type); + debug!("failed to normalize tuple argument type"); + return Err("implementation limitation"); } } - if let Some(block) = return_block { - // To avoid repeated O(n) insert, push any new statements to the end and rotate - // the slice once. - let mut n = 0; - if remap_destination { - caller_body[block].statements.push(Statement { - source_info: callsite.source_info, - kind: StatementKind::Assign(Box::new(( - dest, - Rvalue::Use(Operand::Move(destination_local.into())), - ))), - }); - n += 1; + } else { + for (arg, input) in args.iter().zip(callee_body.args_iter()) { + let input_type = callee_body.local_decls[input].ty; + let arg_ty = arg.node.ty(&caller_body.local_decls, tcx); + if !util::sub_types(tcx, inliner.typing_env(), input_type, arg_ty) { + trace!(?arg_ty, ?input_type); + debug!("failed to normalize argument type"); + return Err("implementation limitation"); } - for local in callee_body.vars_and_temps_iter().rev() { - if integrator.always_live_locals.contains(local) { - let new_local = integrator.map_local(local); - caller_body[block].statements.push(Statement { - source_info: callsite.source_info, - kind: StatementKind::StorageDead(new_local), - }); - n += 1; - } + } + } + + let old_blocks = caller_body.basic_blocks.next_index(); + inline_call(inliner, caller_body, callsite, callee_body); + let new_blocks = old_blocks..caller_body.basic_blocks.next_index(); + + Ok(new_blocks) +} + +fn check_mir_is_available<'tcx, I: Inliner<'tcx>>( + inliner: &I, + caller_body: &Body<'tcx>, + callee: Instance<'tcx>, +) -> Result<(), &'static str> { + let caller_def_id = caller_body.source.def_id(); + let callee_def_id = callee.def_id(); + if callee_def_id == caller_def_id { + return Err("self-recursion"); + } + + match callee.def { + InstanceKind::Item(_) => { + // If there is no MIR available (either because it was not in metadata or + // because it has no MIR because it's an extern function), then the inliner + // won't cause cycles on this. + if !inliner.tcx().is_mir_available(callee_def_id) { + debug!("item MIR unavailable"); + return Err("implementation limitation"); } - caller_body[block].statements.rotate_right(n); + } + // These have no own callable MIR. + InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => { + debug!("instance without MIR (intrinsic / virtual)"); + return Err("implementation limitation"); } - // Insert all of the (mapped) parts of the callee body into the caller. - caller_body.local_decls.extend(callee_body.drain_vars_and_temps()); - caller_body.source_scopes.append(&mut callee_body.source_scopes); - if self - .tcx - .sess - .opts - .unstable_opts - .inline_mir_preserve_debug - .unwrap_or(self.tcx.sess.opts.debuginfo != DebugInfo::None) - { - // Note that we need to preserve these in the standard library so that - // people working on rust can build with or without debuginfo while - // still getting consistent results from the mir-opt tests. - caller_body.var_debug_info.append(&mut callee_body.var_debug_info); + // FIXME(#127030): `ConstParamHasTy` has bad interactions with + // the drop shim builder, which does not evaluate predicates in + // the correct param-env for types being dropped. Stall resolving + // the MIR for this instance until all of its const params are + // substituted. + InstanceKind::DropGlue(_, Some(ty)) if ty.has_type_flags(TypeFlags::HAS_CT_PARAM) => { + debug!("still needs substitution"); + return Err("implementation limitation"); } - caller_body.basic_blocks_mut().append(callee_body.basic_blocks_mut()); - caller_body[callsite.block].terminator = Some(Terminator { - source_info: callsite.source_info, - kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) }, - }); + // This cannot result in an immediate cycle since the callee MIR is a shim, which does + // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we + // do not need to catch this here, we can wait until the inliner decides to continue + // inlining a second time. + InstanceKind::VTableShim(_) + | InstanceKind::ReifyShim(..) + | InstanceKind::FnPtrShim(..) + | InstanceKind::ClosureOnceShim { .. } + | InstanceKind::ConstructCoroutineInClosureShim { .. } + | InstanceKind::DropGlue(..) + | InstanceKind::CloneShim(..) + | InstanceKind::ThreadLocalShim(..) + | InstanceKind::FnPtrAddrShim(..) + | InstanceKind::AsyncDropGlueCtorShim(..) => return Ok(()), + } - // Copy required constants from the callee_body into the caller_body. Although we are only - // pushing unevaluated consts to `required_consts`, here they may have been evaluated - // because we are calling `instantiate_and_normalize_erasing_regions` -- so we filter again. - caller_body.required_consts.as_mut().unwrap().extend( - callee_body.required_consts().into_iter().filter(|ct| ct.const_.is_required_const()), - ); - // Now that we incorporated the callee's `required_consts`, we can remove the callee from - // `mentioned_items` -- but we have to take their `mentioned_items` in return. This does - // some extra work here to save the monomorphization collector work later. It helps a lot, - // since monomorphization can avoid a lot of work when the "mentioned items" are similar to - // the actually used items. By doing this we can entirely avoid visiting the callee! - // We need to reconstruct the `required_item` for the callee so that we can find and - // remove it. - let callee_item = MentionedItem::Fn(func.ty(caller_body, self.tcx)); - let caller_mentioned_items = caller_body.mentioned_items.as_mut().unwrap(); - if let Some(idx) = caller_mentioned_items.iter().position(|item| item.node == callee_item) { - // We found the callee, so remove it and add its items instead. - caller_mentioned_items.remove(idx); - caller_mentioned_items.extend(callee_body.mentioned_items()); - } else { - // If we can't find the callee, there's no point in adding its items. Probably it - // already got removed by being inlined elsewhere in the same function, so we already - // took its items. + if inliner.tcx().is_constructor(callee_def_id) { + trace!("constructors always have MIR"); + // Constructor functions cannot cause a query cycle. + return Ok(()); + } + + if callee_def_id.is_local() + && !inliner + .tcx() + .is_lang_item(inliner.tcx().parent(caller_def_id), rustc_hir::LangItem::FnOnce) + { + // If we know for sure that the function we're calling will itself try to + // call us, then we avoid inlining that function. + if inliner.tcx().mir_callgraph_reachable((callee, caller_def_id.expect_local())) { + debug!("query cycle avoidance"); + return Err("caller might be reachable from callee"); } + + Ok(()) + } else { + // This cannot result in an immediate cycle since the callee MIR is from another crate + // and is already optimized. Any subsequent inlining may cause cycles, but we do + // not need to catch this here, we can wait until the inliner decides to continue + // inlining a second time. + trace!("functions from other crates always have MIR"); + Ok(()) } +} - fn make_call_args( - &self, - args: Box<[Spanned>]>, - callsite: &CallSite<'tcx>, - caller_body: &mut Body<'tcx>, - callee_body: &Body<'tcx>, - return_block: Option, - ) -> Box<[Local]> { - let tcx = self.tcx; - - // There is a bit of a mismatch between the *caller* of a closure and the *callee*. - // The caller provides the arguments wrapped up in a tuple: - // - // tuple_tmp = (a, b, c) - // Fn::call(closure_ref, tuple_tmp) - // - // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`) - // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has - // the job of unpacking this tuple. But here, we are codegen. =) So we want to create - // a vector like - // - // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2] - // - // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient - // if we "spill" that into *another* temporary, so that we can map the argument - // variable in the callee MIR directly to an argument variable on our side. - // So we introduce temporaries like: - // - // tmp0 = tuple_tmp.0 - // tmp1 = tuple_tmp.1 - // tmp2 = tuple_tmp.2 - // - // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`. - if callsite.fn_sig.abi() == ExternAbi::RustCall && callee_body.spread_arg.is_none() { - // FIXME(edition_2024): switch back to a normal method call. - let mut args = <_>::into_iter(args); - let self_ = self.create_temp_if_necessary( - args.next().unwrap().node, - callsite, - caller_body, - return_block, - ); - let tuple = self.create_temp_if_necessary( - args.next().unwrap().node, - callsite, - caller_body, - return_block, - ); - assert!(args.next().is_none()); - - let tuple = Place::from(tuple); - let ty::Tuple(tuple_tys) = tuple.ty(caller_body, tcx).ty.kind() else { - bug!("Closure arguments are not passed as a tuple"); - }; +/// Returns an error if inlining is not possible based on codegen attributes alone. A success +/// indicates that inlining decision should be based on other criteria. +fn check_codegen_attributes<'tcx, I: Inliner<'tcx>>( + inliner: &I, + callsite: &CallSite<'tcx>, + callee_attrs: &CodegenFnAttrs, +) -> Result<(), &'static str> { + let tcx = inliner.tcx(); + if let InlineAttr::Never = callee_attrs.inline { + return Err("never inline attribute"); + } - // The `closure_ref` in our example above. - let closure_ref_arg = iter::once(self_); + // Reachability pass defines which functions are eligible for inlining. Generally inlining + // other functions is incorrect because they could reference symbols that aren't exported. + let is_generic = callsite.callee.args.non_erasable_generics().next().is_some(); + if !is_generic && !tcx.cross_crate_inlinable(callsite.callee.def_id()) { + return Err("not exported"); + } - // The `tmp0`, `tmp1`, and `tmp2` in our example above. - let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { - // This is e.g., `tuple_tmp.0` in our example above. - let tuple_field = Operand::Move(tcx.mk_place_field(tuple, FieldIdx::new(i), ty)); + let codegen_fn_attrs = tcx.codegen_fn_attrs(inliner.caller_def_id()); + if callee_attrs.no_sanitize != codegen_fn_attrs.no_sanitize { + return Err("incompatible sanitizer set"); + } - // Spill to a local to make e.g., `tmp0`. - self.create_temp_if_necessary(tuple_field, callsite, caller_body, return_block) - }); + // Two functions are compatible if the callee has no attribute (meaning + // that it's codegen agnostic), or sets an attribute that is identical + // to this function's attribute. + if callee_attrs.instruction_set.is_some() + && callee_attrs.instruction_set != codegen_fn_attrs.instruction_set + { + return Err("incompatible instruction set"); + } - closure_ref_arg.chain(tuple_tmp_args).collect() - } else { - // FIXME(edition_2024): switch back to a normal method call. - <_>::into_iter(args) - .map(|a| self.create_temp_if_necessary(a.node, callsite, caller_body, return_block)) - .collect() - } + let callee_feature_names = callee_attrs.target_features.iter().map(|f| f.name); + let this_feature_names = codegen_fn_attrs.target_features.iter().map(|f| f.name); + if callee_feature_names.ne(this_feature_names) { + // In general it is not correct to inline a callee with target features that are a + // subset of the caller. This is because the callee might contain calls, and the ABI of + // those calls depends on the target features of the surrounding function. By moving a + // `Call` terminator from one MIR body to another with more target features, we might + // change the ABI of that call! + return Err("incompatible target features"); } - /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh - /// temporary `T` and an instruction `T = arg`, and returns `T`. - fn create_temp_if_necessary( - &self, - arg: Operand<'tcx>, - callsite: &CallSite<'tcx>, - caller_body: &mut Body<'tcx>, - return_block: Option, - ) -> Local { - // Reuse the operand if it is a moved temporary. - if let Operand::Move(place) = &arg - && let Some(local) = place.as_local() - && caller_body.local_kind(local) == LocalKind::Temp - { - return local; + Ok(()) +} + +fn inline_call<'tcx, I: Inliner<'tcx>>( + inliner: &I, + caller_body: &mut Body<'tcx>, + callsite: &CallSite<'tcx>, + mut callee_body: Body<'tcx>, +) { + let tcx = inliner.tcx(); + let terminator = caller_body[callsite.block].terminator.take().unwrap(); + let TerminatorKind::Call { func, args, destination, unwind, target, .. } = terminator.kind + else { + bug!("unexpected terminator kind {:?}", terminator.kind); + }; + + let return_block = if let Some(block) = target { + // Prepare a new block for code that should execute when call returns. We don't use + // target block directly since it might have other predecessors. + let data = BasicBlockData::new( + Some(Terminator { + source_info: terminator.source_info, + kind: TerminatorKind::Goto { target: block }, + }), + caller_body[block].is_cleanup, + ); + Some(caller_body.basic_blocks_mut().push(data)) + } else { + None + }; + + // If the call is something like `a[*i] = f(i)`, where + // `i : &mut usize`, then just duplicating the `a[*i]` + // Place could result in two different locations if `f` + // writes to `i`. To prevent this we need to create a temporary + // borrow of the place and pass the destination as `*temp` instead. + fn dest_needs_borrow(place: Place<'_>) -> bool { + for elem in place.projection.iter() { + match elem { + ProjectionElem::Deref | ProjectionElem::Index(_) => return true, + _ => {} + } } - // Otherwise, create a temporary for the argument. - trace!("creating temp for argument {:?}", arg); - let arg_ty = arg.ty(caller_body, self.tcx); - let local = self.new_call_temp(caller_body, callsite, arg_ty, return_block); - caller_body[callsite.block].statements.push(Statement { - source_info: callsite.source_info, - kind: StatementKind::Assign(Box::new((Place::from(local), Rvalue::Use(arg)))), - }); - local + false } - /// Introduces a new temporary into the caller body that is live for the duration of the call. - fn new_call_temp( - &self, - caller_body: &mut Body<'tcx>, - callsite: &CallSite<'tcx>, - ty: Ty<'tcx>, - return_block: Option, - ) -> Local { - let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span)); - + let dest = if dest_needs_borrow(destination) { + trace!("creating temp for return destination"); + let dest = Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + destination, + ); + let dest_ty = dest.ty(caller_body, tcx); + let temp = Place::from(new_call_temp(caller_body, callsite, dest_ty, return_block)); caller_body[callsite.block].statements.push(Statement { source_info: callsite.source_info, - kind: StatementKind::StorageLive(local), + kind: StatementKind::Assign(Box::new((temp, dest))), }); + tcx.mk_place_deref(temp) + } else { + destination + }; + + // Always create a local to hold the destination, as `RETURN_PLACE` may appear + // where a full `Place` is not allowed. + let (remap_destination, destination_local) = if let Some(d) = dest.as_local() { + (false, d) + } else { + ( + true, + new_call_temp(caller_body, callsite, destination.ty(caller_body, tcx).ty, return_block), + ) + }; + + // Copy the arguments if needed. + let args = make_call_args(inliner, args, callsite, caller_body, &callee_body, return_block); + + let mut integrator = Integrator { + args: &args, + new_locals: Local::new(caller_body.local_decls.len()).., + new_scopes: SourceScope::new(caller_body.source_scopes.len()).., + new_blocks: BasicBlock::new(caller_body.basic_blocks.len()).., + destination: destination_local, + callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(), + callsite, + cleanup_block: unwind, + in_cleanup_block: false, + return_block, + tcx, + always_live_locals: DenseBitSet::new_filled(callee_body.local_decls.len()), + }; + + // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones + // (or existing ones, in a few special cases) in the caller. + integrator.visit_body(&mut callee_body); - if let Some(block) = return_block { - caller_body[block].statements.insert(0, Statement { + // If there are any locals without storage markers, give them storage only for the + // duration of the call. + for local in callee_body.vars_and_temps_iter() { + if integrator.always_live_locals.contains(local) { + let new_local = integrator.map_local(local); + caller_body[callsite.block].statements.push(Statement { source_info: callsite.source_info, - kind: StatementKind::StorageDead(local), + kind: StatementKind::StorageLive(new_local), }); } + } + if let Some(block) = return_block { + // To avoid repeated O(n) insert, push any new statements to the end and rotate + // the slice once. + let mut n = 0; + if remap_destination { + caller_body[block].statements.push(Statement { + source_info: callsite.source_info, + kind: StatementKind::Assign(Box::new(( + dest, + Rvalue::Use(Operand::Move(destination_local.into())), + ))), + }); + n += 1; + } + for local in callee_body.vars_and_temps_iter().rev() { + if integrator.always_live_locals.contains(local) { + let new_local = integrator.map_local(local); + caller_body[block].statements.push(Statement { + source_info: callsite.source_info, + kind: StatementKind::StorageDead(new_local), + }); + n += 1; + } + } + caller_body[block].statements.rotate_right(n); + } - local + // Insert all of the (mapped) parts of the callee body into the caller. + caller_body.local_decls.extend(callee_body.drain_vars_and_temps()); + caller_body.source_scopes.append(&mut callee_body.source_scopes); + if tcx + .sess + .opts + .unstable_opts + .inline_mir_preserve_debug + .unwrap_or(tcx.sess.opts.debuginfo != DebugInfo::None) + { + // Note that we need to preserve these in the standard library so that + // people working on rust can build with or without debuginfo while + // still getting consistent results from the mir-opt tests. + caller_body.var_debug_info.append(&mut callee_body.var_debug_info); } + caller_body.basic_blocks_mut().append(callee_body.basic_blocks_mut()); + + caller_body[callsite.block].terminator = Some(Terminator { + source_info: callsite.source_info, + kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) }, + }); + + // Copy required constants from the callee_body into the caller_body. Although we are only + // pushing unevaluated consts to `required_consts`, here they may have been evaluated + // because we are calling `instantiate_and_normalize_erasing_regions` -- so we filter again. + caller_body.required_consts.as_mut().unwrap().extend( + callee_body.required_consts().into_iter().filter(|ct| ct.const_.is_required_const()), + ); + // Now that we incorporated the callee's `required_consts`, we can remove the callee from + // `mentioned_items` -- but we have to take their `mentioned_items` in return. This does + // some extra work here to save the monomorphization collector work later. It helps a lot, + // since monomorphization can avoid a lot of work when the "mentioned items" are similar to + // the actually used items. By doing this we can entirely avoid visiting the callee! + // We need to reconstruct the `required_item` for the callee so that we can find and + // remove it. + let callee_item = MentionedItem::Fn(func.ty(caller_body, tcx)); + let caller_mentioned_items = caller_body.mentioned_items.as_mut().unwrap(); + if let Some(idx) = caller_mentioned_items.iter().position(|item| item.node == callee_item) { + // We found the callee, so remove it and add its items instead. + caller_mentioned_items.remove(idx); + caller_mentioned_items.extend(callee_body.mentioned_items()); + } else { + // If we can't find the callee, there's no point in adding its items. Probably it + // already got removed by being inlined elsewhere in the same function, so we already + // took its items. + } +} + +fn make_call_args<'tcx, I: Inliner<'tcx>>( + inliner: &I, + args: Box<[Spanned>]>, + callsite: &CallSite<'tcx>, + caller_body: &mut Body<'tcx>, + callee_body: &Body<'tcx>, + return_block: Option, +) -> Box<[Local]> { + let tcx = inliner.tcx(); + + // There is a bit of a mismatch between the *caller* of a closure and the *callee*. + // The caller provides the arguments wrapped up in a tuple: + // + // tuple_tmp = (a, b, c) + // Fn::call(closure_ref, tuple_tmp) + // + // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`) + // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has + // the job of unpacking this tuple. But here, we are codegen. =) So we want to create + // a vector like + // + // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2] + // + // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient + // if we "spill" that into *another* temporary, so that we can map the argument + // variable in the callee MIR directly to an argument variable on our side. + // So we introduce temporaries like: + // + // tmp0 = tuple_tmp.0 + // tmp1 = tuple_tmp.1 + // tmp2 = tuple_tmp.2 + // + // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`. + if callsite.fn_sig.abi() == ExternAbi::RustCall && callee_body.spread_arg.is_none() { + // FIXME(edition_2024): switch back to a normal method call. + let mut args = <_>::into_iter(args); + let self_ = create_temp_if_necessary( + inliner, + args.next().unwrap().node, + callsite, + caller_body, + return_block, + ); + let tuple = create_temp_if_necessary( + inliner, + args.next().unwrap().node, + callsite, + caller_body, + return_block, + ); + assert!(args.next().is_none()); + + let tuple = Place::from(tuple); + let ty::Tuple(tuple_tys) = tuple.ty(caller_body, tcx).ty.kind() else { + bug!("Closure arguments are not passed as a tuple"); + }; + + // The `closure_ref` in our example above. + let closure_ref_arg = iter::once(self_); + + // The `tmp0`, `tmp1`, and `tmp2` in our example above. + let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { + // This is e.g., `tuple_tmp.0` in our example above. + let tuple_field = Operand::Move(tcx.mk_place_field(tuple, FieldIdx::new(i), ty)); + + // Spill to a local to make e.g., `tmp0`. + create_temp_if_necessary(inliner, tuple_field, callsite, caller_body, return_block) + }); + + closure_ref_arg.chain(tuple_tmp_args).collect() + } else { + // FIXME(edition_2024): switch back to a normal method call. + <_>::into_iter(args) + .map(|a| create_temp_if_necessary(inliner, a.node, callsite, caller_body, return_block)) + .collect() + } +} + +/// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh temporary `T` and an +/// instruction `T = arg`, and returns `T`. +fn create_temp_if_necessary<'tcx, I: Inliner<'tcx>>( + inliner: &I, + arg: Operand<'tcx>, + callsite: &CallSite<'tcx>, + caller_body: &mut Body<'tcx>, + return_block: Option, +) -> Local { + // Reuse the operand if it is a moved temporary. + if let Operand::Move(place) = &arg + && let Some(local) = place.as_local() + && caller_body.local_kind(local) == LocalKind::Temp + { + return local; + } + + // Otherwise, create a temporary for the argument. + trace!("creating temp for argument {:?}", arg); + let arg_ty = arg.ty(caller_body, inliner.tcx()); + let local = new_call_temp(caller_body, callsite, arg_ty, return_block); + caller_body[callsite.block].statements.push(Statement { + source_info: callsite.source_info, + kind: StatementKind::Assign(Box::new((Place::from(local), Rvalue::Use(arg)))), + }); + local +} + +/// Introduces a new temporary into the caller body that is live for the duration of the call. +fn new_call_temp<'tcx>( + caller_body: &mut Body<'tcx>, + callsite: &CallSite<'tcx>, + ty: Ty<'tcx>, + return_block: Option, +) -> Local { + let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span)); + + caller_body[callsite.block].statements.push(Statement { + source_info: callsite.source_info, + kind: StatementKind::StorageLive(local), + }); + + if let Some(block) = return_block { + caller_body[block].statements.insert(0, Statement { + source_info: callsite.source_info, + kind: StatementKind::StorageDead(local), + }); + } + + local } /** @@ -891,7 +1127,7 @@ struct Integrator<'a, 'tcx> { in_cleanup_block: bool, return_block: Option, tcx: TyCtxt<'tcx>, - always_live_locals: BitSet, + always_live_locals: DenseBitSet, } impl Integrator<'_, '_> { diff --git a/compiler/rustc_mir_transform/src/instsimplify.rs b/compiler/rustc_mir_transform/src/instsimplify.rs index adc3374df2ea7..20e2e3e8ba2c1 100644 --- a/compiler/rustc_mir_transform/src/instsimplify.rs +++ b/compiler/rustc_mir_transform/src/instsimplify.rs @@ -7,8 +7,7 @@ use rustc_middle::bug; use rustc_middle::mir::*; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout}; -use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, sym}; +use rustc_span::{DUMMY_SP, Symbol, sym}; use crate::simplify::simplify_duplicate_switch_targets; use crate::take_array; @@ -49,6 +48,8 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify { ctx.simplify_ref_deref(rvalue); ctx.simplify_ptr_aggregate(rvalue); ctx.simplify_cast(rvalue); + ctx.simplify_repeated_aggregate(rvalue); + ctx.simplify_repeat_once(rvalue); } _ => {} } @@ -69,6 +70,35 @@ struct InstSimplifyContext<'a, 'tcx> { } impl<'tcx> InstSimplifyContext<'_, 'tcx> { + /// Transform aggregates like [0, 0, 0, 0, 0] into [0; 5]. + /// GVN can also do this optimization, but GVN is only run at mir-opt-level 2 so having this in + /// InstSimplify helps unoptimized builds. + fn simplify_repeated_aggregate(&self, rvalue: &mut Rvalue<'tcx>) { + let Rvalue::Aggregate(box AggregateKind::Array(_), fields) = rvalue else { + return; + }; + if fields.len() < 5 { + return; + } + let first = &fields[rustc_abi::FieldIdx::ZERO]; + let Operand::Constant(first) = first else { + return; + }; + let Ok(first_val) = first.const_.eval(self.tcx, self.typing_env, first.span) else { + return; + }; + if fields.iter().all(|field| { + let Operand::Constant(field) = field else { + return false; + }; + let field = field.const_.eval(self.tcx, self.typing_env, field.span); + field == Ok(first_val) + }) { + let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap()); + *rvalue = Rvalue::Repeat(Operand::Constant(first.clone()), len); + } + } + /// Transform boolean comparisons into logical operations. fn simplify_bool_cmp(&self, rvalue: &mut Rvalue<'tcx>) { match rvalue { @@ -174,33 +204,22 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { *kind = CastKind::IntToInt; return; } - - // Transmuting a transparent struct/union to a field's type is a projection - if let ty::Adt(adt_def, args) = operand_ty.kind() - && adt_def.repr().transparent() - && (adt_def.is_struct() || adt_def.is_union()) - && let Some(place) = operand.place() - { - let variant = adt_def.non_enum_variant(); - for (i, field) in variant.fields.iter_enumerated() { - let field_ty = field.ty(self.tcx, args); - if field_ty == *cast_ty { - let place = place - .project_deeper(&[ProjectionElem::Field(i, *cast_ty)], self.tcx); - let operand = if operand.is_move() { - Operand::Move(place) - } else { - Operand::Copy(place) - }; - *rvalue = Rvalue::Use(operand); - return; - } - } - } } } } + /// Simplify `[x; 1]` to just `[x]`. + fn simplify_repeat_once(&self, rvalue: &mut Rvalue<'tcx>) { + if let Rvalue::Repeat(operand, count) = rvalue + && let Some(1) = count.try_to_target_usize(self.tcx) + { + *rvalue = Rvalue::Aggregate( + Box::new(AggregateKind::Array(operand.ty(self.local_decls, self.tcx))), + [operand.clone()].into(), + ); + } + } + fn simplify_primitive_clone( &self, terminator: &mut Terminator<'tcx>, diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs index beed007589bdf..c73a03489c57f 100644 --- a/compiler/rustc_mir_transform/src/jump_threading.rs +++ b/compiler/rustc_mir_transform/src/jump_threading.rs @@ -35,13 +35,12 @@ //! Likewise, applying the optimisation can create a lot of new MIR, so we bound the instruction //! cost by `MAX_COST`. -use rustc_abi::{TagEncoding, Variants}; use rustc_arena::DroplessArena; use rustc_const_eval::const_eval::DummyMachine; use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable}; use rustc_data_structures::fx::FxHashSet; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::visit::Visitor; @@ -122,7 +121,7 @@ struct TOFinder<'a, 'tcx> { ecx: InterpCx<'tcx, DummyMachine>, body: &'a Body<'tcx>, map: Map<'tcx>, - loop_headers: BitSet, + loop_headers: DenseBitSet, /// We use an arena to avoid cloning the slices when cloning `state`. arena: &'a DroplessArena, opportunities: Vec, @@ -565,31 +564,15 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { StatementKind::SetDiscriminant { box place, variant_index } => { let Some(discr_target) = self.map.find_discr(place.as_ref()) else { return }; let enum_ty = place.ty(self.body, self.tcx).ty; - // `SetDiscriminant` may be a no-op if the assigned variant is the untagged variant - // of a niche encoding. If we cannot ensure that we write to the discriminant, do - // nothing. - let Ok(enum_layout) = self.ecx.layout_of(enum_ty) else { + // `SetDiscriminant` guarantees that the discriminant is now `variant_index`. + // Even if the discriminant write does nothing due to niches, it is UB to set the + // discriminant when the data does not encode the desired discriminant. + let Some(discr) = + self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err() + else { return; }; - let writes_discriminant = match enum_layout.variants { - Variants::Single { index } => { - assert_eq!(index, *variant_index); - true - } - Variants::Multiple { tag_encoding: TagEncoding::Direct, .. } => true, - Variants::Multiple { - tag_encoding: TagEncoding::Niche { untagged_variant, .. }, - .. - } => *variant_index != untagged_variant, - }; - if writes_discriminant { - let Some(discr) = - self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err() - else { - return; - }; - self.process_immediate(bb, discr_target, discr, state); - } + self.process_immediate(bb, discr_target, discr, state); } // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`. StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume( @@ -849,8 +832,8 @@ enum Update { /// at least a predecessor which it dominates. This definition is only correct for reducible CFGs. /// But if the CFG is already irreducible, there is no point in trying much harder. /// is already irreducible. -fn loop_headers(body: &Body<'_>) -> BitSet { - let mut loop_headers = BitSet::new_empty(body.basic_blocks.len()); +fn loop_headers(body: &Body<'_>) -> DenseBitSet { + let mut loop_headers = DenseBitSet::new_empty(body.basic_blocks.len()); let dominators = body.basic_blocks.dominators(); // Only visit reachable blocks. for (bb, bbdata) in traversal::preorder(body) { diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index acf3eb2b62cea..9b3a0e6729538 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -13,7 +13,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir::HirId; use rustc_hir::def::DefKind; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -67,7 +67,7 @@ struct ConstPropagator<'mir, 'tcx> { tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, worklist: Vec, - visited_blocks: BitSet, + visited_blocks: DenseBitSet, locals: IndexVec>, body: &'mir Body<'tcx>, written_only_inside_own_block_locals: FxHashSet, @@ -190,7 +190,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { tcx, typing_env, worklist: vec![START_BLOCK], - visited_blocks: BitSet::new_empty(body.basic_blocks.len()), + visited_blocks: DenseBitSet::new_empty(body.basic_blocks.len()), locals: IndexVec::from_elem_n(Value::Uninit, body.local_decls.len()), body, can_const_prop, @@ -440,7 +440,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { | Rvalue::Use(..) | Rvalue::CopyForDeref(..) | Rvalue::Repeat(..) - | Rvalue::Len(..) | Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::Discriminant(..) @@ -600,20 +599,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } - Len(place) => { - let len = if let ty::Array(_, n) = place.ty(self.local_decls(), self.tcx).ty.kind() - { - n.try_to_target_usize(self.tcx)? - } else { - match self.get_const(place)? { - Value::Immediate(src) => src.len(&self.ecx).discard_err()?, - Value::Aggregate { fields, .. } => fields.len() as u64, - Value::Uninit => return None, - } - }; - ImmTy::from_scalar(Scalar::from_target_usize(len, self), layout).into() - } - Ref(..) | RawPtr(..) => return None, NullaryOp(ref null_op, ty) => { @@ -867,7 +852,7 @@ enum ConstPropMode { struct CanConstProp { can_const_prop: IndexVec, // False at the beginning. Once set, no more assignments are allowed to that local. - found_assignment: BitSet, + found_assignment: DenseBitSet, } impl CanConstProp { @@ -879,7 +864,7 @@ impl CanConstProp { ) -> IndexVec { let mut cpv = CanConstProp { can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls), - found_assignment: BitSet::new_empty(body.local_decls.len()), + found_assignment: DenseBitSet::new_empty(body.local_decls.len()), }; for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { let ty = body.local_decls[local].ty; diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 8be5a63d00878..490e7dd8f7e07 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -216,7 +216,7 @@ impl EnumSizeOpt { }; let layout = tcx.layout_of(typing_env.as_query_input(ty)).ok()?; let variants = match &layout.variants { - Variants::Single { .. } => return None, + Variants::Single { .. } | Variants::Empty => return None, Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => return None, Variants::Multiple { variants, .. } if variants.len() <= 1 => return None, diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 5c090bf7cad08..db999bea98692 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,4 +1,5 @@ // tidy-alphabetical-start +#![feature(array_windows)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(const_type_name)] @@ -34,8 +35,7 @@ use rustc_middle::util::Providers; use rustc_middle::{bug, query, span_bug}; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, sym}; -use rustc_trait_selection::traits; -use tracing::{debug, trace}; +use tracing::debug; #[macro_use] mod pass_manager; @@ -141,7 +141,8 @@ declare_passes! { mod gvn : GVN; // Made public so that `mir_drops_elaborated_and_const_checked` can be overridden // by custom rustc drivers, running all the steps by themselves. See #114628. - pub mod inline : Inline; + pub mod inline : Inline, ForceInline; + mod impossible_predicates : ImpossiblePredicates; mod instsimplify : InstSimplify { BeforeInline, AfterSimplifyCfg }; mod jump_threading : JumpThreading; mod known_panics_lint : KnownPanicsLint; @@ -437,6 +438,8 @@ fn mir_promoted( Some(MirPhase::Analysis(AnalysisPhase::Initial)), ); + lint_tail_expr_drop_order::run_lint(tcx, def, &body); + let promoted = promote_pass.promoted_fragments.into_inner(); (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted)) } @@ -486,63 +489,20 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> & let is_fn_like = tcx.def_kind(def).is_fn_like(); if is_fn_like { // Do not compute the mir call graph without said call graph actually being used. - if pm::should_run_pass(tcx, &inline::Inline) { + if pm::should_run_pass(tcx, &inline::Inline) + || inline::ForceInline::should_run_pass_for_callee(tcx, def.to_def_id()) + { tcx.ensure_with_value().mir_inliner_callees(ty::InstanceKind::Item(def.to_def_id())); } } let (body, _) = tcx.mir_promoted(def); - lint_tail_expr_drop_order::run_lint(tcx, def, &body.borrow()); let mut body = body.steal(); if let Some(error_reported) = tainted_by_errors { body.tainted_by_errors = Some(error_reported); } - // Check if it's even possible to satisfy the 'where' clauses - // for this item. - // - // This branch will never be taken for any normal function. - // However, it's possible to `#!feature(trivial_bounds)]` to write - // a function with impossible to satisfy clauses, e.g.: - // `fn foo() where String: Copy {}` - // - // We don't usually need to worry about this kind of case, - // since we would get a compilation error if the user tried - // to call it. However, since we optimize even without any - // calls to the function, we need to make sure that it even - // makes sense to try to evaluate the body. - // - // If there are unsatisfiable where clauses, then all bets are - // off, and we just give up. - // - // We manually filter the predicates, skipping anything that's not - // "global". We are in a potentially generic context - // (e.g. we are evaluating a function without instantiating generic - // parameters, so this filtering serves two purposes: - // - // 1. We skip evaluating any predicates that we would - // never be able prove are unsatisfiable (e.g. `` - // 2. We avoid trying to normalize predicates involving generic - // parameters (e.g. `::MyItem`). This can confuse - // the normalization code (leading to cycle errors), since - // it's usually never invoked in this way. - let predicates = tcx - .predicates_of(body.source.def_id()) - .predicates - .iter() - .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None }); - if traits::impossible_predicates(tcx, traits::elaborate(tcx, predicates).collect()) { - trace!("found unsatisfiable predicates for {:?}", body.source); - // Clear the body to only contain a single `unreachable` statement. - let bbs = body.basic_blocks.as_mut(); - bbs.raw.truncate(1); - bbs[START_BLOCK].statements.clear(); - bbs[START_BLOCK].terminator_mut().kind = TerminatorKind::Unreachable; - body.var_debug_info.clear(); - body.local_decls.raw.truncate(body.arg_count + 1); - } - run_analysis_to_runtime_passes(tcx, &mut body); // Now that drop elaboration has been performed, we can check for @@ -590,6 +550,7 @@ pub fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<' /// After this series of passes, no lifetime analysis based on borrowing can be done. fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let passes: &[&dyn MirPass<'tcx>] = &[ + &impossible_predicates::ImpossiblePredicates, &cleanup_post_borrowck::CleanupPostBorrowck, &remove_noop_landing_pads::RemoveNoopLandingPads, &simplify::SimplifyCfg::PostAnalysis, @@ -663,6 +624,8 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // Perform instsimplify before inline to eliminate some trivial calls (like clone // shims). &instsimplify::InstSimplify::BeforeInline, + // Perform inlining of `#[rustc_force_inline]`-annotated callees. + &inline::ForceInline, // Perform inlining, which may add a lot of code. &inline::Inline, // Code from other crates may have storage markers, so this needs to happen after diff --git a/compiler/rustc_mir_transform/src/lint.rs b/compiler/rustc_mir_transform/src/lint.rs index 29e762af8de35..f472c7cb493d0 100644 --- a/compiler/rustc_mir_transform/src/lint.rs +++ b/compiler/rustc_mir_transform/src/lint.rs @@ -5,7 +5,7 @@ use std::borrow::Cow; use rustc_data_structures::fx::FxHashSet; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::visit::{PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -43,7 +43,7 @@ struct Lint<'a, 'tcx> { when: String, body: &'a Body<'tcx>, is_fn_like: bool, - always_live_locals: &'a BitSet, + always_live_locals: &'a DenseBitSet, maybe_storage_live: ResultsCursor<'a, 'tcx, MaybeStorageLive<'a>>, maybe_storage_dead: ResultsCursor<'a, 'tcx, MaybeStorageDead<'a>>, places: FxHashSet>, diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs index 7fb421dea0c74..50d10883d2cad 100644 --- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs +++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs @@ -285,7 +285,9 @@ fn ty_dtor_span<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option { | ty::Placeholder(_) | ty::Infer(_) | ty::Slice(_) - | ty::Array(_, _) => None, + | ty::Array(_, _) + | ty::UnsafeBinder(_) => None, + ty::Adt(adt_def, _) => { let did = adt_def.did(); let try_local_did_span = |did: DefId| { @@ -349,6 +351,11 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body< { return; } + + // FIXME(typing_env): This should be able to reveal the opaques local to the + // body using the typeck results. + let typing_env = ty::TypingEnv::non_body_analysis(tcx, def_id); + // ## About BIDs in blocks ## // Track the set of blocks that contain a backwards-incompatible drop (BID) // and, for each block, the vector of locations. @@ -356,7 +363,7 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body< // We group them per-block because they tend to scheduled in the same drop ladder block. let mut bid_per_block = IndexMap::default(); let mut bid_places = UnordSet::new(); - let typing_env = ty::TypingEnv::post_analysis(tcx, def_id); + let mut ty_dropped_components = UnordMap::default(); for (block, data) in body.basic_blocks.iter_enumerated() { for (statement_index, stmt) in data.statements.iter().enumerate() { @@ -684,7 +691,7 @@ impl Subdiagnostic for LocalLabel<'_> { for dtor in self.destructors { dtor.add_to_diag_with(diag, f); } - let msg = f(diag, crate::fluent_generated::mir_transform_label_local_epilogue.into()); + let msg = f(diag, crate::fluent_generated::mir_transform_label_local_epilogue); diag.span_label(self.span, msg); } } diff --git a/compiler/rustc_mir_transform/src/lower_intrinsics.rs b/compiler/rustc_mir_transform/src/lower_intrinsics.rs index 6d635606687a9..942c6144ea69c 100644 --- a/compiler/rustc_mir_transform/src/lower_intrinsics.rs +++ b/compiler/rustc_mir_transform/src/lower_intrinsics.rs @@ -3,7 +3,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::{bug, span_bug}; -use rustc_span::symbol::sym; +use rustc_span::sym; use crate::take_array; diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs index 20e2a65b311c5..534ba99178017 100644 --- a/compiler/rustc_mir_transform/src/match_branches.rs +++ b/compiler/rustc_mir_transform/src/match_branches.rs @@ -7,6 +7,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::layout::{IntegerExt, TyAndLayout}; use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; use rustc_type_ir::TyKind::*; +use tracing::instrument; use super::simplify::simplify_cfg; @@ -51,7 +52,7 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification { } trait SimplifyMatch<'tcx> { - /// Simplifies a match statement, returning true if the simplification succeeds, false + /// Simplifies a match statement, returning `Some` if the simplification succeeds, `None` /// otherwise. Generic code is written here, and we generally don't need a custom /// implementation. fn simplify( @@ -159,6 +160,7 @@ struct SimplifyToIf; /// } /// ``` impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { + #[instrument(level = "debug", skip(self, tcx), ret)] fn can_simplify( &mut self, tcx: TyCtxt<'tcx>, @@ -167,12 +169,15 @@ impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { bbs: &IndexSlice>, _discr_ty: Ty<'tcx>, ) -> Option<()> { - if targets.iter().len() != 1 { - return None; - } + let (first, second) = match targets.all_targets() { + &[first, otherwise] => (first, otherwise), + &[first, second, otherwise] if bbs[otherwise].is_empty_unreachable() => (first, second), + _ => { + return None; + } + }; + // We require that the possible target blocks all be distinct. - let (_, first) = targets.iter().next().unwrap(); - let second = targets.otherwise(); if first == second { return None; } @@ -221,8 +226,14 @@ impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { discr_local: Local, discr_ty: Ty<'tcx>, ) { - let (val, first) = targets.iter().next().unwrap(); - let second = targets.otherwise(); + let ((val, first), second) = match (targets.all_targets(), targets.all_values()) { + (&[first, otherwise], &[val]) => ((val, first), otherwise), + (&[first, second, otherwise], &[val, _]) if bbs[otherwise].is_empty_unreachable() => { + ((val, first), second) + } + _ => unreachable!(), + }; + // We already checked that first and second are different blocks, // and bb_idx has a different terminator from both of them. let first = &bbs[first]; @@ -297,7 +308,7 @@ struct SimplifyToExp { transform_kinds: Vec, } -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] enum ExpectedTransformKind<'a, 'tcx> { /// Identical statements. Same(&'a StatementKind<'tcx>), @@ -362,6 +373,7 @@ impl From> for TransformKind { /// } /// ``` impl<'tcx> SimplifyMatch<'tcx> for SimplifyToExp { + #[instrument(level = "debug", skip(self, tcx), ret)] fn can_simplify( &mut self, tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs index a9227524ce5e5..6dfa14d6b527d 100644 --- a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs +++ b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs @@ -1,7 +1,7 @@ //! This pass removes jumps to basic blocks containing only a return, and replaces them with a //! return instead. -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -16,7 +16,7 @@ impl<'tcx> crate::MirPass<'tcx> for MultipleReturnTerminators { fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // find basic blocks with no statement and a return terminator - let mut bbs_simple_returns = BitSet::new_empty(body.basic_blocks.len()); + let mut bbs_simple_returns = DenseBitSet::new_empty(body.basic_blocks.len()); let bbs = body.basic_blocks_mut(); for idx in bbs.indices() { if bbs[idx].statements.is_empty() diff --git a/compiler/rustc_mir_transform/src/nrvo.rs b/compiler/rustc_mir_transform/src/nrvo.rs index cd026ed68069f..35872de385245 100644 --- a/compiler/rustc_mir_transform/src/nrvo.rs +++ b/compiler/rustc_mir_transform/src/nrvo.rs @@ -1,7 +1,7 @@ //! See the docs for [`RenameReturnPlace`]. use rustc_hir::Mutability; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; use rustc_middle::mir::visit::{MutVisitor, NonUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{self, BasicBlock, Local, Location}; @@ -116,7 +116,7 @@ fn local_eligible_for_nrvo(body: &mir::Body<'_>) -> Option { fn find_local_assigned_to_return_place(start: BasicBlock, body: &mir::Body<'_>) -> Option { let mut block = start; - let mut seen = BitSet::new_empty(body.basic_blocks.len()); + let mut seen = DenseBitSet::new_empty(body.basic_blocks.len()); // Iterate as long as `block` has exactly one predecessor that we have not yet visited. while seen.insert(block) { diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs index 8a45ce0762d59..c3f0a989ce105 100644 --- a/compiler/rustc_mir_transform/src/pass_manager.rs +++ b/compiler/rustc_mir_transform/src/pass_manager.rs @@ -79,6 +79,12 @@ pub(super) trait MirPass<'tcx> { true } + /// Returns `true` if this pass can be overridden by `-Zenable-mir-passes`. This should be + /// true for basically every pass other than those that are necessary for correctness. + fn can_be_overridden(&self) -> bool { + true + } + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>); fn is_mir_dump_enabled(&self) -> bool { @@ -176,6 +182,10 @@ where { let name = pass.name(); + if !pass.can_be_overridden() { + return pass.is_enabled(tcx.sess); + } + let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes; let overridden = overridden_passes.iter().rev().find(|(s, _)| s == &*name).map(|(_name, polarity)| { diff --git a/compiler/rustc_mir_transform/src/prettify.rs b/compiler/rustc_mir_transform/src/prettify.rs index 937c207776b35..51abd4da86eb1 100644 --- a/compiler/rustc_mir_transform/src/prettify.rs +++ b/compiler/rustc_mir_transform/src/prettify.rs @@ -4,7 +4,7 @@ //! (`-Zmir-enable-passes=+ReorderBasicBlocks,+ReorderLocals`) //! to make the MIR easier to read for humans. -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -51,8 +51,10 @@ impl<'tcx> crate::MirPass<'tcx> for ReorderLocals { } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let mut finder = - LocalFinder { map: IndexVec::new(), seen: BitSet::new_empty(body.local_decls.len()) }; + let mut finder = LocalFinder { + map: IndexVec::new(), + seen: DenseBitSet::new_empty(body.local_decls.len()), + }; // We can't reorder the return place or the arguments for local in (0..=body.arg_count).map(Local::from_usize) { @@ -113,7 +115,7 @@ impl<'tcx> MutVisitor<'tcx> for BasicBlockUpdater<'tcx> { struct LocalFinder { map: IndexVec, - seen: BitSet, + seen: DenseBitSet, } impl LocalFinder { diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index 6be95b1f0f1e6..7451f4193042b 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -430,9 +430,7 @@ impl<'tcx> Validator<'_, 'tcx> { self.validate_operand(op)? } - Rvalue::Discriminant(place) | Rvalue::Len(place) => { - self.validate_place(place.as_ref())? - } + Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?, Rvalue::ThreadLocalRef(_) => return Err(Unpromotable), diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs index 96bcdfa6fac47..95b05f94270a8 100644 --- a/compiler/rustc_mir_transform/src/ref_prop.rs +++ b/compiler/rustc_mir_transform/src/ref_prop.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use rustc_data_structures::fx::FxHashSet; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; @@ -132,7 +132,7 @@ fn compute_replacement<'tcx>( let mut targets = IndexVec::from_elem(Value::Unknown, &body.local_decls); // Set of locals for which we will remove their storage statement. This is useful for // reborrowed references. - let mut storage_to_remove = BitSet::new_empty(body.local_decls.len()); + let mut storage_to_remove = DenseBitSet::new_empty(body.local_decls.len()); let fully_replacable_locals = fully_replacable_locals(ssa); @@ -324,8 +324,8 @@ fn compute_replacement<'tcx>( /// /// We consider a local to be replacable iff it's only used in a `Deref` projection `*_local` or /// non-use position (like storage statements and debuginfo). -fn fully_replacable_locals(ssa: &SsaLocals) -> BitSet { - let mut replacable = BitSet::new_empty(ssa.num_locals()); +fn fully_replacable_locals(ssa: &SsaLocals) -> DenseBitSet { + let mut replacable = DenseBitSet::new_empty(ssa.num_locals()); // First pass: for each local, whether its uses can be fully replaced. for local in ssa.locals() { @@ -344,7 +344,7 @@ fn fully_replacable_locals(ssa: &SsaLocals) -> BitSet { struct Replacer<'tcx> { tcx: TyCtxt<'tcx>, targets: IndexVec>, - storage_to_remove: BitSet, + storage_to_remove: DenseBitSet, allowed_replacements: FxHashSet<(Local, Location)>, any_replacement: bool, } diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs index fd49e956f433b..76a3edfe0be4e 100644 --- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs +++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs @@ -1,4 +1,4 @@ -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -40,7 +40,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveNoopLandingPads { let mut jumps_folded = 0; let mut landing_pads_removed = 0; - let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks.len()); + let mut nop_landing_pads = DenseBitSet::new_empty(body.basic_blocks.len()); // This is a post-order traversal, so that if A post-dominates B // then A will be visited before B. @@ -81,7 +81,7 @@ impl RemoveNoopLandingPads { &self, bb: BasicBlock, body: &Body<'_>, - nop_landing_pads: &BitSet, + nop_landing_pads: &DenseBitSet, ) -> bool { for stmt in &body[bb].statements { match &stmt.kind { diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index 64e183bcbc010..55e5701bd0af3 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -36,31 +36,39 @@ struct Replacer<'a, 'tcx> { } /// A cheap, approximate check to avoid unnecessary `layout_of` calls. -fn maybe_zst(ty: Ty<'_>) -> bool { +/// +/// `Some(true)` is definitely ZST; `Some(false)` is definitely *not* ZST. +/// +/// `None` may or may not be, and must check `layout_of` to be sure. +fn trivially_zst<'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option { match ty.kind() { - // maybe ZST (could be more precise) - ty::Adt(..) - | ty::Array(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Tuple(..) - | ty::Alias(ty::Opaque, ..) => true, // definitely ZST - ty::FnDef(..) | ty::Never => true, - // unreachable or can't be ZST - _ => false, + ty::FnDef(..) | ty::Never => Some(true), + ty::Tuple(fields) if fields.is_empty() => Some(true), + ty::Array(_ty, len) if let Some(0) = len.try_to_target_usize(tcx) => Some(true), + // clearly not ZST + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnPtr(..) => Some(false), + // check `layout_of` to see (including unreachable things we won't actually see) + _ => None, } } impl<'tcx> Replacer<'_, 'tcx> { fn known_to_be_zst(&self, ty: Ty<'tcx>) -> bool { - if !maybe_zst(ty) { - return false; + if let Some(is_zst) = trivially_zst(ty, self.tcx) { + is_zst + } else { + self.tcx + .layout_of(self.typing_env.as_query_input(ty)) + .is_ok_and(|layout| layout.is_zst()) } - let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(ty)) else { - return false; - }; - layout.is_zst() } fn make_zst(&self, ty: Ty<'tcx>) -> ConstOperand<'tcx> { diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index 722da3c420dc9..4648ec33c9345 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -20,7 +20,7 @@ use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; use crate::{ - abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, deref_separator, + abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, deref_separator, inline, instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads, simplify, }; @@ -155,6 +155,8 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< &remove_noop_landing_pads::RemoveNoopLandingPads, &simplify::SimplifyCfg::MakeShim, &instsimplify::InstSimplify::BeforeInline, + // Perform inlining of `#[rustc_force_inline]`-annotated callees. + &inline::ForceInline, &abort_unwinding_calls::AbortUnwindingCalls, &add_call_guards::CriticalCallEdges, ], diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs index 139b25be0ab2f..f01bab75c4a1d 100644 --- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs +++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs @@ -96,7 +96,7 @@ impl<'tcx> AsyncDestructorCtorShimBuilder<'tcx> { typing_env, stack: Vec::with_capacity(Self::MAX_STACK_LEN), - last_bb: bbs.push(BasicBlockData::new(None)), + last_bb: bbs.push(BasicBlockData::new(None, false)), top_cleanup_bb: match tcx.sess.panic_strategy() { PanicStrategy::Unwind => { // Don't drop input arg because it's just a pointer diff --git a/compiler/rustc_mir_transform/src/single_use_consts.rs b/compiler/rustc_mir_transform/src/single_use_consts.rs index 277a33c031152..10b3c0ae94f24 100644 --- a/compiler/rustc_mir_transform/src/single_use_consts.rs +++ b/compiler/rustc_mir_transform/src/single_use_consts.rs @@ -1,5 +1,5 @@ use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_middle::bug; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -28,9 +28,9 @@ impl<'tcx> crate::MirPass<'tcx> for SingleUseConsts { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let mut finder = SingleUseConstsFinder { - ineligible_locals: BitSet::new_empty(body.local_decls.len()), + ineligible_locals: DenseBitSet::new_empty(body.local_decls.len()), locations: IndexVec::from_elem(LocationPair::new(), &body.local_decls), - locals_in_debug_info: BitSet::new_empty(body.local_decls.len()), + locals_in_debug_info: DenseBitSet::new_empty(body.local_decls.len()), }; finder.ineligible_locals.insert_range(..=Local::from_usize(body.arg_count)); @@ -96,9 +96,9 @@ impl LocationPair { } struct SingleUseConstsFinder { - ineligible_locals: BitSet, + ineligible_locals: DenseBitSet, locations: IndexVec, - locals_in_debug_info: BitSet, + locals_in_debug_info: DenseBitSet, } impl<'tcx> Visitor<'tcx> for SingleUseConstsFinder { diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs index 52b9ec1e0a357..d54ea3feab6ee 100644 --- a/compiler/rustc_mir_transform/src/sroa.rs +++ b/compiler/rustc_mir_transform/src/sroa.rs @@ -2,7 +2,7 @@ use rustc_abi::{FIRST_VARIANT, FieldIdx}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_hir::LangItem; use rustc_index::IndexVec; -use rustc_index::bit_set::{BitSet, GrowableBitSet}; +use rustc_index::bit_set::{DenseBitSet, GrowableBitSet}; use rustc_middle::bug; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::visit::*; @@ -60,9 +60,9 @@ impl<'tcx> crate::MirPass<'tcx> for ScalarReplacementOfAggregates { fn escaping_locals<'tcx>( tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, - excluded: &BitSet, + excluded: &DenseBitSet, body: &Body<'tcx>, -) -> BitSet { +) -> DenseBitSet { let is_excluded_ty = |ty: Ty<'tcx>| { if ty.is_union() || ty.is_enum() { return true; @@ -97,7 +97,7 @@ fn escaping_locals<'tcx>( false }; - let mut set = BitSet::new_empty(body.local_decls.len()); + let mut set = DenseBitSet::new_empty(body.local_decls.len()); set.insert_range(RETURN_PLACE..=Local::from_usize(body.arg_count)); for (local, decl) in body.local_decls().iter_enumerated() { if excluded.contains(local) || is_excluded_ty(decl.ty) { @@ -109,7 +109,7 @@ fn escaping_locals<'tcx>( return visitor.set; struct EscapeVisitor { - set: BitSet, + set: DenseBitSet, } impl<'tcx> Visitor<'tcx> for EscapeVisitor { @@ -198,7 +198,7 @@ fn compute_flattening<'tcx>( tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, body: &mut Body<'tcx>, - escaping: BitSet, + escaping: DenseBitSet, ) -> ReplacementMap<'tcx> { let mut fragments = IndexVec::from_elem(None, &body.local_decls); @@ -226,8 +226,8 @@ fn replace_flattened_locals<'tcx>( tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, replacements: ReplacementMap<'tcx>, -) -> BitSet { - let mut all_dead_locals = BitSet::new_empty(replacements.fragments.len()); +) -> DenseBitSet { + let mut all_dead_locals = DenseBitSet::new_empty(replacements.fragments.len()); for (local, replacements) in replacements.fragments.iter_enumerated() { if replacements.is_some() { all_dead_locals.insert(local); @@ -267,7 +267,7 @@ struct ReplacementVisitor<'tcx, 'll> { /// Work to do. replacements: &'ll ReplacementMap<'tcx>, /// This is used to check that we are not leaving references to replaced locals behind. - all_dead_locals: BitSet, + all_dead_locals: DenseBitSet, patch: MirPatch<'tcx>, } diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs index 5653aef0aae06..a24b3b2e80ffe 100644 --- a/compiler/rustc_mir_transform/src/ssa.rs +++ b/compiler/rustc_mir_transform/src/ssa.rs @@ -7,7 +7,7 @@ //! of a `Freeze` local. Those can still be considered to be SSA. use rustc_data_structures::graph::dominators::Dominators; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::bug; use rustc_middle::middle::resolve_bound_vars::Set1; @@ -29,7 +29,7 @@ pub(super) struct SsaLocals { /// We ignore non-uses (Storage statements, debuginfo). direct_uses: IndexVec, /// Set of SSA locals that are immutably borrowed. - borrowed_locals: BitSet, + borrowed_locals: DenseBitSet, } pub(super) enum AssignedValue<'a, 'tcx> { @@ -50,7 +50,7 @@ impl SsaLocals { let dominators = body.basic_blocks.dominators(); let direct_uses = IndexVec::from_elem(0, &body.local_decls); - let borrowed_locals = BitSet::new_empty(body.local_decls.len()); + let borrowed_locals = DenseBitSet::new_empty(body.local_decls.len()); let mut visitor = SsaVisitor { body, assignments, @@ -202,12 +202,12 @@ impl SsaLocals { } /// Set of SSA locals that are immutably borrowed. - pub(super) fn borrowed_locals(&self) -> &BitSet { + pub(super) fn borrowed_locals(&self) -> &DenseBitSet { &self.borrowed_locals } /// Make a property uniform on a copy equivalence class by removing elements. - pub(super) fn meet_copy_equivalence(&self, property: &mut BitSet) { + pub(super) fn meet_copy_equivalence(&self, property: &mut DenseBitSet) { // Consolidate to have a local iff all its copies are. // // `copy_classes` defines equivalence classes between locals. The `local`s that recursively @@ -241,7 +241,7 @@ struct SsaVisitor<'a, 'tcx> { assignment_order: Vec, direct_uses: IndexVec, // Track locals that are immutably borrowed, so we can check their type is `Freeze` later. - borrowed_locals: BitSet, + borrowed_locals: DenseBitSet, } impl SsaVisitor<'_, '_> { @@ -396,7 +396,7 @@ pub(crate) struct StorageLiveLocals { impl StorageLiveLocals { pub(crate) fn new( body: &Body<'_>, - always_storage_live_locals: &BitSet, + always_storage_live_locals: &DenseBitSet, ) -> StorageLiveLocals { let mut storage_live = IndexVec::from_elem(Set1::Empty, &body.local_decls); for local in always_storage_live_locals.iter() { diff --git a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs index 57e255b7c32c6..55dcad0680a96 100644 --- a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs +++ b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs @@ -54,6 +54,10 @@ fn variant_discriminants<'tcx>( tcx: TyCtxt<'tcx>, ) -> FxHashSet { match &layout.variants { + Variants::Empty => { + // Uninhabited, no valid discriminant. + FxHashSet::default() + } Variants::Single { index } => { let mut res = FxHashSet::default(); res.insert( diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index bce015046e1da..414477d900402 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -1,10 +1,11 @@ //! Validates the MIR to ensure that invariants are upheld. use rustc_abi::{ExternAbi, FIRST_VARIANT, Size}; +use rustc_attr_parsing::InlineAttr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::LangItem; use rustc_index::IndexVec; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::DenseBitSet; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::{Obligation, ObligationCause}; use rustc_middle::mir::coverage::CoverageKind; @@ -97,7 +98,7 @@ struct CfgChecker<'a, 'tcx> { body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, unwind_edge_count: usize, - reachable_blocks: BitSet, + reachable_blocks: DenseBitSet, value_cache: FxHashSet, // If `false`, then the MIR must not contain `UnwindAction::Continue` or // `TerminatorKind::Resume`. @@ -366,7 +367,8 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> { self.check_edge(location, *target, EdgeKind::Normal); self.check_unwind_edge(location, *unwind); } - TerminatorKind::Call { args, .. } | TerminatorKind::TailCall { args, .. } => { + TerminatorKind::Call { func, args, .. } + | TerminatorKind::TailCall { func, args, .. } => { // FIXME(explicit_tail_calls): refactor this & add tail-call specific checks if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind { if let Some(target) = target { @@ -419,6 +421,13 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> { } } } + + if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind() + && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized) + && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. }) + { + self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined"); + } } TerminatorKind::Assert { target, unwind, .. } => { self.check_edge(location, *target, EdgeKind::Normal); @@ -1009,14 +1018,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } Rvalue::Ref(..) => {} - Rvalue::Len(p) => { - let pty = p.ty(&self.body.local_decls, self.tcx).ty; - check_kinds!( - pty, - "Cannot compute length of non-array type {:?}", - ty::Array(..) | ty::Slice(..) - ); - } Rvalue::BinaryOp(op, vals) => { use BinOp::*; let a = vals.0.ty(&self.body.local_decls, self.tcx); diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 480d82c1a385b..bb603df112942 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -207,6 +207,7 @@ use std::path::PathBuf; +use rustc_attr_parsing::InlineAttr; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync::{LRef, MTLock, par_for_each_in}; use rustc_data_structures::unord::{UnordMap, UnordSet}; @@ -224,15 +225,14 @@ use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion}; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::print::{shrunk_instance_name, with_no_trimmed_paths}; use rustc_middle::ty::{ - self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Ty, TyCtxt, TypeFoldable, - TypeVisitableExt, VtblEntry, + self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Interner, Ty, TyCtxt, + TypeFoldable, TypeVisitableExt, VtblEntry, }; use rustc_middle::util::Providers; use rustc_middle::{bug, span_bug}; use rustc_session::Limit; use rustc_session::config::EntryFnType; use rustc_span::source_map::{Spanned, dummy_spanned, respan}; -use rustc_span::symbol::sym; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument, trace}; @@ -895,9 +895,8 @@ fn visit_instance_use<'tcx>( if !tcx.should_codegen_locally(instance) { return; } - if let ty::InstanceKind::Intrinsic(def_id) = instance.def { - let name = tcx.item_name(def_id); - if let Some(_requirement) = ValidityRequirement::from_intrinsic(name) { + if let Some(intrinsic) = tcx.intrinsic(instance.def_id()) { + if let Some(_requirement) = ValidityRequirement::from_intrinsic(intrinsic.name) { // The intrinsics assert_inhabited, assert_zero_valid, and assert_mem_uninitialized_valid will // be lowered in codegen to nothing or a call to panic_nounwind. So if we encounter any // of those intrinsics, we need to include a mono item for panic_nounwind, else we may try to @@ -907,11 +906,12 @@ fn visit_instance_use<'tcx>( if tcx.should_codegen_locally(panic_instance) { output.push(create_fn_mono_item(tcx, panic_instance, source)); } - } else if tcx.has_attr(def_id, sym::rustc_intrinsic) - && !tcx.has_attr(def_id, sym::rustc_intrinsic_must_be_overridden) - { - // Codegen the fallback body of intrinsics with fallback bodies - let instance = ty::Instance::new(def_id, instance.args); + } else if !intrinsic.must_be_overridden { + // Codegen the fallback body of intrinsics with fallback bodies. + // We explicitly skip this otherwise to ensure we get a linker error + // if anyone tries to call this intrinsic and the codegen backend did not + // override the implementation. + let instance = ty::Instance::new(instance.def_id(), instance.args); if tcx.should_codegen_locally(instance) { output.push(create_fn_mono_item(tcx, instance, source)); } @@ -960,6 +960,14 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxtAt<'tcx>, instance: Instance<'tcx>) - return false; } + if tcx.def_kind(def_id).has_codegen_attrs() + && matches!(tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. }) + { + // `#[rustc_force_inline]` items should never be codegened. This should be caught by + // the MIR validator. + tcx.delay_bug("attempt to codegen `#[rustc_force_inline]` item"); + } + if def_id.is_local() { // Local items cannot be referred to locally without monomorphizing them locally. return true; @@ -1369,6 +1377,10 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionStrategy) -> Vec RootCollector<'_, 'v> { match self.tcx.def_kind(id.owner_id) { DefKind::Enum | DefKind::Struct | DefKind::Union => { if self.strategy == MonoItemCollectionStrategy::Eager - && self.tcx.generics_of(id.owner_id).is_empty() + && !self.tcx.generics_of(id.owner_id).requires_monomorphization(self.tcx) { debug!("RootCollector: ADT drop-glue for `{id:?}`",); + let id_args = + ty::GenericArgs::for_item(self.tcx, id.owner_id.to_def_id(), |param, _| { + match param.kind { + GenericParamDefKind::Lifetime => { + self.tcx.lifetimes.re_erased.into() + } + GenericParamDefKind::Type { .. } + | GenericParamDefKind::Const { .. } => { + unreachable!( + "`own_requires_monomorphization` check means that \ + we should have no type/const params" + ) + } + } + }); // This type is impossible to instantiate, so we should not try to // generate a `drop_in_place` instance for it. if self.tcx.instantiate_and_check_impossible_predicates(( id.owner_id.to_def_id(), - ty::List::empty(), + id_args, )) { return; } - let ty = self.tcx.type_of(id.owner_id.to_def_id()).no_bound_vars().unwrap(); + let ty = + self.tcx.type_of(id.owner_id.to_def_id()).instantiate(self.tcx, id_args); + assert!(!ty.has_non_region_param()); visit_drop_use(self.tcx, ty, true, DUMMY_SP, self.output); } } @@ -1451,10 +1480,39 @@ impl<'v> RootCollector<'_, 'v> { } } + fn process_nested_body(&mut self, def_id: LocalDefId) { + match self.tcx.def_kind(def_id) { + DefKind::Closure => { + if self.strategy == MonoItemCollectionStrategy::Eager + && !self + .tcx + .generics_of(self.tcx.typeck_root_def_id(def_id.to_def_id())) + .requires_monomorphization(self.tcx) + { + let instance = match *self.tcx.type_of(def_id).instantiate_identity().kind() { + ty::Closure(def_id, args) + | ty::Coroutine(def_id, args) + | ty::CoroutineClosure(def_id, args) => { + Instance::new(def_id, self.tcx.erase_regions(args)) + } + _ => unreachable!(), + }; + let mono_item = create_fn_mono_item(self.tcx, instance, DUMMY_SP); + if mono_item.node.is_instantiable(self.tcx) { + self.output.push(mono_item); + } + } + } + _ => {} + } + } + fn is_root(&self, def_id: LocalDefId) -> bool { !self.tcx.generics_of(def_id).requires_monomorphization(self.tcx) && match self.strategy { - MonoItemCollectionStrategy::Eager => true, + MonoItemCollectionStrategy::Eager => { + !matches!(self.tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. }) + } MonoItemCollectionStrategy::Lazy => { self.entry_fn.and_then(|(id, _)| id.as_local()) == Some(def_id) || self.tcx.is_reachable_non_generic(def_id) diff --git a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs index 438d49fd7fb4a..02b9397f9a995 100644 --- a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs +++ b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs @@ -6,9 +6,8 @@ use rustc_middle::mir::{self, Location, traversal}; use rustc_middle::ty::{self, AssocKind, Instance, Ty, TyCtxt, TypeFoldable}; use rustc_session::Limit; use rustc_session::lint::builtin::LARGE_ASSIGNMENTS; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{Ident, sym}; +use rustc_span::{Ident, Span, sym}; use tracing::{debug, trace}; use crate::errors::LargeAssignmentsLint; diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 33e1065f051b2..7b17966343084 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -117,7 +117,7 @@ use rustc_middle::ty::{self, InstanceKind, TyCtxt}; use rustc_middle::util::Providers; use rustc_session::CodegenUnits; use rustc_session::config::{DumpMonoStatsFormat, SwitchWithOptPath}; -use rustc_span::symbol::Symbol; +use rustc_span::Symbol; use rustc_target::spec::SymbolVisibility; use tracing::debug; diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 63608f9e85615..8a54a4ece983e 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -3,6 +3,7 @@ use std::cmp::Ordering; use rustc_type_ir::data_structures::HashMap; use rustc_type_ir::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable}; use rustc_type_ir::inherent::*; +use rustc_type_ir::solve::{Goal, QueryInput}; use rustc_type_ir::visit::TypeVisitableExt; use rustc_type_ir::{ self as ty, Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, InferCtxtLike, @@ -17,8 +18,11 @@ use crate::delegate::SolverDelegate; /// while canonicalizing the response happens in the context of the /// query. #[derive(Debug, Clone, Copy)] -pub enum CanonicalizeMode { - Input, +enum CanonicalizeMode { + /// When canonicalizing the `param_env`, we keep `'static` as merging + /// trait candidates relies on it when deciding whether a where-bound + /// is trivial. + Input { keep_static: bool }, /// FIXME: We currently return region constraints referring to /// placeholders and inference variables from a binder instantiated /// inside of the query. @@ -59,15 +63,15 @@ pub struct Canonicalizer<'a, D: SolverDelegate, I: Interner> { } impl<'a, D: SolverDelegate, I: Interner> Canonicalizer<'a, D, I> { - pub fn canonicalize>( + pub fn canonicalize_response>( delegate: &'a D, - canonicalize_mode: CanonicalizeMode, + max_input_universe: ty::UniverseIndex, variables: &'a mut Vec, value: T, ) -> ty::Canonical { let mut canonicalizer = Canonicalizer { delegate, - canonicalize_mode, + canonicalize_mode: CanonicalizeMode::Response { max_input_universe }, variables, variable_lookup_table: Default::default(), @@ -80,9 +84,67 @@ impl<'a, D: SolverDelegate, I: Interner> Canonicalizer<'a, D, I> { let value = value.fold_with(&mut canonicalizer); assert!(!value.has_infer(), "unexpected infer in {value:?}"); assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}"); - let (max_universe, variables) = canonicalizer.finalize(); + Canonical { max_universe, variables, value } + } + + /// When canonicalizing query inputs, we keep `'static` in the `param_env` + /// but erase it everywhere else. We generally don't want to depend on region + /// identity, so while it should not matter whether `'static` is kept in the + /// value or opaque type storage as well, this prevents us from accidentally + /// relying on it in the future. + /// + /// We want to keep the option of canonicalizing `'static` to an existential + /// variable in the future by changing the way we detect global where-bounds. + pub fn canonicalize_input>( + delegate: &'a D, + variables: &'a mut Vec, + input: QueryInput, + ) -> ty::Canonical> { + // First canonicalize the `param_env` while keeping `'static` + let mut env_canonicalizer = Canonicalizer { + delegate, + canonicalize_mode: CanonicalizeMode::Input { keep_static: true }, + + variables, + variable_lookup_table: Default::default(), + primitive_var_infos: Vec::new(), + binder_index: ty::INNERMOST, + + cache: Default::default(), + }; + let param_env = input.goal.param_env.fold_with(&mut env_canonicalizer); + debug_assert_eq!(env_canonicalizer.binder_index, ty::INNERMOST); + // Then canonicalize the rest of the input without keeping `'static` + // while *mostly* reusing the canonicalizer from above. + let mut rest_canonicalizer = Canonicalizer { + delegate, + canonicalize_mode: CanonicalizeMode::Input { keep_static: false }, + + variables: env_canonicalizer.variables, + // We're able to reuse the `variable_lookup_table` as whether or not + // it already contains an entry for `'static` does not matter. + variable_lookup_table: env_canonicalizer.variable_lookup_table, + primitive_var_infos: env_canonicalizer.primitive_var_infos, + binder_index: ty::INNERMOST, + // We do not reuse the cache as it may contain entries whose canonicalized + // value contains `'static`. While we could alternatively handle this by + // checking for `'static` when using cached entries, this does not + // feel worth the effort. I do not expect that a `ParamEnv` will ever + // contain large enough types for caching to be necessary. + cache: Default::default(), + }; + + let predicate = input.goal.predicate.fold_with(&mut rest_canonicalizer); + let goal = Goal { param_env, predicate }; + let predefined_opaques_in_body = + input.predefined_opaques_in_body.fold_with(&mut rest_canonicalizer); + let value = QueryInput { goal, predefined_opaques_in_body }; + + assert!(!value.has_infer(), "unexpected infer in {value:?}"); + assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}"); + let (max_universe, variables) = rest_canonicalizer.finalize(); Canonical { max_universe, variables, value } } @@ -126,7 +188,7 @@ impl<'a, D: SolverDelegate, I: Interner> Canonicalizer<'a, D, I> { // all information which should not matter for the solver. // // For this we compress universes as much as possible. - CanonicalizeMode::Input => {} + CanonicalizeMode::Input { .. } => {} // When canonicalizing a response we map a universes already entered // by the caller to the root universe and only return useful universe // information for placeholders and inference variables created inside @@ -290,17 +352,15 @@ impl<'a, D: SolverDelegate, I: Interner> Canonicalizer<'a, D, I> { } }, ty::Placeholder(placeholder) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::PlaceholderTy(PlaceholderLike::new( - placeholder.universe(), - self.variables.len().into(), - )), + CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderTy( + PlaceholderLike::new(placeholder.universe(), self.variables.len().into()), + ), CanonicalizeMode::Response { .. } => CanonicalVarKind::PlaceholderTy(placeholder), }, ty::Param(_) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::PlaceholderTy(PlaceholderLike::new( - ty::UniverseIndex::ROOT, - self.variables.len().into(), - )), + CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderTy( + PlaceholderLike::new(ty::UniverseIndex::ROOT, self.variables.len().into()), + ), CanonicalizeMode::Response { .. } => panic!("param ty in response: {t:?}"), }, ty::Bool @@ -318,6 +378,7 @@ impl<'a, D: SolverDelegate, I: Interner> Canonicalizer<'a, D, I> { | ty::Pat(_, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -357,21 +418,30 @@ impl, I: Interner> TypeFolder for Canonicaliz let kind = match r.kind() { ty::ReBound(..) => return r, - // We may encounter `ReStatic` in item signatures or the hidden type - // of an opaque. `ReErased` should only be encountered in the hidden + // We don't canonicalize `ReStatic` in the `param_env` as we use it + // when checking whether a `ParamEnv` candidate is global. + ty::ReStatic => match self.canonicalize_mode { + CanonicalizeMode::Input { keep_static: false } => { + CanonicalVarKind::Region(ty::UniverseIndex::ROOT) + } + CanonicalizeMode::Input { keep_static: true } + | CanonicalizeMode::Response { .. } => return r, + }, + + // `ReErased` should only be encountered in the hidden // type of an opaque for regions that are ignored for the purposes of // captures. // // FIXME: We should investigate the perf implications of not uniquifying // `ReErased`. We may be able to short-circuit registering region // obligations if we encounter a `ReErased` on one side, for example. - ty::ReStatic | ty::ReErased | ty::ReError(_) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), + ty::ReErased | ty::ReError(_) => match self.canonicalize_mode { + CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), CanonicalizeMode::Response { .. } => return r, }, ty::ReEarlyParam(_) | ty::ReLateParam(_) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), + CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), CanonicalizeMode::Response { .. } => { panic!("unexpected region in response: {r:?}") } @@ -379,7 +449,7 @@ impl, I: Interner> TypeFolder for Canonicaliz ty::RePlaceholder(placeholder) => match self.canonicalize_mode { // We canonicalize placeholder regions as existentials in query inputs. - CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), + CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), CanonicalizeMode::Response { max_input_universe } => { // If we have a placeholder region inside of a query, it must be from // a new universe. @@ -397,7 +467,9 @@ impl, I: Interner> TypeFolder for Canonicaliz "region vid should have been resolved fully before canonicalization" ); match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), + CanonicalizeMode::Input { keep_static: _ } => { + CanonicalVarKind::Region(ty::UniverseIndex::ROOT) + } CanonicalizeMode::Response { .. } => { CanonicalVarKind::Region(self.delegate.universe_of_lt(vid).unwrap()) } @@ -434,7 +506,7 @@ impl, I: Interner> TypeFolder for Canonicaliz ty::InferConst::Fresh(_) => todo!(), }, ty::ConstKind::Placeholder(placeholder) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::PlaceholderConst( + CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderConst( PlaceholderLike::new(placeholder.universe(), self.variables.len().into()), ), CanonicalizeMode::Response { .. } => { @@ -442,7 +514,7 @@ impl, I: Interner> TypeFolder for Canonicaliz } }, ty::ConstKind::Param(_) => match self.canonicalize_mode { - CanonicalizeMode::Input => CanonicalVarKind::PlaceholderConst( + CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderConst( PlaceholderLike::new(ty::UniverseIndex::ROOT, self.variables.len().into()), ), CanonicalizeMode::Response { .. } => panic!("param ty in response: {c:?}"), diff --git a/compiler/rustc_next_trait_solver/src/coherence.rs b/compiler/rustc_next_trait_solver/src/coherence.rs index 2461ef0c0df57..408742747c2a1 100644 --- a/compiler/rustc_next_trait_solver/src/coherence.rs +++ b/compiler/rustc_next_trait_solver/src/coherence.rs @@ -339,7 +339,9 @@ where | ty::Slice(..) | ty::RawPtr(..) | ty::Never - | ty::Tuple(..) => self.found_non_local_ty(ty), + | ty::Tuple(..) + // FIXME(unsafe_binders): Non-local? + | ty::UnsafeBinder(_) => self.found_non_local_ty(ty), ty::Param(..) => panic!("unexpected ty param"), diff --git a/compiler/rustc_next_trait_solver/src/delegate.rs b/compiler/rustc_next_trait_solver/src/delegate.rs index 4ba54a2e0bf23..2d2d50e62f9a1 100644 --- a/compiler/rustc_next_trait_solver/src/delegate.rs +++ b/compiler/rustc_next_trait_solver/src/delegate.rs @@ -95,7 +95,10 @@ pub trait SolverDelegate: Deref::Infcx> + Size goal_trait_ref: ty::TraitRef, trait_assoc_def_id: ::DefId, impl_def_id: ::DefId, - ) -> Result::DefId>, NoSolution>; + ) -> Result< + Option<::DefId>, + ::ErrorGuaranteed, + >; fn is_transmutable( &self, diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 198ccb000f345..63432dc199b98 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -11,6 +11,7 @@ use rustc_type_ir::visit::TypeVisitableExt as _; use rustc_type_ir::{self as ty, Interner, TypingMode, Upcast as _, elaborate}; use tracing::{debug, instrument}; +use super::trait_goals::TraitGoalProvenVia; use crate::delegate::SolverDelegate; use crate::solve::inspect::ProbeKind; use crate::solve::{ @@ -337,15 +338,6 @@ where self.assemble_param_env_candidates(goal, &mut candidates); - match self.typing_mode() { - TypingMode::Coherence => {} - TypingMode::Analysis { .. } - | TypingMode::PostBorrowckAnalysis { .. } - | TypingMode::PostAnalysis => { - self.discard_impls_shadowed_by_env(goal, &mut candidates); - } - } - candidates } @@ -500,7 +492,7 @@ where goal: Goal, candidates: &mut Vec>, ) { - for (i, assumption) in goal.param_env.caller_bounds().into_iter().enumerate() { + for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() { candidates.extend(G::probe_and_consider_implied_clause( self, CandidateSource::ParamEnv(i), @@ -553,6 +545,7 @@ where | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -642,6 +635,7 @@ where | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Alias(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -733,72 +727,64 @@ where }) } - /// If there's a where-bound for the current goal, do not use any impl candidates - /// to prove the current goal. Most importantly, if there is a where-bound which does - /// not specify any associated types, we do not allow normalizing the associated type - /// by using an impl, even if it would apply. + /// We sadly can't simply take all possible candidates for normalization goals + /// and check whether they result in the same constraints. We want to make sure + /// that trying to normalize an alias doesn't result in constraints which aren't + /// otherwise required. + /// + /// Most notably, when proving a trait goal by via a where-bound, we should not + /// normalize via impls which have stricter region constraints than the where-bound: + /// + /// ```rust + /// trait Trait<'a> { + /// type Assoc; + /// } + /// + /// impl<'a, T: 'a> Trait<'a> for T { + /// type Assoc = u32; + /// } + /// + /// fn with_bound<'a, T: Trait<'a>>(_value: T::Assoc) {} + /// ``` /// - /// - // FIXME(@lcnr): The current structure here makes me unhappy and feels ugly. idk how - // to improve this however. However, this should make it fairly straightforward to refine - // the filtering going forward, so it seems alright-ish for now. - #[instrument(level = "debug", skip(self, goal))] - fn discard_impls_shadowed_by_env>( + /// The where-bound of `with_bound` doesn't specify the associated type, so we would + /// only be able to normalize `>::Assoc` by using the impl. This impl + /// adds a `T: 'a` bound however, which would result in a region error. Given that the + /// user explicitly wrote that `T: Trait<'a>` holds, this is undesirable and we instead + /// treat the alias as rigid. + /// + /// See trait-system-refactor-initiative#124 for more details. + #[instrument(level = "debug", skip(self), ret)] + pub(super) fn merge_candidates( &mut self, - goal: Goal, - candidates: &mut Vec>, - ) { - let cx = self.cx(); - let trait_goal: Goal> = - goal.with(cx, goal.predicate.trait_ref(cx)); - - let mut trait_candidates_from_env = vec![]; - self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| { - ecx.assemble_param_env_candidates(trait_goal, &mut trait_candidates_from_env); - ecx.assemble_alias_bound_candidates(trait_goal, &mut trait_candidates_from_env); - }); + proven_via: Option, + candidates: Vec>, + ) -> QueryResult { + let Some(proven_via) = proven_via else { + // We don't care about overflow. If proving the trait goal overflowed, then + // it's enough to report an overflow error for that, we don't also have to + // overflow during normalization. + return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Ambiguity)); + }; - if !trait_candidates_from_env.is_empty() { - let trait_env_result = self.merge_candidates(trait_candidates_from_env); - match trait_env_result.unwrap().value.certainty { - // If proving the trait goal succeeds by using the env, - // we freely drop all impl candidates. - // - // FIXME(@lcnr): It feels like this could easily hide - // a forced ambiguity candidate added earlier. - // This feels dangerous. - Certainty::Yes => { - candidates.retain(|c| match c.source { - CandidateSource::Impl(_) | CandidateSource::BuiltinImpl(_) => { - debug!(?c, "discard impl candidate"); - false - } - CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => true, - CandidateSource::CoherenceUnknowable => panic!("uh oh"), - }); - } - // If it is still ambiguous we instead just force the whole goal - // to be ambig and wait for inference constraints. See - // tests/ui/traits/next-solver/env-shadows-impls/ambig-env-no-shadow.rs - Certainty::Maybe(cause) => { - debug!(?cause, "force ambiguity"); - *candidates = self.forced_ambiguity(cause).into_iter().collect(); - } - } - } - } + let responses: Vec<_> = match proven_via { + // Even when a trait bound has been proven using a where-bound, we + // still need to consider alias-bounds for normalization, see + // tests/ui/next-solver/alias-bound-shadowed-by-env.rs. + // + // FIXME(const_trait_impl): should this behavior also be used by + // constness checking. Doing so is *at least theoretically* breaking, + // see github.com/rust-lang/rust/issues/133044#issuecomment-2500709754 + TraitGoalProvenVia::ParamEnv | TraitGoalProvenVia::AliasBound => candidates + .iter() + .filter(|c| { + matches!(c.source, CandidateSource::AliasBound | CandidateSource::ParamEnv(_)) + }) + .map(|c| c.result) + .collect(), + TraitGoalProvenVia::Misc => candidates.iter().map(|c| c.result).collect(), + }; - /// If there are multiple ways to prove a trait or projection goal, we have - /// to somehow try to merge the candidates into one. If that fails, we return - /// ambiguity. - #[instrument(level = "debug", skip(self), ret)] - pub(super) fn merge_candidates(&mut self, candidates: Vec>) -> QueryResult { - // First try merging all candidates. This is complete and fully sound. - let responses = candidates.iter().map(|c| c.result).collect::>(); - if let Some(result) = self.try_merge_responses(&responses) { - return Ok(result); - } else { - self.flounder(&responses) - } + self.try_merge_responses(&responses).map_or_else(|| self.flounder(&responses), Ok) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index 05ce61bc0678f..3c5d9b95e772d 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -83,6 +83,8 @@ where .map(|bty| bty.instantiate(cx, args)) .collect()), + ty::UnsafeBinder(bound_ty) => Ok(vec![bound_ty.into()]), + // For `PhantomData`, we pass `T`. ty::Adt(def, args) if def.is_phantom_data() => Ok(vec![ty::Binder::dummy(args.type_at(0))]), @@ -144,6 +146,8 @@ where panic!("unexpected type `{ty:?}`") } + ty::UnsafeBinder(bound_ty) => Ok(vec![bound_ty.into()]), + // impl Sized for () // impl Sized for (T1, T2, .., Tn) where Tn: Sized if n >= 1 ty::Tuple(tys) => Ok(tys.last().map_or_else(Vec::new, |ty| vec![ty::Binder::dummy(ty)])), @@ -239,6 +243,8 @@ where } }, + ty::UnsafeBinder(_) => Err(NoSolution), + // impl Copy/Clone for CoroutineWitness where T: Copy/Clone forall T in coroutine_hidden_types ty::CoroutineWitness(def_id, args) => Ok(ecx .cx() @@ -374,6 +380,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable( | ty::Param(_) | ty::Placeholder(..) | ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) - | ty::Error(_) => return Err(NoSolution), + | ty::Error(_) + | ty::UnsafeBinder(_) => return Err(NoSolution), ty::Bound(..) | ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { @@ -703,6 +712,8 @@ pub(in crate::solve) fn extract_fn_def_from_const_callable( } } +// NOTE: Keep this in sync with `evaluate_host_effect_for_destruct_goal` in +// the old solver, for as long as that exists. pub(in crate::solve) fn const_conditions_for_destruct( cx: I, self_ty: I::Ty, @@ -764,6 +775,10 @@ pub(in crate::solve) fn const_conditions_for_destruct( | ty::Coroutine(_, _) | ty::CoroutineWitness(_, _) => Err(NoSolution), + // FIXME(unsafe_binders): Unsafe binders could implement `~const Drop` + // if their inner type implements it. + ty::UnsafeBinder(_) => Err(NoSolution), + ty::Dynamic(..) | ty::Param(_) | ty::Alias(..) | ty::Placeholder(_) | ty::Foreign(_) => { Err(NoSolution) } diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs index 81b5199002b23..7669a305d58d1 100644 --- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs @@ -4,6 +4,7 @@ use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; +use rustc_type_ir::solve::inspect::ProbeKind; use rustc_type_ir::{self as ty, Interner, elaborate}; use tracing::instrument; @@ -102,7 +103,7 @@ where |ecx| { // Const conditions must hold for the implied const bound to hold. ecx.add_goals( - GoalSource::Misc, + GoalSource::AliasBoundConstCondition, cx.const_conditions(alias_ty.def_id) .iter_instantiated(cx, alias_ty.args) .map(|trait_ref| { @@ -352,7 +353,7 @@ where ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { ecx.add_goals( - GoalSource::Misc, + GoalSource::AliasBoundConstCondition, const_conditions.into_iter().map(|trait_ref| { goal.with( cx, @@ -391,6 +392,11 @@ where goal: Goal>, ) -> QueryResult { let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_candidates(candidates) + let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| { + let trait_goal: Goal> = + goal.with(ecx.cx(), goal.predicate.trait_ref); + ecx.compute_trait_goal(trait_goal) + })?; + self.merge_candidates(proven_via, candidates) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs index a143af13688ba..e99cd3d272700 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs @@ -18,7 +18,7 @@ use rustc_type_ir::relate::solver_relating::RelateExt; use rustc_type_ir::{self as ty, Canonical, CanonicalVarValues, InferCtxtLike, Interner}; use tracing::{debug, instrument, trace}; -use crate::canonicalizer::{CanonicalizeMode, Canonicalizer}; +use crate::canonicalizer::Canonicalizer; use crate::delegate::SolverDelegate; use crate::resolve::EagerResolver; use crate::solve::eval_ctxt::NestedGoals; @@ -60,17 +60,13 @@ where (goal, opaque_types).fold_with(&mut EagerResolver::new(self.delegate)); let mut orig_values = Default::default(); - let canonical = Canonicalizer::canonicalize( - self.delegate, - CanonicalizeMode::Input, - &mut orig_values, - QueryInput { + let canonical = + Canonicalizer::canonicalize_input(self.delegate, &mut orig_values, QueryInput { goal, predefined_opaques_in_body: self .cx() .mk_predefined_opaques_in_body(PredefinedOpaquesData { opaque_types }), - }, - ); + }); let query_input = ty::CanonicalQueryInput { canonical, typing_mode: self.typing_mode() }; (orig_values, query_input) } @@ -148,9 +144,9 @@ where .region_constraints .retain(|outlives| outlives.0.as_region().map_or(true, |re| re != outlives.1)); - let canonical = Canonicalizer::canonicalize( + let canonical = Canonicalizer::canonicalize_response( self.delegate, - CanonicalizeMode::Response { max_input_universe: self.max_input_universe }, + self.max_input_universe, &mut Default::default(), Response { var_values, @@ -428,12 +424,7 @@ where let var_values = CanonicalVarValues { var_values: delegate.cx().mk_args(var_values) }; let state = inspect::State { var_values, data }; let state = state.fold_with(&mut EagerResolver::new(delegate)); - Canonicalizer::canonicalize( - delegate, - CanonicalizeMode::Response { max_input_universe }, - &mut vec![], - state, - ) + Canonicalizer::canonicalize_response(delegate, max_input_universe, &mut vec![], state) } // FIXME: needs to be pub to be accessed by downstream diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 70ceb22bfea58..91ad24bff6743 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -440,7 +440,7 @@ where if let Some(kind) = kind.no_bound_vars() { match kind { ty::PredicateKind::Clause(ty::ClauseKind::Trait(predicate)) => { - self.compute_trait_goal(Goal { param_env, predicate }) + self.compute_trait_goal(Goal { param_env, predicate }).map(|(r, _via)| r) } ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(predicate)) => { self.compute_host_effect_goal(Goal { param_env, predicate }) @@ -950,7 +950,7 @@ where goal_trait_ref: ty::TraitRef, trait_assoc_def_id: I::DefId, impl_def_id: I::DefId, - ) -> Result, NoSolution> { + ) -> Result, I::ErrorGuaranteed> { self.delegate.fetch_eligible_assoc_item(goal_trait_ref, trait_assoc_def_id, impl_def_id) } diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs index ebf1013db1ecb..37678bfd8805f 100644 --- a/compiler/rustc_next_trait_solver/src/solve/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs @@ -243,22 +243,27 @@ where .copied() } + fn bail_with_ambiguity(&mut self, responses: &[CanonicalResponse]) -> CanonicalResponse { + debug_assert!(!responses.is_empty()); + if let Certainty::Maybe(maybe_cause) = + responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| { + certainty.unify_with(response.value.certainty) + }) + { + self.make_ambiguous_response_no_constraints(maybe_cause) + } else { + panic!("expected flounder response to be ambiguous") + } + } + /// If we fail to merge responses we flounder and return overflow or ambiguity. #[instrument(level = "trace", skip(self), ret)] fn flounder(&mut self, responses: &[CanonicalResponse]) -> QueryResult { if responses.is_empty() { return Err(NoSolution); + } else { + Ok(self.bail_with_ambiguity(responses)) } - - let Certainty::Maybe(maybe_cause) = - responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| { - certainty.unify_with(response.value.certainty) - }) - else { - panic!("expected flounder response to be ambiguous") - }; - - Ok(self.make_ambiguous_response_no_constraints(maybe_cause)) } /// Normalize a type for when it is structurally matched on. diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs index 63dbee2640bff..76cbe5758b23a 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs @@ -88,10 +88,17 @@ where /// returns `NoSolution`. #[instrument(level = "trace", skip(self), ret)] fn normalize_at_least_one_step(&mut self, goal: Goal>) -> QueryResult { - match goal.predicate.alias.kind(self.cx()) { + let cx = self.cx(); + match goal.predicate.alias.kind(cx) { ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst => { let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_candidates(candidates) + let (_, proven_via) = + self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| { + let trait_goal: Goal> = + goal.with(cx, goal.predicate.alias.trait_ref(cx)); + ecx.compute_trait_goal(trait_goal) + })?; + self.merge_candidates(proven_via, candidates) } ty::AliasTermKind::InherentTy => self.normalize_inherent_associated_type(goal), ty::AliasTermKind::OpaqueTy => self.normalize_opaque_type(goal), @@ -237,20 +244,7 @@ where .map(|pred| goal.with(cx, pred)), ); - // In case the associated item is hidden due to specialization, we have to - // return ambiguity this would otherwise be incomplete, resulting in - // unsoundness during coherence (#105782). - let Some(target_item_def_id) = ecx.fetch_eligible_assoc_item( - goal_trait_ref, - goal.predicate.def_id(), - impl_def_id, - )? - else { - return ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); - }; - - let error_response = |ecx: &mut EvalCtxt<'_, D>, msg: &str| { - let guar = cx.delay_bug(msg); + let error_response = |ecx: &mut EvalCtxt<'_, D>, guar| { let error_term = match goal.predicate.alias.kind(cx) { ty::AliasTermKind::ProjectionTy => Ty::new_error(cx, guar).into(), ty::AliasTermKind::ProjectionConst => Const::new_error(cx, guar).into(), @@ -260,8 +254,24 @@ where ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }; + // In case the associated item is hidden due to specialization, we have to + // return ambiguity this would otherwise be incomplete, resulting in + // unsoundness during coherence (#105782). + let target_item_def_id = match ecx.fetch_eligible_assoc_item( + goal_trait_ref, + goal.predicate.def_id(), + impl_def_id, + ) { + Ok(Some(target_item_def_id)) => target_item_def_id, + Ok(None) => { + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + } + Err(guar) => return error_response(ecx, guar), + }; + if !cx.has_item_definition(target_item_def_id) { - return error_response(ecx, "missing item"); + return error_response(ecx, cx.delay_bug("missing item")); } let target_container_def_id = cx.parent(target_item_def_id); @@ -285,7 +295,10 @@ where )?; if !cx.check_args_compatible(target_item_def_id, target_args) { - return error_response(ecx, "associated item has mismatched arguments"); + return error_response( + ecx, + cx.delay_bug("associated item has mismatched arguments"), + ); } // Finally we construct the actual value of the associated type. @@ -612,6 +625,11 @@ where Some(tail_ty) => Ty::new_projection(cx, metadata_def_id, [tail_ty]), }, + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binder): Figure out how to handle pointee for unsafe binders. + todo!() + } + ty::Infer( ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_), ) @@ -815,6 +833,11 @@ where | ty::Tuple(_) | ty::Error(_) => self_ty.discriminant_ty(ecx.cx()), + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binders): instantiate this with placeholders?? i guess?? + todo!("discr subgoal...") + } + // We do not call `Ty::discriminant_ty` on alias, param, or placeholder // types, which return `::Discriminant` // (or ICE in the case of placeholders). Projecting a type to itself @@ -862,6 +885,11 @@ where | ty::Tuple(_) | ty::Error(_) => self_ty.async_destructor_ty(ecx.cx()), + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binders): Instantiate the binder with placeholders I guess. + todo!() + } + // We do not call `Ty::async_destructor_ty` on alias, param, or placeholder // types, which return `::AsyncDestructor` // (or ICE in the case of placeholders). Projecting a type to itself diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 12df35d30b8f3..f4d7c3ce76cf2 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -5,6 +5,7 @@ use rustc_type_ir::data_structures::IndexSet; use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; +use rustc_type_ir::solve::CanonicalResponse; use rustc_type_ir::visit::TypeVisitableExt as _; use rustc_type_ir::{self as ty, Interner, TraitPredicate, TypingMode, Upcast as _, elaborate}; use tracing::{instrument, trace}; @@ -740,12 +741,14 @@ where a_data.principal(), )); } else if let Some(a_principal) = a_data.principal() { - for new_a_principal in - elaborate::supertraits(self.cx(), a_principal.with_self_ty(cx, a_ty)).skip(1) + for (idx, new_a_principal) in + elaborate::supertraits(self.cx(), a_principal.with_self_ty(cx, a_ty)) + .enumerate() + .skip(1) { responses.extend(self.consider_builtin_upcast_to_principal( goal, - CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting), + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(idx)), a_data, a_region, b_data, @@ -1099,7 +1102,8 @@ where | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) - | ty::Adt(_, _) => { + | ty::Adt(_, _) + | ty::UnsafeBinder(_) => { let mut disqualifying_impl = None; self.cx().for_each_relevant_impl( goal.predicate.def_id(), @@ -1147,13 +1151,101 @@ where ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) } +} + +/// How we've proven this trait goal. +/// +/// This is used by `NormalizesTo` goals to only normalize +/// by using the same 'kind of candidate' we've used to prove +/// its corresponding trait goal. Most notably, we do not +/// normalize by using an impl if the trait goal has been +/// proven via a `ParamEnv` candidate. +/// +/// This is necessary to avoid unnecessary region constraints, +/// see trait-system-refactor-initiative#125 for more details. +#[derive(Debug, Clone, Copy)] +pub(super) enum TraitGoalProvenVia { + /// We've proven the trait goal by something which is + /// is not a non-global where-bound or an alias-bound. + /// + /// This means we don't disable any candidates during + /// normalization. + Misc, + ParamEnv, + AliasBound, +} + +impl EvalCtxt<'_, D> +where + D: SolverDelegate, + I: Interner, +{ + pub(super) fn merge_trait_candidates( + &mut self, + goal: Goal>, + candidates: Vec>, + ) -> Result<(CanonicalResponse, Option), NoSolution> { + if let TypingMode::Coherence = self.typing_mode() { + let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect(); + return if let Some(response) = self.try_merge_responses(&all_candidates) { + Ok((response, Some(TraitGoalProvenVia::Misc))) + } else { + self.flounder(&all_candidates).map(|r| (r, None)) + }; + } + + // FIXME: prefer trivial builtin impls + + // If there are non-global where-bounds, prefer where-bounds + // (including global ones) over everything else. + let has_non_global_where_bounds = candidates.iter().any(|c| match c.source { + CandidateSource::ParamEnv(idx) => { + let where_bound = goal.param_env.caller_bounds().get(idx); + where_bound.has_bound_vars() || !where_bound.is_global() + } + _ => false, + }); + if has_non_global_where_bounds { + let where_bounds: Vec<_> = candidates + .iter() + .filter(|c| matches!(c.source, CandidateSource::ParamEnv(_))) + .map(|c| c.result) + .collect(); + + return if let Some(response) = self.try_merge_responses(&where_bounds) { + Ok((response, Some(TraitGoalProvenVia::ParamEnv))) + } else { + Ok((self.bail_with_ambiguity(&where_bounds), None)) + }; + } + + if candidates.iter().any(|c| matches!(c.source, CandidateSource::AliasBound)) { + let alias_bounds: Vec<_> = candidates + .iter() + .filter(|c| matches!(c.source, CandidateSource::AliasBound)) + .map(|c| c.result) + .collect(); + return if let Some(response) = self.try_merge_responses(&alias_bounds) { + Ok((response, Some(TraitGoalProvenVia::AliasBound))) + } else { + Ok((self.bail_with_ambiguity(&alias_bounds), None)) + }; + } + + let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect(); + if let Some(response) = self.try_merge_responses(&all_candidates) { + Ok((response, Some(TraitGoalProvenVia::Misc))) + } else { + self.flounder(&all_candidates).map(|r| (r, None)) + } + } #[instrument(level = "trace", skip(self))] pub(super) fn compute_trait_goal( &mut self, goal: Goal>, - ) -> QueryResult { + ) -> Result<(CanonicalResponse, Option), NoSolution> { let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_candidates(candidates) + self.merge_trait_candidates(goal, candidates) } } diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index 4c4e03cdfa394..f78d9dc2bfc28 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -13,8 +13,7 @@ use rustc_errors::{ use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::edition::{Edition, LATEST_STABLE_EDITION}; -use rustc_span::symbol::Ident; -use rustc_span::{Span, Symbol}; +use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; use crate::parser::{ForbiddenLetReason, TokenDescription}; diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 443ddfc94ec2b..792e2cc26ef1c 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -14,8 +14,7 @@ use rustc_session::lint::builtin::{ TEXT_DIRECTION_CODEPOINT_IN_COMMENT, }; use rustc_session::parse::ParseSess; -use rustc_span::symbol::Symbol; -use rustc_span::{BytePos, Pos, Span}; +use rustc_span::{BytePos, Pos, Span, Symbol}; use tracing::debug; use crate::lexer::diagnostics::TokenTreeDiagInfo; diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 42eef27803eb5..ef8d0f96b6102 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -1,8 +1,7 @@ //! Characters and their corresponding confusables were collected from //! -use rustc_span::symbol::kw; -use rustc_span::{BytePos, Pos, Span}; +use rustc_span::{BytePos, Pos, Span, kw}; use super::Lexer; use crate::errors::TokenSubstitution; diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 8cf37671185a2..2691e6f56d68b 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,8 +1,6 @@ -use rustc_ast::token::{self, Delimiter}; -use rustc_ast::{self as ast, Attribute, attr}; +use rustc_ast::{self as ast, Attribute, attr, token}; use rustc_errors::codes::*; use rustc_errors::{Diag, PResult}; -use rustc_span::symbol::kw; use rustc_span::{BytePos, Span}; use thin_vec::ThinVec; use tracing::debug; @@ -11,7 +9,7 @@ use super::{ AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, UsePreAttrPos, }; -use crate::{errors, fluent_generated as fluent, maybe_whole}; +use crate::{errors, exp, fluent_generated as fluent, maybe_whole}; // Public for rustfmt usage #[derive(Debug)] @@ -46,7 +44,7 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; let start_pos = self.num_bump_calls; loop { - let attr = if self.check(&token::Pound) { + let attr = if self.check(exp!(Pound)) { let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span); let inner_error_reason = if just_parsed_doc_comment { @@ -127,14 +125,14 @@ impl<'a> Parser<'a> { let lo = self.token.span; // Attributes can't have attributes of their own [Editor's note: not with that attitude] self.collect_tokens_no_attrs(|this| { - assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position"); + assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position"); let style = - if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; + if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; - this.expect(&token::OpenDelim(Delimiter::Bracket))?; + this.expect(exp!(OpenBracket))?; let item = this.parse_attr_item(ForceCollect::No)?; - this.expect(&token::CloseDelim(Delimiter::Bracket))?; + this.expect(exp!(CloseBracket))?; let attr_sp = lo.to(this.prev_token.span); // Emit error if inner attribute is encountered and forbidden. @@ -275,10 +273,10 @@ impl<'a> Parser<'a> { // Attr items don't have attributes. self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| { - let is_unsafe = this.eat_keyword(kw::Unsafe); + let is_unsafe = this.eat_keyword(exp!(Unsafe)); let unsafety = if is_unsafe { let unsafe_span = this.prev_token.span; - this.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + this.expect(exp!(OpenParen))?; ast::Safety::Unsafe(unsafe_span) } else { ast::Safety::Default @@ -287,7 +285,7 @@ impl<'a> Parser<'a> { let path = this.parse_path(PathStyle::Mod)?; let args = this.parse_attr_args()?; if is_unsafe { - this.expect(&token::CloseDelim(Delimiter::Parenthesis))?; + this.expect(exp!(CloseParen))?; } Ok(( ast::AttrItem { unsafety, path, args, tokens: None }, @@ -307,7 +305,7 @@ impl<'a> Parser<'a> { loop { let start_pos = self.num_bump_calls; // Only try to parse if it is an inner attribute (has `!`). - let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { + let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) { Some(self.parse_attribute(InnerAttrPolicy::Permitted)?) } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { if attr_style == ast::AttrStyle::Inner { @@ -359,7 +357,7 @@ impl<'a> Parser<'a> { &mut self, ) -> PResult<'a, (ast::MetaItemInner, Vec<(ast::AttrItem, Span)>)> { let cfg_predicate = self.parse_meta_item_inner()?; - self.expect(&token::Comma)?; + self.expect(exp!(Comma))?; // Presumably, the majority of the time there will only be one attr. let mut expanded_attrs = Vec::with_capacity(1); @@ -367,7 +365,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let item = self.parse_attr_item(ForceCollect::Yes)?; expanded_attrs.push((item, lo.to(self.prev_token.span))); - if !self.eat(&token::Comma) { + if !self.eat(exp!(Comma)) { break; } } @@ -381,7 +379,7 @@ impl<'a> Parser<'a> { let mut nmis = ThinVec::with_capacity(1); while self.token != token::Eof { nmis.push(self.parse_meta_item_inner()?); - if !self.eat(&token::Comma) { + if !self.eat(exp!(Comma)) { break; } } @@ -414,13 +412,13 @@ impl<'a> Parser<'a> { let lo = self.token.span; let is_unsafe = if unsafe_allowed == AllowLeadingUnsafe::Yes { - self.eat_keyword(kw::Unsafe) + self.eat_keyword(exp!(Unsafe)) } else { false }; let unsafety = if is_unsafe { let unsafe_span = self.prev_token.span; - self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(OpenParen))?; ast::Safety::Unsafe(unsafe_span) } else { @@ -430,7 +428,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Mod)?; let kind = self.parse_meta_item_kind()?; if is_unsafe { - self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; + self.expect(exp!(CloseParen))?; } let span = lo.to(self.prev_token.span); @@ -438,9 +436,9 @@ impl<'a> Parser<'a> { } pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { - Ok(if self.eat(&token::Eq) { + Ok(if self.eat(exp!(Eq)) { ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?) - } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { + } else if self.check(exp!(OpenParen)) { let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?; ast::MetaItemKind::List(list) } else { diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index e5edf605d823e..9ee6c2fae1ac0 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -16,20 +16,21 @@ use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; use rustc_errors::{ - Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, PResult, Subdiagnostic, - Suggestions, pluralize, + Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, PResult, Subdiagnostic, Suggestions, + pluralize, }; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::source_map::Spanned; -use rustc_span::symbol::{AllKeywords, Ident, kw, sym}; -use rustc_span::{BytePos, DUMMY_SP, Span, SpanSnippetError, Symbol}; +use rustc_span::symbol::used_keywords; +use rustc_span::{BytePos, DUMMY_SP, Ident, Span, SpanSnippetError, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::{debug, trace}; use super::pat::Expected; use super::{ - BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenType, + BlockMode, CommaRecoveryMode, ExpTokenPair, Parser, PathStyle, Restrictions, SemiColonMode, + SeqSep, TokenType, }; use crate::errors::{ AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, AwaitSuggestion, @@ -47,7 +48,7 @@ use crate::errors::{ UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType, }; use crate::parser::attr::InnerAttrPolicy; -use crate::{fluent_generated as fluent, parser}; +use crate::{exp, fluent_generated as fluent}; /// Creates a placeholder argument. pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param { @@ -462,8 +463,8 @@ impl<'a> Parser<'a> { pub(super) fn expected_one_of_not_found( &mut self, - edible: &[TokenKind], - inedible: &[TokenKind], + edible: &[ExpTokenPair<'_>], + inedible: &[ExpTokenPair<'_>], ) -> PResult<'a, ErrorGuaranteed> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { @@ -483,49 +484,17 @@ impl<'a> Parser<'a> { }) } - self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token)); - let mut expected = self - .expected_tokens - .iter() - .filter(|token| { - // Filter out suggestions that suggest the same token which was found and deemed incorrect. - fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool { - if let TokenKind::Ident(current_sym, _) = found - && let TokenType::Keyword(suggested_sym) = expected - { - return current_sym == suggested_sym; - } - false - } - - if **token != parser::TokenType::Token(self.token.kind.clone()) { - let eq = is_ident_eq_keyword(&self.token.kind, &token); - // If the suggestion is a keyword and the found token is an ident, - // the content of which are equal to the suggestion's content, - // we can remove that suggestion (see the `return false` below). - - // If this isn't the case however, and the suggestion is a token the - // content of which is the same as the found token's, we remove it as well. - if !eq { - if let TokenType::Token(kind) = token { - if self.token == *kind { - return false; - } - } - return true; - } - } - false - }) - .cloned() - .collect::>(); + for exp in edible.iter().chain(inedible.iter()) { + self.expected_token_types.insert(exp.token_type); + } + let mut expected: Vec<_> = self.expected_token_types.iter().collect(); expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); let sm = self.psess.source_map(); // Special-case "expected `;`" errors. - if expected.contains(&TokenType::Token(token::Semi)) { + if expected.contains(&TokenType::Semi) { // If the user is trying to write a ternary expression, recover it and // return an Err to prevent a cascade of irrelevant diagnostics. if self.prev_token == token::Question @@ -577,7 +546,7 @@ impl<'a> Parser<'a> { || (sm.is_multiline( self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()), ) && t == &token::Pound) - }) && !expected.contains(&TokenType::Token(token::Comma)) + }) && !expected.contains(&TokenType::Comma) { // Missing semicolon typo. This is triggered if the next token could either start a // new statement or is a block close. For example: @@ -597,7 +566,7 @@ impl<'a> Parser<'a> { if self.token == TokenKind::EqEq && self.prev_token.is_ident() - && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq))) + && expected.contains(&TokenType::Eq) { // Likely typo: `=` → `==` in let expr or enum item return Err(self.dcx().create_err(UseEqInstead { span: self.token.span })); @@ -636,15 +605,8 @@ impl<'a> Parser<'a> { // Look for usages of '=>' where '>=' was probably intended if self.token == token::FatArrow - && expected - .iter() - .any(|tok| matches!(tok, TokenType::Operator | TokenType::Token(TokenKind::Le))) - && !expected.iter().any(|tok| { - matches!( - tok, - TokenType::Token(TokenKind::FatArrow) | TokenType::Token(TokenKind::Comma) - ) - }) + && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le)) + && !expected.iter().any(|tok| matches!(tok, TokenType::FatArrow | TokenType::Comma)) { err.span_suggestion( self.token.span, @@ -741,7 +703,7 @@ impl<'a> Parser<'a> { }; if self.check_too_many_raw_str_terminators(&mut err) { - if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { + if expected.contains(&TokenType::Semi) && self.eat(exp!(Semi)) { let guar = err.emit(); return Ok(guar); } else { @@ -785,17 +747,15 @@ impl<'a> Parser<'a> { let Some((curr_ident, _)) = self.token.ident() else { return; }; - let expected_tokens: &[TokenType] = + let expected_token_types: &[TokenType] = expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]); - let expected_keywords: Vec = expected_tokens - .iter() - .filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None }) - .collect(); - - // When there are a few keywords in the last ten elements of `self.expected_tokens` and the current - // token is an identifier, it's probably a misspelled keyword. - // This handles code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in `if`-`else` - // and mispelled `where` in a where clause. + let expected_keywords: Vec = + expected_token_types.iter().filter_map(|token| token.is_keyword()).collect(); + + // When there are a few keywords in the last ten elements of `self.expected_token_types` + // and the current token is an identifier, it's probably a misspelled keyword. This handles + // code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in + // `if`-`else` and misspelled `where` in a where clause. if !expected_keywords.is_empty() && !curr_ident.is_used_keyword() && let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords) @@ -811,12 +771,12 @@ impl<'a> Parser<'a> { // so that it gets generated only when the diagnostic needs it. // Also, it is unlikely that this list is generated multiple times because the // parser halts after execution hits this path. - let all_keywords = AllKeywords::new().collect_used(|| prev_ident.span.edition()); + let all_keywords = used_keywords(|| prev_ident.span.edition()); // Otherwise, check the previous token with all the keywords as possible candidates. // This handles code like `Struct Human;` and `While a < b {}`. - // We check the previous token only when the current token is an identifier to avoid false - // positives like suggesting keyword `for` for `extern crate foo {}`. + // We check the previous token only when the current token is an identifier to avoid + // false positives like suggesting keyword `for` for `extern crate foo {}`. if let Some(misspelled_kw) = find_similar_kw(prev_ident, &all_keywords) { err.subdiagnostic(misspelled_kw); // We don't want other suggestions to be added as they are most likely meaningless @@ -1052,7 +1012,7 @@ impl<'a> Parser<'a> { (Err(snapshot_err), Err(err)) => { // We don't know what went wrong, emit the normal error. snapshot_err.cancel(); - self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); + self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes); Err(err) } (Ok(_), Ok(mut tail)) => { @@ -1089,7 +1049,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); let guar = err.emit(); - self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]); + self.eat_to_tokens(&[exp!(CloseBrace)]); guar } token::OpenDelim(Delimiter::Parenthesis) @@ -1097,7 +1057,7 @@ impl<'a> Parser<'a> { { // We are within a function call or tuple, we can emit the error // and recover. - self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]); + self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]); err.multipart_suggestion_verbose( "you might have meant to open the body of the closure", @@ -1124,11 +1084,11 @@ impl<'a> Parser<'a> { Ok(self.mk_expr_err(lo.to(self.token.span), guar)) } - /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, + /// Eats and discards tokens until one of `closes` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { - if let Err(err) = - self.parse_seq_to_before_tokens(kets, &[], SeqSep::none(), |p| Ok(p.parse_token_tree())) + pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) { + if let Err(err) = self + .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree())) { err.cancel(); } @@ -1147,7 +1107,7 @@ impl<'a> Parser<'a> { pub(super) fn check_trailing_angle_brackets( &mut self, segment: &PathSegment, - end: &[&TokenKind], + end: &[ExpTokenPair<'_>], ) -> Option { if !self.may_recover() { return None; @@ -1230,7 +1190,7 @@ impl<'a> Parser<'a> { // second case. if self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - end.contains(&&t.kind) + end.iter().any(|exp| exp.tok == &t.kind) }) { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. @@ -1298,11 +1258,11 @@ impl<'a> Parser<'a> { ) -> PResult<'a, ErrorGuaranteed> { if let ExprKind::Binary(binop, _, _) = &expr.kind && let ast::BinOpKind::Lt = binop.node - && self.eat(&token::Comma) + && self.eat(exp!(Comma)) { let x = self.parse_seq_to_before_end( - &token::Gt, - SeqSep::trailing_allowed(token::Comma), + exp!(Gt), + SeqSep::trailing_allowed(exp!(Comma)), |p| match p.parse_generic_arg(None)? { Some(arg) => Ok(arg), // If we didn't eat a generic arg, then we should error. @@ -1311,7 +1271,7 @@ impl<'a> Parser<'a> { ); match x { Ok((_, _, Recovered::No)) => { - if self.eat(&token::Gt) { + if self.eat(exp!(Gt)) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( binop.span.shrink_to_lo(), @@ -1376,7 +1336,7 @@ impl<'a> Parser<'a> { ) -> bool { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind - && ident.as_str().parse::().is_err() + && !ident.is_numeric() && !matches!(r1.kind, ExprKind::Lit(_)) { // The parser has encountered `foo.bar Parser<'a> { ty_span: Span, ty: P, ) -> PResult<'a, P> { - self.expect(&token::PathSep)?; + self.expect(exp!(PathSep))?; let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None }; self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?; @@ -1956,10 +1916,10 @@ impl<'a> Parser<'a> { } pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> { - if self.eat(&token::Semi) || self.recover_colon_as_semi() { + if self.eat(exp!(Semi)) || self.recover_colon_as_semi() { return Ok(()); } - self.expect(&token::Semi).map(drop) // Error unconditionally + self.expect(exp!(Semi)).map(drop) // Error unconditionally } pub(super) fn recover_colon_as_semi(&mut self) -> bool { @@ -2004,15 +1964,15 @@ impl<'a> Parser<'a> { } fn recover_await_macro(&mut self) -> PResult<'a, (Span, P, bool)> { - self.expect(&token::Not)?; - self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(Not))?; + self.expect(exp!(OpenParen))?; let expr = self.parse_expr()?; - self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; + self.expect(exp!(CloseParen))?; Ok((self.prev_token.span, expr, false)) } fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P, bool)> { - let is_question = self.eat(&token::Question); // Handle `await? `. + let is_question = self.eat(exp!(Question)); // Handle `await? `. let expr = if self.token == token::OpenDelim(Delimiter::Brace) { // Handle `await { }`. // This needs to be handled separately from the next arm to avoid @@ -2074,7 +2034,7 @@ impl<'a> Parser<'a> { let try_span = lo.to(self.token.span); //we take the try!( span self.bump(); //remove ( let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty - self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block + self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block let hi = self.token.span; self.bump(); //remove ) let mut err = self.dcx().struct_span_err(lo.to(hi), "use of deprecated `try` macro"); @@ -2130,13 +2090,14 @@ impl<'a> Parser<'a> { pub(super) fn recover_seq_parse_error( &mut self, - delim: Delimiter, + open: ExpTokenPair<'_>, + close: ExpTokenPair<'_>, lo: Span, err: Diag<'a>, ) -> P { let guar = err.emit(); // Recover from parse error, callers expect the closing delim to be consumed. - self.consume_block(delim, ConsumeClosingDelim::Yes); + self.consume_block(open, close, ConsumeClosingDelim::Yes); self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar)) } @@ -2225,7 +2186,7 @@ impl<'a> Parser<'a> { } pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) { - if self.eat_keyword(kw::In) { + if self.eat_keyword(exp!(In)) { // a common typo: `for _ in in bar {}` self.dcx().emit_err(InInTypo { span: self.prev_token.span, @@ -2366,7 +2327,7 @@ impl<'a> Parser<'a> { pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P, P)> { let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName), None)?; - self.expect(&token::Colon)?; + self.expect(exp!(Colon))?; let ty = self.parse_ty()?; self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span }); @@ -2384,12 +2345,17 @@ impl<'a> Parser<'a> { Ok(param) } - pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) { + pub(super) fn consume_block( + &mut self, + open: ExpTokenPair<'_>, + close: ExpTokenPair<'_>, + consume_close: ConsumeClosingDelim, + ) { let mut brace_depth = 0; loop { - if self.eat(&token::OpenDelim(delim)) { + if self.eat(open) { brace_depth += 1; - } else if self.check(&token::CloseDelim(delim)) { + } else if self.check(close) { if brace_depth == 0 { if let ConsumeClosingDelim::Yes = consume_close { // Some of the callers of this method expect to be able to parse the @@ -2545,7 +2511,7 @@ impl<'a> Parser<'a> { match self.recover_const_arg(arg.span(), err) { Ok(arg) => { args.push(AngleBracketedArg::Arg(arg)); - if self.eat(&token::Comma) { + if self.eat(exp!(Comma)) { return Ok(true); // Continue } } @@ -3016,7 +2982,7 @@ impl<'a> Parser<'a> { /// Check for exclusive ranges written as `..<` pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> { if maybe_lt == token::Lt - && (self.expected_tokens.contains(&TokenType::Token(token::Gt)) + && (self.expected_token_types.contains(TokenType::Gt) || matches!(self.token.kind, token::Literal(..))) { err.span_suggestion( @@ -3057,17 +3023,10 @@ impl<'a> Parser<'a> { } pub(super) fn recover_vcs_conflict_marker(&mut self) { - if let Err(err) = self.err_vcs_conflict_marker() { - err.emit(); - FatalError.raise(); - } - } - - pub(crate) fn err_vcs_conflict_marker(&mut self) -> PResult<'a, ()> { // <<<<<<< let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else { - return Ok(()); + return; }; let mut spans = Vec::with_capacity(3); spans.push(start); @@ -3097,7 +3056,7 @@ impl<'a> Parser<'a> { self.bump(); } - let mut err = self.dcx().struct_span_err(spans, "encountered diff marker"); + let mut err = self.dcx().struct_span_fatal(spans, "encountered diff marker"); match middlediff3 { // We're using diff3 Some(middlediff3) => { @@ -3140,15 +3099,15 @@ impl<'a> Parser<'a> { visit ", ); - Err(err) + err.emit(); } /// Parse and throw away a parenthesized comma separated /// sequence of patterns until `)` is reached. fn skip_pat_list(&mut self) -> PResult<'a, ()> { - while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) { + while !self.check(exp!(CloseParen)) { self.parse_pat_no_top_alt(None, None)?; - if !self.eat(&token::Comma) { + if !self.eat(exp!(Comma)) { return Ok(()); } } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index a2136399b0c82..7533e75ffe261 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -8,6 +8,7 @@ use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::tokenstream::TokenTree; use rustc_ast::util::case::Case; use rustc_ast::util::classify; use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par}; @@ -26,8 +27,7 @@ use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error}; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; use rustc_span::source_map::{self, Spanned}; -use rustc_span::symbol::{Ident, Symbol, kw, sym}; -use rustc_span::{BytePos, ErrorGuaranteed, Pos, Span}; +use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::instrument; @@ -35,10 +35,10 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, + AttrWrapper, BlockMode, ClosureSpans, ExpTokenPair, ForceCollect, Parser, PathStyle, + Restrictions, SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, }; -use crate::{errors, maybe_recover_from_interpolated_ty_qpath}; +use crate::{errors, exp, maybe_recover_from_interpolated_ty_qpath}; #[derive(Debug)] pub(super) enum DestructuredFloat { @@ -153,7 +153,7 @@ impl<'a> Parser<'a> { return Ok((lhs, parsed_something)); } - self.expected_tokens.push(TokenType::Operator); + self.expected_token_types.insert(TokenType::Operator); while let Some(op) = self.check_assoc_op() { let lhs_span = self.interpolated_or_expr_span(&lhs); let cur_op_span = self.token.span; @@ -279,13 +279,9 @@ impl<'a> Parser<'a> { break; } - let fixity = op.fixity(); - let min_prec = match fixity { + let min_prec = match op.fixity() { Fixity::Right => Bound::Included(prec), - Fixity::Left => Bound::Excluded(prec), - // We currently have no non-associative operators that are not handled above by - // the special cases. The code is here only for future convenience. - Fixity::None => Bound::Excluded(prec), + Fixity::Left | Fixity::None => Bound::Excluded(prec), }; let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { let attrs = this.parse_outer_attributes()?; @@ -337,10 +333,6 @@ impl<'a> Parser<'a> { self.dcx().span_bug(span, "AssocOp should have been handled by special case") } }; - - if let Fixity::None = fixity { - break; - } } Ok((lhs, parsed_something)) @@ -873,9 +865,9 @@ impl<'a> Parser<'a> { /// Parse `mut?` or `raw [ const | mut ]`. fn parse_borrow_modifiers(&mut self) -> (ast::BorrowKind, ast::Mutability) { - if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) { + if self.check_keyword(exp!(Raw)) && self.look_ahead(1, Token::is_mutability) { // `raw [ const | mut ]`. - let found_raw = self.eat_keyword(kw::Raw); + let found_raw = self.eat_keyword(exp!(Raw)); assert!(found_raw); let mutability = self.parse_const_or_mut().unwrap(); (ast::BorrowKind::Raw, mutability) @@ -908,7 +900,7 @@ impl<'a> Parser<'a> { // a `return` which could be suggested otherwise. self.eat_noexpect(&token::Question) } else { - self.eat(&token::Question) + self.eat(exp!(Question)) }; if has_question { // `expr?` @@ -926,7 +918,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(errors::ExprRArrowCall { span }); true } else { - self.eat(&token::Dot) + self.eat(exp!(Dot)) }; if has_dot { // expr.f @@ -1251,7 +1243,7 @@ impl<'a> Parser<'a> { .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args))); match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) { Ok(expr) => expr, - Err(err) => self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err), + Err(err) => self.recover_seq_parse_error(exp!(OpenParen), exp!(CloseParen), lo, err), } } @@ -1268,10 +1260,8 @@ impl<'a> Parser<'a> { match (self.may_recover(), seq, snapshot) { (true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { snapshot.bump(); // `(` - match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) { - Ok((fields, ..)) - if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) => - { + match snapshot.parse_struct_fields(path.clone(), false, exp!(CloseParen)) { + Ok((fields, ..)) if snapshot.eat(exp!(CloseParen)) => { // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. self.restore_snapshot(snapshot); @@ -1328,7 +1318,7 @@ impl<'a> Parser<'a> { self.bump(); // `[` let index = self.parse_expr()?; self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?; - self.expect(&token::CloseDelim(Delimiter::Bracket))?; + self.expect(exp!(CloseBracket))?; Ok(self.mk_expr( lo.to(self.prev_token.span), self.mk_index(base, index, open_delim_span.to(self.prev_token.span)), @@ -1337,12 +1327,12 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { - if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) { + if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { return Ok(self.mk_await_expr(self_arg, lo)); } // Post-fix match - if self.eat_keyword(kw::Match) { + if self.eat_keyword(exp!(Match)) { let match_span = self.prev_token.span; self.psess.gated_spans.gate(sym::postfix_match, match_span); return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix); @@ -1350,10 +1340,10 @@ impl<'a> Parser<'a> { let fn_span_lo = self.token.span; let mut seg = self.parse_path_segment(PathStyle::Expr, None)?; - self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]); + self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]); self.check_turbofish_missing_angle_brackets(&mut seg); - if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { + if self.check(exp!(OpenParen)) { // Method call `expr.f()` let args = self.parse_expr_paren_seq()?; let fn_span = fn_span_lo.to(self.prev_token.span); @@ -1415,18 +1405,18 @@ impl<'a> Parser<'a> { let restrictions = self.restrictions; self.with_res(restrictions - Restrictions::ALLOW_LET, |this| { - // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. + // Note: adding new syntax here? Don't forget to adjust `TokenKind::can_begin_expr()`. let lo = this.token.span; if let token::Literal(_) = this.token.kind { // This match arm is a special-case of the `_` match arm below and // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. this.parse_expr_lit() - } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { + } else if this.check(exp!(OpenParen)) { this.parse_expr_tuple_parens(restrictions) - } else if this.check(&token::OpenDelim(Delimiter::Brace)) { + } else if this.check(exp!(OpenBrace)) { this.parse_expr_block(None, lo, BlockCheckMode::Default) - } else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) { + } else if this.check(exp!(Or)) || this.check(exp!(OrOr)) { this.parse_expr_closure().map_err(|mut err| { // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }` // then suggest parens around the lhs. @@ -1435,41 +1425,41 @@ impl<'a> Parser<'a> { } err }) - } else if this.check(&token::OpenDelim(Delimiter::Bracket)) { - this.parse_expr_array_or_repeat(Delimiter::Bracket) + } else if this.check(exp!(OpenBracket)) { + this.parse_expr_array_or_repeat(exp!(CloseBracket)) } else if this.is_builtin() { this.parse_expr_builtin() } else if this.check_path() { this.parse_expr_path_start() - } else if this.check_keyword(kw::Move) - || this.check_keyword(kw::Static) + } else if this.check_keyword(exp!(Move)) + || this.check_keyword(exp!(Static)) || this.check_const_closure() { this.parse_expr_closure() - } else if this.eat_keyword(kw::If) { + } else if this.eat_keyword(exp!(If)) { this.parse_expr_if() - } else if this.check_keyword(kw::For) { + } else if this.check_keyword(exp!(For)) { if this.choose_generics_over_qpath(1) { this.parse_expr_closure() } else { - assert!(this.eat_keyword(kw::For)); + assert!(this.eat_keyword(exp!(For))); this.parse_expr_for(None, lo) } - } else if this.eat_keyword(kw::While) { + } else if this.eat_keyword(exp!(While)) { this.parse_expr_while(None, lo) } else if let Some(label) = this.eat_label() { this.parse_expr_labeled(label, true) - } else if this.eat_keyword(kw::Loop) { + } else if this.eat_keyword(exp!(Loop)) { this.parse_expr_loop(None, lo).map_err(|mut err| { err.span_label(lo, "while parsing this `loop` expression"); err }) - } else if this.eat_keyword(kw::Match) { + } else if this.eat_keyword(exp!(Match)) { this.parse_expr_match().map_err(|mut err| { err.span_label(lo, "while parsing this `match` expression"); err }) - } else if this.eat_keyword(kw::Unsafe) { + } else if this.eat_keyword(exp!(Unsafe)) { this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err( |mut err| { err.span_label(lo, "while parsing this `unsafe` expression"); @@ -1481,23 +1471,23 @@ impl<'a> Parser<'a> { } else if this.may_recover() && this.is_do_catch_block() { this.recover_do_catch() } else if this.is_try_block() { - this.expect_keyword(kw::Try)?; + this.expect_keyword(exp!(Try))?; this.parse_try_block(lo) - } else if this.eat_keyword(kw::Return) { + } else if this.eat_keyword(exp!(Return)) { this.parse_expr_return() - } else if this.eat_keyword(kw::Continue) { + } else if this.eat_keyword(exp!(Continue)) { this.parse_expr_continue(lo) - } else if this.eat_keyword(kw::Break) { + } else if this.eat_keyword(exp!(Break)) { this.parse_expr_break() - } else if this.eat_keyword(kw::Yield) { + } else if this.eat_keyword(exp!(Yield)) { this.parse_expr_yield() } else if this.is_do_yeet() { this.parse_expr_yeet() - } else if this.eat_keyword(kw::Become) { + } else if this.eat_keyword(exp!(Become)) { this.parse_expr_become() - } else if this.check_keyword(kw::Let) { + } else if this.check_keyword(exp!(Let)) { this.parse_expr_let(restrictions) - } else if this.eat_keyword(kw::Underscore) { + } else if this.eat_keyword(exp!(Underscore)) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) } else if this.token.uninterpolated_span().at_least_rust_2018() { // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. @@ -1505,11 +1495,11 @@ impl<'a> Parser<'a> { // check for `gen {}` and `gen move {}` // or `async gen {}` and `async gen move {}` && (this.is_gen_block(kw::Gen, 0) - || (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1))) + || (this.check_keyword(exp!(Async)) && this.is_gen_block(kw::Gen, 1))) { // FIXME: (async) gen closures aren't yet parsed. this.parse_gen_block() - } else if this.check_keyword(kw::Async) { + } else if this.check_keyword(exp!(Async)) { // FIXME(gen_blocks): Parse `gen async` and suggest swap if this.is_gen_block(kw::Async, 0) { // Check for `async {` and `async move {`, @@ -1541,15 +1531,20 @@ impl<'a> Parser<'a> { fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P> { let lo = self.token.span; - self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(OpenParen))?; let (es, trailing_comma) = match self.parse_seq_to_end( - &token::CloseDelim(Delimiter::Parenthesis), - SeqSep::trailing_allowed(token::Comma), + exp!(CloseParen), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)), ) { Ok(x) => x, Err(err) => { - return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); + return Ok(self.recover_seq_parse_error( + exp!(OpenParen), + exp!(CloseParen), + lo, + err, + )); } }; let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { @@ -1563,25 +1558,24 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr) } - fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P> { + fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `[` or other open delim - let close = &token::CloseDelim(close_delim); let kind = if self.eat(close) { // Empty vector ExprKind::Array(ThinVec::new()) } else { // Non-empty vector let first_expr = self.parse_expr()?; - if self.eat(&token::Semi) { + if self.eat(exp!(Semi)) { // Repeating array syntax: `[ 0; 512 ]` let count = self.parse_expr_anon_const()?; self.expect(close)?; ExprKind::Repeat(first_expr, count) - } else if self.eat(&token::Comma) { + } else if self.eat(exp!(Comma)) { // Vector with two or more elements. - let sep = SeqSep::trailing_allowed(token::Comma); + let sep = SeqSep::trailing_allowed(exp!(Comma)); let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?; exprs.insert(0, first_expr); ExprKind::Array(exprs) @@ -1615,7 +1609,7 @@ impl<'a> Parser<'a> { }; // `!`, as an operator, is prefix, so we know this isn't that. - let (span, kind) = if self.eat(&token::Not) { + let (span, kind) = if self.eat(exp!(Not)) { // MACRO INVOCATION expression if qself.is_some() { self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); @@ -1623,7 +1617,7 @@ impl<'a> Parser<'a> { let lo = path.span; let mac = P(MacCall { path, args: self.parse_delim_args()? }); (lo.to(self.prev_token.span), ExprKind::MacCall(mac)) - } else if self.check(&token::OpenDelim(Delimiter::Brace)) + } else if self.check(exp!(OpenBrace)) && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) { if qself.is_some() { @@ -1646,13 +1640,13 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P> { let lo = label_.ident.span; let label = Some(label_); - let ate_colon = self.eat(&token::Colon); + let ate_colon = self.eat(exp!(Colon)); let tok_sp = self.token.span; - let expr = if self.eat_keyword(kw::While) { + let expr = if self.eat_keyword(exp!(While)) { self.parse_expr_while(label, lo) - } else if self.eat_keyword(kw::For) { + } else if self.eat_keyword(exp!(For)) { self.parse_expr_for(label, lo) - } else if self.eat_keyword(kw::Loop) { + } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() @@ -1958,7 +1952,7 @@ impl<'a> Parser<'a> { self.psess.gated_spans.gate(sym::builtin_syntax, ident.span); self.bump(); - self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(OpenParen))?; let ret = if let Some(res) = parse(self, lo, ident)? { Ok(res) } else { @@ -1968,7 +1962,7 @@ impl<'a> Parser<'a> { }); return Err(err); }; - self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?; + self.expect(exp!(CloseParen))?; ret } @@ -1976,14 +1970,12 @@ impl<'a> Parser<'a> { /// Built-in macro for `offset_of!` expressions. pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P> { let container = self.parse_ty()?; - self.expect(&TokenKind::Comma)?; + self.expect(exp!(Comma))?; let fields = self.parse_floating_field_access()?; let trailing_comma = self.eat_noexpect(&TokenKind::Comma); - if let Err(mut e) = - self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)]) - { + if let Err(mut e) = self.expect_one_of(&[], &[exp!(CloseParen)]) { if trailing_comma { e.note("unexpected third argument to offset_of"); } else { @@ -2006,7 +1998,7 @@ impl<'a> Parser<'a> { /// Built-in macro for type ascription expressions. pub(crate) fn parse_expr_type_ascribe(&mut self, lo: Span) -> PResult<'a, P> { let expr = self.parse_expr()?; - self.expect(&token::Comma)?; + self.expect(exp!(Comma))?; let ty = self.parse_ty()?; let span = lo.to(self.token.span); Ok(self.mk_expr(span, ExprKind::Type(expr, ty))) @@ -2018,7 +2010,7 @@ impl<'a> Parser<'a> { kind: UnsafeBinderCastKind, ) -> PResult<'a, P> { let expr = self.parse_expr()?; - let ty = if self.eat(&TokenKind::Comma) { Some(self.parse_ty()?) } else { None }; + let ty = if self.eat(exp!(Comma)) { Some(self.parse_ty()?) } else { None }; let span = lo.to(self.token.span); Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty))) } @@ -2214,7 +2206,7 @@ impl<'a> Parser<'a> { } let lo = self.token.span; - let minus_present = self.eat(&token::BinOp(token::Minus)); + let minus_present = self.eat(exp!(Minus)); let (token_lit, span) = self.parse_token_lit()?; let expr = self.mk_expr(span, ExprKind::Lit(token_lit)); @@ -2236,7 +2228,7 @@ impl<'a> Parser<'a> { /// expression. fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option> { let mut snapshot = self.create_snapshot_for_diagnostic(); - match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) { + match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) { Ok(arr) => { let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces { span: arr.span, @@ -2272,8 +2264,8 @@ impl<'a> Parser<'a> { let mut snapshot = self.create_snapshot_for_diagnostic(); snapshot.bump(); match snapshot.parse_seq_to_before_end( - &token::CloseDelim(Delimiter::Bracket), - SeqSep::trailing_allowed(token::Comma), + exp!(CloseBracket), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_expr(), ) { Ok(_) @@ -2337,7 +2329,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let before = self.prev_token.clone(); - let binder = if self.check_keyword(kw::For) { + let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; let span = lo.to(self.prev_token.span); @@ -2352,7 +2344,7 @@ impl<'a> Parser<'a> { let constness = self.parse_closure_constness(); let movability = - if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable }; + if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) @@ -2393,7 +2385,8 @@ impl<'a> Parser<'a> { } if self.token == TokenKind::Semi - && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis))) + && let Some(last) = self.token_cursor.stack.last() + && let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr() && self.may_recover() { // It is likely that the closure body is a block but where the @@ -2432,10 +2425,10 @@ impl<'a> Parser<'a> { /// Parses an optional `move` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { - if self.eat_keyword(kw::Move) { + if self.eat_keyword(exp!(Move)) { let move_kw_span = self.prev_token.span; // Check for `move async` and recover - if self.check_keyword(kw::Async) { + if self.check_keyword(exp!(Async)) { let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo); Err(self .dcx() @@ -2452,15 +2445,15 @@ impl<'a> Parser<'a> { fn parse_fn_block_decl(&mut self) -> PResult<'a, (P, Span)> { let arg_start = self.token.span.lo(); - let inputs = if self.eat(&token::OrOr) { + let inputs = if self.eat(exp!(OrOr)) { ThinVec::new() } else { - self.expect(&token::BinOp(token::Or))?; + self.expect(exp!(Or))?; let args = self .parse_seq_to_before_tokens( - &[&token::BinOp(token::Or)], + &[exp!(Or)], &[&token::OrOr], - SeqSep::trailing_allowed(token::Comma), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_fn_block_param(), )? .0; @@ -2480,7 +2473,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_outer_attributes()?; self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?; - let ty = if this.eat(&token::Colon) { + let ty = if this.eat(exp!(Colon)) { this.parse_ty()? } else { this.mk_ty(pat.span, TyKind::Infer) @@ -2565,7 +2558,7 @@ impl<'a> Parser<'a> { } else { let attrs = self.parse_outer_attributes()?; // For recovery. let maybe_fatarrow = self.token.clone(); - let block = if self.check(&token::OpenDelim(Delimiter::Brace)) { + let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { block @@ -2608,7 +2601,7 @@ impl<'a> Parser<'a> { self.error_on_if_block_attrs(lo, false, block.span, attrs); block }; - let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None }; + let els = if self.eat_keyword(exp!(Else)) { Some(self.parse_expr_else()?) } else { None }; Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els))) } @@ -2661,7 +2654,7 @@ impl<'a> Parser<'a> { }); self.bump(); } else { - self.expect(&token::Eq)?; + self.expect(exp!(Eq))?; } let attrs = self.parse_outer_attributes()?; let (expr, _) = @@ -2674,9 +2667,9 @@ impl<'a> Parser<'a> { fn parse_expr_else(&mut self) -> PResult<'a, P> { let else_span = self.prev_token.span; // `else` let attrs = self.parse_outer_attributes()?; // For recovery. - let expr = if self.eat_keyword(kw::If) { + let expr = if self.eat_keyword(exp!(If)) { ensure_sufficient_stack(|| self.parse_expr_if())? - } else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) { + } else if self.check(exp!(OpenBrace)) { self.parse_simple_block()? } else { let snapshot = self.create_snapshot_for_diagnostic(); @@ -2718,7 +2711,7 @@ impl<'a> Parser<'a> { // while true {} // } // ^ - if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) + if self.check(exp!(OpenBrace)) && (classify::expr_requires_semi_to_be_stmt(&cond) || matches!(cond.kind, ExprKind::MacCall(..))) => @@ -2804,7 +2797,7 @@ impl<'a> Parser<'a> { begin_paren, ) { (Ok(pat), _) => pat, // Happy path. - (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => { + (Err(err), Some((start_span, left))) if self.eat_keyword(exp!(In)) => { // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would // happen right before the return of this method. let attrs = self.parse_outer_attributes()?; @@ -2838,7 +2831,7 @@ impl<'a> Parser<'a> { } (Err(err), _) => return Err(err), // Some other error, bubble up. }; - if !self.eat_keyword(kw::In) { + if !self.eat_keyword(exp!(In)) { self.error_missing_in_for_loop(); } self.check_for_for_in_in_typo(self.prev_token.span); @@ -2850,7 +2843,7 @@ impl<'a> Parser<'a> { /// Parses `for await? in ` (`for` token already eaten). fn parse_expr_for(&mut self, opt_label: Option